Actual source code: sliced.c

  1: #include <petscdmsliced.h>
  2: #include <petscmat.h>
  3: #include <petsc/private/dmimpl.h>

  5: /* CSR storage of the nonzero structure of a bs*bs matrix */
  6: typedef struct {
  7:   PetscInt bs,nz,*i,*j;
  8: } DMSlicedBlockFills;

 10: typedef struct  {
 11:   PetscInt           bs,n,N,Nghosts,*ghosts;
 12:   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
 13:   DMSlicedBlockFills *dfill,*ofill;
 14: } DM_Sliced;

 16: PetscErrorCode  DMCreateMatrix_Sliced(DM dm, Mat *J)
 17: {
 18:   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
 19:   ISLocalToGlobalMapping lmap;
 20:   void                   (*aij)(void) = NULL;
 21:   DM_Sliced              *slice = (DM_Sliced*)dm->data;

 23:   bs   = slice->bs;
 24:   MatCreate(PetscObjectComm((PetscObject)dm),J);
 25:   MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);
 26:   MatSetBlockSize(*J,bs);
 27:   MatSetType(*J,dm->mattype);
 28:   MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);
 29:   MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);
 30:   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
 31:   * good before going on with it. */
 32:   PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);
 33:   if (!aij) {
 34:     PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);
 35:   }
 36:   if (aij) {
 37:     if (bs == 1) {
 38:       MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);
 39:       MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);
 40:     } else if (!slice->d_nnz) {
 41:       MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,NULL);
 42:       MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,NULL,slice->o_nz*bs,NULL);
 43:     } else {
 44:       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
 45:       PetscMalloc2(slice->n*bs,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,&so_nnz);
 46:       for (i=0; i<slice->n*bs; i++) {
 47:         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
 48:                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
 49:         if (so_nnz) {
 50:           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
 51:         }
 52:       }
 53:       MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);
 54:       MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);
 55:       PetscFree2(sd_nnz,so_nnz);
 56:     }
 57:   }

 59:   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
 60:   PetscMalloc1(slice->n+slice->Nghosts,&globals);
 61:   MatGetOwnershipRange(*J,&rstart,NULL);
 62:   for (i=0; i<slice->n; i++) globals[i] = rstart/bs + i;

 64:   for (i=0; i<slice->Nghosts; i++) {
 65:     globals[slice->n+i] = slice->ghosts[i];
 66:   }
 67:   ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,bs,slice->n+slice->Nghosts,globals,PETSC_OWN_POINTER,&lmap);
 68:   MatSetLocalToGlobalMapping(*J,lmap,lmap);
 69:   ISLocalToGlobalMappingDestroy(&lmap);
 70:   MatSetDM(*J,dm);
 71:   return 0;
 72: }

 74: /*@C
 75:     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
 76:       be ghosts on this process

 78:     Not Collective

 80:     Input Parameters:
 81: +    slice - the DM object
 82: .    bs - block size
 83: .    nlocal - number of local (owned, non-ghost) blocks
 84: .    Nghosts - number of ghost blocks on this process
 85: -    ghosts - global indices of each ghost block

 87:     Level: advanced

 89: .seealso DMDestroy(), DMCreateGlobalVector()

 91: @*/
 92: PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
 93: {
 94:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

 97:   PetscFree(slice->ghosts);
 98:   PetscMalloc1(Nghosts,&slice->ghosts);
 99:   PetscArraycpy(slice->ghosts,ghosts,Nghosts);
100:   slice->bs      = bs;
101:   slice->n       = nlocal;
102:   slice->Nghosts = Nghosts;
103:   return 0;
104: }

106: /*@C
107:     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced

109:     Not Collective

111:     Input Parameters:
112: +    slice - the DM object
113: .    d_nz  - number of block nonzeros per block row in diagonal portion of local
114:            submatrix  (same for all local rows)
115: .    d_nnz - array containing the number of block nonzeros in the various block rows
116:            of the in diagonal portion of the local (possibly different for each block
117:            row) or NULL.
118: .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
119:            submatrix (same for all local rows).
120: -    o_nnz - array containing the number of nonzeros in the various block rows of the
121:            off-diagonal portion of the local submatrix (possibly different for
122:            each block row) or NULL.

124:     Notes:
125:     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
126:     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().

128:     Level: advanced

130: .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
131:          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()

133: @*/
134: PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
135: {
136:   DM_Sliced *slice = (DM_Sliced*)dm->data;

139:   slice->d_nz  = d_nz;
140:   slice->d_nnz = (PetscInt*)d_nnz;
141:   slice->o_nz  = o_nz;
142:   slice->o_nnz = (PetscInt*)o_nnz;
143:   return 0;
144: }

146: static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
147: {
148:   PetscInt           i,j,nz,*fi,*fj;
149:   DMSlicedBlockFills *f;

152:   if (*inf) PetscFree3(*inf,(*inf)->i,(*inf)->j);
153:   if (!fill) return 0;
154:   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
155:   PetscMalloc3(1,&f,bs+1,&fi,nz,&fj);
156:   f->bs = bs;
157:   f->nz = nz;
158:   f->i  = fi;
159:   f->j  = fj;
160:   for (i=0,nz=0; i<bs; i++) {
161:     fi[i] = nz;
162:     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
163:   }
164:   fi[i] = nz;
165:   *inf  = f;
166:   return 0;
167: }

169: /*@C
170:     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
171:     of the matrix returned by DMSlicedGetMatrix().

173:     Logically Collective on dm

175:     Input Parameters:
176: +   sliced - the DM object
177: .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
178: -   ofill - the fill pattern in the off-diagonal blocks

180:     Notes:
181:     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
182:     See DMDASetBlockFills() for example usage.

184:     Level: advanced

186: .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
187: @*/
188: PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
189: {
190:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

193:   DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);
194:   DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);
195:   return 0;
196: }

198: static PetscErrorCode  DMDestroy_Sliced(DM dm)
199: {
200:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

202:   PetscFree(slice->ghosts);
203:   if (slice->dfill) PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);
204:   if (slice->ofill) PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);
205:   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
206:   PetscFree(slice);
207:   return 0;
208: }

210: static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
211: {
212:   DM_Sliced      *slice = (DM_Sliced*)dm->data;

216:   *gvec = NULL;
217:   VecCreateGhostBlock(PetscObjectComm((PetscObject)dm),slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);
218:   VecSetDM(*gvec,dm);
219:   return 0;
220: }

222: static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
223: {
224:   PetscBool      flg;

226:   VecGhostIsLocalForm(g,l,&flg);
228:   VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);
229:   VecGhostUpdateBegin(g,mode,SCATTER_FORWARD);
230:   return 0;
231: }

233: static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
234: {
235:   PetscBool      flg;

237:   VecGhostIsLocalForm(g,l,&flg);
239:   VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);
240:   return 0;
241: }

243: /*MC
244:    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields

246:    See DMCreateSliced() for details.

248:   Level: intermediate

250: .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
251: M*/

253: PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
254: {
255:   DM_Sliced      *slice;

257:   PetscNewLog(p,&slice);
258:   p->data = slice;

260:   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
261:   p->ops->creatematrix       = DMCreateMatrix_Sliced;
262:   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
263:   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
264:   p->ops->destroy            = DMDestroy_Sliced;
265:   return 0;
266: }

268: /*@C
269:     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem

271:     Collective

273:     Input Parameters:
274: +   comm - the processors that will share the global vector
275: .   bs - the block size
276: .   nlocal - number of vector entries on this process
277: .   Nghosts - number of ghost points needed on this process
278: .   ghosts - global indices of all ghost points for this process
279: .   d_nnz - matrix preallocation information representing coupling within this process
280: -   o_nnz - matrix preallocation information representing coupling between this process and other processes

282:     Output Parameters:
283: .   slice - the slice object

285:     Notes:
286:         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
287:         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
288:         the ghost points.

290:         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().

292:     Level: advanced

294: .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
295:          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()

297: @*/
298: PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
299: {
301:   DMCreate(comm,dm);
302:   DMSetType(*dm,DMSLICED);
303:   DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts);
304:   if (d_nnz) {
305:     DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz);
306:   }
307:   return 0;
308: }