Actual source code: gcreate.c

  1: #include <petsc/private/matimpl.h>

  3: PETSC_INTERN PetscErrorCode MatSetBlockSizes_Default(Mat mat, PetscInt rbs, PetscInt cbs)
  4: {
  5:   PetscFunctionBegin;
  6:   if (!mat->preallocated) PetscFunctionReturn(PETSC_SUCCESS);
  7:   PetscCheck(mat->rmap->bs <= 0 || mat->rmap->bs == rbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot change row block size %" PetscInt_FMT " to %" PetscInt_FMT, mat->rmap->bs, rbs);
  8:   PetscCheck(mat->cmap->bs <= 0 || mat->cmap->bs == cbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot change column block size %" PetscInt_FMT " to %" PetscInt_FMT, mat->cmap->bs, cbs);
  9:   PetscFunctionReturn(PETSC_SUCCESS);
 10: }

 12: PETSC_INTERN PetscErrorCode MatShift_Basic(Mat Y, PetscScalar a)
 13: {
 14:   PetscInt    i, start, end;
 15:   PetscScalar alpha = a;
 16:   PetscBool   prevoption;

 18:   PetscFunctionBegin;
 19:   PetscCall(MatGetOption(Y, MAT_NO_OFF_PROC_ENTRIES, &prevoption));
 20:   PetscCall(MatSetOption(Y, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
 21:   PetscCall(MatGetOwnershipRange(Y, &start, &end));
 22:   for (i = start; i < end; i++) {
 23:     if (i < Y->cmap->N) PetscCall(MatSetValues(Y, 1, &i, 1, &i, &alpha, ADD_VALUES));
 24:   }
 25:   PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY));
 26:   PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY));
 27:   PetscCall(MatSetOption(Y, MAT_NO_OFF_PROC_ENTRIES, prevoption));
 28:   PetscFunctionReturn(PETSC_SUCCESS);
 29: }

 31: /*@
 32:   MatCreate - Creates a matrix where the type is determined
 33:   from either a call to `MatSetType()` or from the options database
 34:   with a call to `MatSetFromOptions()`.

 36:   Collective

 38:   Input Parameter:
 39: . comm - MPI communicator

 41:   Output Parameter:
 42: . A - the matrix

 44:   Options Database Keys:
 45: + -mat_type seqaij   - `MATSEQAIJ` type, uses `MatCreateSeqAIJ()`
 46: . -mat_type mpiaij   - `MATMPIAIJ` type, uses `MatCreateAIJ()`
 47: . -mat_type seqdense - `MATSEQDENSE`, uses `MatCreateSeqDense()`
 48: . -mat_type mpidense - `MATMPIDENSE` type, uses `MatCreateDense()`
 49: . -mat_type seqbaij  - `MATSEQBAIJ` type, uses `MatCreateSeqBAIJ()`
 50: - -mat_type mpibaij  - `MATMPIBAIJ` type, uses `MatCreateBAIJ()`

 52:    See the manpages for particular formats (e.g., `MATSEQAIJ`)
 53:    for additional format-specific options.

 55:   Level: beginner

 57:   Notes:
 58:   The default matrix type is `MATAIJ`, using the routines `MatCreateSeqAIJ()` or
 59:   `MatCreateAIJ()` if you do not set a type in the options database. If you never call
 60:   `MatSetType()` or `MatSetFromOptions()` it will generate an error when you try to use the
 61:   matrix.

 63: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJ()`, `MatCreateAIJ()`,
 64:           `MatCreateSeqDense()`, `MatCreateDense()`,
 65:           `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`,
 66:           `MatCreateSeqSBAIJ()`, `MatCreateSBAIJ()`,
 67:           `MatConvert()`
 68: @*/
 69: PetscErrorCode MatCreate(MPI_Comm comm, Mat *A)
 70: {
 71:   Mat B;

 73:   PetscFunctionBegin;
 74:   PetscAssertPointer(A, 2);

 76:   *A = NULL;
 77:   PetscCall(MatInitializePackage());

 79:   PetscCall(PetscHeaderCreate(B, MAT_CLASSID, "Mat", "Matrix", "Mat", comm, MatDestroy, MatView));
 80:   PetscCall(PetscLayoutCreate(comm, &B->rmap));
 81:   PetscCall(PetscLayoutCreate(comm, &B->cmap));
 82:   PetscCall(PetscStrallocpy(VECSTANDARD, &B->defaultvectype));
 83:   PetscCall(PetscStrallocpy(PETSCRANDER48, &B->defaultrandtype));

 85:   B->symmetric                   = PETSC_BOOL3_UNKNOWN;
 86:   B->hermitian                   = PETSC_BOOL3_UNKNOWN;
 87:   B->structurally_symmetric      = PETSC_BOOL3_UNKNOWN;
 88:   B->spd                         = PETSC_BOOL3_UNKNOWN;
 89:   B->symmetry_eternal            = PETSC_FALSE;
 90:   B->structural_symmetry_eternal = PETSC_FALSE;

 92:   B->congruentlayouts = PETSC_DECIDE;
 93:   B->preallocated     = PETSC_FALSE;
 94: #if defined(PETSC_HAVE_DEVICE)
 95:   B->boundtocpu = PETSC_TRUE;
 96: #endif
 97:   *A = B;
 98:   PetscFunctionReturn(PETSC_SUCCESS);
 99: }

101: /*@C
102:   MatCreateFromOptions - Creates a matrix whose type is set from the options database

104:   Collective

106:   Input Parameters:
107: + comm   - MPI communicator
108: . prefix - [optional] prefix for the options database
109: . bs     - the blocksize (commonly 1)
110: . m      - the local number of rows (or `PETSC_DECIDE`)
111: . n      - the local number of columns (or `PETSC_DECIDE` or `PETSC_DETERMINE`)
112: . M      - the global number of rows (or `PETSC_DETERMINE`)
113: - N      - the global number of columns (or `PETSC_DETERMINE`)

115:   Output Parameter:
116: . A - the matrix

118:   Options Database Key:
119: . -mat_type - see `MatType`, for example `aij`, `aijcusparse`, `baij`, `sbaij`, dense, defaults to `aij`

121:   Level: beginner

123: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJ()`, `MatCreateAIJ()`,
124:           `MatCreateSeqDense()`, `MatCreateDense()`,
125:           `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`,
126:           `MatCreateSeqSBAIJ()`, `MatCreateSBAIJ()`,
127:           `MatConvert()`, `MatCreate()`
128: @*/
129: PetscErrorCode MatCreateFromOptions(MPI_Comm comm, const char *prefix, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, Mat *A)
130: {
131:   PetscFunctionBegin;
132:   PetscAssertPointer(A, 8);
133:   PetscCall(MatCreate(comm, A));
134:   if (prefix) PetscCall(MatSetOptionsPrefix(*A, prefix));
135:   PetscCall(MatSetBlockSize(*A, bs));
136:   PetscCall(MatSetSizes(*A, m, n, M, N));
137:   PetscCall(MatSetFromOptions(*A));
138:   PetscFunctionReturn(PETSC_SUCCESS);
139: }

141: /*@
142:   MatSetErrorIfFailure - Causes `Mat` to generate an immediate error, for example a zero pivot, is detected.

144:   Logically Collective

146:   Input Parameters:
147: + mat - matrix obtained from `MatCreate()`
148: - flg - `PETSC_TRUE` indicates you want the error generated

150:   Level: advanced

152:   Note:
153:   If this flag is not set then the matrix operation will note the error and continue. The error may cause a later `PC` or `KSP` error
154:   or result in a `KSPConvergedReason` indicating the method did not converge.

156: .seealso: [](ch_matrices), `Mat`, `PCSetErrorIfFailure()`, `KSPConvergedReason`, `SNESConvergedReason`
157: @*/
158: PetscErrorCode MatSetErrorIfFailure(Mat mat, PetscBool flg)
159: {
160:   PetscFunctionBegin;
163:   mat->erroriffailure = flg;
164:   PetscFunctionReturn(PETSC_SUCCESS);
165: }

167: /*@
168:   MatSetSizes - Sets the local and global sizes, and checks to determine compatibility

170:   Collective

172:   Input Parameters:
173: + A - the matrix
174: . m - number of local rows (or `PETSC_DECIDE`)
175: . n - number of local columns (or `PETSC_DECIDE`)
176: . M - number of global rows (or `PETSC_DETERMINE`)
177: - N - number of global columns (or `PETSC_DETERMINE`)

179:   Level: beginner

181:   Notes:
182:   `m` (`n`) and `M` (`N`) cannot be both `PETSC_DECIDE`
183:   If one processor calls this with `M` (`N`) of `PETSC_DECIDE` then all processors must, otherwise the program will hang.

185:   If `PETSC_DECIDE` is not used for the arguments 'm' and 'n', then the
186:   user must ensure that they are chosen to be compatible with the
187:   vectors. To do this, one first considers the matrix-vector product
188:   'y = A x'. The `m` that is used in the above routine must match the
189:   local size used in the vector creation routine `VecCreateMPI()` for 'y'.
190:   Likewise, the `n` used must match that used as the local size in
191:   `VecCreateMPI()` for 'x'.

193:   You cannot change the sizes once they have been set.

195:   The sizes must be set before `MatSetUp()` or MatXXXSetPreallocation() is called.

197: .seealso: [](ch_matrices), `Mat`, `MatGetSize()`, `PetscSplitOwnership()`
198: @*/
199: PetscErrorCode MatSetSizes(Mat A, PetscInt m, PetscInt n, PetscInt M, PetscInt N)
200: {
201:   PetscFunctionBegin;
205:   PetscCheck(M <= 0 || m <= M, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local row size %" PetscInt_FMT " cannot be larger than global row size %" PetscInt_FMT, m, M);
206:   PetscCheck(N <= 0 || n <= N, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local column size %" PetscInt_FMT " cannot be larger than global column size %" PetscInt_FMT, n, N);
207:   PetscCheck((A->rmap->n < 0 || A->rmap->N < 0) || (A->rmap->n == m && (M <= 0 || A->rmap->N == M)), PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot change/reset row sizes to %" PetscInt_FMT " local %" PetscInt_FMT " global after previously setting them to %" PetscInt_FMT " local %" PetscInt_FMT " global", m, M,
208:              A->rmap->n, A->rmap->N);
209:   PetscCheck((A->cmap->n < 0 || A->cmap->N < 0) || (A->cmap->n == n && (N <= 0 || A->cmap->N == N)), PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot change/reset column sizes to %" PetscInt_FMT " local %" PetscInt_FMT " global after previously setting them to %" PetscInt_FMT " local %" PetscInt_FMT " global", n, N,
210:              A->cmap->n, A->cmap->N);
211:   A->rmap->n = m;
212:   A->cmap->n = n;
213:   A->rmap->N = M > -1 ? M : A->rmap->N;
214:   A->cmap->N = N > -1 ? N : A->cmap->N;
215:   PetscFunctionReturn(PETSC_SUCCESS);
216: }

218: /*@
219:   MatSetFromOptions - Creates a matrix where the type is determined
220:   from the options database.

222:   Collective

224:   Input Parameter:
225: . B - the matrix

227:   Options Database Keys:
228: + -mat_type seqaij   - `MATSEQAIJ` type, uses `MatCreateSeqAIJ()`
229: . -mat_type mpiaij   - `MATMPIAIJ` type, uses `MatCreateAIJ()`
230: . -mat_type seqdense - `MATSEQDENSE` type, uses `MatCreateSeqDense()`
231: . -mat_type mpidense - `MATMPIDENSE`, uses `MatCreateDense()`
232: . -mat_type seqbaij  - `MATSEQBAIJ`, uses `MatCreateSeqBAIJ()`
233: - -mat_type mpibaij  - `MATMPIBAIJ`, uses `MatCreateBAIJ()`

235:    See the manpages for particular formats (e.g., `MATSEQAIJ`)
236:    for additional format-specific options.

238:   Level: beginner

240:   Notes:
241:   Generates a parallel MPI matrix if the communicator has more than one processor.  The default
242:   matrix type is `MATAIJ`, using the routines `MatCreateSeqAIJ()` and `MatCreateAIJ()` if you
243:   do not select a type in the options database.

245: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJ()`, `MatCreateAIJ()`,
246:           `MatCreateSeqDense()`, `MatCreateDense()`,
247:           `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`,
248:           `MatCreateSeqSBAIJ()`, `MatCreateSBAIJ()`,
249:           `MatConvert()`
250: @*/
251: PetscErrorCode MatSetFromOptions(Mat B)
252: {
253:   const char *deft = MATAIJ;
254:   char        type[256];
255:   PetscBool   flg, set;
256:   PetscInt    bind_below = 0;

258:   PetscFunctionBegin;

261:   PetscObjectOptionsBegin((PetscObject)B);

263:   if (B->rmap->bs < 0) {
264:     PetscInt newbs = -1;
265:     PetscCall(PetscOptionsInt("-mat_block_size", "Set the blocksize used to store the matrix", "MatSetBlockSize", newbs, &newbs, &flg));
266:     if (flg) {
267:       PetscCall(PetscLayoutSetBlockSize(B->rmap, newbs));
268:       PetscCall(PetscLayoutSetBlockSize(B->cmap, newbs));
269:     }
270:   }

272:   PetscCall(PetscOptionsFList("-mat_type", "Matrix type", "MatSetType", MatList, deft, type, 256, &flg));
273:   if (flg) {
274:     PetscCall(MatSetType(B, type));
275:   } else if (!((PetscObject)B)->type_name) {
276:     PetscCall(MatSetType(B, deft));
277:   }

279:   PetscCall(PetscOptionsName("-mat_is_symmetric", "Checks if mat is symmetric on MatAssemblyEnd()", "MatIsSymmetric", &B->checksymmetryonassembly));
280:   PetscCall(PetscOptionsReal("-mat_is_symmetric", "Checks if mat is symmetric on MatAssemblyEnd()", "MatIsSymmetric", B->checksymmetrytol, &B->checksymmetrytol, NULL));
281:   PetscCall(PetscOptionsBool("-mat_null_space_test", "Checks if provided null space is correct in MatAssemblyEnd()", "MatSetNullSpaceTest", B->checknullspaceonassembly, &B->checknullspaceonassembly, NULL));
282:   PetscCall(PetscOptionsBool("-mat_error_if_failure", "Generate an error if an error occurs when factoring the matrix", "MatSetErrorIfFailure", B->erroriffailure, &B->erroriffailure, NULL));

284:   PetscTryTypeMethod(B, setfromoptions, PetscOptionsObject);

286:   flg = PETSC_FALSE;
287:   PetscCall(PetscOptionsBool("-mat_new_nonzero_location_err", "Generate an error if new nonzeros are created in the matrix structure (useful to test preallocation)", "MatSetOption", flg, &flg, &set));
288:   if (set) PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, flg));
289:   flg = PETSC_FALSE;
290:   PetscCall(PetscOptionsBool("-mat_new_nonzero_allocation_err", "Generate an error if new nonzeros are allocated in the matrix structure (useful to test preallocation)", "MatSetOption", flg, &flg, &set));
291:   if (set) PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, flg));
292:   flg = PETSC_FALSE;
293:   PetscCall(PetscOptionsBool("-mat_ignore_zero_entries", "For AIJ/IS matrices this will stop zero values from creating a zero location in the matrix", "MatSetOption", flg, &flg, &set));
294:   if (set) PetscCall(MatSetOption(B, MAT_IGNORE_ZERO_ENTRIES, flg));

296:   flg = PETSC_FALSE;
297:   PetscCall(PetscOptionsBool("-mat_form_explicit_transpose", "Hint to form an explicit transpose for operations like MatMultTranspose", "MatSetOption", flg, &flg, &set));
298:   if (set) PetscCall(MatSetOption(B, MAT_FORM_EXPLICIT_TRANSPOSE, flg));

300:   /* Bind to CPU if below a user-specified size threshold.
301:    * This perhaps belongs in the options for the GPU Mat types, but MatBindToCPU() does nothing when called on non-GPU types,
302:    * and putting it here makes is more maintainable than duplicating this for all. */
303:   PetscCall(PetscOptionsInt("-mat_bind_below", "Set the size threshold (in local rows) below which the Mat is bound to the CPU", "MatBindToCPU", bind_below, &bind_below, &flg));
304:   if (flg && B->rmap->n < bind_below) PetscCall(MatBindToCPU(B, PETSC_TRUE));

306:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
307:   PetscCall(PetscObjectProcessOptionsHandlers((PetscObject)B, PetscOptionsObject));
308:   PetscOptionsEnd();
309:   PetscFunctionReturn(PETSC_SUCCESS);
310: }

312: /*@C
313:   MatXAIJSetPreallocation - set preallocation for serial and parallel `MATAIJ`, `MATBAIJ`, and `MATSBAIJ` matrices and their unassembled versions.

315:   Collective

317:   Input Parameters:
318: + A     - matrix being preallocated
319: . bs    - block size
320: . dnnz  - number of nonzero column blocks per block row of diagonal part of parallel matrix
321: . onnz  - number of nonzero column blocks per block row of off-diagonal part of parallel matrix
322: . dnnzu - number of nonzero column blocks per block row of upper-triangular part of diagonal part of parallel matrix
323: - onnzu - number of nonzero column blocks per block row of upper-triangular part of off-diagonal part of parallel matrix

325:   Level: beginner

327: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocation()`,
328:           `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`,
329:           `PetscSplitOwnership()`
330: @*/
331: PetscErrorCode MatXAIJSetPreallocation(Mat A, PetscInt bs, const PetscInt dnnz[], const PetscInt onnz[], const PetscInt dnnzu[], const PetscInt onnzu[])
332: {
333:   PetscInt cbs;
334:   void (*aij)(void);
335:   void (*is)(void);
336:   void (*hyp)(void) = NULL;

338:   PetscFunctionBegin;
339:   if (bs != PETSC_DECIDE) { /* don't mess with an already set block size */
340:     PetscCall(MatSetBlockSize(A, bs));
341:   }
342:   PetscCall(PetscLayoutSetUp(A->rmap));
343:   PetscCall(PetscLayoutSetUp(A->cmap));
344:   PetscCall(MatGetBlockSizes(A, &bs, &cbs));
345:   /* these routines assumes bs == cbs, this should be checked somehow */
346:   PetscCall(MatSeqBAIJSetPreallocation(A, bs, 0, dnnz));
347:   PetscCall(MatMPIBAIJSetPreallocation(A, bs, 0, dnnz, 0, onnz));
348:   PetscCall(MatSeqSBAIJSetPreallocation(A, bs, 0, dnnzu));
349:   PetscCall(MatMPISBAIJSetPreallocation(A, bs, 0, dnnzu, 0, onnzu));
350:   /*
351:     In general, we have to do extra work to preallocate for scalar (AIJ) or unassembled (IS) matrices so we check whether it will do any
352:     good before going on with it.
353:   */
354:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatMPIAIJSetPreallocation_C", &aij));
355:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatISSetPreallocation_C", &is));
356: #if defined(PETSC_HAVE_HYPRE)
357:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatHYPRESetPreallocation_C", &hyp));
358: #endif
359:   if (!aij && !is && !hyp) PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSeqAIJSetPreallocation_C", &aij));
360:   if (aij || is || hyp) {
361:     if (bs == cbs && bs == 1) {
362:       PetscCall(MatSeqAIJSetPreallocation(A, 0, dnnz));
363:       PetscCall(MatMPIAIJSetPreallocation(A, 0, dnnz, 0, onnz));
364:       PetscCall(MatISSetPreallocation(A, 0, dnnz, 0, onnz));
365: #if defined(PETSC_HAVE_HYPRE)
366:       PetscCall(MatHYPRESetPreallocation(A, 0, dnnz, 0, onnz));
367: #endif
368:     } else { /* Convert block-row precallocation to scalar-row */
369:       PetscInt i, m, *sdnnz, *sonnz;
370:       PetscCall(MatGetLocalSize(A, &m, NULL));
371:       PetscCall(PetscMalloc2((!!dnnz) * m, &sdnnz, (!!onnz) * m, &sonnz));
372:       for (i = 0; i < m; i++) {
373:         if (dnnz) sdnnz[i] = dnnz[i / bs] * cbs;
374:         if (onnz) sonnz[i] = onnz[i / bs] * cbs;
375:       }
376:       PetscCall(MatSeqAIJSetPreallocation(A, 0, dnnz ? sdnnz : NULL));
377:       PetscCall(MatMPIAIJSetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL));
378:       PetscCall(MatISSetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL));
379: #if defined(PETSC_HAVE_HYPRE)
380:       PetscCall(MatHYPRESetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL));
381: #endif
382:       PetscCall(PetscFree2(sdnnz, sonnz));
383:     }
384:   }
385:   PetscFunctionReturn(PETSC_SUCCESS);
386: }

388: /*
389:         Merges some information from Cs header to A; the C object is then destroyed

391:         This is somewhat different from MatHeaderReplace() it would be nice to merge the code
392: */
393: PetscErrorCode MatHeaderMerge(Mat A, Mat *C)
394: {
395:   PetscInt         refct;
396:   PetscOps         Abops;
397:   struct _MatOps   Aops;
398:   char            *mtype, *mname, *mprefix;
399:   Mat_Product     *product;
400:   Mat_Redundant   *redundant;
401:   PetscObjectState state;

403:   PetscFunctionBegin;
406:   if (A == *C) PetscFunctionReturn(PETSC_SUCCESS);
407:   PetscCheckSameComm(A, 1, *C, 2);
408:   /* save the parts of A we need */
409:   Abops     = ((PetscObject)A)->bops[0];
410:   Aops      = A->ops[0];
411:   refct     = ((PetscObject)A)->refct;
412:   mtype     = ((PetscObject)A)->type_name;
413:   mname     = ((PetscObject)A)->name;
414:   state     = ((PetscObject)A)->state;
415:   mprefix   = ((PetscObject)A)->prefix;
416:   product   = A->product;
417:   redundant = A->redundant;

419:   /* zero these so the destroy below does not free them */
420:   ((PetscObject)A)->type_name = NULL;
421:   ((PetscObject)A)->name      = NULL;

423:   /*
424:      free all the interior data structures from mat
425:      cannot use PetscUseTypeMethod(A,destroy); because compiler
426:      thinks it may print NULL type_name and name
427:   */
428:   PetscTryTypeMethod(A, destroy);

430:   PetscCall(PetscFree(A->defaultvectype));
431:   PetscCall(PetscFree(A->defaultrandtype));
432:   PetscCall(PetscLayoutDestroy(&A->rmap));
433:   PetscCall(PetscLayoutDestroy(&A->cmap));
434:   PetscCall(PetscFunctionListDestroy(&((PetscObject)A)->qlist));
435:   PetscCall(PetscObjectListDestroy(&((PetscObject)A)->olist));
436:   PetscCall(PetscComposedQuantitiesDestroy((PetscObject)A));

438:   /* copy C over to A */
439:   PetscCall(PetscFree(A->factorprefix));
440:   PetscCall(PetscMemcpy(A, *C, sizeof(struct _p_Mat)));

442:   /* return the parts of A we saved */
443:   ((PetscObject)A)->bops[0]   = Abops;
444:   A->ops[0]                   = Aops;
445:   ((PetscObject)A)->refct     = refct;
446:   ((PetscObject)A)->type_name = mtype;
447:   ((PetscObject)A)->name      = mname;
448:   ((PetscObject)A)->prefix    = mprefix;
449:   ((PetscObject)A)->state     = state + 1;
450:   A->product                  = product;
451:   A->redundant                = redundant;

453:   /* since these two are copied into A we do not want them destroyed in C */
454:   ((PetscObject)*C)->qlist = NULL;
455:   ((PetscObject)*C)->olist = NULL;

457:   PetscCall(PetscHeaderDestroy(C));
458:   PetscFunctionReturn(PETSC_SUCCESS);
459: }
460: /*
461:         Replace A's header with that of C; the C object is then destroyed

463:         This is essentially code moved from MatDestroy()

465:         This is somewhat different from MatHeaderMerge() it would be nice to merge the code

467:         Used in DM hence is declared PETSC_EXTERN
468: */
469: PETSC_EXTERN PetscErrorCode MatHeaderReplace(Mat A, Mat *C)
470: {
471:   PetscInt         refct;
472:   PetscObjectState state;
473:   struct _p_Mat    buffer;
474:   MatStencilInfo   stencil;

476:   PetscFunctionBegin;
479:   if (A == *C) PetscFunctionReturn(PETSC_SUCCESS);
480:   PetscCheckSameComm(A, 1, *C, 2);
481:   PetscCheck(((PetscObject)*C)->refct == 1, PetscObjectComm((PetscObject)C), PETSC_ERR_ARG_WRONGSTATE, "Object C has refct %" PetscInt_FMT " > 1, would leave hanging reference", ((PetscObject)*C)->refct);

483:   /* swap C and A */
484:   refct   = ((PetscObject)A)->refct;
485:   state   = ((PetscObject)A)->state;
486:   stencil = A->stencil;
487:   PetscCall(PetscMemcpy(&buffer, A, sizeof(struct _p_Mat)));
488:   PetscCall(PetscMemcpy(A, *C, sizeof(struct _p_Mat)));
489:   PetscCall(PetscMemcpy(*C, &buffer, sizeof(struct _p_Mat)));
490:   ((PetscObject)A)->refct = refct;
491:   ((PetscObject)A)->state = state + 1;
492:   A->stencil              = stencil;

494:   ((PetscObject)*C)->refct = 1;
495:   PetscCall(MatShellSetOperation(*C, MATOP_DESTROY, (void (*)(void))NULL));
496:   PetscCall(MatDestroy(C));
497:   PetscFunctionReturn(PETSC_SUCCESS);
498: }

500: /*@
501:   MatBindToCPU - marks a matrix to temporarily stay on the CPU and perform computations on the CPU

503:   Logically Collective

505:   Input Parameters:
506: + A   - the matrix
507: - flg - bind to the CPU if value of `PETSC_TRUE`

509:   Level: intermediate

511: .seealso: [](ch_matrices), `Mat`, `MatBoundToCPU()`
512: @*/
513: PetscErrorCode MatBindToCPU(Mat A, PetscBool flg)
514: {
515:   PetscFunctionBegin;
518: #if defined(PETSC_HAVE_DEVICE)
519:   if (A->boundtocpu == flg) PetscFunctionReturn(PETSC_SUCCESS);
520:   A->boundtocpu = flg;
521:   PetscTryTypeMethod(A, bindtocpu, flg);
522: #endif
523:   PetscFunctionReturn(PETSC_SUCCESS);
524: }

526: /*@
527:   MatBoundToCPU - query if a matrix is bound to the CPU

529:   Input Parameter:
530: . A - the matrix

532:   Output Parameter:
533: . flg - the logical flag

535:   Level: intermediate

537: .seealso: [](ch_matrices), `Mat`, `MatBindToCPU()`
538: @*/
539: PetscErrorCode MatBoundToCPU(Mat A, PetscBool *flg)
540: {
541:   PetscFunctionBegin;
543:   PetscAssertPointer(flg, 2);
544: #if defined(PETSC_HAVE_DEVICE)
545:   *flg = A->boundtocpu;
546: #else
547:   *flg = PETSC_TRUE;
548: #endif
549:   PetscFunctionReturn(PETSC_SUCCESS);
550: }

552: PetscErrorCode MatSetValuesCOO_Basic(Mat A, const PetscScalar coo_v[], InsertMode imode)
553: {
554:   IS              is_coo_i, is_coo_j;
555:   const PetscInt *coo_i, *coo_j;
556:   PetscInt        n, n_i, n_j;
557:   PetscScalar     zero = 0.;

559:   PetscFunctionBegin;
560:   PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_coo_i", (PetscObject *)&is_coo_i));
561:   PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_coo_j", (PetscObject *)&is_coo_j));
562:   PetscCheck(is_coo_i, PetscObjectComm((PetscObject)A), PETSC_ERR_COR, "Missing coo_i IS");
563:   PetscCheck(is_coo_j, PetscObjectComm((PetscObject)A), PETSC_ERR_COR, "Missing coo_j IS");
564:   PetscCall(ISGetLocalSize(is_coo_i, &n_i));
565:   PetscCall(ISGetLocalSize(is_coo_j, &n_j));
566:   PetscCheck(n_i == n_j, PETSC_COMM_SELF, PETSC_ERR_COR, "Wrong local size %" PetscInt_FMT " != %" PetscInt_FMT, n_i, n_j);
567:   PetscCall(ISGetIndices(is_coo_i, &coo_i));
568:   PetscCall(ISGetIndices(is_coo_j, &coo_j));
569:   if (imode != ADD_VALUES) PetscCall(MatZeroEntries(A));
570:   for (n = 0; n < n_i; n++) PetscCall(MatSetValue(A, coo_i[n], coo_j[n], coo_v ? coo_v[n] : zero, ADD_VALUES));
571:   PetscCall(ISRestoreIndices(is_coo_i, &coo_i));
572:   PetscCall(ISRestoreIndices(is_coo_j, &coo_j));
573:   PetscFunctionReturn(PETSC_SUCCESS);
574: }

576: PetscErrorCode MatSetPreallocationCOO_Basic(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
577: {
578:   Mat         preallocator;
579:   IS          is_coo_i, is_coo_j;
580:   PetscScalar zero = 0.0;

582:   PetscFunctionBegin;
583:   PetscCall(PetscLayoutSetUp(A->rmap));
584:   PetscCall(PetscLayoutSetUp(A->cmap));
585:   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &preallocator));
586:   PetscCall(MatSetType(preallocator, MATPREALLOCATOR));
587:   PetscCall(MatSetSizes(preallocator, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
588:   PetscCall(MatSetLayouts(preallocator, A->rmap, A->cmap));
589:   PetscCall(MatSetUp(preallocator));
590:   for (PetscCount n = 0; n < ncoo; n++) PetscCall(MatSetValue(preallocator, coo_i[n], coo_j[n], zero, INSERT_VALUES));
591:   PetscCall(MatAssemblyBegin(preallocator, MAT_FINAL_ASSEMBLY));
592:   PetscCall(MatAssemblyEnd(preallocator, MAT_FINAL_ASSEMBLY));
593:   PetscCall(MatPreallocatorPreallocate(preallocator, PETSC_TRUE, A));
594:   PetscCall(MatDestroy(&preallocator));
595:   PetscCheck(ncoo <= PETSC_MAX_INT, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ncoo %" PetscCount_FMT " overflowed PetscInt; configure --with-64-bit-indices or request support", ncoo);
596:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ncoo, coo_i, PETSC_COPY_VALUES, &is_coo_i));
597:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ncoo, coo_j, PETSC_COPY_VALUES, &is_coo_j));
598:   PetscCall(PetscObjectCompose((PetscObject)A, "__PETSc_coo_i", (PetscObject)is_coo_i));
599:   PetscCall(PetscObjectCompose((PetscObject)A, "__PETSc_coo_j", (PetscObject)is_coo_j));
600:   PetscCall(ISDestroy(&is_coo_i));
601:   PetscCall(ISDestroy(&is_coo_j));
602:   PetscFunctionReturn(PETSC_SUCCESS);
603: }

605: /*@C
606:   MatSetPreallocationCOO - set preallocation for matrices using a coordinate format of the entries with global indices

608:   Collective

610:   Input Parameters:
611: + A     - matrix being preallocated
612: . ncoo  - number of entries
613: . coo_i - row indices
614: - coo_j - column indices

616:   Level: beginner

618:   Notes:
619:   The indices `coo_i` and `coo_j` may be modified within this function. The caller should not rely on them
620:   having any specific value after this function returns. The arrays can be freed or reused immediately
621:   after this function returns.

623:   Entries can be repeated, see `MatSetValuesCOO()`. Entries with negative row or column indices are allowed
624:   but will be ignored. The corresponding entries in `MatSetValuesCOO()` will be ignored too. Remote entries
625:   are allowed and will be properly added or inserted to the matrix, unless the matrix option `MAT_IGNORE_OFF_PROC_ENTRIES`
626:   is set, in which case remote entries are ignored, or `MAT_NO_OFF_PROC_ENTRIES` is set, in which case an error will be generated.

628:   If you just want to create a sequential AIJ matrix (`MATSEQAIJ`), and your matrix entries in COO format are unique, you can also use
629:   `MatCreateSeqAIJFromTriple()`. But that is not recommended for iterative applications.

631: .seealso: [](ch_matrices), `Mat`, `MatSetValuesCOO()`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`,
632:           `MatMPIBAIJSetPreallocation()`, `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`, `MatSetPreallocationCOOLocal()`,
633:           `DMSetMatrixPreallocateSkip()`, `MatCreateSeqAIJFromTriple()`
634: @*/
635: PetscErrorCode MatSetPreallocationCOO(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
636: {
637:   PetscErrorCode (*f)(Mat, PetscCount, PetscInt[], PetscInt[]) = NULL;

639:   PetscFunctionBegin;
642:   if (ncoo) PetscAssertPointer(coo_i, 3);
643:   if (ncoo) PetscAssertPointer(coo_j, 4);
644:   PetscCall(PetscLayoutSetUp(A->rmap));
645:   PetscCall(PetscLayoutSetUp(A->cmap));
646:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetPreallocationCOO_C", &f));

648:   PetscCall(PetscLogEventBegin(MAT_PreallCOO, A, 0, 0, 0));
649:   if (f) {
650:     PetscCall((*f)(A, ncoo, coo_i, coo_j));
651:   } else { /* allow fallback, very slow */
652:     PetscCall(MatSetPreallocationCOO_Basic(A, ncoo, coo_i, coo_j));
653:   }
654:   PetscCall(PetscLogEventEnd(MAT_PreallCOO, A, 0, 0, 0));
655:   A->preallocated = PETSC_TRUE;
656:   A->nonzerostate++;
657:   PetscFunctionReturn(PETSC_SUCCESS);
658: }

660: /*@C
661:   MatSetPreallocationCOOLocal - set preallocation for matrices using a coordinate format of the entries with local indices

663:   Collective

665:   Input Parameters:
666: + A     - matrix being preallocated
667: . ncoo  - number of entries
668: . coo_i - row indices (local numbering; may be modified)
669: - coo_j - column indices (local numbering; may be modified)

671:   Level: beginner

673:   Notes:
674:   The local indices are translated using the local to global mapping, thus `MatSetLocalToGlobalMapping()` must have been
675:   called prior to this function. For matrices created with `DMCreateMatrix()` the local to global mapping is often already provided.

677:   The indices `coo_i` and `coo_j` may be modified within this function. They might be translated to corresponding global
678:   indices, but the caller should not rely on them having any specific value after this function returns. The arrays
679:   can be freed or reused immediately after this function returns.

681:   Entries can be repeated, see `MatSetValuesCOO()`. Entries with negative row or column indices are allowed
682:   but will be ignored. The corresponding entries in `MatSetValuesCOO()` will be ignored too. Remote entries
683:   are allowed and will be properly added or inserted to the matrix.

685: .seealso: [](ch_matrices), `Mat`, `MatSetValuesCOO()`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`,
686:           `MatMPIBAIJSetPreallocation()`, `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`, `MatSetPreallocationCOO()`,
687:           `DMSetMatrixPreallocateSkip()`
688: @*/
689: PetscErrorCode MatSetPreallocationCOOLocal(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
690: {
691:   PetscErrorCode (*f)(Mat, PetscCount, PetscInt[], PetscInt[]) = NULL;

693:   PetscFunctionBegin;
696:   if (ncoo) PetscAssertPointer(coo_i, 3);
697:   if (ncoo) PetscAssertPointer(coo_j, 4);
698:   PetscCheck(ncoo <= PETSC_MAX_INT, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ncoo %" PetscCount_FMT " overflowed PetscInt; configure --with-64-bit-indices or request support", ncoo);
699:   PetscCall(PetscLayoutSetUp(A->rmap));
700:   PetscCall(PetscLayoutSetUp(A->cmap));

702:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", &f));
703:   if (f) {
704:     PetscCall((*f)(A, ncoo, coo_i, coo_j));
705:     A->nonzerostate++;
706:   } else {
707:     ISLocalToGlobalMapping ltog_row, ltog_col;
708:     PetscCall(MatGetLocalToGlobalMapping(A, &ltog_row, &ltog_col));
709:     if (ltog_row) PetscCall(ISLocalToGlobalMappingApply(ltog_row, ncoo, coo_i, coo_i));
710:     if (ltog_col) PetscCall(ISLocalToGlobalMappingApply(ltog_col, ncoo, coo_j, coo_j));
711:     PetscCall(MatSetPreallocationCOO(A, ncoo, coo_i, coo_j));
712:   }
713:   A->preallocated = PETSC_TRUE;
714:   PetscFunctionReturn(PETSC_SUCCESS);
715: }

717: /*@
718:   MatSetValuesCOO - set values at once in a matrix preallocated using `MatSetPreallocationCOO()`

720:   Collective

722:   Input Parameters:
723: + A     - matrix being preallocated
724: . coo_v - the matrix values (can be `NULL`)
725: - imode - the insert mode

727:   Level: beginner

729:   Notes:
730:   The values must follow the order of the indices prescribed with `MatSetPreallocationCOO()` or `MatSetPreallocationCOOLocal()`.

732:   When repeated entries are specified in the COO indices the `coo_v` values are first properly summed, regardless of the value of imode.
733:   The imode flag indicates if coo_v must be added to the current values of the matrix (`ADD_VALUES`) or overwritten (`INSERT_VALUES`).

735:   `MatAssemblyBegin()` and `MatAssemblyEnd()` do not need to be called after this routine. It automatically handles the assembly process.

737: .seealso: [](ch_matrices), `Mat`, `MatSetPreallocationCOO()`, `MatSetPreallocationCOOLocal()`, `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
738: @*/
739: PetscErrorCode MatSetValuesCOO(Mat A, const PetscScalar coo_v[], InsertMode imode)
740: {
741:   PetscErrorCode (*f)(Mat, const PetscScalar[], InsertMode) = NULL;
742:   PetscBool oldFlg;

744:   PetscFunctionBegin;
747:   MatCheckPreallocated(A, 1);
749:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetValuesCOO_C", &f));
750:   PetscCall(PetscLogEventBegin(MAT_SetVCOO, A, 0, 0, 0));
751:   if (f) {
752:     PetscCall((*f)(A, coo_v, imode)); // all known COO implementations do not use MatStash. They do their own off-proc communication
753:     PetscCall(MatGetOption(A, MAT_NO_OFF_PROC_ENTRIES, &oldFlg));
754:     PetscCall(MatSetOption(A, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); // set A->nooffprocentries to avoid costly MatStash scatter in MatAssembly
755:   } else {
756:     PetscCall(MatSetValuesCOO_Basic(A, coo_v, imode)); // fall back to MatSetValues, which might use MatStash
757:   }
758:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
759:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
760:   if (f) PetscCall(MatSetOption(A, MAT_NO_OFF_PROC_ENTRIES, oldFlg));
761:   PetscCall(PetscLogEventEnd(MAT_SetVCOO, A, 0, 0, 0));
762:   PetscFunctionReturn(PETSC_SUCCESS);
763: }

765: /*@
766:   MatSetBindingPropagates - Sets whether the state of being bound to the CPU for a GPU matrix type propagates to child and some other associated objects

768:   Input Parameters:
769: + A   - the matrix
770: - flg - flag indicating whether the boundtocpu flag should be propagated

772:   Level: developer

774:   Notes:
775:   If the value of flg is set to true, the following will occur
776: +   `MatCreateSubMatrices()` and `MatCreateRedundantMatrix()` - bind created matrices to CPU if the input matrix is bound to the CPU.
777: -   `MatCreateVecs()` - bind created vectors to CPU if the input matrix is bound to the CPU.

779:   The bindingpropagates flag itself is also propagated by the above routines.

781:   Developer Notes:
782:   If the fine-scale `DMDA` has the `-dm_bind_below` option set to true, then `DMCreateInterpolationScale()` calls `MatSetBindingPropagates()`
783:   on the restriction/interpolation operator to set the bindingpropagates flag to true.

785: .seealso: [](ch_matrices), `Mat`, `VecSetBindingPropagates()`, `MatGetBindingPropagates()`
786: @*/
787: PetscErrorCode MatSetBindingPropagates(Mat A, PetscBool flg)
788: {
789:   PetscFunctionBegin;
791: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
792:   A->bindingpropagates = flg;
793: #endif
794:   PetscFunctionReturn(PETSC_SUCCESS);
795: }

797: /*@
798:   MatGetBindingPropagates - Gets whether the state of being bound to the CPU for a GPU matrix type propagates to child and some other associated objects

800:   Input Parameter:
801: . A - the matrix

803:   Output Parameter:
804: . flg - flag indicating whether the boundtocpu flag will be propagated

806:   Level: developer

808: .seealso: [](ch_matrices), `Mat`, `MatSetBindingPropagates()`
809: @*/
810: PetscErrorCode MatGetBindingPropagates(Mat A, PetscBool *flg)
811: {
812:   PetscFunctionBegin;
814:   PetscAssertPointer(flg, 2);
815: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
816:   *flg = A->bindingpropagates;
817: #else
818:   *flg = PETSC_FALSE;
819: #endif
820:   PetscFunctionReturn(PETSC_SUCCESS);
821: }