Actual source code: strumpack.c

  1: #include <../src/mat/impls/aij/seq/aij.h>
  2: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  3: #include <StrumpackSparseSolver.h>

  5: static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A, Vec v)
  6: {
  7:   PetscFunctionBegin;
  8:   SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type: STRUMPACK factor");
  9:   PetscFunctionReturn(PETSC_SUCCESS);
 10: }

 12: static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
 13: {
 14:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;

 16:   PetscFunctionBegin;
 17:   /* Deallocate STRUMPACK storage */
 18:   PetscStackCallExternalVoid("STRUMPACK_destroy", STRUMPACK_destroy(S));
 19:   PetscCall(PetscFree(A->data));

 21:   /* clear composed functions */
 22:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
 23:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetReordering_C", NULL));
 24:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetReordering_C", NULL));
 25:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetColPerm_C", NULL));
 26:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetColPerm_C", NULL));
 27:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricNxyz_C", NULL));
 28:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricComponents_C", NULL));
 29:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricWidth_C", NULL));
 30:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGPU_C", NULL));
 31:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetGPU_C", NULL));
 32:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompression_C", NULL));
 33:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompression_C", NULL));
 34:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompRelTol_C", NULL));
 35:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompRelTol_C", NULL));
 36:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompAbsTol_C", NULL));
 37:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompAbsTol_C", NULL));
 38:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompMaxRank_C", NULL));
 39:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompMaxRank_C", NULL));
 40:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompLeafSize_C", NULL));
 41:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompLeafSize_C", NULL));
 42:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompMinSepSize_C", NULL));
 43:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompMinSepSize_C", NULL));
 44:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompLossyPrecision_C", NULL));
 45:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompLossyPrecision_C", NULL));
 46:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompButterflyLevels_C", NULL));
 47:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompButterflyLevels_C", NULL));

 49:   PetscFunctionReturn(PETSC_SUCCESS);
 50: }

 52: static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering reordering)
 53: {
 54:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

 56:   PetscFunctionBegin;
 57:   PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, (STRUMPACK_REORDERING_STRATEGY)reordering));
 58:   PetscFunctionReturn(PETSC_SUCCESS);
 59: }
 60: static PetscErrorCode MatSTRUMPACKGetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering *reordering)
 61: {
 62:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

 64:   PetscFunctionBegin;
 65:   PetscStackCallExternalVoid("STRUMPACK_reordering_method", *reordering = (MatSTRUMPACKReordering)STRUMPACK_reordering_method(*S));
 66:   PetscFunctionReturn(PETSC_SUCCESS);
 67: }

 69: /*@
 70:   MatSTRUMPACKSetReordering - Set STRUMPACK fill-reducing reordering

 72:   Logically Collective

 74:   Input Parameters:
 75: + F          - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
 76: - reordering - the code to be used to find the fill-reducing reordering

 78:   Options Database Key:
 79: . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering, see `MatSTRUMPACKReordering`

 81:   Level: intermediate

 83:   References:
 84: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

 86: .seealso: [](ch_matrices), `Mat`, `MatSTRUMPACKReordering`, `MatGetFactor()`, `MatSTRUMPACKSetColPerm()`, `MatSTRUMPACKGetReordering()`
 87: @*/
 88: PetscErrorCode MatSTRUMPACKSetReordering(Mat F, MatSTRUMPACKReordering reordering)
 89: {
 90:   PetscFunctionBegin;
 93:   PetscTryMethod(F, "MatSTRUMPACKSetReordering_C", (Mat, MatSTRUMPACKReordering), (F, reordering));
 94:   PetscFunctionReturn(PETSC_SUCCESS);
 95: }
 96: /*@
 97:   MatSTRUMPACKGetReordering - Get STRUMPACK fill-reducing reordering

 99:   Logically Collective

101:   Input Parameters:
102: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface

104:   Output Parameter:
105: . reordering - the code to be used to find the fill-reducing reordering

107:   Level: intermediate

109:   References:
110: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

112: .seealso: [](ch_matrices), `Mat`, `MatSTRUMPACKReordering`, `MatGetFactor()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
113: @*/
114: PetscErrorCode MatSTRUMPACKGetReordering(Mat F, MatSTRUMPACKReordering *reordering)
115: {
116:   PetscFunctionBegin;
118:   PetscTryMethod(F, "MatSTRUMPACKGetReordering_C", (Mat, MatSTRUMPACKReordering *), (F, reordering));
120:   PetscFunctionReturn(PETSC_SUCCESS);
121: }

123: static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F, PetscBool cperm)
124: {
125:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

127:   PetscFunctionBegin;
128:   PetscStackCallExternalVoid("STRUMPACK_set_matching", STRUMPACK_set_matching(*S, cperm ? STRUMPACK_MATCHING_MAX_DIAGONAL_PRODUCT_SCALING : STRUMPACK_MATCHING_NONE));
129:   PetscFunctionReturn(PETSC_SUCCESS);
130: }
131: static PetscErrorCode MatSTRUMPACKGetColPerm_STRUMPACK(Mat F, PetscBool *cperm)
132: {
133:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

135:   PetscFunctionBegin;
136:   PetscStackCallExternalVoid("STRUMPACK_matching", *cperm = (PetscBool)(STRUMPACK_matching(*S) != STRUMPACK_MATCHING_NONE));
137:   PetscFunctionReturn(PETSC_SUCCESS);
138: }

140: /*@
141:   MatSTRUMPACKSetColPerm - Set whether STRUMPACK should try to permute the columns of the matrix in order to get a nonzero diagonal

143:   Logically Collective

145:   Input Parameters:
146: + F     - the factored matrix obtained by calling `MatGetFactor()`
147: - cperm - `PETSC_TRUE` to permute (internally) the columns of the matrix

149:   Options Database Key:
150: . -mat_strumpack_colperm <cperm> - true to use the permutation

152:   Level: intermediate

154:   References:
155: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

157: .seealso: [](ch_matrices), `MatSTRUMPACKSetReordering()`, `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetColPerm()`
158: @*/
159: PetscErrorCode MatSTRUMPACKSetColPerm(Mat F, PetscBool cperm)
160: {
161:   PetscFunctionBegin;
164:   PetscTryMethod(F, "MatSTRUMPACKSetColPerm_C", (Mat, PetscBool), (F, cperm));
165:   PetscFunctionReturn(PETSC_SUCCESS);
166: }
167: /*@
168:   MatSTRUMPACKGetColPerm - Get whether STRUMPACK will try to permute the columns of the matrix in order to get a nonzero diagonal

170:   Logically Collective

172:   Input Parameters:
173: . F - the factored matrix obtained by calling `MatGetFactor()`

175:   Output Parameter:
176: . cperm - Indicates whether STRUMPACK will permute columns

178:   Level: intermediate

180:   References:
181: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

183: .seealso: [](ch_matrices), `MatSTRUMPACKSetReordering()`, `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetColPerm()`
184: @*/
185: PetscErrorCode MatSTRUMPACKGetColPerm(Mat F, PetscBool *cperm)
186: {
187:   PetscFunctionBegin;
189:   PetscTryMethod(F, "MatSTRUMPACKGetColPerm_C", (Mat, PetscBool *), (F, cperm));
191:   PetscFunctionReturn(PETSC_SUCCESS);
192: }

194: static PetscErrorCode MatSTRUMPACKSetGPU_STRUMPACK(Mat F, PetscBool gpu)
195: {
196:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

198:   PetscFunctionBegin;
199:   if (gpu) {
200: #if !(defined(STRUMPACK_USE_CUDA) || defined(STRUMPACK_USE_HIP) || defined(STRUMPACK_USE_SYCL))
201:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Warning: strumpack was not configured with GPU support\n"));
202: #endif
203:     PetscStackCallExternalVoid("STRUMPACK_enable_gpu", STRUMPACK_enable_gpu(*S));
204:   } else PetscStackCallExternalVoid("STRUMPACK_disable_gpu", STRUMPACK_disable_gpu(*S));
205:   PetscFunctionReturn(PETSC_SUCCESS);
206: }
207: static PetscErrorCode MatSTRUMPACKGetGPU_STRUMPACK(Mat F, PetscBool *gpu)
208: {
209:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

211:   PetscFunctionBegin;
212:   PetscStackCallExternalVoid("STRUMPACK_use_gpu", *gpu = (PetscBool)STRUMPACK_use_gpu(*S));
213:   PetscFunctionReturn(PETSC_SUCCESS);
214: }

216: /*@
217:   MatSTRUMPACKSetGPU - Set whether STRUMPACK should enable GPU acceleration (not supported for all compression types)

219:   Logically Collective

221:   Input Parameters:
222: + F   - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
223: - gpu - whether or not to use GPU acceleration

225:   Options Database Key:
226: . -mat_strumpack_gpu <gpu> - true to use gpu offload

228:   Level: intermediate

230:   References:
231: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

233: .seealso: `MatGetFactor()`, `MatSTRUMPACKGetGPU()`
234: @*/
235: PetscErrorCode MatSTRUMPACKSetGPU(Mat F, PetscBool gpu)
236: {
237:   PetscFunctionBegin;
240:   PetscTryMethod(F, "MatSTRUMPACKSetGPU_C", (Mat, PetscBool), (F, gpu));
241:   PetscFunctionReturn(PETSC_SUCCESS);
242: }
243: /*@
244:   MatSTRUMPACKGetGPU - Get whether STRUMPACK will try to use GPU acceleration (not supported for all compression types)

246:   Logically Collective

248:   Input Parameters:
249: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface

251:   Output Parameter:
252: . gpu - whether or not STRUMPACK will try to use GPU acceleration

254:   Level: intermediate

256:   References:
257: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

259: .seealso: `MatGetFactor()`, `MatSTRUMPACKSetGPU()`
260: @*/
261: PetscErrorCode MatSTRUMPACKGetGPU(Mat F, PetscBool *gpu)
262: {
263:   PetscFunctionBegin;
265:   PetscTryMethod(F, "MatSTRUMPACKGetGPU_C", (Mat, PetscBool *), (F, gpu));
267:   PetscFunctionReturn(PETSC_SUCCESS);
268: }

270: static PetscErrorCode MatSTRUMPACKSetCompression_STRUMPACK(Mat F, MatSTRUMPACKCompressionType comp)
271: {
272:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

274:   PetscFunctionBegin;
275: #if !defined(STRUMPACK_HAVE_BPACK)
276:   PetscCheck(comp != MAT_STRUMPACK_COMPRESSION_TYPE_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_BLR_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_ZFP_BLR_HODLR, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Compression scheme requires ButterflyPACK, please reconfigure with --download-butterflypack");
277: #endif
278: #if !defined(STRUMPACK_HAVE_ZFP)
279:   PetscCheck(comp != MAT_STRUMPACK_COMPRESSION_TYPE_ZFP_BLR_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_LOSSLESS && comp != MAT_STRUMPACK_COMPRESSION_TYPE_LOSSY, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Compression scheme requires ZFP, please reconfigure with --download-zfp");
280: #endif
281:   PetscStackCallExternalVoid("STRUMPACK_set_compression", STRUMPACK_set_compression(*S, (STRUMPACK_COMPRESSION_TYPE)comp));
282:   PetscFunctionReturn(PETSC_SUCCESS);
283: }
284: static PetscErrorCode MatSTRUMPACKGetCompression_STRUMPACK(Mat F, MatSTRUMPACKCompressionType *comp)
285: {
286:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

288:   PetscFunctionBegin;
289:   PetscStackCallExternalVoid("STRUMPACK_compression", *comp = (MatSTRUMPACKCompressionType)STRUMPACK_compression(*S));
290:   PetscFunctionReturn(PETSC_SUCCESS);
291: }

293: /*@
294:   MatSTRUMPACKSetCompression - Set STRUMPACK compression type

296:   Input Parameters:
297: + F    - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
298: - comp - Type of compression to be used in the approximate sparse factorization

300:   Options Database Key:
301: . -mat_strumpack_compression <NONE> - Type of rank-structured compression in sparse LU factors (choose one of) NONE HSS BLR HODLR BLR_HODLR ZFP_BLR_HODLR LOSSLESS LOSSY

303:   Level: intermediate

305:   Notes:
306:   Default for `comp` is `MAT_STRUMPACK_COMPRESSION_TYPE_NONE` for `-pc_type lu` and `MAT_STRUMPACK_COMPRESSION_TYPE_BLR`
307:   for `-pc_type ilu`

309:   References:
310: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

312: .seealso: `MatGetFactor()`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKGetCompression()`
313: @*/
314: PetscErrorCode MatSTRUMPACKSetCompression(Mat F, MatSTRUMPACKCompressionType comp)
315: {
316:   PetscFunctionBegin;
319:   PetscTryMethod(F, "MatSTRUMPACKSetCompression_C", (Mat, MatSTRUMPACKCompressionType), (F, comp));
320:   PetscFunctionReturn(PETSC_SUCCESS);
321: }
322: /*@
323:   MatSTRUMPACKGetCompression - Get STRUMPACK compression type

325:   Input Parameters:
326: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface

328:   Output Parameter:
329: . comp - Type of compression to be used in the approximate sparse factorization

331:   Level: intermediate

333:   Notes:
334:   Default is `MAT_STRUMPACK_COMPRESSION_TYPE_NONE` for `-pc_type lu` and `MAT_STRUMPACK_COMPRESSION_TYPE_BLR` for `-pc_type ilu`

336:   References:
337: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

339: .seealso: `MatGetFactor()`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKSetCompression()`
340: @*/
341: PetscErrorCode MatSTRUMPACKGetCompression(Mat F, MatSTRUMPACKCompressionType *comp)
342: {
343:   PetscFunctionBegin;
345:   PetscTryMethod(F, "MatSTRUMPACKGetCompression_C", (Mat, MatSTRUMPACKCompressionType *), (F, comp));
347:   PetscFunctionReturn(PETSC_SUCCESS);
348: }

350: static PetscErrorCode MatSTRUMPACKSetCompRelTol_STRUMPACK(Mat F, PetscReal rtol)
351: {
352:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

354:   PetscFunctionBegin;
355:   PetscStackCallExternalVoid("STRUMPACK_set_compression_rel_tol", STRUMPACK_set_compression_rel_tol(*S, rtol));
356:   PetscFunctionReturn(PETSC_SUCCESS);
357: }
358: static PetscErrorCode MatSTRUMPACKGetCompRelTol_STRUMPACK(Mat F, PetscReal *rtol)
359: {
360:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

362:   PetscFunctionBegin;
363:   PetscStackCallExternalVoid("STRUMPACK_compression_rel_tol", *rtol = (PetscReal)STRUMPACK_compression_rel_tol(*S));
364:   PetscFunctionReturn(PETSC_SUCCESS);
365: }

367: /*@
368:   MatSTRUMPACKSetCompRelTol - Set STRUMPACK relative tolerance for compression

370:   Logically Collective

372:   Input Parameters:
373: + F    - the factored matrix obtained by calling `MatGetFactor()`
374: - rtol - relative compression tolerance

376:   Options Database Key:
377: . -mat_strumpack_compression_rel_tol <1e-4> - Relative compression tolerance, when using `-pctype ilu`

379:   Level: intermediate

381:   References:
382: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

384: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompRelTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
385: @*/
386: PetscErrorCode MatSTRUMPACKSetCompRelTol(Mat F, PetscReal rtol)
387: {
388:   PetscFunctionBegin;
391:   PetscTryMethod(F, "MatSTRUMPACKSetCompRelTol_C", (Mat, PetscReal), (F, rtol));
392:   PetscFunctionReturn(PETSC_SUCCESS);
393: }
394: /*@
395:   MatSTRUMPACKGetCompRelTol - Get STRUMPACK relative tolerance for compression

397:   Logically Collective

399:   Input Parameters:
400: . F - the factored matrix obtained by calling `MatGetFactor()`

402:   Output Parameter:
403: . rtol - relative compression tolerance

405:   Level: intermediate

407:   References:
408: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

410: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompRelTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
411: @*/
412: PetscErrorCode MatSTRUMPACKGetCompRelTol(Mat F, PetscReal *rtol)
413: {
414:   PetscFunctionBegin;
416:   PetscTryMethod(F, "MatSTRUMPACKGetCompRelTol_C", (Mat, PetscReal *), (F, rtol));
418:   PetscFunctionReturn(PETSC_SUCCESS);
419: }

421: static PetscErrorCode MatSTRUMPACKSetCompAbsTol_STRUMPACK(Mat F, PetscReal atol)
422: {
423:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

425:   PetscFunctionBegin;
426:   PetscStackCallExternalVoid("STRUMPACK_set_compression_abs_tol", STRUMPACK_set_compression_abs_tol(*S, atol));
427:   PetscFunctionReturn(PETSC_SUCCESS);
428: }
429: static PetscErrorCode MatSTRUMPACKGetCompAbsTol_STRUMPACK(Mat F, PetscReal *atol)
430: {
431:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

433:   PetscFunctionBegin;
434:   PetscStackCallExternalVoid("STRUMPACK_compression_abs_tol", *atol = (PetscReal)STRUMPACK_compression_abs_tol(*S));
435:   PetscFunctionReturn(PETSC_SUCCESS);
436: }

438: /*@
439:   MatSTRUMPACKSetCompAbsTol - Set STRUMPACK absolute tolerance for compression

441:   Logically Collective

443:   Input Parameters:
444: + F    - the factored matrix obtained by calling `MatGetFactor()`
445: - atol - absolute compression tolerance

447:   Options Database Key:
448: . -mat_strumpack_compression_abs_tol <1e-10> - Absolute compression tolerance, when using `-pctype ilu`

450:   Level: intermediate

452:   References:
453: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

455: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompAbsTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
456: @*/
457: PetscErrorCode MatSTRUMPACKSetCompAbsTol(Mat F, PetscReal atol)
458: {
459:   PetscFunctionBegin;
462:   PetscTryMethod(F, "MatSTRUMPACKSetCompAbsTol_C", (Mat, PetscReal), (F, atol));
463:   PetscFunctionReturn(PETSC_SUCCESS);
464: }
465: /*@
466:   MatSTRUMPACKGetCompAbsTol - Get STRUMPACK absolute tolerance for compression

468:   Logically Collective

470:   Input Parameters:
471: . F - the factored matrix obtained by calling `MatGetFactor()`

473:   Output Parameter:
474: . atol - absolute compression tolerance

476:   Level: intermediate

478:   References:
479: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

481: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompAbsTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
482: @*/
483: PetscErrorCode MatSTRUMPACKGetCompAbsTol(Mat F, PetscReal *atol)
484: {
485:   PetscFunctionBegin;
487:   PetscTryMethod(F, "MatSTRUMPACKGetCompAbsTol_C", (Mat, PetscReal *), (F, atol));
489:   PetscFunctionReturn(PETSC_SUCCESS);
490: }

492: static PetscErrorCode MatSTRUMPACKSetCompLeafSize_STRUMPACK(Mat F, PetscInt leaf_size)
493: {
494:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

496:   PetscFunctionBegin;
497:   PetscStackCallExternalVoid("STRUMPACK_set_compression_leaf_size", STRUMPACK_set_compression_leaf_size(*S, leaf_size));
498:   PetscFunctionReturn(PETSC_SUCCESS);
499: }
500: static PetscErrorCode MatSTRUMPACKGetCompLeafSize_STRUMPACK(Mat F, PetscInt *leaf_size)
501: {
502:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

504:   PetscFunctionBegin;
505:   PetscStackCallExternalVoid("STRUMPACK_compression_leaf_size", *leaf_size = (PetscInt)STRUMPACK_compression_leaf_size(*S));
506:   PetscFunctionReturn(PETSC_SUCCESS);
507: }

509: /*@
510:   MatSTRUMPACKSetCompLeafSize - Set STRUMPACK leaf size for HSS, BLR, HODLR...

512:   Logically Collective

514:   Input Parameters:
515: + F         - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
516: - leaf_size - Size of diagonal blocks in rank-structured approximation

518:   Options Database Key:
519: . -mat_strumpack_compression_leaf_size - Size of diagonal blocks in rank-structured approximation, when using `-pctype ilu`

521:   Level: intermediate

523:   References:
524: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

526: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompLeafSize()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
527: @*/
528: PetscErrorCode MatSTRUMPACKSetCompLeafSize(Mat F, PetscInt leaf_size)
529: {
530:   PetscFunctionBegin;
533:   PetscTryMethod(F, "MatSTRUMPACKSetCompLeafSize_C", (Mat, PetscInt), (F, leaf_size));
534:   PetscFunctionReturn(PETSC_SUCCESS);
535: }
536: /*@
537:   MatSTRUMPACKGetCompLeafSize - Get STRUMPACK leaf size for HSS, BLR, HODLR...

539:   Logically Collective

541:   Input Parameters:
542: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface

544:   Output Parameter:
545: . leaf_size - Size of diagonal blocks in rank-structured approximation

547:   Level: intermediate

549:   References:
550: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

552: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompLeafSize()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
553: @*/
554: PetscErrorCode MatSTRUMPACKGetCompLeafSize(Mat F, PetscInt *leaf_size)
555: {
556:   PetscFunctionBegin;
558:   PetscTryMethod(F, "MatSTRUMPACKGetCompLeafSize_C", (Mat, PetscInt *), (F, leaf_size));
560:   PetscFunctionReturn(PETSC_SUCCESS);
561: }

563: static PetscErrorCode MatSTRUMPACKSetGeometricNxyz_STRUMPACK(Mat F, PetscInt nx, PetscInt ny, PetscInt nz)
564: {
565:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

567:   PetscFunctionBegin;
568:   if (nx < 1) {
569:     PetscCheck(nx == PETSC_DECIDE || nx == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "nx < 1");
570:     nx = 1;
571:   }
572:   PetscStackCallExternalVoid("STRUMPACK_set_nx", STRUMPACK_set_nx(*S, nx));
573:   if (ny < 1) {
574:     PetscCheck(ny == PETSC_DECIDE || ny == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "ny < 1");
575:     ny = 1;
576:   }
577:   PetscStackCallExternalVoid("STRUMPACK_set_ny", STRUMPACK_set_ny(*S, ny));
578:   if (nz < 1) {
579:     PetscCheck(nz == PETSC_DECIDE || nz == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "nz < 1");
580:     nz = 1;
581:   }
582:   PetscStackCallExternalVoid("STRUMPACK_set_nz", STRUMPACK_set_nz(*S, nz));
583:   PetscFunctionReturn(PETSC_SUCCESS);
584: }
585: static PetscErrorCode MatSTRUMPACKSetGeometricComponents_STRUMPACK(Mat F, PetscInt nc)
586: {
587:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

589:   PetscFunctionBegin;
590:   PetscStackCallExternalVoid("STRUMPACK_set_components", STRUMPACK_set_components(*S, nc));
591:   PetscFunctionReturn(PETSC_SUCCESS);
592: }
593: static PetscErrorCode MatSTRUMPACKSetGeometricWidth_STRUMPACK(Mat F, PetscInt w)
594: {
595:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

597:   PetscFunctionBegin;
598:   PetscStackCallExternalVoid("STRUMPACK_set_separator_width", STRUMPACK_set_separator_width(*S, w));
599:   PetscFunctionReturn(PETSC_SUCCESS);
600: }

602: /*@
603:   MatSTRUMPACKSetGeometricNxyz - Set STRUMPACK mesh x, y and z dimensions, for use with GEOMETRIC ordering.

605:   Logically Collective

607:   Input Parameters:
608: + F  - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
609: . nx - x dimension of the mesh
610: . ny - y dimension of the mesh
611: - nz - z dimension of the mesh

613:   Level: intermediate

615:   Notes:
616:   If the mesh is two (or one) dimensional one can use 1, `PETSC_DECIDE` or `PETSC_DEFAULT`
617:   for the missing z (and y) dimensions.

619:   References:
620: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

622: .seealso: `MatGetFactor()`
623: @*/
624: PetscErrorCode MatSTRUMPACKSetGeometricNxyz(Mat F, PetscInt nx, PetscInt ny, PetscInt nz)
625: {
626:   PetscFunctionBegin;
631:   PetscTryMethod(F, "MatSTRUMPACKSetGeometricNxyz_C", (Mat, PetscInt, PetscInt, PetscInt), (F, nx, ny, nz));
632:   PetscFunctionReturn(PETSC_SUCCESS);
633: }
634: /*@
635:   MatSTRUMPACKSetGeometricComponents - Set STRUMPACK number of degrees of freedom per mesh point, for use with GEOMETRIC ordering.

637:   Logically Collective

639:   Input Parameters:
640: + F  - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
641: - nc - Number of components/dof's per grid point

643:   Options Database Key:
644: . -mat_strumpack_geometric_components <1> - Number of components per mesh point, for geometric nested dissection ordering

646:   Level: intermediate

648:   References:
649: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

651: .seealso: `MatGetFactor()`
652: @*/
653: PetscErrorCode MatSTRUMPACKSetGeometricComponents(Mat F, PetscInt nc)
654: {
655:   PetscFunctionBegin;
658:   PetscTryMethod(F, "MatSTRUMPACKSetGeometricComponents_C", (Mat, PetscInt), (F, nc));
659:   PetscFunctionReturn(PETSC_SUCCESS);
660: }
661: /*@
662:   MatSTRUMPACKSetGeometricWidth - Set STRUMPACK width of the separator, for use with GEOMETRIC ordering.

664:   Logically Collective

666:   Input Parameters:
667: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
668: - w - width of the separator

670:   Options Database Key:
671: . -mat_strumpack_geometric_width <1> - Width of the separator of the mesh, for geometric nested dissection ordering

673:   Level: intermediate

675:   References:
676: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

678: .seealso: `MatGetFactor()`
679: @*/
680: PetscErrorCode MatSTRUMPACKSetGeometricWidth(Mat F, PetscInt w)
681: {
682:   PetscFunctionBegin;
685:   PetscTryMethod(F, "MatSTRUMPACKSetGeometricWidth_C", (Mat, PetscInt), (F, w));
686:   PetscFunctionReturn(PETSC_SUCCESS);
687: }

689: static PetscErrorCode MatSTRUMPACKSetCompMinSepSize_STRUMPACK(Mat F, PetscInt min_sep_size)
690: {
691:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

693:   PetscFunctionBegin;
694:   PetscStackCallExternalVoid("STRUMPACK_set_compression_min_sep_size", STRUMPACK_set_compression_min_sep_size(*S, min_sep_size));
695:   PetscFunctionReturn(PETSC_SUCCESS);
696: }
697: static PetscErrorCode MatSTRUMPACKGetCompMinSepSize_STRUMPACK(Mat F, PetscInt *min_sep_size)
698: {
699:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

701:   PetscFunctionBegin;
702:   PetscStackCallExternalVoid("STRUMPACK_compression_min_sep_size", *min_sep_size = (PetscInt)STRUMPACK_compression_min_sep_size(*S));
703:   PetscFunctionReturn(PETSC_SUCCESS);
704: }

706: /*@
707:   MatSTRUMPACKSetCompMinSepSize - Set STRUMPACK minimum separator size for low-rank approximation

709:   Logically Collective

711:   Input Parameters:
712: + F            - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
713: - min_sep_size - minimum dense matrix size for low-rank approximation

715:   Options Database Key:
716: . -mat_strumpack_compression_min_sep_size <min_sep_size> - Minimum size of dense sub-block for low-rank compression

718:   Level: intermediate

720:   References:
721: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

723: .seealso: `MatGetFactor()`, `MatSTRUMPACKGetCompMinSepSize()`
724: @*/
725: PetscErrorCode MatSTRUMPACKSetCompMinSepSize(Mat F, PetscInt min_sep_size)
726: {
727:   PetscFunctionBegin;
730:   PetscTryMethod(F, "MatSTRUMPACKSetCompMinSepSize_C", (Mat, PetscInt), (F, min_sep_size));
731:   PetscFunctionReturn(PETSC_SUCCESS);
732: }
733: /*@
734:   MatSTRUMPACKGetCompMinSepSize - Get STRUMPACK minimum separator size for low-rank approximation

736:   Logically Collective

738:   Input Parameters:
739: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface

741:   Output Parameter:
742: . min_sep_size - minimum dense matrix size for low-rank approximation

744:   Level: intermediate

746:   References:
747: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

749: .seealso: `MatGetFactor()`, `MatSTRUMPACKSetCompMinSepSize()`
750: @*/
751: PetscErrorCode MatSTRUMPACKGetCompMinSepSize(Mat F, PetscInt *min_sep_size)
752: {
753:   PetscFunctionBegin;
755:   PetscTryMethod(F, "MatSTRUMPACKGetCompMinSepSize_C", (Mat, PetscInt *), (F, min_sep_size));
757:   PetscFunctionReturn(PETSC_SUCCESS);
758: }

760: static PetscErrorCode MatSTRUMPACKSetCompLossyPrecision_STRUMPACK(Mat F, PetscInt lossy_prec)
761: {
762:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

764:   PetscFunctionBegin;
765:   PetscStackCallExternalVoid("STRUMPACK_set_compression_lossy_precision", STRUMPACK_set_compression_lossy_precision(*S, lossy_prec));
766:   PetscFunctionReturn(PETSC_SUCCESS);
767: }
768: static PetscErrorCode MatSTRUMPACKGetCompLossyPrecision_STRUMPACK(Mat F, PetscInt *lossy_prec)
769: {
770:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

772:   PetscFunctionBegin;
773:   PetscStackCallExternalVoid("STRUMPACK_compression_lossy_precision", *lossy_prec = (PetscInt)STRUMPACK_compression_lossy_precision(*S));
774:   PetscFunctionReturn(PETSC_SUCCESS);
775: }

777: /*@
778:   MatSTRUMPACKSetCompLossyPrecision - Set STRUMPACK precision for lossy compression (requires ZFP support)

780:   Logically Collective

782:   Input Parameters:
783: + F          - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
784: - lossy_prec - Number of bitplanes to use in lossy compression

786:   Options Database Key:
787: . -mat_strumpack_compression_lossy_precision <lossy_prec> - Precision when using lossy compression [1-64], when using `-pctype ilu -mat_strumpack_compression MAT_STRUMPACK_COMPRESSION_TYPE_LOSSY`

789:   Level: intermediate

791:   References:
792: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

794: .seealso: `MatGetFactor()`, `MatSTRUMPACKGetCompLossyPrecision()`
795: @*/
796: PetscErrorCode MatSTRUMPACKSetCompLossyPrecision(Mat F, PetscInt lossy_prec)
797: {
798:   PetscFunctionBegin;
801:   PetscTryMethod(F, "MatSTRUMPACKSetCompLossyPrecision_C", (Mat, PetscInt), (F, lossy_prec));
802:   PetscFunctionReturn(PETSC_SUCCESS);
803: }
804: /*@
805:   MatSTRUMPACKGetCompLossyPrecision - Get STRUMPACK precision for lossy compression (requires ZFP support)

807:   Logically Collective

809:   Input Parameters:
810: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface

812:   Output Parameter:
813: . lossy_prec - Number of bitplanes to use in lossy compression

815:   Level: intermediate

817:   References:
818: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

820: .seealso: `MatGetFactor()`, `MatSTRUMPACKSetCompLossyPrecision()`
821: @*/
822: PetscErrorCode MatSTRUMPACKGetCompLossyPrecision(Mat F, PetscInt *lossy_prec)
823: {
824:   PetscFunctionBegin;
826:   PetscTryMethod(F, "MatSTRUMPACKGetCompLossyPrecision_C", (Mat, PetscInt *), (F, lossy_prec));
828:   PetscFunctionReturn(PETSC_SUCCESS);
829: }

831: static PetscErrorCode MatSTRUMPACKSetCompButterflyLevels_STRUMPACK(Mat F, PetscInt bfly_lvls)
832: {
833:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

835:   PetscFunctionBegin;
836:   PetscStackCallExternalVoid("STRUMPACK_set_compression_butterfly_levels", STRUMPACK_set_compression_butterfly_levels(*S, bfly_lvls));
837:   PetscFunctionReturn(PETSC_SUCCESS);
838: }
839: static PetscErrorCode MatSTRUMPACKGetCompButterflyLevels_STRUMPACK(Mat F, PetscInt *bfly_lvls)
840: {
841:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;

843:   PetscFunctionBegin;
844:   PetscStackCallExternalVoid("STRUMPACK_compression_butterfly_levels", *bfly_lvls = (PetscInt)STRUMPACK_compression_butterfly_levels(*S));
845:   PetscFunctionReturn(PETSC_SUCCESS);
846: }

848: /*@
849:   MatSTRUMPACKSetCompButterflyLevels - Set STRUMPACK number of butterfly levels in HODLR compression (requires ButterflyPACK support)

851:   Logically Collective

853:   Input Parameters:
854: + F         - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
855: - bfly_lvls - Number of levels of butterfly compression in HODLR compression

857:   Options Database Key:
858: . -mat_strumpack_compression_butterfly_levels <bfly_lvls> - Number of levels in the hierarchically off-diagonal matrix for which to use butterfly, when using `-pctype ilu`, (BLR_)HODLR compression

860:   Level: intermediate

862:   References:
863: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

865: .seealso: `MatGetFactor()`, `MatSTRUMPACKGetCompButterflyLevels()`
866: @*/
867: PetscErrorCode MatSTRUMPACKSetCompButterflyLevels(Mat F, PetscInt bfly_lvls)
868: {
869:   PetscFunctionBegin;
872:   PetscTryMethod(F, "MatSTRUMPACKSetButterflyLevels_C", (Mat, PetscInt), (F, bfly_lvls));
873:   PetscFunctionReturn(PETSC_SUCCESS);
874: }
875: /*@
876:   MatSTRUMPACKGetCompButterflyLevels - Get STRUMPACK number of butterfly levels in HODLR compression (requires ButterflyPACK support)

878:   Logically Collective

880:   Input Parameters:
881: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface

883:   Output Parameter:
884: . bfly_lvls - Number of levels of butterfly compression in HODLR compression

886:   Level: intermediate

888:   References:
889: .  * - STRUMPACK documentation: https://portal.nersc.gov/project/sparse/strumpack/master/

891: .seealso: `MatGetFactor()`, `MatSTRUMPACKSetCompButterflyLevels()`
892: @*/
893: PetscErrorCode MatSTRUMPACKGetCompButterflyLevels(Mat F, PetscInt *bfly_lvls)
894: {
895:   PetscFunctionBegin;
897:   PetscTryMethod(F, "MatSTRUMPACKGetButterflyLevels_C", (Mat, PetscInt *), (F, bfly_lvls));
899:   PetscFunctionReturn(PETSC_SUCCESS);
900: }

902: static PetscErrorCode MatSolve_STRUMPACK(Mat A, Vec b_mpi, Vec x)
903: {
904:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
905:   STRUMPACK_RETURN_CODE   sp_err;
906:   const PetscScalar      *bptr;
907:   PetscScalar            *xptr;

909:   PetscFunctionBegin;
910:   PetscCall(VecGetArray(x, &xptr));
911:   PetscCall(VecGetArrayRead(b_mpi, &bptr));

913:   PetscStackCallExternalVoid("STRUMPACK_solve", sp_err = STRUMPACK_solve(*S, (PetscScalar *)bptr, xptr, 0));
914:   switch (sp_err) {
915:   case STRUMPACK_SUCCESS:
916:     break;
917:   case STRUMPACK_MATRIX_NOT_SET: {
918:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
919:     break;
920:   }
921:   case STRUMPACK_REORDERING_ERROR: {
922:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
923:     break;
924:   }
925:   default:
926:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
927:   }
928:   PetscCall(VecRestoreArray(x, &xptr));
929:   PetscCall(VecRestoreArrayRead(b_mpi, &bptr));
930:   PetscFunctionReturn(PETSC_SUCCESS);
931: }

933: static PetscErrorCode MatMatSolve_STRUMPACK(Mat A, Mat B_mpi, Mat X)
934: {
935:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
936:   STRUMPACK_RETURN_CODE   sp_err;
937:   PetscBool               flg;
938:   PetscInt                m = A->rmap->n, nrhs;
939:   const PetscScalar      *bptr;
940:   PetscScalar            *xptr;

942:   PetscFunctionBegin;
943:   PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
944:   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATDENSE matrix");
945:   PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
946:   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATDENSE matrix");

948:   PetscCall(MatGetSize(B_mpi, NULL, &nrhs));
949:   PetscCall(MatDenseGetArray(X, &xptr));
950:   PetscCall(MatDenseGetArrayRead(B_mpi, &bptr));

952:   PetscStackCallExternalVoid("STRUMPACK_matsolve", sp_err = STRUMPACK_matsolve(*S, nrhs, bptr, m, xptr, m, 0));
953:   switch (sp_err) {
954:   case STRUMPACK_SUCCESS:
955:     break;
956:   case STRUMPACK_MATRIX_NOT_SET: {
957:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
958:     break;
959:   }
960:   case STRUMPACK_REORDERING_ERROR: {
961:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
962:     break;
963:   }
964:   default:
965:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
966:   }
967:   PetscCall(MatDenseRestoreArrayRead(B_mpi, &bptr));
968:   PetscCall(MatDenseRestoreArray(X, &xptr));

970:   PetscFunctionReturn(PETSC_SUCCESS);
971: }

973: static PetscErrorCode MatView_Info_STRUMPACK(Mat A, PetscViewer viewer)
974: {
975:   PetscFunctionBegin;
976:   /* check if matrix is strumpack type */
977:   if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(PETSC_SUCCESS);
978:   PetscCall(PetscViewerASCIIPrintf(viewer, "STRUMPACK sparse solver!\n"));
979:   PetscFunctionReturn(PETSC_SUCCESS);
980: }

982: static PetscErrorCode MatView_STRUMPACK(Mat A, PetscViewer viewer)
983: {
984:   PetscBool         iascii;
985:   PetscViewerFormat format;

987:   PetscFunctionBegin;
988:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
989:   if (iascii) {
990:     PetscCall(PetscViewerGetFormat(viewer, &format));
991:     if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A, viewer));
992:   }
993:   PetscFunctionReturn(PETSC_SUCCESS);
994: }

996: static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F, Mat A, const MatFactorInfo *info)
997: {
998:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
999:   STRUMPACK_RETURN_CODE   sp_err;
1000:   Mat                     Aloc;
1001:   const PetscScalar      *av;
1002:   const PetscInt         *ai = NULL, *aj = NULL;
1003:   PetscInt                M = A->rmap->N, m = A->rmap->n, dummy;
1004:   PetscBool               ismpiaij, isseqaij, flg;

1006:   PetscFunctionBegin;
1007:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
1008:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
1009:   if (ismpiaij) {
1010:     PetscCall(MatMPIAIJGetLocalMat(A, MAT_INITIAL_MATRIX, &Aloc));
1011:   } else if (isseqaij) {
1012:     PetscCall(PetscObjectReference((PetscObject)A));
1013:     Aloc = A;
1014:   } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)A)->type_name);

1016:   PetscCall(MatGetRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &dummy, &ai, &aj, &flg));
1017:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "GetRowIJ failed");
1018:   PetscCall(MatSeqAIJGetArrayRead(Aloc, &av));

1020:   if (ismpiaij) {
1021:     const PetscInt *dist = NULL;
1022:     PetscCall(MatGetOwnershipRanges(A, &dist));
1023:     PetscStackCallExternalVoid("STRUMPACK_set_distributed_csr_matrix", STRUMPACK_set_distributed_csr_matrix(*S, &m, ai, aj, av, dist, 0));
1024:   } else if (isseqaij) {
1025:     PetscStackCallExternalVoid("STRUMPACK_set_csr_matrix", STRUMPACK_set_csr_matrix(*S, &M, ai, aj, av, 0));
1026:   }

1028:   PetscCall(MatRestoreRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &dummy, &ai, &aj, &flg));
1029:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "RestoreRowIJ failed");
1030:   PetscCall(MatSeqAIJRestoreArrayRead(Aloc, &av));
1031:   PetscCall(MatDestroy(&Aloc));

1033:   /* Reorder and Factor the matrix. */
1034:   /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
1035:   PetscStackCallExternalVoid("STRUMPACK_reorder", sp_err = STRUMPACK_reorder(*S));
1036:   PetscStackCallExternalVoid("STRUMPACK_factor", sp_err = STRUMPACK_factor(*S));
1037:   switch (sp_err) {
1038:   case STRUMPACK_SUCCESS:
1039:     break;
1040:   case STRUMPACK_MATRIX_NOT_SET: {
1041:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
1042:     break;
1043:   }
1044:   case STRUMPACK_REORDERING_ERROR: {
1045:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
1046:     break;
1047:   }
1048:   default:
1049:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: factorization failed");
1050:   }
1051:   F->assembled    = PETSC_TRUE;
1052:   F->preallocated = PETSC_TRUE;
1053:   PetscFunctionReturn(PETSC_SUCCESS);
1054: }

1056: static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
1057: {
1058:   PetscFunctionBegin;
1059:   F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
1060:   F->ops->solve           = MatSolve_STRUMPACK;
1061:   F->ops->matsolve        = MatMatSolve_STRUMPACK;
1062:   PetscFunctionReturn(PETSC_SUCCESS);
1063: }

1065: static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A, MatSolverType *type)
1066: {
1067:   PetscFunctionBegin;
1068:   *type = MATSOLVERSTRUMPACK;
1069:   PetscFunctionReturn(PETSC_SUCCESS);
1070: }

1072: /*MC
1073:   MATSOLVERSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
1074:   and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK.

1076:   Consult the STRUMPACK manual for more info,
1077:     https://portal.nersc.gov/project/sparse/strumpack/master/

1079:   Use `./configure --download-strumpack --download-metis` to have PETSc installed with STRUMPACK.

1081:   For full functionality, add `--download-slate --download-magma --download-parmetis --download-ptscotch --download-zfp --download-butterflypack`.
1082:   SLATE provides GPU support in the multi-GPU setting, providing ScaLAPACK functionality but with GPU acceleration.
1083:   MAGMA can optionally be used for on node GPU support instead cuBLAS/cuSOLVER, and performs slightly better.
1084:   ParMETIS and PTScotch can be used for parallel fill-reducing ordering.
1085:   ZFP is used for floating point compression of the sparse factors (LOSSY or LOSSLESS compression).
1086:   ButterflyPACK is used for HODLR (Hierarchically Off-Diagonal Low Rank) and HODBF (Hierarchically Off-Diagonal Butterfly) compression of the sparse factors.

1088:   Recommended use is 1 MPI rank per GPU.

1090:   Use `-pc_type lu` `-pc_factor_mat_solver_type strumpack` to use this as an exact (direct) solver.

1092:   Use `-pc_type ilu` `-pc_factor_mat_solver_type strumpack` to enable low-rank compression (i.e, use as a preconditioner), by default using block low rank (BLR).

1094:   Works with `MATAIJ` matrices

1096:   Options Database Keys:
1097: + -mat_strumpack_verbose                      - Enable verbose output
1098: . -mat_strumpack_compression                  - Type of rank-structured compression in sparse LU factors (choose one of) NONE HSS BLR HODLR BLR_HODLR ZFP_BLR_HODLR LOSSLESS LOSSY
1099: . -mat_strumpack_compression_rel_tol          - Relative compression tolerance, when using `-pctype ilu`
1100: . -mat_strumpack_compression_abs_tol          - Absolute compression tolerance, when using `-pctype ilu`
1101: . -mat_strumpack_compression_min_sep_size     - Minimum size of separator for rank-structured compression, when using `-pctype ilu`
1102: . -mat_strumpack_compression_leaf_size        - Size of diagonal blocks in rank-structured approximation, when using `-pctype ilu`
1103: . -mat_strumpack_compression_lossy_precision  - Precision when using lossy compression [1-64], when using `-pctype ilu`, compression LOSSY (requires ZFP support)
1104: . -mat_strumpack_compression_butterfly_levels - Number of levels in the hierarchically off-diagonal matrix for which to use butterfly, when using `-pctype ilu`, (BLR_)HODLR compression (requires ButterflyPACK support)
1105: . -mat_strumpack_gpu                          - Enable GPU acceleration in numerical factorization (not supported for all compression types)
1106: . -mat_strumpack_colperm <TRUE>               - Permute matrix to make diagonal nonzeros
1107: . -mat_strumpack_reordering <METIS>           - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM GEOMETRIC AMD MMD AND MLF SPECTRAL
1108: . -mat_strumpack_geometric_xyz <1,1,1>        - Mesh x,y,z dimensions, for use with GEOMETRIC ordering
1109: . -mat_strumpack_geometric_components <1>     - Number of components per mesh point, for geometric nested dissection ordering
1110: . -mat_strumpack_geometric_width <1>          - Width of the separator of the mesh, for geometric nested dissection ordering
1111: - -mat_strumpack_metis_nodeNDP                - Use METIS_NodeNDP instead of METIS_NodeND, for a more balanced tree

1113:  Level: beginner

1115:  HODLR, BLR_HODBF and ZFP_BLR_HODLR compression require STRUMPACK to be configured with ButterflyPACK support (`--download-butterflypack`).

1117:  LOSSY, LOSSLESS and ZFP_BLR_HODLR compression require STRUMPACK to be configured with ZFP support (`--download-zfp`).

1119: .seealso: [](ch_matrices), `Mat`, `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType`,
1120:           `MatGetFactor()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKReordering`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKSetColPerm()`.
1121: M*/
1122: static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat *F)
1123: {
1124:   Mat       B;
1125:   PetscInt  M = A->rmap->N, N = A->cmap->N;
1126:   PetscBool verb, flg, set;
1127:   PetscReal ctol;
1128:   PetscInt  min_sep_size, leaf_size, nxyz[3], nrdims, nc, w;
1129: #if defined(STRUMPACK_USE_ZFP)
1130:   PetscInt lossy_prec;
1131: #endif
1132: #if defined(STRUMPACK_USE_BPACK)
1133:   PetscInt bfly_lvls;
1134: #endif
1135: #if defined(STRUMPACK_USE_SLATE_SCALAPACK)
1136:   PetscMPIInt mpithreads;
1137: #endif
1138:   STRUMPACK_SparseSolver       *S;
1139:   STRUMPACK_INTERFACE           iface;
1140:   STRUMPACK_REORDERING_STRATEGY ndcurrent, ndvalue;
1141:   STRUMPACK_COMPRESSION_TYPE    compcurrent, compvalue;
1142:   const STRUMPACK_PRECISION     table[2][2][2] = {
1143:     {{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
1144:     {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX},       {STRUMPACK_FLOAT, STRUMPACK_DOUBLE}      }
1145:   };
1146:   const STRUMPACK_PRECISION prec               = table[(sizeof(PetscInt) == 8) ? 0 : 1][(PETSC_SCALAR == PETSC_COMPLEX) ? 0 : 1][(PETSC_REAL == PETSC_FLOAT) ? 0 : 1];
1147:   const char *const         STRUMPACKNDTypes[] = {"NATURAL", "METIS", "PARMETIS", "SCOTCH", "PTSCOTCH", "RCM", "GEOMETRIC", "AMD", "MMD", "AND", "MLF", "SPECTRAL", "STRUMPACKNDTypes", "", 0};
1148:   const char *const         CompTypes[]        = {"NONE", "HSS", "BLR", "HODLR", "BLR_HODLR", "ZFP_BLR_HODLR", "LOSSLESS", "LOSSY", "CompTypes", "", 0};

1150:   PetscFunctionBegin;
1151: #if defined(STRUMPACK_USE_SLATE_SCALAPACK)
1152:   PetscCallMPI(MPI_Query_thread(&mpithreads));
1153:   PetscCheck(mpithreads == MPI_THREAD_MULTIPLE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP_SYS, "SLATE requires MPI_THREAD_MULTIPLE");
1154: #endif
1155:   /* Create the factorization matrix */
1156:   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
1157:   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, M, N));
1158:   PetscCall(PetscStrallocpy("strumpack", &((PetscObject)B)->type_name));
1159:   PetscCall(MatSetUp(B));
1160:   PetscCall(MatSeqAIJSetPreallocation(B, 0, NULL));
1161:   PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));
1162:   B->trivialsymbolic = PETSC_TRUE;
1163:   if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
1164:     B->ops->lufactorsymbolic  = MatLUFactorSymbolic_STRUMPACK;
1165:     B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
1166:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported");
1167:   B->ops->getinfo     = MatGetInfo_External;
1168:   B->ops->view        = MatView_STRUMPACK;
1169:   B->ops->destroy     = MatDestroy_STRUMPACK;
1170:   B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
1171:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_strumpack));
1172:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetReordering_C", MatSTRUMPACKSetReordering_STRUMPACK));
1173:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetReordering_C", MatSTRUMPACKGetReordering_STRUMPACK));
1174:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetColPerm_C", MatSTRUMPACKSetColPerm_STRUMPACK));
1175:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetColPerm_C", MatSTRUMPACKGetColPerm_STRUMPACK));
1176:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricNxyz_C", MatSTRUMPACKSetGeometricNxyz_STRUMPACK));
1177:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricComponents_C", MatSTRUMPACKSetGeometricComponents_STRUMPACK));
1178:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricWidth_C", MatSTRUMPACKSetGeometricWidth_STRUMPACK));
1179:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGPU_C", MatSTRUMPACKSetGPU_STRUMPACK));
1180:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetGPU_C", MatSTRUMPACKGetGPU_STRUMPACK));
1181:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompression_C", MatSTRUMPACKSetCompression_STRUMPACK));
1182:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompression_C", MatSTRUMPACKGetCompression_STRUMPACK));
1183:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompRelTol_C", MatSTRUMPACKSetCompRelTol_STRUMPACK));
1184:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompRelTol_C", MatSTRUMPACKGetCompRelTol_STRUMPACK));
1185:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompAbsTol_C", MatSTRUMPACKSetCompAbsTol_STRUMPACK));
1186:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompAbsTol_C", MatSTRUMPACKGetCompAbsTol_STRUMPACK));
1187:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompLeafSize_C", MatSTRUMPACKSetCompLeafSize_STRUMPACK));
1188:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompLeafSize_C", MatSTRUMPACKGetCompLeafSize_STRUMPACK));
1189:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompMinSepSize_C", MatSTRUMPACKSetCompMinSepSize_STRUMPACK));
1190:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompMinSepSize_C", MatSTRUMPACKGetCompMinSepSize_STRUMPACK));
1191:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompLossyPrecision_C", MatSTRUMPACKSetCompLossyPrecision_STRUMPACK));
1192:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompLossyPrecision_C", MatSTRUMPACKGetCompLossyPrecision_STRUMPACK));
1193:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompButterflyLevels_C", MatSTRUMPACKSetCompButterflyLevels_STRUMPACK));
1194:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompButterflyLevels_C", MatSTRUMPACKGetCompButterflyLevels_STRUMPACK));
1195:   B->factortype = ftype;

1197:   /* set solvertype */
1198:   PetscCall(PetscFree(B->solvertype));
1199:   PetscCall(PetscStrallocpy(MATSOLVERSTRUMPACK, &B->solvertype));

1201:   PetscCall(PetscNew(&S));
1202:   B->data = S;

1204:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg)); /* A might be MATSEQAIJCUSPARSE */
1205:   iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;

1207:   PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "STRUMPACK Options", "Mat");
1208:   verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
1209:   PetscCall(PetscOptionsBool("-mat_strumpack_verbose", "Print STRUMPACK information", "None", verb, &verb, NULL));

1211:   PetscStackCallExternalVoid("STRUMPACK_init", STRUMPACK_init(S, PetscObjectComm((PetscObject)A), prec, iface, 0, NULL, verb));

1213:   /* By default, no compression is done. Compression is enabled when the user enables it with        */
1214:   /*  -mat_strumpack_compression with anything else than NONE, or when selecting ilu                 */
1215:   /* preconditioning, in which case we default to STRUMPACK_BLR compression.                         */
1216:   /* When compression is enabled, the STRUMPACK solver becomes an incomplete                         */
1217:   /* (or approximate) LU factorization.                                                              */
1218:   PetscStackCallExternalVoid("STRUMPACK_compression", compcurrent = STRUMPACK_compression(*S));
1219:   PetscCall(PetscOptionsEnum("-mat_strumpack_compression", "Rank-structured compression type", "None", CompTypes, (PetscEnum)compcurrent, (PetscEnum *)&compvalue, &set));
1220:   if (set) {
1221:     PetscCall(MatSTRUMPACKSetCompression(B, (MatSTRUMPACKCompressionType)compvalue));
1222:   } else {
1223:     if (ftype == MAT_FACTOR_ILU) { PetscStackCallExternalVoid("STRUMPACK_set_compression", STRUMPACK_set_compression(*S, STRUMPACK_BLR)); }
1224:   }

1226:   PetscStackCallExternalVoid("STRUMPACK_compression_rel_tol", ctol = (PetscReal)STRUMPACK_compression_rel_tol(*S));
1227:   PetscCall(PetscOptionsReal("-mat_strumpack_compression_rel_tol", "Relative compression tolerance", "None", ctol, &ctol, &set));
1228:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_rel_tol", STRUMPACK_set_compression_rel_tol(*S, (double)ctol));

1230:   PetscStackCallExternalVoid("STRUMPACK_compression_abs_tol", ctol = (PetscReal)STRUMPACK_compression_abs_tol(*S));
1231:   PetscCall(PetscOptionsReal("-mat_strumpack_compression_abs_tol", "Absolute compression tolerance", "None", ctol, &ctol, &set));
1232:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_abs_tol", STRUMPACK_set_compression_abs_tol(*S, (double)ctol));

1234:   PetscStackCallExternalVoid("STRUMPACK_compression_min_sep_size", min_sep_size = (PetscInt)STRUMPACK_compression_min_sep_size(*S));
1235:   PetscCall(PetscOptionsInt("-mat_strumpack_compression_min_sep_size", "Minimum size of separator for compression", "None", min_sep_size, &min_sep_size, &set));
1236:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_min_sep_size", STRUMPACK_set_compression_min_sep_size(*S, (int)min_sep_size));

1238:   PetscStackCallExternalVoid("STRUMPACK_compression_leaf_size", leaf_size = (PetscInt)STRUMPACK_compression_leaf_size(*S));
1239:   PetscCall(PetscOptionsInt("-mat_strumpack_compression_leaf_size", "Size of diagonal blocks in rank-structured approximation", "None", leaf_size, &leaf_size, &set));
1240:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_leaf_size", STRUMPACK_set_compression_leaf_size(*S, (int)leaf_size));

1242: #if defined(STRUMPACK_USE_ZFP)
1243:   PetscStackCallExternalVoid("STRUMPACK_compression_lossy_precision", lossy_prec = (PetscInt)STRUMPACK_compression_lossy_precision(*S));
1244:   PetscCall(PetscOptionsInt("-mat_strumpack_compression_lossy_precision", "Number of bitplanes to use in lossy compression", "None", lossy_prec, &lossy_prec, &set));
1245:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_lossy_precision", STRUMPACK_set_compression_lossy_precision(*S, (int)lossy_prec));
1246: #endif

1248: #if defined(STRUMPACK_USE_BPACK)
1249:   PetscStackCallExternalVoid("STRUMPACK_compression_butterfly_levels", bfly_lvls = (PetscInt)STRUMPACK_compression_butterfly_levels(*S));
1250:   PetscCall(PetscOptionsInt("-mat_strumpack_compression_butterfly_levels", "Number of levels in the HODLR matrix for which to use butterfly compression", "None", bfly_lvls, &bfly_lvls, &set));
1251:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_butterfly_levels", STRUMPACK_set_compression_butterfly_levels(*S, (int)bfly_lvls));
1252: #endif

1254: #if defined(STRUMPACK_USE_CUDA) || defined(STRUMPACK_USE_HIP) || defined(STRUMPACK_USE_SYCL)
1255:   PetscStackCallExternalVoid("STRUMPACK_use_gpu", flg = (STRUMPACK_use_gpu(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
1256:   PetscCall(PetscOptionsBool("-mat_strumpack_gpu", "Enable GPU acceleration (not supported for all compression types)", "None", flg, &flg, &set));
1257:   if (set) MatSTRUMPACKSetGPU(B, flg);
1258: #endif

1260:   PetscStackCallExternalVoid("STRUMPACK_matching", flg = (STRUMPACK_matching(*S) == STRUMPACK_MATCHING_NONE) ? PETSC_FALSE : PETSC_TRUE);
1261:   PetscCall(PetscOptionsBool("-mat_strumpack_colperm", "Find a col perm to get nonzero diagonal", "None", flg, &flg, &set));
1262:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_matching", STRUMPACK_set_matching(*S, flg ? STRUMPACK_MATCHING_MAX_DIAGONAL_PRODUCT_SCALING : STRUMPACK_MATCHING_NONE));

1264:   PetscStackCallExternalVoid("STRUMPACK_reordering_method", ndcurrent = STRUMPACK_reordering_method(*S));
1265:   PetscCall(PetscOptionsEnum("-mat_strumpack_reordering", "Sparsity reducing matrix reordering", "None", STRUMPACKNDTypes, (PetscEnum)ndcurrent, (PetscEnum *)&ndvalue, &set));
1266:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, ndvalue));

1268:   /* geometric ordering, for a regular 1D/2D/3D mesh in the natural ordering, */
1269:   /* with nc DOF's per gridpoint, and possibly a wider stencil                */
1270:   nrdims  = 3;
1271:   nxyz[0] = nxyz[1] = nxyz[2] = 1;
1272:   PetscCall(PetscOptionsIntArray("-mat_strumpack_geometric_xyz", "Mesh sizes nx,ny,nz (Use 1 for default)", "", nxyz, &nrdims, &set));
1273:   if (set) {
1274:     PetscCheck(nrdims == 1 || nrdims == 2 || nrdims == 3, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "'-mat_strumpack_geometric_xyz' requires 1, 2, or 3 values.");
1275:     PetscCall(MatSTRUMPACKSetGeometricNxyz(B, (int)nxyz[0], (int)nxyz[1], (int)nxyz[2]));
1276:   }
1277:   PetscCall(PetscOptionsInt("-mat_strumpack_geometric_components", "Number of components per mesh point, for geometric nested dissection ordering", "None", 1, &nc, &set));
1278:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_components", STRUMPACK_set_components(*S, (int)nc));
1279:   PetscCall(PetscOptionsInt("-mat_strumpack_geometric_width", "Width of the separator (for instance a 1D 3-point wide stencil needs a 1 point wide separator, a 1D 5-point stencil needs a 2 point wide separator), for geometric nested dissection ordering", "None", 1, &w, &set));
1280:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_separator_width", STRUMPACK_set_separator_width(*S, (int)w));

1282:   PetscStackCallExternalVoid("STRUMPACK_use_METIS_NodeNDP", flg = (STRUMPACK_use_METIS_NodeNDP(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
1283:   PetscCall(PetscOptionsBool("-mat_strumpack_metis_nodeNDP", "Use METIS_NodeNDP instead of METIS_NodeND, for a more balanced tree", "None", flg, &flg, &set));
1284:   if (set) {
1285:     if (flg) {
1286:       PetscStackCallExternalVoid("STRUMPACK_enable_METIS_NodeNDP", STRUMPACK_enable_METIS_NodeNDP(*S));
1287:     } else {
1288:       PetscStackCallExternalVoid("STRUMPACK_disable_METIS_NodeNDP", STRUMPACK_disable_METIS_NodeNDP(*S));
1289:     }
1290:   }

1292:   /* Disable the outer iterative solver from STRUMPACK.                                       */
1293:   /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement.   */
1294:   /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling       */
1295:   /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable    */
1296:   /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP.                   */
1297:   PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));

1299:   PetscOptionsEnd();

1301:   *F = B;
1302:   PetscFunctionReturn(PETSC_SUCCESS);
1303: }

1305: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
1306: {
1307:   PetscFunctionBegin;
1308:   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
1309:   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
1310:   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
1311:   PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
1312:   PetscFunctionReturn(PETSC_SUCCESS);
1313: }