Actual source code: itfunc.c

  1: /*
  2:       Interface KSP routines that the user calls.
  3: */

  5: #include <petsc/private/kspimpl.h>
  6: #include <petsc/private/matimpl.h>
  7: #include <petscdm.h>

  9: /* number of nested levels of KSPSetUp/Solve(). This is used to determine if KSP_DIVERGED_ITS should be fatal. */
 10: static PetscInt level = 0;

 12: static inline PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
 13: {
 14:   PetscCall(PetscViewerPushFormat(viewer, format));
 15:   PetscCall(PetscObjectView(obj, viewer));
 16:   PetscCall(PetscViewerPopFormat(viewer));
 17:   return PETSC_SUCCESS;
 18: }

 20: /*@
 21:   KSPComputeExtremeSingularValues - Computes the extreme singular values
 22:   for the preconditioned operator. Called after or during `KSPSolve()`.

 24:   Not Collective

 26:   Input Parameter:
 27: . ksp - iterative context obtained from `KSPCreate()`

 29:   Output Parameters:
 30: + emax - maximum estimated singular value
 31: - emin - minimum estimated singular value

 33:   Options Database Key:
 34: . -ksp_view_singularvalues - compute extreme singular values and print when `KSPSolve()` completes.

 36:   Level: advanced

 38:   Notes:
 39:   One must call `KSPSetComputeSingularValues()` before calling `KSPSetUp()`
 40:   (or use the option -ksp_view_eigenvalues) in order for this routine to work correctly.

 42:   Many users may just want to use the monitoring routine
 43:   `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
 44:   to print the extreme singular values at each iteration of the linear solve.

 46:   Estimates of the smallest singular value may be very inaccurate, especially if the Krylov method has not converged.
 47:   The largest singular value is usually accurate to within a few percent if the method has converged, but is still not
 48:   intended for eigenanalysis. Consider the excellent package `SLEPc` if accurate values are required.

 50:   Disable restarts if using KSPGMRES, otherwise this estimate will only be using those iterations after the last
 51:   restart. See `KSPGMRESSetRestart()` for more details.

 53: .seealso: [](ch_ksp), `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeEigenvalues()`, `KSP`
 54: @*/
 55: PetscErrorCode KSPComputeExtremeSingularValues(KSP ksp, PetscReal *emax, PetscReal *emin)
 56: {
 57:   PetscFunctionBegin;
 59:   PetscAssertPointer(emax, 2);
 60:   PetscAssertPointer(emin, 3);
 61:   PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Singular values not requested before KSPSetUp()");

 63:   if (ksp->ops->computeextremesingularvalues) PetscUseTypeMethod(ksp, computeextremesingularvalues, emax, emin);
 64:   else {
 65:     *emin = -1.0;
 66:     *emax = -1.0;
 67:   }
 68:   PetscFunctionReturn(PETSC_SUCCESS);
 69: }

 71: /*@
 72:   KSPComputeEigenvalues - Computes the extreme eigenvalues for the
 73:   preconditioned operator. Called after or during `KSPSolve()`.

 75:   Not Collective

 77:   Input Parameters:
 78: + ksp - iterative context obtained from `KSPCreate()`
 79: - n   - size of arrays r and c. The number of eigenvalues computed (neig) will, in
 80:        general, be less than this.

 82:   Output Parameters:
 83: + r    - real part of computed eigenvalues, provided by user with a dimension of at least n
 84: . c    - complex part of computed eigenvalues, provided by user with a dimension of at least n
 85: - neig - actual number of eigenvalues computed (will be less than or equal to n)

 87:   Options Database Keys:
 88: . -ksp_view_eigenvalues - Prints eigenvalues to stdout

 90:   Level: advanced

 92:   Notes:
 93:   The number of eigenvalues estimated depends on the size of the Krylov space
 94:   generated during the `KSPSolve()` ; for example, with
 95:   CG it corresponds to the number of CG iterations, for GMRES it is the number
 96:   of GMRES iterations SINCE the last restart. Any extra space in r[] and c[]
 97:   will be ignored.

 99:   `KSPComputeEigenvalues()` does not usually provide accurate estimates; it is
100:   intended only for assistance in understanding the convergence of iterative
101:   methods, not for eigenanalysis. For accurate computation of eigenvalues we recommend using
102:   the excellent package SLEPc.

104:   One must call `KSPSetComputeEigenvalues()` before calling `KSPSetUp()`
105:   in order for this routine to work correctly.

107:   Many users may just want to use the monitoring routine
108:   `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
109:   to print the singular values at each iteration of the linear solve.

111:   `KSPComputeRitz()` provides estimates for both the eigenvalues and their corresponding eigenvectors.

113: .seealso: [](ch_ksp), `KSPSetComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeExtremeSingularValues()`, `KSP`, `KSPComputeRitz()`
114: @*/
115: PetscErrorCode KSPComputeEigenvalues(KSP ksp, PetscInt n, PetscReal r[], PetscReal c[], PetscInt *neig)
116: {
117:   PetscFunctionBegin;
119:   if (n) PetscAssertPointer(r, 3);
120:   if (n) PetscAssertPointer(c, 4);
121:   PetscCheck(n >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Requested < 0 Eigenvalues");
122:   PetscAssertPointer(neig, 5);
123:   PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Eigenvalues not requested before KSPSetUp()");

125:   if (n && ksp->ops->computeeigenvalues) PetscUseTypeMethod(ksp, computeeigenvalues, n, r, c, neig);
126:   else *neig = 0;
127:   PetscFunctionReturn(PETSC_SUCCESS);
128: }

130: /*@
131:   KSPComputeRitz - Computes the Ritz or harmonic Ritz pairs associated with the
132:   smallest or largest in modulus, for the preconditioned operator.

134:   Not Collective

136:   Input Parameters:
137: + ksp   - iterative context obtained from `KSPCreate()`
138: . ritz  - `PETSC_TRUE` or `PETSC_FALSE` for Ritz pairs or harmonic Ritz pairs, respectively
139: - small - `PETSC_TRUE` or `PETSC_FALSE` for smallest or largest (harmonic) Ritz values, respectively

141:   Output Parameters:
142: + nrit  - On input number of (harmonic) Ritz pairs to compute; on output, actual number of computed (harmonic) Ritz pairs
143: . S     - an array of the Ritz vectors, pass in an array of vectors of size nrit
144: . tetar - real part of the Ritz values, pass in an array of size nrit
145: - tetai - imaginary part of the Ritz values, pass in an array of size nrit

147:   Level: advanced

149:   Notes:
150:   This only works with a `KSPType` of `KSPGMRES`.

152:   One must call `KSPSetComputeRitz()` before calling `KSPSetUp()` in order for this routine to work correctly.

154:   This routine must be called after `KSPSolve()`.

156:   In GMRES, the (harmonic) Ritz pairs are computed from the Hessenberg matrix obtained during
157:   the last complete cycle of the GMRES solve, or during the partial cycle if the solve ended before
158:   a restart (that is a complete GMRES cycle was never achieved).

160:   The number of actual (harmonic) Ritz pairs computed is less than or equal to the restart
161:   parameter for GMRES if a complete cycle has been performed or less or equal to the number of GMRES
162:   iterations.

164:   `KSPComputeEigenvalues()` provides estimates for only the eigenvalues (Ritz values).

166:   For real matrices, the (harmonic) Ritz pairs can be complex-valued. In such a case,
167:   the routine selects the complex (harmonic) Ritz value and its conjugate, and two successive entries of the
168:   vectors S are equal to the real and the imaginary parts of the associated vectors.
169:   When PETSc has been built with complex scalars, the real and imaginary parts of the Ritz
170:   values are still returned in tetar and tetai, as is done in `KSPComputeEigenvalues()`, but
171:   the Ritz vectors S are complex.

173:   The (harmonic) Ritz pairs are given in order of increasing (harmonic) Ritz values in modulus.

175:   The Ritz pairs do not necessarily accurately reflect the eigenvalues and eigenvectors of the operator, consider the
176:   excellent package `SLEPc` if accurate values are required.

178: .seealso: [](ch_ksp), `KSPSetComputeRitz()`, `KSP`, `KSPGMRES`, `KSPComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`
179: @*/
180: PetscErrorCode KSPComputeRitz(KSP ksp, PetscBool ritz, PetscBool small, PetscInt *nrit, Vec S[], PetscReal tetar[], PetscReal tetai[])
181: {
182:   PetscFunctionBegin;
184:   PetscCheck(ksp->calc_ritz, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Ritz pairs not requested before KSPSetUp()");
185:   PetscTryTypeMethod(ksp, computeritz, ritz, small, nrit, S, tetar, tetai);
186:   PetscFunctionReturn(PETSC_SUCCESS);
187: }
188: /*@
189:   KSPSetUpOnBlocks - Sets up the preconditioner for each block in
190:   the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
191:   methods.

193:   Collective

195:   Input Parameter:
196: . ksp - the `KSP` context

198:   Level: advanced

200:   Notes:
201:   `KSPSetUpOnBlocks()` is a routine that the user can optionally call for
202:   more precise profiling (via -log_view) of the setup phase for these
203:   block preconditioners.  If the user does not call `KSPSetUpOnBlocks()`,
204:   it will automatically be called from within `KSPSolve()`.

206:   Calling `KSPSetUpOnBlocks()` is the same as calling `PCSetUpOnBlocks()`
207:   on the PC context within the `KSP` context.

209: .seealso: [](ch_ksp), `PCSetUpOnBlocks()`, `KSPSetUp()`, `PCSetUp()`, `KSP`
210: @*/
211: PetscErrorCode KSPSetUpOnBlocks(KSP ksp)
212: {
213:   PC             pc;
214:   PCFailedReason pcreason;

216:   PetscFunctionBegin;
218:   level++;
219:   PetscCall(KSPGetPC(ksp, &pc));
220:   PetscCall(PCSetUpOnBlocks(pc));
221:   PetscCall(PCGetFailedReasonRank(pc, &pcreason));
222:   level--;
223:   /*
224:      This is tricky since only a subset of MPI ranks may set this; each KSPSolve_*() is responsible for checking
225:      this flag and initializing an appropriate vector with VecSetInf() so that the first norm computation can
226:      produce a result at KSPCheckNorm() thus communicating the known problem to all MPI ranks so they may
227:      terminate the Krylov solve. For many KSP implementations this is handled within KSPInitialResidual()
228:   */
229:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
230:   PetscFunctionReturn(PETSC_SUCCESS);
231: }

233: /*@
234:   KSPSetReusePreconditioner - reuse the current preconditioner, do not construct a new one even if the operator changes

236:   Collective

238:   Input Parameters:
239: + ksp  - iterative context obtained from `KSPCreate()`
240: - flag - `PETSC_TRUE` to reuse the current preconditioner

242:   Level: intermediate

244: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
245: @*/
246: PetscErrorCode KSPSetReusePreconditioner(KSP ksp, PetscBool flag)
247: {
248:   PC pc;

250:   PetscFunctionBegin;
252:   PetscCall(KSPGetPC(ksp, &pc));
253:   PetscCall(PCSetReusePreconditioner(pc, flag));
254:   PetscFunctionReturn(PETSC_SUCCESS);
255: }

257: /*@
258:   KSPGetReusePreconditioner - Determines if the `KSP` reuses the current preconditioner even if the operator in the preconditioner has changed.

260:   Collective

262:   Input Parameter:
263: . ksp - iterative context obtained from `KSPCreate()`

265:   Output Parameter:
266: . flag - the boolean flag

268:   Level: intermediate

270: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSPSetReusePreconditioner()`, `KSP`
271: @*/
272: PetscErrorCode KSPGetReusePreconditioner(KSP ksp, PetscBool *flag)
273: {
274:   PetscFunctionBegin;
276:   PetscAssertPointer(flag, 2);
277:   *flag = PETSC_FALSE;
278:   if (ksp->pc) PetscCall(PCGetReusePreconditioner(ksp->pc, flag));
279:   PetscFunctionReturn(PETSC_SUCCESS);
280: }

282: /*@
283:   KSPSetSkipPCSetFromOptions - prevents `KSPSetFromOptions()` from calling `PCSetFromOptions()`. This is used if the same `PC` is shared by more than one `KSP` so its options are not resettable for each `KSP`

285:   Collective

287:   Input Parameters:
288: + ksp  - iterative context obtained from `KSPCreate()`
289: - flag - `PETSC_TRUE` to skip calling the `PCSetFromOptions()`

291:   Level: intermediate

293: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
294: @*/
295: PetscErrorCode KSPSetSkipPCSetFromOptions(KSP ksp, PetscBool flag)
296: {
297:   PetscFunctionBegin;
299:   ksp->skippcsetfromoptions = flag;
300:   PetscFunctionReturn(PETSC_SUCCESS);
301: }

303: /*@
304:   KSPSetUp - Sets up the internal data structures for the
305:   later use of an iterative solver.

307:   Collective

309:   Input Parameter:
310: . ksp - iterative context obtained from `KSPCreate()`

312:   Level: developer

314: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`
315: @*/
316: PetscErrorCode KSPSetUp(KSP ksp)
317: {
318:   Mat            A, B;
319:   Mat            mat, pmat;
320:   MatNullSpace   nullsp;
321:   PCFailedReason pcreason;
322:   PC             pc;
323:   PetscBool      pcmpi;

325:   PetscFunctionBegin;
327:   PetscCall(KSPGetPC(ksp, &pc));
328:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCMPI, &pcmpi));
329:   if (pcmpi) {
330:     PetscBool ksppreonly;
331:     PetscCall(PetscObjectTypeCompare((PetscObject)ksp, KSPPREONLY, &ksppreonly));
332:     if (!ksppreonly) PetscCall(KSPSetType(ksp, KSPPREONLY));
333:   }
334:   level++;

336:   /* reset the convergence flag from the previous solves */
337:   ksp->reason = KSP_CONVERGED_ITERATING;

339:   if (!((PetscObject)ksp)->type_name) PetscCall(KSPSetType(ksp, KSPGMRES));
340:   PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));

342:   if (ksp->dmActive && !ksp->setupstage) {
343:     /* first time in so build matrix and vector data structures using DM */
344:     if (!ksp->vec_rhs) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_rhs));
345:     if (!ksp->vec_sol) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_sol));
346:     PetscCall(DMCreateMatrix(ksp->dm, &A));
347:     PetscCall(KSPSetOperators(ksp, A, A));
348:     PetscCall(PetscObjectDereference((PetscObject)A));
349:   }

351:   if (ksp->dmActive) {
352:     DMKSP kdm;
353:     PetscCall(DMGetDMKSP(ksp->dm, &kdm));

355:     if (kdm->ops->computeinitialguess && ksp->setupstage != KSP_SETUP_NEWRHS) {
356:       /* only computes initial guess the first time through */
357:       PetscCallBack("KSP callback initial guess", (*kdm->ops->computeinitialguess)(ksp, ksp->vec_sol, kdm->initialguessctx));
358:       PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
359:     }
360:     if (kdm->ops->computerhs) PetscCallBack("KSP callback rhs", (*kdm->ops->computerhs)(ksp, ksp->vec_rhs, kdm->rhsctx));

362:     if (ksp->setupstage != KSP_SETUP_NEWRHS) {
363:       PetscCheck(kdm->ops->computeoperators, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "You called KSPSetDM() but did not use DMKSPSetComputeOperators() or KSPSetDMActive(ksp,PETSC_FALSE);");
364:       PetscCall(KSPGetOperators(ksp, &A, &B));
365:       PetscCallBack("KSP callback operators", (*kdm->ops->computeoperators)(ksp, A, B, kdm->operatorsctx));
366:     }
367:   }

369:   if (ksp->setupstage == KSP_SETUP_NEWRHS) {
370:     level--;
371:     PetscFunctionReturn(PETSC_SUCCESS);
372:   }
373:   PetscCall(PetscLogEventBegin(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));

375:   switch (ksp->setupstage) {
376:   case KSP_SETUP_NEW:
377:     PetscUseTypeMethod(ksp, setup);
378:     break;
379:   case KSP_SETUP_NEWMATRIX: /* This should be replaced with a more general mechanism */
380:     if (ksp->setupnewmatrix) PetscUseTypeMethod(ksp, setup);
381:     break;
382:   default:
383:     break;
384:   }

386:   if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
387:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
388:   /* scale the matrix if requested */
389:   if (ksp->dscale) {
390:     PetscScalar *xx;
391:     PetscInt     i, n;
392:     PetscBool    zeroflag = PETSC_FALSE;

394:     if (!ksp->diagonal) { /* allocate vector to hold diagonal */
395:       PetscCall(MatCreateVecs(pmat, &ksp->diagonal, NULL));
396:     }
397:     PetscCall(MatGetDiagonal(pmat, ksp->diagonal));
398:     PetscCall(VecGetLocalSize(ksp->diagonal, &n));
399:     PetscCall(VecGetArray(ksp->diagonal, &xx));
400:     for (i = 0; i < n; i++) {
401:       if (xx[i] != 0.0) xx[i] = 1.0 / PetscSqrtReal(PetscAbsScalar(xx[i]));
402:       else {
403:         xx[i]    = 1.0;
404:         zeroflag = PETSC_TRUE;
405:       }
406:     }
407:     PetscCall(VecRestoreArray(ksp->diagonal, &xx));
408:     if (zeroflag) PetscCall(PetscInfo(ksp, "Zero detected in diagonal of matrix, using 1 at those locations\n"));
409:     PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
410:     if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
411:     ksp->dscalefix2 = PETSC_FALSE;
412:   }
413:   PetscCall(PetscLogEventEnd(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
414:   PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
415:   PetscCall(PCSetUp(ksp->pc));
416:   PetscCall(PCGetFailedReasonRank(ksp->pc, &pcreason));
417:   /* TODO: this code was wrong and is still wrong, there is no way to propagate the failure to all processes; their is no code to handle a ksp->reason on only some ranks */
418:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;

420:   PetscCall(MatGetNullSpace(mat, &nullsp));
421:   if (nullsp) {
422:     PetscBool test = PETSC_FALSE;
423:     PetscCall(PetscOptionsGetBool(((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_test_null_space", &test, NULL));
424:     if (test) PetscCall(MatNullSpaceTest(nullsp, mat, NULL));
425:   }
426:   ksp->setupstage = KSP_SETUP_NEWRHS;
427:   level--;
428:   PetscFunctionReturn(PETSC_SUCCESS);
429: }

431: /*@C
432:   KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged to a viewer

434:   Collective

436:   Input Parameters:
437: + ksp    - iterative context obtained from `KSPCreate()`
438: - viewer - the viewer to display the reason

440:   Options Database Keys:
441: + -ksp_converged_reason          - print reason for converged or diverged, also prints number of iterations
442: - -ksp_converged_reason ::failed - only print reason and number of iterations when diverged

444:   Level: beginner

446:   Notes:
447:   To change the format of the output call PetscViewerPushFormat(viewer,format) before this call. Use PETSC_VIEWER_DEFAULT for the default,
448:   use PETSC_VIEWER_FAILED to only display a reason if it fails.

450: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
451:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `KSP`, `KSPGetConvergedReason()`, `PetscViewerPushFormat()`, `PetscViewerPopFormat()`
452: @*/
453: PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer)
454: {
455:   PetscBool         isAscii;
456:   PetscViewerFormat format;

458:   PetscFunctionBegin;
459:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
460:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
461:   if (isAscii) {
462:     PetscCall(PetscViewerGetFormat(viewer, &format));
463:     PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
464:     if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) {
465:       if (((PetscObject)ksp)->prefix) {
466:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
467:       } else {
468:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
469:       }
470:     } else if (ksp->reason <= 0) {
471:       if (((PetscObject)ksp)->prefix) {
472:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
473:       } else {
474:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
475:       }
476:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
477:         PCFailedReason reason;
478:         PetscCall(PCGetFailedReason(ksp->pc, &reason));
479:         PetscCall(PetscViewerASCIIPrintf(viewer, "               PC failed due to %s \n", PCFailedReasons[reason]));
480:       }
481:     }
482:     PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
483:   }
484:   PetscFunctionReturn(PETSC_SUCCESS);
485: }

487: /*@C
488:   KSPConvergedReasonViewSet - Sets an ADDITIONAL function that is to be used at the
489:   end of the linear solver to display the convergence reason of the linear solver.

491:   Logically Collective

493:   Input Parameters:
494: + ksp               - the `KSP` context
495: . f                 - the ksp converged reason view function
496: . vctx              - [optional] user-defined context for private data for the
497:           ksp converged reason view routine (use `NULL` if no context is desired)
498: - reasonviewdestroy - [optional] routine that frees reasonview context
499:           (may be `NULL`)

501:   Options Database Keys:
502: + -ksp_converged_reason             - sets a default `KSPConvergedReasonView()`
503: - -ksp_converged_reason_view_cancel - cancels all converged reason viewers that have
504:                             been hardwired into a code by
505:                             calls to `KSPConvergedReasonViewSet()`, but
506:                             does not cancel those set via
507:                             the options database.

509:   Level: intermediate

511:   Notes:
512:   Several different converged reason view routines may be set by calling
513:   `KSPConvergedReasonViewSet()` multiple times; all will be called in the
514:   order in which they were set.

516: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewCancel()`
517: @*/
518: PetscErrorCode KSPConvergedReasonViewSet(KSP ksp, PetscErrorCode (*f)(KSP, void *), void *vctx, PetscErrorCode (*reasonviewdestroy)(void **))
519: {
520:   PetscInt  i;
521:   PetscBool identical;

523:   PetscFunctionBegin;
525:   for (i = 0; i < ksp->numberreasonviews; i++) {
526:     PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))f, vctx, reasonviewdestroy, (PetscErrorCode(*)(void))ksp->reasonview[i], ksp->reasonviewcontext[i], ksp->reasonviewdestroy[i], &identical));
527:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
528:   }
529:   PetscCheck(ksp->numberreasonviews < MAXKSPREASONVIEWS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP reasonview set");
530:   ksp->reasonview[ksp->numberreasonviews]          = f;
531:   ksp->reasonviewdestroy[ksp->numberreasonviews]   = reasonviewdestroy;
532:   ksp->reasonviewcontext[ksp->numberreasonviews++] = (void *)vctx;
533:   PetscFunctionReturn(PETSC_SUCCESS);
534: }

536: /*@
537:   KSPConvergedReasonViewCancel - Clears all the reasonview functions for a `KSP` object.

539:   Collective

541:   Input Parameter:
542: . ksp - iterative context obtained from `KSPCreate()`

544:   Level: intermediate

546: .seealso: [](ch_ksp), `KSPCreate()`, `KSPDestroy()`, `KSPReset()`
547: @*/
548: PetscErrorCode KSPConvergedReasonViewCancel(KSP ksp)
549: {
550:   PetscInt i;

552:   PetscFunctionBegin;
554:   for (i = 0; i < ksp->numberreasonviews; i++) {
555:     if (ksp->reasonviewdestroy[i]) PetscCall((*ksp->reasonviewdestroy[i])(&ksp->reasonviewcontext[i]));
556:   }
557:   ksp->numberreasonviews = 0;
558:   PetscFunctionReturn(PETSC_SUCCESS);
559: }

561: /*@
562:   KSPConvergedReasonViewFromOptions - Processes command line options to determine if/how a KSPReason is to be viewed.

564:   Collective

566:   Input Parameter:
567: . ksp - the `KSP` object

569:   Level: intermediate

571: .seealso: [](ch_ksp), `KSPConvergedReasonView()`
572: @*/
573: PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp)
574: {
575:   PetscViewer       viewer;
576:   PetscBool         flg;
577:   PetscViewerFormat format;
578:   PetscInt          i;

580:   PetscFunctionBegin;

582:   /* Call all user-provided reason review routines */
583:   for (i = 0; i < ksp->numberreasonviews; i++) PetscCall((*ksp->reasonview[i])(ksp, ksp->reasonviewcontext[i]));

585:   /* Call the default PETSc routine */
586:   PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ksp), ((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_converged_reason", &viewer, &format, &flg));
587:   if (flg) {
588:     PetscCall(PetscViewerPushFormat(viewer, format));
589:     PetscCall(KSPConvergedReasonView(ksp, viewer));
590:     PetscCall(PetscViewerPopFormat(viewer));
591:     PetscCall(PetscViewerDestroy(&viewer));
592:   }
593:   PetscFunctionReturn(PETSC_SUCCESS);
594: }

596: /*@C
597:   KSPConvergedRateView - Displays the reason a `KSP` solve converged or diverged to a viewer

599:   Collective

601:   Input Parameters:
602: + ksp    - iterative context obtained from `KSPCreate()`
603: - viewer - the viewer to display the reason

605:   Options Database Key:
606: . -ksp_converged_rate - print reason for convergence or divergence and the convergence rate (or 0.0 for divergence)

608:   Level: intermediate

610:   Notes:
611:   To change the format of the output, call PetscViewerPushFormat(viewer,format) before this call.

613:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
614:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,

616:   References:
617: .  * -  `//en.wikipedia.org/wiki/Coefficient_of_determination`

619: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPGetConvergedRate()`, `KSPSetTolerances()`, `KSPConvergedDefault()`
620: @*/
621: PetscErrorCode KSPConvergedRateView(KSP ksp, PetscViewer viewer)
622: {
623:   PetscViewerFormat format;
624:   PetscBool         isAscii;
625:   PetscReal         rrate, rRsq, erate = 0.0, eRsq = 0.0;
626:   PetscInt          its;
627:   const char       *prefix, *reason = KSPConvergedReasons[ksp->reason];

629:   PetscFunctionBegin;
630:   PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
631:   PetscCall(KSPGetIterationNumber(ksp, &its));
632:   PetscCall(KSPComputeConvergenceRate(ksp, &rrate, &rRsq, &erate, &eRsq));
633:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
634:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
635:   if (isAscii) {
636:     PetscCall(PetscViewerGetFormat(viewer, &format));
637:     PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
638:     if (ksp->reason > 0) {
639:       if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT, prefix, reason, its));
640:       else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT, reason, its));
641:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
642:       if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
643:       if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
644:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
645:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
646:     } else if (ksp->reason <= 0) {
647:       if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT, prefix, reason, its));
648:       else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT, reason, its));
649:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
650:       if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
651:       if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
652:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
653:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
654:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
655:         PCFailedReason reason;
656:         PetscCall(PCGetFailedReason(ksp->pc, &reason));
657:         PetscCall(PetscViewerASCIIPrintf(viewer, "               PC failed due to %s \n", PCFailedReasons[reason]));
658:       }
659:     }
660:     PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
661:   }
662:   PetscFunctionReturn(PETSC_SUCCESS);
663: }

665: #include <petscdraw.h>

667: static PetscErrorCode KSPViewEigenvalues_Internal(KSP ksp, PetscBool isExplicit, PetscViewer viewer, PetscViewerFormat format)
668: {
669:   PetscReal  *r, *c;
670:   PetscInt    n, i, neig;
671:   PetscBool   isascii, isdraw;
672:   PetscMPIInt rank;

674:   PetscFunctionBegin;
675:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)ksp), &rank));
676:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
677:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
678:   if (isExplicit) {
679:     PetscCall(VecGetSize(ksp->vec_sol, &n));
680:     PetscCall(PetscMalloc2(n, &r, n, &c));
681:     PetscCall(KSPComputeEigenvaluesExplicitly(ksp, n, r, c));
682:     neig = n;
683:   } else {
684:     PetscInt nits;

686:     PetscCall(KSPGetIterationNumber(ksp, &nits));
687:     n = nits + 2;
688:     if (!nits) {
689:       PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any eigenvalues\n"));
690:       PetscFunctionReturn(PETSC_SUCCESS);
691:     }
692:     PetscCall(PetscMalloc2(n, &r, n, &c));
693:     PetscCall(KSPComputeEigenvalues(ksp, n, r, c, &neig));
694:   }
695:   if (isascii) {
696:     PetscCall(PetscViewerASCIIPrintf(viewer, "%s computed eigenvalues\n", isExplicit ? "Explicitly" : "Iteratively"));
697:     for (i = 0; i < neig; ++i) {
698:       if (c[i] >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %gi\n", (double)r[i], (double)c[i]));
699:       else PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %gi\n", (double)r[i], -(double)c[i]));
700:     }
701:   } else if (isdraw && rank == 0) {
702:     PetscDraw   draw;
703:     PetscDrawSP drawsp;

705:     if (format == PETSC_VIEWER_DRAW_CONTOUR) {
706:       PetscCall(KSPPlotEigenContours_Private(ksp, neig, r, c));
707:     } else {
708:       PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
709:       PetscCall(PetscDrawSPCreate(draw, 1, &drawsp));
710:       PetscCall(PetscDrawSPReset(drawsp));
711:       for (i = 0; i < neig; ++i) PetscCall(PetscDrawSPAddPoint(drawsp, r + i, c + i));
712:       PetscCall(PetscDrawSPDraw(drawsp, PETSC_TRUE));
713:       PetscCall(PetscDrawSPSave(drawsp));
714:       PetscCall(PetscDrawSPDestroy(&drawsp));
715:     }
716:   }
717:   PetscCall(PetscFree2(r, c));
718:   PetscFunctionReturn(PETSC_SUCCESS);
719: }

721: static PetscErrorCode KSPViewSingularvalues_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
722: {
723:   PetscReal smax, smin;
724:   PetscInt  nits;
725:   PetscBool isascii;

727:   PetscFunctionBegin;
728:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
729:   PetscCall(KSPGetIterationNumber(ksp, &nits));
730:   if (!nits) {
731:     PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any singular values\n"));
732:     PetscFunctionReturn(PETSC_SUCCESS);
733:   }
734:   PetscCall(KSPComputeExtremeSingularValues(ksp, &smax, &smin));
735:   if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Iteratively computed extreme singular values: max %g min %g max/min %g\n", (double)smax, (double)smin, (double)(smax / smin)));
736:   PetscFunctionReturn(PETSC_SUCCESS);
737: }

739: static PetscErrorCode KSPViewFinalResidual_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
740: {
741:   PetscBool isascii;

743:   PetscFunctionBegin;
744:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
745:   PetscCheck(!ksp->dscale || ksp->dscalefix, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Cannot compute final scale with -ksp_diagonal_scale except also with -ksp_diagonal_scale_fix");
746:   if (isascii) {
747:     Mat       A;
748:     Vec       t;
749:     PetscReal norm;

751:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
752:     PetscCall(VecDuplicate(ksp->vec_rhs, &t));
753:     PetscCall(KSP_MatMult(ksp, A, ksp->vec_sol, t));
754:     PetscCall(VecAYPX(t, -1.0, ksp->vec_rhs));
755:     PetscCall(VecViewFromOptions(t, (PetscObject)ksp, "-ksp_view_final_residual_vec"));
756:     PetscCall(VecNorm(t, NORM_2, &norm));
757:     PetscCall(VecDestroy(&t));
758:     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP final norm of residual %g\n", (double)norm));
759:   }
760:   PetscFunctionReturn(PETSC_SUCCESS);
761: }

763: static PetscErrorCode KSPMonitorPauseFinal_Internal(KSP ksp)
764: {
765:   PetscInt i;

767:   PetscFunctionBegin;
768:   if (!ksp->pauseFinal) PetscFunctionReturn(PETSC_SUCCESS);
769:   for (i = 0; i < ksp->numbermonitors; ++i) {
770:     PetscViewerAndFormat *vf = (PetscViewerAndFormat *)ksp->monitorcontext[i];
771:     PetscDraw             draw;
772:     PetscReal             lpause;

774:     if (!vf) continue;
775:     if (vf->lg) {
776:       if (!PetscCheckPointer(vf->lg, PETSC_OBJECT)) continue;
777:       if (((PetscObject)vf->lg)->classid != PETSC_DRAWLG_CLASSID) continue;
778:       PetscCall(PetscDrawLGGetDraw(vf->lg, &draw));
779:       PetscCall(PetscDrawGetPause(draw, &lpause));
780:       PetscCall(PetscDrawSetPause(draw, -1.0));
781:       PetscCall(PetscDrawPause(draw));
782:       PetscCall(PetscDrawSetPause(draw, lpause));
783:     } else {
784:       PetscBool isdraw;

786:       if (!PetscCheckPointer(vf->viewer, PETSC_OBJECT)) continue;
787:       if (((PetscObject)vf->viewer)->classid != PETSC_VIEWER_CLASSID) continue;
788:       PetscCall(PetscObjectTypeCompare((PetscObject)vf->viewer, PETSCVIEWERDRAW, &isdraw));
789:       if (!isdraw) continue;
790:       PetscCall(PetscViewerDrawGetDraw(vf->viewer, 0, &draw));
791:       PetscCall(PetscDrawGetPause(draw, &lpause));
792:       PetscCall(PetscDrawSetPause(draw, -1.0));
793:       PetscCall(PetscDrawPause(draw));
794:       PetscCall(PetscDrawSetPause(draw, lpause));
795:     }
796:   }
797:   PetscFunctionReturn(PETSC_SUCCESS);
798: }

800: static PetscErrorCode KSPSolve_Private(KSP ksp, Vec b, Vec x)
801: {
802:   PetscBool    flg = PETSC_FALSE, inXisinB = PETSC_FALSE, guess_zero;
803:   Mat          mat, pmat;
804:   MPI_Comm     comm;
805:   MatNullSpace nullsp;
806:   Vec          btmp, vec_rhs = NULL;

808:   PetscFunctionBegin;
809:   level++;
810:   comm = PetscObjectComm((PetscObject)ksp);
811:   if (x && x == b) {
812:     PetscCheck(ksp->guess_zero, comm, PETSC_ERR_ARG_INCOMP, "Cannot use x == b with nonzero initial guess");
813:     PetscCall(VecDuplicate(b, &x));
814:     inXisinB = PETSC_TRUE;
815:   }
816:   if (b) {
817:     PetscCall(PetscObjectReference((PetscObject)b));
818:     PetscCall(VecDestroy(&ksp->vec_rhs));
819:     ksp->vec_rhs = b;
820:   }
821:   if (x) {
822:     PetscCall(PetscObjectReference((PetscObject)x));
823:     PetscCall(VecDestroy(&ksp->vec_sol));
824:     ksp->vec_sol = x;
825:   }

827:   if (ksp->viewPre) PetscCall(ObjectView((PetscObject)ksp, ksp->viewerPre, ksp->formatPre));

829:   if (ksp->presolve) PetscCall((*ksp->presolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->prectx));

831:   /* reset the residual history list if requested */
832:   if (ksp->res_hist_reset) ksp->res_hist_len = 0;
833:   if (ksp->err_hist_reset) ksp->err_hist_len = 0;

835:   /* KSPSetUp() scales the matrix if needed */
836:   PetscCall(KSPSetUp(ksp));
837:   PetscCall(KSPSetUpOnBlocks(ksp));

839:   if (ksp->guess) {
840:     PetscObjectState ostate, state;

842:     PetscCall(KSPGuessSetUp(ksp->guess));
843:     PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &ostate));
844:     PetscCall(KSPGuessFormGuess(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
845:     PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &state));
846:     if (state != ostate) {
847:       ksp->guess_zero = PETSC_FALSE;
848:     } else {
849:       PetscCall(PetscInfo(ksp, "Using zero initial guess since the KSPGuess object did not change the vector\n"));
850:       ksp->guess_zero = PETSC_TRUE;
851:     }
852:   }

854:   PetscCall(VecSetErrorIfLocked(ksp->vec_sol, 3));

856:   PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
857:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
858:   /* diagonal scale RHS if called for */
859:   if (ksp->dscale) {
860:     PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
861:     /* second time in, but matrix was scaled back to original */
862:     if (ksp->dscalefix && ksp->dscalefix2) {
863:       Mat mat, pmat;

865:       PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
866:       PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
867:       if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
868:     }

870:     /* scale initial guess */
871:     if (!ksp->guess_zero) {
872:       if (!ksp->truediagonal) {
873:         PetscCall(VecDuplicate(ksp->diagonal, &ksp->truediagonal));
874:         PetscCall(VecCopy(ksp->diagonal, ksp->truediagonal));
875:         PetscCall(VecReciprocal(ksp->truediagonal));
876:       }
877:       PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->truediagonal));
878:     }
879:   }
880:   PetscCall(PCPreSolve(ksp->pc, ksp));

882:   if (ksp->guess_zero && !ksp->guess_not_read) PetscCall(VecSet(ksp->vec_sol, 0.0));
883:   if (ksp->guess_knoll) { /* The Knoll trick is independent on the KSPGuess specified */
884:     PetscCall(PCApply(ksp->pc, ksp->vec_rhs, ksp->vec_sol));
885:     PetscCall(KSP_RemoveNullSpace(ksp, ksp->vec_sol));
886:     ksp->guess_zero = PETSC_FALSE;
887:   }

889:   /* can we mark the initial guess as zero for this solve? */
890:   guess_zero = ksp->guess_zero;
891:   if (!ksp->guess_zero) {
892:     PetscReal norm;

894:     PetscCall(VecNormAvailable(ksp->vec_sol, NORM_2, &flg, &norm));
895:     if (flg && !norm) ksp->guess_zero = PETSC_TRUE;
896:   }
897:   if (ksp->transpose_solve) {
898:     PetscCall(MatGetNullSpace(pmat, &nullsp));
899:   } else {
900:     PetscCall(MatGetTransposeNullSpace(pmat, &nullsp));
901:   }
902:   if (nullsp) {
903:     PetscCall(VecDuplicate(ksp->vec_rhs, &btmp));
904:     PetscCall(VecCopy(ksp->vec_rhs, btmp));
905:     PetscCall(MatNullSpaceRemove(nullsp, btmp));
906:     vec_rhs      = ksp->vec_rhs;
907:     ksp->vec_rhs = btmp;
908:   }
909:   PetscCall(VecLockReadPush(ksp->vec_rhs));
910:   PetscUseTypeMethod(ksp, solve);
911:   PetscCall(KSPMonitorPauseFinal_Internal(ksp));

913:   PetscCall(VecLockReadPop(ksp->vec_rhs));
914:   if (nullsp) {
915:     ksp->vec_rhs = vec_rhs;
916:     PetscCall(VecDestroy(&btmp));
917:   }

919:   ksp->guess_zero = guess_zero;

921:   PetscCheck(ksp->reason, comm, PETSC_ERR_PLIB, "Internal error, solver returned without setting converged reason");
922:   ksp->totalits += ksp->its;

924:   PetscCall(KSPConvergedReasonViewFromOptions(ksp));

926:   if (ksp->viewRate) {
927:     PetscCall(PetscViewerPushFormat(ksp->viewerRate, ksp->formatRate));
928:     PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
929:     PetscCall(PetscViewerPopFormat(ksp->viewerRate));
930:   }
931:   PetscCall(PCPostSolve(ksp->pc, ksp));

933:   /* diagonal scale solution if called for */
934:   if (ksp->dscale) {
935:     PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->diagonal));
936:     /* unscale right hand side and matrix */
937:     if (ksp->dscalefix) {
938:       Mat mat, pmat;

940:       PetscCall(VecReciprocal(ksp->diagonal));
941:       PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
942:       PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
943:       PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
944:       if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
945:       PetscCall(VecReciprocal(ksp->diagonal));
946:       ksp->dscalefix2 = PETSC_TRUE;
947:     }
948:   }
949:   PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
950:   if (ksp->guess) PetscCall(KSPGuessUpdate(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
951:   if (ksp->postsolve) PetscCall((*ksp->postsolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->postctx));

953:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
954:   if (ksp->viewEV) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_FALSE, ksp->viewerEV, ksp->formatEV));
955:   if (ksp->viewEVExp) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_TRUE, ksp->viewerEVExp, ksp->formatEVExp));
956:   if (ksp->viewSV) PetscCall(KSPViewSingularvalues_Internal(ksp, ksp->viewerSV, ksp->formatSV));
957:   if (ksp->viewFinalRes) PetscCall(KSPViewFinalResidual_Internal(ksp, ksp->viewerFinalRes, ksp->formatFinalRes));
958:   if (ksp->viewMat) PetscCall(ObjectView((PetscObject)mat, ksp->viewerMat, ksp->formatMat));
959:   if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)pmat, ksp->viewerPMat, ksp->formatPMat));
960:   if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)ksp->vec_rhs, ksp->viewerRhs, ksp->formatRhs));
961:   if (ksp->viewSol) PetscCall(ObjectView((PetscObject)ksp->vec_sol, ksp->viewerSol, ksp->formatSol));
962:   if (ksp->view) PetscCall(ObjectView((PetscObject)ksp, ksp->viewer, ksp->format));
963:   if (ksp->viewDScale) PetscCall(ObjectView((PetscObject)ksp->diagonal, ksp->viewerDScale, ksp->formatDScale));
964:   if (ksp->viewMatExp) {
965:     Mat A, B;

967:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
968:     if (ksp->transpose_solve) {
969:       Mat AT;

971:       PetscCall(MatCreateTranspose(A, &AT));
972:       PetscCall(MatComputeOperator(AT, MATAIJ, &B));
973:       PetscCall(MatDestroy(&AT));
974:     } else {
975:       PetscCall(MatComputeOperator(A, MATAIJ, &B));
976:     }
977:     PetscCall(ObjectView((PetscObject)B, ksp->viewerMatExp, ksp->formatMatExp));
978:     PetscCall(MatDestroy(&B));
979:   }
980:   if (ksp->viewPOpExp) {
981:     Mat B;

983:     PetscCall(KSPComputeOperator(ksp, MATAIJ, &B));
984:     PetscCall(ObjectView((PetscObject)B, ksp->viewerPOpExp, ksp->formatPOpExp));
985:     PetscCall(MatDestroy(&B));
986:   }

988:   if (inXisinB) {
989:     PetscCall(VecCopy(x, b));
990:     PetscCall(VecDestroy(&x));
991:   }
992:   PetscCall(PetscObjectSAWsBlock((PetscObject)ksp));
993:   if (ksp->errorifnotconverged && ksp->reason < 0 && ((level == 1) || (ksp->reason != KSP_DIVERGED_ITS))) {
994:     PCFailedReason reason;

996:     PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
997:     PetscCall(PCGetFailedReason(ksp->pc, &reason));
998:     SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
999:   }
1000:   level--;
1001:   PetscFunctionReturn(PETSC_SUCCESS);
1002: }

1004: /*@
1005:   KSPSolve - Solves linear system.

1007:   Collective

1009:   Input Parameters:
1010: + ksp - iterative context obtained from `KSPCreate()`
1011: . b   - the right hand side vector
1012: - x   - the solution (this may be the same vector as b, then b will be overwritten with answer)

1014:   Options Database Keys:
1015: + -ksp_view_eigenvalues                      - compute preconditioned operators eigenvalues
1016: . -ksp_view_eigenvalues_explicit             - compute the eigenvalues by forming the dense operator and using LAPACK
1017: . -ksp_view_mat binary                       - save matrix to the default binary viewer
1018: . -ksp_view_pmat binary                      - save matrix used to build preconditioner to the default binary viewer
1019: . -ksp_view_rhs binary                       - save right hand side vector to the default binary viewer
1020: . -ksp_view_solution binary                  - save computed solution vector to the default binary viewer
1021:            (can be read later with src/ksp/tutorials/ex10.c for testing solvers)
1022: . -ksp_view_mat_explicit                     - for matrix-free operators, computes the matrix entries and views them
1023: . -ksp_view_preconditioned_operator_explicit - computes the product of the preconditioner and matrix as an explicit matrix and views it
1024: . -ksp_converged_reason                      - print reason for converged or diverged, also prints number of iterations
1025: . -ksp_view_final_residual                   - print 2-norm of true linear system residual at the end of the solution process
1026: . -ksp_error_if_not_converged                - stop the program as soon as an error is detected in a `KSPSolve()`
1027: - -ksp_view                                  - print the ksp data structure at the end of the system solution

1029:   Level: beginner

1031:   Notes:
1032:   If one uses `KSPSetDM()` then x or b need not be passed. Use `KSPGetSolution()` to access the solution in this case.

1034:   The operator is specified with `KSPSetOperators()`.

1036:   `KSPSolve()` will normally return without generating an error regardless of whether the linear system was solved or if constructing the preconditioner failed.
1037:   Call `KSPGetConvergedReason()` to determine if the solver converged or failed and why. The option -ksp_error_if_not_converged or function `KSPSetErrorIfNotConverged()`
1038:   will cause `KSPSolve()` to error as soon as an error occurs in the linear solver.  In inner KSPSolves() KSP_DIVERGED_ITS is not treated as an error because when using nested solvers
1039:   it may be fine that inner solvers in the preconditioner do not converge during the solution process.

1041:   The number of iterations can be obtained from `KSPGetIterationNumber()`.

1043:   If you provide a matrix that has a `MatSetNullSpace()` and `MatSetTransposeNullSpace()` this will use that information to solve singular systems
1044:   in the least squares sense with a norm minimizing solution.

1046:   A x = b   where b = b_p + b_t where b_t is not in the range of A (and hence by the fundamental theorem of linear algebra is in the nullspace(A') see `MatSetNullSpace()`

1048:   `KSP` first removes b_t producing the linear system  A x = b_p (which has multiple solutions) and solves this to find the ||x|| minimizing solution (and hence
1049:   it finds the solution x orthogonal to the nullspace(A). The algorithm is simply in each iteration of the Krylov method we remove the nullspace(A) from the search
1050:   direction thus the solution which is a linear combination of the search directions has no component in the nullspace(A).

1052:   We recommend always using `KSPGMRES` for such singular systems.
1053:   If nullspace(A) = nullspace(A') (note symmetric matrices always satisfy this property) then both left and right preconditioning will work
1054:   If nullspace(A) != nullspace(A') then left preconditioning will work but right preconditioning may not work (or it may).

1056:   Developer Notes:
1057:   The reason we cannot always solve  nullspace(A) != nullspace(A') systems with right preconditioning is because we need to remove at each iteration
1058:   the nullspace(AB) from the search direction. While we know the nullspace(A) the nullspace(AB) equals B^-1 times the nullspace(A) but except for trivial preconditioners
1059:   such as diagonal scaling we cannot apply the inverse of the preconditioner to a vector and thus cannot compute the nullspace(AB).

1061:   If using a direct method (e.g., via the `KSP` solver
1062:   `KSPPREONLY` and a preconditioner such as `PCLU` or `PCILU`,
1063:   then its=1.  See `KSPSetTolerances()` and `KSPConvergedDefault()`
1064:   for more details.

1066:   Understanding Convergence\:
1067:   The routines `KSPMonitorSet()`, `KSPComputeEigenvalues()`, and
1068:   `KSPComputeEigenvaluesExplicitly()` provide information on additional
1069:   options to monitor convergence and print eigenvalue information.

1071: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1072:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatSetTransposeNullSpace()`, `KSP`,
1073:           `KSPConvergedReasonView()`, `KSPCheckSolve()`, `KSPSetErrorIfNotConverged()`
1074: @*/
1075: PetscErrorCode KSPSolve(KSP ksp, Vec b, Vec x)
1076: {
1077:   PetscFunctionBegin;
1081:   ksp->transpose_solve = PETSC_FALSE;
1082:   PetscCall(KSPSolve_Private(ksp, b, x));
1083:   PetscFunctionReturn(PETSC_SUCCESS);
1084: }

1086: /*@
1087:   KSPSolveTranspose - Solves a linear system with the transposed matrix.

1089:   Collective

1091:   Input Parameters:
1092: + ksp - iterative context obtained from `KSPCreate()`
1093: . b   - right hand side vector
1094: - x   - solution vector

1096:   Level: developer

1098:   Note:
1099:   For complex numbers this solve the non-Hermitian transpose system.

1101:   Developer Notes:
1102:   We need to implement a `KSPSolveHermitianTranspose()`

1104: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1105:           `KSPSolve()`, `KSP`
1106: @*/
1107: PetscErrorCode KSPSolveTranspose(KSP ksp, Vec b, Vec x)
1108: {
1109:   PetscFunctionBegin;
1113:   if (ksp->transpose.use_explicittranspose) {
1114:     Mat J, Jpre;
1115:     PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1116:     if (!ksp->transpose.reuse_transpose) {
1117:       PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1118:       if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1119:       ksp->transpose.reuse_transpose = PETSC_TRUE;
1120:     } else {
1121:       PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1122:       if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1123:     }
1124:     if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1125:       PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1126:       ksp->transpose.BT = ksp->transpose.AT;
1127:     }
1128:     PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1129:   } else {
1130:     ksp->transpose_solve = PETSC_TRUE;
1131:   }
1132:   PetscCall(KSPSolve_Private(ksp, b, x));
1133:   PetscFunctionReturn(PETSC_SUCCESS);
1134: }

1136: static PetscErrorCode KSPViewFinalMatResidual_Internal(KSP ksp, Mat B, Mat X, PetscViewer viewer, PetscViewerFormat format, PetscInt shift)
1137: {
1138:   Mat        A, R;
1139:   PetscReal *norms;
1140:   PetscInt   i, N;
1141:   PetscBool  flg;

1143:   PetscFunctionBegin;
1144:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &flg));
1145:   if (flg) {
1146:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
1147:     if (!ksp->transpose_solve) PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &R));
1148:     else PetscCall(MatTransposeMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &R));
1149:     PetscCall(MatAYPX(R, -1.0, B, SAME_NONZERO_PATTERN));
1150:     PetscCall(MatGetSize(R, NULL, &N));
1151:     PetscCall(PetscMalloc1(N, &norms));
1152:     PetscCall(MatGetColumnNorms(R, NORM_2, norms));
1153:     PetscCall(MatDestroy(&R));
1154:     for (i = 0; i < N; ++i) PetscCall(PetscViewerASCIIPrintf(viewer, "%s #%" PetscInt_FMT " %g\n", i == 0 ? "KSP final norm of residual" : "                          ", shift + i, (double)norms[i]));
1155:     PetscCall(PetscFree(norms));
1156:   }
1157:   PetscFunctionReturn(PETSC_SUCCESS);
1158: }

1160: static PetscErrorCode KSPMatSolve_Private(KSP ksp, Mat B, Mat X)
1161: {
1162:   Mat       A, P, vB, vX;
1163:   Vec       cb, cx;
1164:   PetscInt  n1, N1, n2, N2, Bbn = PETSC_DECIDE;
1165:   PetscBool match;

1167:   PetscFunctionBegin;
1171:   PetscCheckSameComm(ksp, 1, B, 2);
1172:   PetscCheckSameComm(ksp, 1, X, 3);
1173:   PetscCheckSameType(B, 2, X, 3);
1174:   PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
1175:   MatCheckPreallocated(X, 3);
1176:   if (!X->assembled) {
1177:     PetscCall(MatSetOption(X, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1178:     PetscCall(MatAssemblyBegin(X, MAT_FINAL_ASSEMBLY));
1179:     PetscCall(MatAssemblyEnd(X, MAT_FINAL_ASSEMBLY));
1180:   }
1181:   PetscCheck(B != X, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_IDN, "B and X must be different matrices");
1182:   PetscCheck(!ksp->transpose_solve || !ksp->transpose.use_explicittranspose, PetscObjectComm((PetscObject)ksp), PETSC_ERR_SUP, "KSPMatSolveTranspose() does not support -ksp_use_explicittranspose");
1183:   PetscCall(KSPGetOperators(ksp, &A, &P));
1184:   PetscCall(MatGetLocalSize(B, NULL, &n2));
1185:   PetscCall(MatGetLocalSize(X, NULL, &n1));
1186:   PetscCall(MatGetSize(B, NULL, &N2));
1187:   PetscCall(MatGetSize(X, NULL, &N1));
1188:   PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of right-hand sides (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of solutions (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
1189:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)B, &match, MATSEQDENSE, MATMPIDENSE, ""));
1190:   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of right-hand sides not stored in a dense Mat");
1191:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
1192:   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of solutions not stored in a dense Mat");
1193:   PetscCall(KSPSetUp(ksp));
1194:   PetscCall(KSPSetUpOnBlocks(ksp));
1195:   if (ksp->ops->matsolve) {
1196:     level++;
1197:     if (ksp->guess_zero) PetscCall(MatZeroEntries(X));
1198:     PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1199:     PetscCall(KSPGetMatSolveBatchSize(ksp, &Bbn));
1200:     /* by default, do a single solve with all columns */
1201:     if (Bbn == PETSC_DECIDE) Bbn = N2;
1202:     else PetscCheck(Bbn >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "KSPMatSolve() batch size %" PetscInt_FMT " must be positive", Bbn);
1203:     PetscCall(PetscInfo(ksp, "KSP type %s solving using batches of width at most %" PetscInt_FMT "\n", ((PetscObject)ksp)->type_name, Bbn));
1204:     /* if -ksp_matsolve_batch_size is greater than the actual number of columns, do a single solve with all columns */
1205:     if (Bbn >= N2) {
1206:       PetscUseTypeMethod(ksp, matsolve, B, X);
1207:       if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, B, X, ksp->viewerFinalRes, ksp->formatFinalRes, 0));

1209:       PetscCall(KSPConvergedReasonViewFromOptions(ksp));

1211:       if (ksp->viewRate) {
1212:         PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1213:         PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1214:         PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1215:       }
1216:     } else {
1217:       for (n2 = 0; n2 < N2; n2 += Bbn) {
1218:         PetscCall(MatDenseGetSubMatrix(B, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vB));
1219:         PetscCall(MatDenseGetSubMatrix(X, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vX));
1220:         PetscUseTypeMethod(ksp, matsolve, vB, vX);
1221:         if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, vB, vX, ksp->viewerFinalRes, ksp->formatFinalRes, n2));

1223:         PetscCall(KSPConvergedReasonViewFromOptions(ksp));

1225:         if (ksp->viewRate) {
1226:           PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1227:           PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1228:           PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1229:         }
1230:         PetscCall(MatDenseRestoreSubMatrix(B, &vB));
1231:         PetscCall(MatDenseRestoreSubMatrix(X, &vX));
1232:       }
1233:     }
1234:     if (ksp->viewMat) PetscCall(ObjectView((PetscObject)A, ksp->viewerMat, ksp->formatMat));
1235:     if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)P, ksp->viewerPMat, ksp->formatPMat));
1236:     if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)B, ksp->viewerRhs, ksp->formatRhs));
1237:     if (ksp->viewSol) PetscCall(ObjectView((PetscObject)X, ksp->viewerSol, ksp->formatSol));
1238:     if (ksp->view) PetscCall(KSPView(ksp, ksp->viewer));
1239:     PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1240:     if (ksp->errorifnotconverged && ksp->reason < 0 && (level == 1 || ksp->reason != KSP_DIVERGED_ITS)) {
1241:       PCFailedReason reason;

1243:       PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
1244:       PetscCall(PCGetFailedReason(ksp->pc, &reason));
1245:       SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
1246:     }
1247:     level--;
1248:   } else {
1249:     PetscCall(PetscInfo(ksp, "KSP type %s solving column by column\n", ((PetscObject)ksp)->type_name));
1250:     for (n2 = 0; n2 < N2; ++n2) {
1251:       PetscCall(MatDenseGetColumnVecRead(B, n2, &cb));
1252:       PetscCall(MatDenseGetColumnVecWrite(X, n2, &cx));
1253:       PetscCall(KSPSolve_Private(ksp, cb, cx));
1254:       PetscCall(MatDenseRestoreColumnVecWrite(X, n2, &cx));
1255:       PetscCall(MatDenseRestoreColumnVecRead(B, n2, &cb));
1256:     }
1257:   }
1258:   PetscFunctionReturn(PETSC_SUCCESS);
1259: }

1261: /*@
1262:   KSPMatSolve - Solves a linear system with multiple right-hand sides stored as a `MATDENSE`. Unlike `KSPSolve()`, `B` and `X` must be different matrices.

1264:   Input Parameters:
1265: + ksp - iterative context
1266: - B   - block of right-hand sides

1268:   Output Parameter:
1269: . X - block of solutions

1271:   Level: intermediate

1273:   Note:
1274:   This is a stripped-down version of `KSPSolve()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.

1276: .seealso: [](ch_ksp), `KSPSolve()`, `MatMatSolve()`, `KSPMatSolveTranspose()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1277: @*/
1278: PetscErrorCode KSPMatSolve(KSP ksp, Mat B, Mat X)
1279: {
1280:   PetscFunctionBegin;
1281:   ksp->transpose_solve = PETSC_FALSE;
1282:   PetscCall(KSPMatSolve_Private(ksp, B, X));
1283:   PetscFunctionReturn(PETSC_SUCCESS);
1284: }

1286: /*@
1287:   KSPMatSolveTranspose - Solves a linear system with the transposed matrix with multiple right-hand sides stored as a `MATDENSE`. Unlike `KSPSolveTranspose()`, `B` and `X` must be different matrices and the transposed matrix cannot be assembled explicitly for the user.

1289:   Input Parameters:
1290: + ksp - iterative context
1291: - B   - block of right-hand sides

1293:   Output Parameter:
1294: . X - block of solutions

1296:   Level: intermediate

1298:   Notes:
1299:   This is a stripped-down version of `KSPSolveTranspose()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.

1301: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `MatMatTransposeSolve()`, `KSPMatSolve()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1302: @*/
1303: PetscErrorCode KSPMatSolveTranspose(KSP ksp, Mat B, Mat X)
1304: {
1305:   PetscFunctionBegin;
1306:   ksp->transpose_solve = PETSC_TRUE;
1307:   PetscCall(KSPMatSolve_Private(ksp, B, X));
1308:   PetscFunctionReturn(PETSC_SUCCESS);
1309: }

1311: /*@
1312:   KSPSetMatSolveBatchSize - Sets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1314:   Logically Collective

1316:   Input Parameters:
1317: + ksp - iterative context
1318: - bs  - batch size

1320:   Level: advanced

1322: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPGetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1323: @*/
1324: PetscErrorCode KSPSetMatSolveBatchSize(KSP ksp, PetscInt bs)
1325: {
1326:   PetscFunctionBegin;
1329:   ksp->nmax = bs;
1330:   PetscFunctionReturn(PETSC_SUCCESS);
1331: }

1333: /*@
1334:   KSPGetMatSolveBatchSize - Gets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1336:   Input Parameter:
1337: . ksp - iterative context

1339:   Output Parameter:
1340: . bs - batch size

1342:   Level: advanced

1344: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPSetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1345: @*/
1346: PetscErrorCode KSPGetMatSolveBatchSize(KSP ksp, PetscInt *bs)
1347: {
1348:   PetscFunctionBegin;
1350:   PetscAssertPointer(bs, 2);
1351:   *bs = ksp->nmax;
1352:   PetscFunctionReturn(PETSC_SUCCESS);
1353: }

1355: /*@
1356:   KSPResetViewers - Resets all the viewers set from the options database during `KSPSetFromOptions()`

1358:   Collective

1360:   Input Parameter:
1361: . ksp - iterative context obtained from `KSPCreate()`

1363:   Level: beginner

1365: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSPSetFromOptions()`, `KSP`
1366: @*/
1367: PetscErrorCode KSPResetViewers(KSP ksp)
1368: {
1369:   PetscFunctionBegin;
1371:   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1372:   PetscCall(PetscViewerDestroy(&ksp->viewer));
1373:   PetscCall(PetscViewerDestroy(&ksp->viewerPre));
1374:   PetscCall(PetscViewerDestroy(&ksp->viewerRate));
1375:   PetscCall(PetscViewerDestroy(&ksp->viewerMat));
1376:   PetscCall(PetscViewerDestroy(&ksp->viewerPMat));
1377:   PetscCall(PetscViewerDestroy(&ksp->viewerRhs));
1378:   PetscCall(PetscViewerDestroy(&ksp->viewerSol));
1379:   PetscCall(PetscViewerDestroy(&ksp->viewerMatExp));
1380:   PetscCall(PetscViewerDestroy(&ksp->viewerEV));
1381:   PetscCall(PetscViewerDestroy(&ksp->viewerSV));
1382:   PetscCall(PetscViewerDestroy(&ksp->viewerEVExp));
1383:   PetscCall(PetscViewerDestroy(&ksp->viewerFinalRes));
1384:   PetscCall(PetscViewerDestroy(&ksp->viewerPOpExp));
1385:   PetscCall(PetscViewerDestroy(&ksp->viewerDScale));
1386:   ksp->view         = PETSC_FALSE;
1387:   ksp->viewPre      = PETSC_FALSE;
1388:   ksp->viewMat      = PETSC_FALSE;
1389:   ksp->viewPMat     = PETSC_FALSE;
1390:   ksp->viewRhs      = PETSC_FALSE;
1391:   ksp->viewSol      = PETSC_FALSE;
1392:   ksp->viewMatExp   = PETSC_FALSE;
1393:   ksp->viewEV       = PETSC_FALSE;
1394:   ksp->viewSV       = PETSC_FALSE;
1395:   ksp->viewEVExp    = PETSC_FALSE;
1396:   ksp->viewFinalRes = PETSC_FALSE;
1397:   ksp->viewPOpExp   = PETSC_FALSE;
1398:   ksp->viewDScale   = PETSC_FALSE;
1399:   PetscFunctionReturn(PETSC_SUCCESS);
1400: }

1402: /*@
1403:   KSPReset - Resets a `KSP` context to the kspsetupcalled = 0 state and removes any allocated Vecs and Mats

1405:   Collective

1407:   Input Parameter:
1408: . ksp - iterative context obtained from `KSPCreate()`

1410:   Level: beginner

1412: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1413: @*/
1414: PetscErrorCode KSPReset(KSP ksp)
1415: {
1416:   PetscFunctionBegin;
1418:   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1419:   PetscTryTypeMethod(ksp, reset);
1420:   if (ksp->pc) PetscCall(PCReset(ksp->pc));
1421:   if (ksp->guess) {
1422:     KSPGuess guess = ksp->guess;
1423:     PetscTryTypeMethod(guess, reset);
1424:   }
1425:   PetscCall(VecDestroyVecs(ksp->nwork, &ksp->work));
1426:   PetscCall(VecDestroy(&ksp->vec_rhs));
1427:   PetscCall(VecDestroy(&ksp->vec_sol));
1428:   PetscCall(VecDestroy(&ksp->diagonal));
1429:   PetscCall(VecDestroy(&ksp->truediagonal));

1431:   PetscCall(KSPResetViewers(ksp));

1433:   ksp->setupstage = KSP_SETUP_NEW;
1434:   ksp->nmax       = PETSC_DECIDE;
1435:   PetscFunctionReturn(PETSC_SUCCESS);
1436: }

1438: /*@C
1439:   KSPDestroy - Destroys `KSP` context.

1441:   Collective

1443:   Input Parameter:
1444: . ksp - iterative context obtained from `KSPCreate()`

1446:   Level: beginner

1448: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1449: @*/
1450: PetscErrorCode KSPDestroy(KSP *ksp)
1451: {
1452:   PC pc;

1454:   PetscFunctionBegin;
1455:   if (!*ksp) PetscFunctionReturn(PETSC_SUCCESS);
1457:   if (--((PetscObject)(*ksp))->refct > 0) {
1458:     *ksp = NULL;
1459:     PetscFunctionReturn(PETSC_SUCCESS);
1460:   }

1462:   PetscCall(PetscObjectSAWsViewOff((PetscObject)*ksp));

1464:   /*
1465:    Avoid a cascading call to PCReset(ksp->pc) from the following call:
1466:    PCReset() shouldn't be called from KSPDestroy() as it is unprotected by pc's
1467:    refcount (and may be shared, e.g., by other ksps).
1468:    */
1469:   pc         = (*ksp)->pc;
1470:   (*ksp)->pc = NULL;
1471:   PetscCall(KSPReset((*ksp)));
1472:   (*ksp)->pc = pc;
1473:   PetscTryTypeMethod((*ksp), destroy);

1475:   if ((*ksp)->transpose.use_explicittranspose) {
1476:     PetscCall(MatDestroy(&(*ksp)->transpose.AT));
1477:     PetscCall(MatDestroy(&(*ksp)->transpose.BT));
1478:     (*ksp)->transpose.reuse_transpose = PETSC_FALSE;
1479:   }

1481:   PetscCall(KSPGuessDestroy(&(*ksp)->guess));
1482:   PetscCall(DMDestroy(&(*ksp)->dm));
1483:   PetscCall(PCDestroy(&(*ksp)->pc));
1484:   PetscCall(PetscFree((*ksp)->res_hist_alloc));
1485:   PetscCall(PetscFree((*ksp)->err_hist_alloc));
1486:   if ((*ksp)->convergeddestroy) PetscCall((*(*ksp)->convergeddestroy)((*ksp)->cnvP));
1487:   PetscCall(KSPMonitorCancel((*ksp)));
1488:   PetscCall(KSPConvergedReasonViewCancel((*ksp)));
1489:   PetscCall(PetscHeaderDestroy(ksp));
1490:   PetscFunctionReturn(PETSC_SUCCESS);
1491: }

1493: /*@
1494:   KSPSetPCSide - Sets the preconditioning side.

1496:   Logically Collective

1498:   Input Parameter:
1499: . ksp - iterative context obtained from `KSPCreate()`

1501:   Output Parameter:
1502: . side - the preconditioning side, where side is one of
1503: .vb
1504:       PC_LEFT - left preconditioning (default)
1505:       PC_RIGHT - right preconditioning
1506:       PC_SYMMETRIC - symmetric preconditioning
1507: .ve

1509:   Options Database Key:
1510: . -ksp_pc_side <right,left,symmetric> - `KSP` preconditioner side

1512:   Level: intermediate

1514:   Notes:
1515:   Left preconditioning is used by default for most Krylov methods except `KSPFGMRES` which only supports right preconditioning.

1517:   For methods changing the side of the preconditioner changes the norm type that is used, see `KSPSetNormType()`.

1519:   Symmetric preconditioning is currently available only for the `KSPQCG` method. However, note that
1520:   symmetric preconditioning can be emulated by using either right or left
1521:   preconditioning and a pre or post processing step.

1523:   Setting the `PCSide` often affects the default norm type.  See `KSPSetNormType()` for details.

1525: .seealso: [](ch_ksp), `KSPGetPCSide()`, `KSPSetNormType()`, `KSPGetNormType()`, `KSP`
1526: @*/
1527: PetscErrorCode KSPSetPCSide(KSP ksp, PCSide side)
1528: {
1529:   PetscFunctionBegin;
1532:   ksp->pc_side = ksp->pc_side_set = side;
1533:   PetscFunctionReturn(PETSC_SUCCESS);
1534: }

1536: /*@
1537:   KSPGetPCSide - Gets the preconditioning side.

1539:   Not Collective

1541:   Input Parameter:
1542: . ksp - iterative context obtained from `KSPCreate()`

1544:   Output Parameter:
1545: . side - the preconditioning side, where side is one of
1546: .vb
1547:       PC_LEFT - left preconditioning (default)
1548:       PC_RIGHT - right preconditioning
1549:       PC_SYMMETRIC - symmetric preconditioning
1550: .ve

1552:   Level: intermediate

1554: .seealso: [](ch_ksp), `KSPSetPCSide()`, `KSP`
1555: @*/
1556: PetscErrorCode KSPGetPCSide(KSP ksp, PCSide *side)
1557: {
1558:   PetscFunctionBegin;
1560:   PetscAssertPointer(side, 2);
1561:   PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
1562:   *side = ksp->pc_side;
1563:   PetscFunctionReturn(PETSC_SUCCESS);
1564: }

1566: /*@
1567:   KSPGetTolerances - Gets the relative, absolute, divergence, and maximum
1568:   iteration tolerances used by the default `KSP` convergence tests.

1570:   Not Collective

1572:   Input Parameter:
1573: . ksp - the Krylov subspace context

1575:   Output Parameters:
1576: + rtol   - the relative convergence tolerance
1577: . abstol - the absolute convergence tolerance
1578: . dtol   - the divergence tolerance
1579: - maxits - maximum number of iterations

1581:   Level: intermediate

1583:   Notes:
1584:   The user can specify `NULL` for any parameter that is not needed.

1586: .seealso: [](ch_ksp), `KSPSetTolerances()`, `KSP`, `KSPSetMinimumIterations()`, `KSPGetMinimumIterations()`
1587: @*/
1588: PetscErrorCode KSPGetTolerances(KSP ksp, PetscReal *rtol, PetscReal *abstol, PetscReal *dtol, PetscInt *maxits)
1589: {
1590:   PetscFunctionBegin;
1592:   if (abstol) *abstol = ksp->abstol;
1593:   if (rtol) *rtol = ksp->rtol;
1594:   if (dtol) *dtol = ksp->divtol;
1595:   if (maxits) *maxits = ksp->max_it;
1596:   PetscFunctionReturn(PETSC_SUCCESS);
1597: }

1599: /*@
1600:   KSPSetTolerances - Sets the relative, absolute, divergence, and maximum
1601:   iteration tolerances used by the default `KSP` convergence testers.

1603:   Logically Collective

1605:   Input Parameters:
1606: + ksp    - the Krylov subspace context
1607: . rtol   - the relative convergence tolerance, relative decrease in the (possibly preconditioned) residual norm
1608: . abstol - the absolute convergence tolerance   absolute size of the (possibly preconditioned) residual norm
1609: . dtol   - the divergence tolerance,   amount (possibly preconditioned) residual norm can increase before `KSPConvergedDefault()` concludes that the method is diverging
1610: - maxits - maximum number of iterations to use

1612:   Options Database Keys:
1613: + -ksp_atol <abstol>   - Sets `abstol`
1614: . -ksp_rtol <rtol>     - Sets `rtol`
1615: . -ksp_divtol <dtol>   - Sets `dtol`
1616: - -ksp_max_it <maxits> - Sets `maxits`

1618:   Level: intermediate

1620:   Notes:
1621:   Use `PETSC_DEFAULT` to retain the default value of any of the tolerances.

1623:   See `KSPConvergedDefault()` for details how these parameters are used in the default convergence test.  See also `KSPSetConvergenceTest()`
1624:   for setting user-defined stopping criteria.

1626: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetMinimumIterations()`
1627: @*/
1628: PetscErrorCode KSPSetTolerances(KSP ksp, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt maxits)
1629: {
1630:   PetscFunctionBegin;

1637:   if (rtol != (PetscReal)PETSC_DEFAULT) {
1638:     PetscCheck(rtol >= 0.0 && rtol < 1.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Relative tolerance %g must be non-negative and less than 1.0", (double)rtol);
1639:     ksp->rtol = rtol;
1640:   }
1641:   if (abstol != (PetscReal)PETSC_DEFAULT) {
1642:     PetscCheck(abstol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Absolute tolerance %g must be non-negative", (double)abstol);
1643:     ksp->abstol = abstol;
1644:   }
1645:   if (dtol != (PetscReal)PETSC_DEFAULT) {
1646:     PetscCheck(dtol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Divergence tolerance %g must be larger than 1.0", (double)dtol);
1647:     ksp->divtol = dtol;
1648:   }
1649:   if (maxits != PETSC_DEFAULT) {
1650:     PetscCheck(maxits >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Maximum number of iterations %" PetscInt_FMT " must be non-negative", maxits);
1651:     ksp->max_it = maxits;
1652:   }
1653:   PetscFunctionReturn(PETSC_SUCCESS);
1654: }

1656: /*@
1657:   KSPSetMinimumIterations - Sets the minimum number of iterations to use, regardless of the tolerances

1659:   Logically Collective

1661:   Input Parameters:
1662: + ksp   - the Krylov subspace context
1663: - minit - minimum number of iterations to use

1665:   Options Database Keys:
1666: . -ksp_min_it <minits> - Sets `minit`

1668:   Level: intermediate

1670:   Notes:
1671:   Use `KSPSetTolerances()` to set a variety of other tolerances

1673:   See `KSPConvergedDefault()` for details on how these parameters are used in the default convergence test. See also `KSPSetConvergenceTest()`
1674:   for setting user-defined stopping criteria.

1676: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPGetMinimumIterations()`
1677: @*/
1678: PetscErrorCode KSPSetMinimumIterations(KSP ksp, PetscInt minit)
1679: {
1680:   PetscFunctionBegin;

1684:   PetscCheck(minit >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Minimum number of iterations %" PetscInt_FMT " must be non-negative", minit);
1685:   ksp->min_it = minit;
1686:   PetscFunctionReturn(PETSC_SUCCESS);
1687: }

1689: /*@
1690:   KSPGetMinimumIterations - Gets the minimum number of iterations to use, regardless of the tolerances, that was set with `KSPSetMinimumIterations()` or `-ksp_min_it`

1692:   Not Collective

1694:   Input Parameter:
1695: . ksp - the Krylov subspace context

1697:   Output Parameter:
1698: . minit - minimum number of iterations to use

1700:   Level: intermediate

1702: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPSetMinimumIterations()`
1703: @*/
1704: PetscErrorCode KSPGetMinimumIterations(KSP ksp, PetscInt *minit)
1705: {
1706:   PetscFunctionBegin;
1708:   PetscAssertPointer(minit, 2);

1710:   *minit = ksp->min_it;
1711:   PetscFunctionReturn(PETSC_SUCCESS);
1712: }

1714: /*@
1715:   KSPSetInitialGuessNonzero - Tells the iterative solver that the
1716:   initial guess is nonzero; otherwise `KSP` assumes the initial guess
1717:   is to be zero (and thus zeros it out before solving).

1719:   Logically Collective

1721:   Input Parameters:
1722: + ksp - iterative context obtained from `KSPCreate()`
1723: - flg - ``PETSC_TRUE`` indicates the guess is non-zero, `PETSC_FALSE` indicates the guess is zero

1725:   Options Database Key:
1726: . -ksp_initial_guess_nonzero <true,false> - use nonzero initial guess

1728:   Level: beginner

1730:   Notes:
1731:   If this is not called the X vector is zeroed in the call to `KSPSolve()`.

1733: .seealso: [](ch_ksp), `KSPGetInitialGuessNonzero()`, `KSPSetGuessType()`, `KSPGuessType`, `KSP`
1734: @*/
1735: PetscErrorCode KSPSetInitialGuessNonzero(KSP ksp, PetscBool flg)
1736: {
1737:   PetscFunctionBegin;
1740:   ksp->guess_zero = (PetscBool) !(int)flg;
1741:   PetscFunctionReturn(PETSC_SUCCESS);
1742: }

1744: /*@
1745:   KSPGetInitialGuessNonzero - Determines whether the `KSP` solver is using
1746:   a zero initial guess.

1748:   Not Collective

1750:   Input Parameter:
1751: . ksp - iterative context obtained from `KSPCreate()`

1753:   Output Parameter:
1754: . flag - `PETSC_TRUE` if guess is nonzero, else `PETSC_FALSE`

1756:   Level: intermediate

1758: .seealso: [](ch_ksp), `KSPSetInitialGuessNonzero()`, `KSP`
1759: @*/
1760: PetscErrorCode KSPGetInitialGuessNonzero(KSP ksp, PetscBool *flag)
1761: {
1762:   PetscFunctionBegin;
1764:   PetscAssertPointer(flag, 2);
1765:   if (ksp->guess_zero) *flag = PETSC_FALSE;
1766:   else *flag = PETSC_TRUE;
1767:   PetscFunctionReturn(PETSC_SUCCESS);
1768: }

1770: /*@
1771:   KSPSetErrorIfNotConverged - Causes `KSPSolve()` to generate an error if the solver has not converged as soon as the error is detected.

1773:   Logically Collective

1775:   Input Parameters:
1776: + ksp - iterative context obtained from `KSPCreate()`
1777: - flg - `PETSC_TRUE` indicates you want the error generated

1779:   Options Database Key:
1780: . -ksp_error_if_not_converged <true,false> - generate an error and stop the program

1782:   Level: intermediate

1784:   Notes:
1785:   Normally PETSc continues if a linear solver fails to converge, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
1786:   to determine if it has converged.

1788:   A `KSP_DIVERGED_ITS` will not generate an error in a `KSPSolve()` inside a nested linear solver

1790: .seealso: [](ch_ksp), `KSPGetErrorIfNotConverged()`, `KSP`
1791: @*/
1792: PetscErrorCode KSPSetErrorIfNotConverged(KSP ksp, PetscBool flg)
1793: {
1794:   PetscFunctionBegin;
1797:   ksp->errorifnotconverged = flg;
1798:   PetscFunctionReturn(PETSC_SUCCESS);
1799: }

1801: /*@
1802:   KSPGetErrorIfNotConverged - Will `KSPSolve()` generate an error if the solver does not converge?

1804:   Not Collective

1806:   Input Parameter:
1807: . ksp - iterative context obtained from KSPCreate()

1809:   Output Parameter:
1810: . flag - `PETSC_TRUE` if it will generate an error, else `PETSC_FALSE`

1812:   Level: intermediate

1814: .seealso: [](ch_ksp), `KSPSetErrorIfNotConverged()`, `KSP`
1815: @*/
1816: PetscErrorCode KSPGetErrorIfNotConverged(KSP ksp, PetscBool *flag)
1817: {
1818:   PetscFunctionBegin;
1820:   PetscAssertPointer(flag, 2);
1821:   *flag = ksp->errorifnotconverged;
1822:   PetscFunctionReturn(PETSC_SUCCESS);
1823: }

1825: /*@
1826:   KSPSetInitialGuessKnoll - Tells the iterative solver to use `PCApply()` to compute the initial guess (The Knoll trick)

1828:   Logically Collective

1830:   Input Parameters:
1831: + ksp - iterative context obtained from `KSPCreate()`
1832: - flg - `PETSC_TRUE` or `PETSC_FALSE`

1834:   Level: advanced

1836:   Developer Notes:
1837:   The Knoll trick is not currently implemented using the `KSPGuess` class

1839: .seealso: [](ch_ksp), `KSPGetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1840: @*/
1841: PetscErrorCode KSPSetInitialGuessKnoll(KSP ksp, PetscBool flg)
1842: {
1843:   PetscFunctionBegin;
1846:   ksp->guess_knoll = flg;
1847:   PetscFunctionReturn(PETSC_SUCCESS);
1848: }

1850: /*@
1851:   KSPGetInitialGuessKnoll - Determines whether the `KSP` solver is using the Knoll trick (using PCApply(pc,b,...) to compute
1852:   the initial guess

1854:   Not Collective

1856:   Input Parameter:
1857: . ksp - iterative context obtained from `KSPCreate()`

1859:   Output Parameter:
1860: . flag - `PETSC_TRUE` if using Knoll trick, else `PETSC_FALSE`

1862:   Level: advanced

1864: .seealso: [](ch_ksp), `KSPSetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1865: @*/
1866: PetscErrorCode KSPGetInitialGuessKnoll(KSP ksp, PetscBool *flag)
1867: {
1868:   PetscFunctionBegin;
1870:   PetscAssertPointer(flag, 2);
1871:   *flag = ksp->guess_knoll;
1872:   PetscFunctionReturn(PETSC_SUCCESS);
1873: }

1875: /*@
1876:   KSPGetComputeSingularValues - Gets the flag indicating whether the extreme singular
1877:   values will be calculated via a Lanczos or Arnoldi process as the linear
1878:   system is solved.

1880:   Not Collective

1882:   Input Parameter:
1883: . ksp - iterative context obtained from `KSPCreate()`

1885:   Output Parameter:
1886: . flg - `PETSC_TRUE` or `PETSC_FALSE`

1888:   Options Database Key:
1889: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1891:   Level: advanced

1893:   Notes:
1894:   Currently this option is not valid for all iterative methods.

1896:   Many users may just want to use the monitoring routine
1897:   `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1898:   to print the singular values at each iteration of the linear solve.

1900: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1901: @*/
1902: PetscErrorCode KSPGetComputeSingularValues(KSP ksp, PetscBool *flg)
1903: {
1904:   PetscFunctionBegin;
1906:   PetscAssertPointer(flg, 2);
1907:   *flg = ksp->calc_sings;
1908:   PetscFunctionReturn(PETSC_SUCCESS);
1909: }

1911: /*@
1912:   KSPSetComputeSingularValues - Sets a flag so that the extreme singular
1913:   values will be calculated via a Lanczos or Arnoldi process as the linear
1914:   system is solved.

1916:   Logically Collective

1918:   Input Parameters:
1919: + ksp - iterative context obtained from `KSPCreate()`
1920: - flg - `PETSC_TRUE` or `PETSC_FALSE`

1922:   Options Database Key:
1923: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1925:   Level: advanced

1927:   Notes:
1928:   Currently this option is not valid for all iterative methods.

1930:   Many users may just want to use the monitoring routine
1931:   `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1932:   to print the singular values at each iteration of the linear solve.

1934: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1935: @*/
1936: PetscErrorCode KSPSetComputeSingularValues(KSP ksp, PetscBool flg)
1937: {
1938:   PetscFunctionBegin;
1941:   ksp->calc_sings = flg;
1942:   PetscFunctionReturn(PETSC_SUCCESS);
1943: }

1945: /*@
1946:   KSPGetComputeEigenvalues - Gets the flag indicating that the extreme eigenvalues
1947:   values will be calculated via a Lanczos or Arnoldi process as the linear
1948:   system is solved.

1950:   Not Collective

1952:   Input Parameter:
1953: . ksp - iterative context obtained from `KSPCreate()`

1955:   Output Parameter:
1956: . flg - `PETSC_TRUE` or `PETSC_FALSE`

1958:   Level: advanced

1960:   Note:
1961:   Currently this option is not valid for all iterative methods.

1963: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`
1964: @*/
1965: PetscErrorCode KSPGetComputeEigenvalues(KSP ksp, PetscBool *flg)
1966: {
1967:   PetscFunctionBegin;
1969:   PetscAssertPointer(flg, 2);
1970:   *flg = ksp->calc_sings;
1971:   PetscFunctionReturn(PETSC_SUCCESS);
1972: }

1974: /*@
1975:   KSPSetComputeEigenvalues - Sets a flag so that the extreme eigenvalues
1976:   values will be calculated via a Lanczos or Arnoldi process as the linear
1977:   system is solved.

1979:   Logically Collective

1981:   Input Parameters:
1982: + ksp - iterative context obtained from `KSPCreate()`
1983: - flg - `PETSC_TRUE` or `PETSC_FALSE`

1985:   Level: advanced

1987:   Note:
1988:   Currently this option is not valid for all iterative methods.

1990: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`
1991: @*/
1992: PetscErrorCode KSPSetComputeEigenvalues(KSP ksp, PetscBool flg)
1993: {
1994:   PetscFunctionBegin;
1997:   ksp->calc_sings = flg;
1998:   PetscFunctionReturn(PETSC_SUCCESS);
1999: }

2001: /*@
2002:   KSPSetComputeRitz - Sets a flag so that the Ritz or harmonic Ritz pairs
2003:   will be calculated via a Lanczos or Arnoldi process as the linear
2004:   system is solved.

2006:   Logically Collective

2008:   Input Parameters:
2009: + ksp - iterative context obtained from `KSPCreate()`
2010: - flg - `PETSC_TRUE` or `PETSC_FALSE`

2012:   Level: advanced

2014:   Note:
2015:   Currently this option is only valid for the GMRES method.

2017: .seealso: [](ch_ksp), `KSPComputeRitz()`, `KSP`
2018: @*/
2019: PetscErrorCode KSPSetComputeRitz(KSP ksp, PetscBool flg)
2020: {
2021:   PetscFunctionBegin;
2024:   ksp->calc_ritz = flg;
2025:   PetscFunctionReturn(PETSC_SUCCESS);
2026: }

2028: /*@
2029:   KSPGetRhs - Gets the right-hand-side vector for the linear system to
2030:   be solved.

2032:   Not Collective

2034:   Input Parameter:
2035: . ksp - iterative context obtained from `KSPCreate()`

2037:   Output Parameter:
2038: . r - right-hand-side vector

2040:   Level: developer

2042: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPSolve()`, `KSP`
2043: @*/
2044: PetscErrorCode KSPGetRhs(KSP ksp, Vec *r)
2045: {
2046:   PetscFunctionBegin;
2048:   PetscAssertPointer(r, 2);
2049:   *r = ksp->vec_rhs;
2050:   PetscFunctionReturn(PETSC_SUCCESS);
2051: }

2053: /*@
2054:   KSPGetSolution - Gets the location of the solution for the
2055:   linear system to be solved.  Note that this may not be where the solution
2056:   is stored during the iterative process; see `KSPBuildSolution()`.

2058:   Not Collective

2060:   Input Parameter:
2061: . ksp - iterative context obtained from `KSPCreate()`

2063:   Output Parameter:
2064: . v - solution vector

2066:   Level: developer

2068: .seealso: [](ch_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP`
2069: @*/
2070: PetscErrorCode KSPGetSolution(KSP ksp, Vec *v)
2071: {
2072:   PetscFunctionBegin;
2074:   PetscAssertPointer(v, 2);
2075:   *v = ksp->vec_sol;
2076:   PetscFunctionReturn(PETSC_SUCCESS);
2077: }

2079: /*@
2080:   KSPSetPC - Sets the preconditioner to be used to calculate the
2081:   application of the preconditioner on a vector.

2083:   Collective

2085:   Input Parameters:
2086: + ksp - iterative context obtained from `KSPCreate()`
2087: - pc  - the preconditioner object (can be `NULL`)

2089:   Level: developer

2091:   Note:
2092:   Use `KSPGetPC()` to retrieve the preconditioner context.

2094: .seealso: [](ch_ksp), `KSPGetPC()`, `KSP`
2095: @*/
2096: PetscErrorCode KSPSetPC(KSP ksp, PC pc)
2097: {
2098:   PetscFunctionBegin;
2100:   if (pc) {
2102:     PetscCheckSameComm(ksp, 1, pc, 2);
2103:   }
2104:   if (ksp->pc != pc && ksp->setupstage) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2105:   PetscCall(PetscObjectReference((PetscObject)pc));
2106:   PetscCall(PCDestroy(&ksp->pc));
2107:   ksp->pc = pc;
2108:   PetscFunctionReturn(PETSC_SUCCESS);
2109: }

2111: PETSC_INTERN PetscErrorCode PCCreate_MPI(PC);

2113: // PetscClangLinter pragma disable: -fdoc-internal-linkage
2114: /*@C
2115:    KSPCheckPCMPI - Checks if `-mpi_linear_solver_server` is active and the `PC` should be changed to `PCMPI`

2117:    Collective

2119:    Input Parameter:
2120: .  ksp - iterative context obtained from `KSPCreate()`

2122:    Level: developer

2124: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PCMPIServerBegin()`, `PCMPIServerEnd()`
2125: @*/
2126: PETSC_INTERN PetscErrorCode KSPCheckPCMPI(KSP ksp)
2127: {
2128:   PetscBool isPCMPI;

2130:   PetscFunctionBegin;
2132:   PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
2133:   if (PCMPIServerActive && ksp->nestlevel == 0 && !isPCMPI) {
2134:     const char *prefix;
2135:     char       *found = NULL;

2137:     PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
2138:     if (prefix) PetscCall(PetscStrstr(prefix, "mpi_linear_solver_server_", &found));
2139:     if (!found) PetscCall(KSPAppendOptionsPrefix(ksp, "mpi_linear_solver_server_"));
2140:     PetscCall(PCSetType(ksp->pc, PCMPI));
2141:   }
2142:   PetscFunctionReturn(PETSC_SUCCESS);
2143: }

2145: /*@
2146:   KSPGetPC - Returns a pointer to the preconditioner context
2147:   set with `KSPSetPC()`.

2149:   Not Collective

2151:   Input Parameter:
2152: . ksp - iterative context obtained from `KSPCreate()`

2154:   Output Parameter:
2155: . pc - preconditioner context

2157:   Level: developer

2159: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`
2160: @*/
2161: PetscErrorCode KSPGetPC(KSP ksp, PC *pc)
2162: {
2163:   PetscFunctionBegin;
2165:   PetscAssertPointer(pc, 2);
2166:   if (!ksp->pc) {
2167:     PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &ksp->pc));
2168:     PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp->pc, (PetscObject)ksp, 0));
2169:     PetscCall(PetscObjectSetOptions((PetscObject)ksp->pc, ((PetscObject)ksp)->options));
2170:     PetscCall(PCSetKSPNestLevel(ksp->pc, ksp->nestlevel));
2171:   }
2172:   PetscCall(KSPCheckPCMPI(ksp));
2173:   *pc = ksp->pc;
2174:   PetscFunctionReturn(PETSC_SUCCESS);
2175: }

2177: /*@
2178:   KSPMonitor - runs the user provided monitor routines, if they exist

2180:   Collective

2182:   Input Parameters:
2183: + ksp   - iterative context obtained from `KSPCreate()`
2184: . it    - iteration number
2185: - rnorm - relative norm of the residual

2187:   Level: developer

2189:   Notes:
2190:   This routine is called by the `KSP` implementations.
2191:   It does not typically need to be called by the user.

2193: .seealso: [](ch_ksp), `KSPMonitorSet()`
2194: @*/
2195: PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm)
2196: {
2197:   PetscInt i, n = ksp->numbermonitors;

2199:   PetscFunctionBegin;
2200:   for (i = 0; i < n; i++) PetscCall((*ksp->monitor[i])(ksp, it, rnorm, ksp->monitorcontext[i]));
2201:   PetscFunctionReturn(PETSC_SUCCESS);
2202: }

2204: /*@C
2205:   KSPMonitorSet - Sets an ADDITIONAL function to be called at every iteration to monitor
2206:   the residual/error etc.

2208:   Logically Collective

2210:   Input Parameters:
2211: + ksp            - iterative context obtained from `KSPCreate()`
2212: . monitor        - pointer to function (if this is `NULL`, it turns off monitoring
2213: . ctx            - [optional] context for private data for the monitor routine (use `NULL` if no context is needed)
2214: - monitordestroy - [optional] routine that frees monitor context (may be `NULL`)

2216:   Calling sequence of `monitor`:
2217: + ksp   - iterative context obtained from `KSPCreate()`
2218: . it    - iteration number
2219: . rnorm - (estimated) 2-norm of (preconditioned) residual
2220: - ctx   - optional monitoring context, as set by `KSPMonitorSet()`

2222:   Calling sequence of `monitordestroy`:
2223: . ctx - optional monitoring context, as set by `KSPMonitorSet()`

2225:   Options Database Keys:
2226: + -ksp_monitor                             - sets `KSPMonitorResidual()`
2227: . -ksp_monitor draw                        - sets `KSPMonitorResidualDraw()` and plots residual
2228: . -ksp_monitor draw::draw_lg               - sets `KSPMonitorResidualDrawLG()` and plots residual
2229: . -ksp_monitor_pause_final                 - Pauses any graphics when the solve finishes (only works for internal monitors)
2230: . -ksp_monitor_true_residual               - sets `KSPMonitorTrueResidual()`
2231: . -ksp_monitor_true_residual draw::draw_lg - sets `KSPMonitorTrueResidualDrawLG()` and plots residual
2232: . -ksp_monitor_max                         - sets `KSPMonitorTrueResidualMax()`
2233: . -ksp_monitor_singular_value              - sets `KSPMonitorSingularValue()`
2234: - -ksp_monitor_cancel                      - cancels all monitors that have
2235:                           been hardwired into a code by
2236:                           calls to `KSPMonitorSet()`, but
2237:                           does not cancel those set via
2238:                           the options database.

2240:   Level: beginner

2242:   Notes:
2243:   The default is to do nothing.  To print the residual, or preconditioned
2244:   residual if `KSPSetNormType`(ksp,`KSP_NORM_PRECONDITIONED`) was called, use
2245:   `KSPMonitorResidual()` as the monitoring routine, with a `PETSCVIEWERASCII` as the
2246:   context.

2248:   Several different monitoring routines may be set by calling
2249:   `KSPMonitorSet()` multiple times; all will be called in the
2250:   order in which they were set.

2252:   Fortran Notes:
2253:   Only a single monitor function can be set for each `KSP` object

2255: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorCancel()`, `KSP`
2256: @*/
2257: PetscErrorCode KSPMonitorSet(KSP ksp, PetscErrorCode (*monitor)(KSP ksp, PetscInt it, PetscReal rnorm, void *ctx), void *ctx, PetscErrorCode (*monitordestroy)(void **ctx))
2258: {
2259:   PetscInt  i;
2260:   PetscBool identical;

2262:   PetscFunctionBegin;
2264:   for (i = 0; i < ksp->numbermonitors; i++) {
2265:     PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))monitor, ctx, monitordestroy, (PetscErrorCode(*)(void))ksp->monitor[i], ksp->monitorcontext[i], ksp->monitordestroy[i], &identical));
2266:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
2267:   }
2268:   PetscCheck(ksp->numbermonitors < MAXKSPMONITORS, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP monitors set");
2269:   ksp->monitor[ksp->numbermonitors]          = monitor;
2270:   ksp->monitordestroy[ksp->numbermonitors]   = monitordestroy;
2271:   ksp->monitorcontext[ksp->numbermonitors++] = (void *)ctx;
2272:   PetscFunctionReturn(PETSC_SUCCESS);
2273: }

2275: /*@
2276:   KSPMonitorCancel - Clears all monitors for a `KSP` object.

2278:   Logically Collective

2280:   Input Parameter:
2281: . ksp - iterative context obtained from `KSPCreate()`

2283:   Options Database Key:
2284: . -ksp_monitor_cancel - Cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but does not cancel those set via the options database.

2286:   Level: intermediate

2288: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorSet()`, `KSP`
2289: @*/
2290: PetscErrorCode KSPMonitorCancel(KSP ksp)
2291: {
2292:   PetscInt i;

2294:   PetscFunctionBegin;
2296:   for (i = 0; i < ksp->numbermonitors; i++) {
2297:     if (ksp->monitordestroy[i]) PetscCall((*ksp->monitordestroy[i])(&ksp->monitorcontext[i]));
2298:   }
2299:   ksp->numbermonitors = 0;
2300:   PetscFunctionReturn(PETSC_SUCCESS);
2301: }

2303: /*@C
2304:   KSPGetMonitorContext - Gets the monitoring context, as set by `KSPMonitorSet()` for the FIRST monitor only.

2306:   Not Collective

2308:   Input Parameter:
2309: . ksp - iterative context obtained from `KSPCreate()`

2311:   Output Parameter:
2312: . ctx - monitoring context

2314:   Level: intermediate

2316: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSP`
2317: @*/
2318: PetscErrorCode KSPGetMonitorContext(KSP ksp, void *ctx)
2319: {
2320:   PetscFunctionBegin;
2322:   *(void **)ctx = ksp->monitorcontext[0];
2323:   PetscFunctionReturn(PETSC_SUCCESS);
2324: }

2326: /*@
2327:   KSPSetResidualHistory - Sets the array used to hold the residual history.
2328:   If set, this array will contain the residual norms computed at each
2329:   iteration of the solver.

2331:   Not Collective

2333:   Input Parameters:
2334: + ksp   - iterative context obtained from `KSPCreate()`
2335: . a     - array to hold history
2336: . na    - size of a
2337: - reset - `PETSC_TRUE` indicates the history counter is reset to zero
2338:            for each new linear solve

2340:   Level: advanced

2342:   Notes:
2343:   If provided, he array is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2344:   If 'a' is `NULL` then space is allocated for the history. If 'na' `PETSC_DECIDE` or `PETSC_DEFAULT` then a
2345:   default array of length 10000 is allocated.

2347:   If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2349: .seealso: [](ch_ksp), `KSPGetResidualHistory()`, `KSP`
2350: @*/
2351: PetscErrorCode KSPSetResidualHistory(KSP ksp, PetscReal a[], PetscInt na, PetscBool reset)
2352: {
2353:   PetscFunctionBegin;

2356:   PetscCall(PetscFree(ksp->res_hist_alloc));
2357:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2358:     ksp->res_hist     = a;
2359:     ksp->res_hist_max = (size_t)na;
2360:   } else {
2361:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->res_hist_max = (size_t)na;
2362:     else ksp->res_hist_max = 10000; /* like default ksp->max_it */
2363:     PetscCall(PetscCalloc1(ksp->res_hist_max, &ksp->res_hist_alloc));

2365:     ksp->res_hist = ksp->res_hist_alloc;
2366:   }
2367:   ksp->res_hist_len   = 0;
2368:   ksp->res_hist_reset = reset;
2369:   PetscFunctionReturn(PETSC_SUCCESS);
2370: }

2372: /*@C
2373:   KSPGetResidualHistory - Gets the array used to hold the residual history and the number of residuals it contains.

2375:   Not Collective

2377:   Input Parameter:
2378: . ksp - iterative context obtained from `KSPCreate()`

2380:   Output Parameters:
2381: + a  - pointer to array to hold history (or `NULL`)
2382: - na - number of used entries in a (or `NULL`)

2384:   Level: advanced

2386:   Note:
2387:   This array is borrowed and should not be freed by the caller.

2389:   Can only be called after a `KSPSetResidualHistory()` otherwise `a` and `na` are set to `NULL` and zero

2391:   Fortran Notes:
2392:   The Fortran version of this routine has a calling sequence
2393: $   call KSPGetResidualHistory(KSP ksp, integer na, integer ierr)
2394:   note that you have passed a Fortran array into `KSPSetResidualHistory()` and you need
2395:   to access the residual values from this Fortran array you provided. Only the `na` (number of
2396:   residual norms currently held) is set.

2398: .seealso: [](ch_ksp), `KSPSetResidualHistory()`, `KSP`
2399: @*/
2400: PetscErrorCode KSPGetResidualHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2401: {
2402:   PetscFunctionBegin;
2404:   if (a) *a = ksp->res_hist;
2405:   if (na) *na = (PetscInt)ksp->res_hist_len;
2406:   PetscFunctionReturn(PETSC_SUCCESS);
2407: }

2409: /*@
2410:   KSPSetErrorHistory - Sets the array used to hold the error history. If set, this array will contain the error norms computed at each iteration of the solver.

2412:   Not Collective

2414:   Input Parameters:
2415: + ksp   - iterative context obtained from `KSPCreate()`
2416: . a     - array to hold history
2417: . na    - size of `a`
2418: - reset - `PETSC_TRUE` indicates the history counter is reset to zero for each new linear solve

2420:   Level: advanced

2422:   Notes:
2423:   If provided, the array is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2424:   If 'a' is `NULL` then space is allocated for the history. If 'na' is `PETSC_DECIDE` or `PETSC_DEFAULT` then a default array of length 10000 is allocated.

2426:   If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2428: .seealso: [](ch_ksp), `KSPGetErrorHistory()`, `KSPSetResidualHistory()`, `KSP`
2429: @*/
2430: PetscErrorCode KSPSetErrorHistory(KSP ksp, PetscReal a[], PetscInt na, PetscBool reset)
2431: {
2432:   PetscFunctionBegin;

2435:   PetscCall(PetscFree(ksp->err_hist_alloc));
2436:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2437:     ksp->err_hist     = a;
2438:     ksp->err_hist_max = (size_t)na;
2439:   } else {
2440:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->err_hist_max = (size_t)na;
2441:     else ksp->err_hist_max = 10000; /* like default ksp->max_it */
2442:     PetscCall(PetscCalloc1(ksp->err_hist_max, &ksp->err_hist_alloc));

2444:     ksp->err_hist = ksp->err_hist_alloc;
2445:   }
2446:   ksp->err_hist_len   = 0;
2447:   ksp->err_hist_reset = reset;
2448:   PetscFunctionReturn(PETSC_SUCCESS);
2449: }

2451: /*@C
2452:   KSPGetErrorHistory - Gets the array used to hold the error history and the number of residuals it contains.

2454:   Not Collective

2456:   Input Parameter:
2457: . ksp - iterative context obtained from `KSPCreate()`

2459:   Output Parameters:
2460: + a  - pointer to array to hold history (or `NULL`)
2461: - na - number of used entries in a (or `NULL`)

2463:   Level: advanced

2465:   Notes:
2466:   This array is borrowed and should not be freed by the caller.
2467:   Can only be called after a `KSPSetErrorHistory()` otherwise `a` and `na` are set to `NULL` and zero

2469:   Fortran Notes:
2470:   The Fortran version of this routine has a calling sequence
2471: $   call KSPGetErrorHistory(KSP ksp, integer na, integer ierr)
2472:   note that you have passed a Fortran array into `KSPSetErrorHistory()` and you need
2473:   to access the residual values from this Fortran array you provided. Only the `na` (number of
2474:   residual norms currently held) is set.

2476: .seealso: [](ch_ksp), `KSPSetErrorHistory()`, `KSPGetResidualHistory()`, `KSP`
2477: @*/
2478: PetscErrorCode KSPGetErrorHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2479: {
2480:   PetscFunctionBegin;
2482:   if (a) *a = ksp->err_hist;
2483:   if (na) *na = (PetscInt)ksp->err_hist_len;
2484:   PetscFunctionReturn(PETSC_SUCCESS);
2485: }

2487: /*@
2488:   KSPComputeConvergenceRate - Compute the convergence rate for the iteration

2490:   Not collective

2492:   Input Parameter:
2493: . ksp - The `KSP`

2495:   Output Parameters:
2496: + cr   - The residual contraction rate
2497: . rRsq - The coefficient of determination, R^2, indicating the linearity of the data
2498: . ce   - The error contraction rate
2499: - eRsq - The coefficient of determination, R^2, indicating the linearity of the data

2501:   Level: advanced

2503:   Note:
2504:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
2505:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,

2507:   References:
2508: . * - `//en.wikipedia.org/wiki/Coefficient_of_determination`

2510: .seealso: [](ch_ksp), `KSP`, `KSPConvergedRateView()`
2511: @*/
2512: PetscErrorCode KSPComputeConvergenceRate(KSP ksp, PetscReal *cr, PetscReal *rRsq, PetscReal *ce, PetscReal *eRsq)
2513: {
2514:   PetscReal const *hist;
2515:   PetscReal       *x, *y, slope, intercept, mean = 0.0, var = 0.0, res = 0.0;
2516:   PetscInt         n, k;

2518:   PetscFunctionBegin;
2519:   if (cr || rRsq) {
2520:     PetscCall(KSPGetResidualHistory(ksp, &hist, &n));
2521:     if (!n) {
2522:       if (cr) *cr = 0.0;
2523:       if (rRsq) *rRsq = -1.0;
2524:     } else {
2525:       PetscCall(PetscMalloc2(n, &x, n, &y));
2526:       for (k = 0; k < n; ++k) {
2527:         x[k] = k;
2528:         y[k] = PetscLogReal(hist[k]);
2529:         mean += y[k];
2530:       }
2531:       mean /= n;
2532:       PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2533:       for (k = 0; k < n; ++k) {
2534:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2535:         var += PetscSqr(y[k] - mean);
2536:       }
2537:       PetscCall(PetscFree2(x, y));
2538:       if (cr) *cr = PetscExpReal(slope);
2539:       if (rRsq) *rRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2540:     }
2541:   }
2542:   if (ce || eRsq) {
2543:     PetscCall(KSPGetErrorHistory(ksp, &hist, &n));
2544:     if (!n) {
2545:       if (ce) *ce = 0.0;
2546:       if (eRsq) *eRsq = -1.0;
2547:     } else {
2548:       PetscCall(PetscMalloc2(n, &x, n, &y));
2549:       for (k = 0; k < n; ++k) {
2550:         x[k] = k;
2551:         y[k] = PetscLogReal(hist[k]);
2552:         mean += y[k];
2553:       }
2554:       mean /= n;
2555:       PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2556:       for (k = 0; k < n; ++k) {
2557:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2558:         var += PetscSqr(y[k] - mean);
2559:       }
2560:       PetscCall(PetscFree2(x, y));
2561:       if (ce) *ce = PetscExpReal(slope);
2562:       if (eRsq) *eRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2563:     }
2564:   }
2565:   PetscFunctionReturn(PETSC_SUCCESS);
2566: }

2568: /*@C
2569:   KSPSetConvergenceTest - Sets the function to be used to determine convergence.

2571:   Logically Collective

2573:   Input Parameters:
2574: + ksp      - iterative context obtained from `KSPCreate()`
2575: . converge - pointer to the function
2576: . ctx      - context for private data for the convergence routine (may be null)
2577: - destroy  - a routine for destroying the context (may be null)

2579:   Calling sequence of `converge`:
2580: + ksp    - iterative context obtained from `KSPCreate()`
2581: . it     - iteration number
2582: . rnorm  - (estimated) 2-norm of (preconditioned) residual
2583: . reason - the reason why it has converged or diverged
2584: - ctx    - optional convergence context, as set by `KSPSetConvergenceTest()`

2586:   Calling sequence of `destroy`:
2587: . ctx - the context

2589:   Level: advanced

2591:   Notes:
2592:   Must be called after the `KSP` type has been set so put this after
2593:   a call to `KSPSetType()`, or `KSPSetFromOptions()`.

2595:   The default convergence test, `KSPConvergedDefault()`, aborts if the
2596:   residual grows to more than 10000 times the initial residual.

2598:   The default is a combination of relative and absolute tolerances.
2599:   The residual value that is tested may be an approximation; routines
2600:   that need exact values should compute them.

2602:   In the default PETSc convergence test, the precise values of reason
2603:   are macros such as `KSP_CONVERGED_RTOL`, which are defined in petscksp.h.

2605: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPGetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2606: @*/
2607: PetscErrorCode KSPSetConvergenceTest(KSP ksp, PetscErrorCode (*converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void *ctx, PetscErrorCode (*destroy)(void *ctx))
2608: {
2609:   PetscFunctionBegin;
2611:   if (ksp->convergeddestroy) PetscCall((*ksp->convergeddestroy)(ksp->cnvP));
2612:   ksp->converged        = converge;
2613:   ksp->convergeddestroy = destroy;
2614:   ksp->cnvP             = (void *)ctx;
2615:   PetscFunctionReturn(PETSC_SUCCESS);
2616: }

2618: /*@C
2619:   KSPGetConvergenceTest - Gets the function to be used to determine convergence.

2621:   Logically Collective

2623:   Input Parameter:
2624: . ksp - iterative context obtained from `KSPCreate()`

2626:   Output Parameters:
2627: + converge - pointer to convergence test function
2628: . ctx      - context for private data for the convergence routine (may be null)
2629: - destroy  - a routine for destroying the context (may be null)

2631:   Calling sequence of `converge`:
2632: + ksp    - iterative context obtained from `KSPCreate()`
2633: . it     - iteration number
2634: . rnorm  - (estimated) 2-norm of (preconditioned) residual
2635: . reason - the reason why it has converged or diverged
2636: - ctx    - optional convergence context, as set by `KSPSetConvergenceTest()`

2638:   Calling sequence of `destroy`:
2639: . ctx - the convergence test context

2641:   Level: advanced

2643: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2644: @*/
2645: PetscErrorCode KSPGetConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void **ctx, PetscErrorCode (**destroy)(void *ctx))
2646: {
2647:   PetscFunctionBegin;
2649:   if (converge) *converge = ksp->converged;
2650:   if (destroy) *destroy = ksp->convergeddestroy;
2651:   if (ctx) *ctx = ksp->cnvP;
2652:   PetscFunctionReturn(PETSC_SUCCESS);
2653: }

2655: /*@C
2656:   KSPGetAndClearConvergenceTest - Gets the function to be used to determine convergence. Removes the current test without calling destroy on the test context

2658:   Logically Collective

2660:   Input Parameter:
2661: . ksp - iterative context obtained from `KSPCreate()`

2663:   Output Parameters:
2664: + converge - pointer to convergence test function
2665: . ctx      - context for private data for the convergence routine
2666: - destroy  - a routine for destroying the context

2668:   Calling sequence of `converge`:
2669: + ksp    - iterative context obtained from `KSPCreate()`
2670: . it     - iteration number
2671: . rnorm  - (estimated) 2-norm of (preconditioned) residual
2672: . reason - the reason why it has converged or diverged
2673: - ctx    - optional convergence context, as set by `KSPSetConvergenceTest()`

2675:   Calling sequence of `destroy`:
2676: . ctx - the convergence test context

2678:   Level: advanced

2680:   Note:
2681:   This is intended to be used to allow transferring the convergence test (and its context) to another testing object (for example another `KSP`)
2682:   and then calling `KSPSetConvergenceTest()` on this original `KSP`. If you just called `KSPGetConvergenceTest()` followed
2683:   by `KSPSetConvergenceTest()` the original context information
2684:   would be destroyed and hence the transferred context would be invalid and trigger a crash on use

2686: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2687: @*/
2688: PetscErrorCode KSPGetAndClearConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void **ctx, PetscErrorCode (**destroy)(void *ctx))
2689: {
2690:   PetscFunctionBegin;
2692:   *converge             = ksp->converged;
2693:   *destroy              = ksp->convergeddestroy;
2694:   *ctx                  = ksp->cnvP;
2695:   ksp->converged        = NULL;
2696:   ksp->cnvP             = NULL;
2697:   ksp->convergeddestroy = NULL;
2698:   PetscFunctionReturn(PETSC_SUCCESS);
2699: }

2701: /*@C
2702:   KSPGetConvergenceContext - Gets the convergence context set with `KSPSetConvergenceTest()`.

2704:   Not Collective

2706:   Input Parameter:
2707: . ksp - iterative context obtained from `KSPCreate()`

2709:   Output Parameter:
2710: . ctx - monitoring context

2712:   Level: advanced

2714: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2715: @*/
2716: PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx)
2717: {
2718:   PetscFunctionBegin;
2720:   *(void **)ctx = ksp->cnvP;
2721:   PetscFunctionReturn(PETSC_SUCCESS);
2722: }

2724: /*@C
2725:   KSPBuildSolution - Builds the approximate solution in a vector provided.

2727:   Collective

2729:   Input Parameter:
2730: . ksp - iterative context obtained from `KSPCreate()`

2732:   Output Parameter:
2733:    Provide exactly one of
2734: + v - location to stash solution.
2735: - V - the solution is returned in this location. This vector is created
2736:        internally. This vector should NOT be destroyed by the user with
2737:        `VecDestroy()`.

2739:   Level: developer

2741:   Notes:
2742:   This routine can be used in one of two ways
2743: .vb
2744:       KSPBuildSolution(ksp,NULL,&V);
2745:    or
2746:       KSPBuildSolution(ksp,v,NULL); or KSPBuildSolution(ksp,v,&v);
2747: .ve
2748:   In the first case an internal vector is allocated to store the solution
2749:   (the user cannot destroy this vector). In the second case the solution
2750:   is generated in the vector that the user provides. Note that for certain
2751:   methods, such as `KSPCG`, the second case requires a copy of the solution,
2752:   while in the first case the call is essentially free since it simply
2753:   returns the vector where the solution already is stored. For some methods
2754:   like `KSPGMRES` this is a reasonably expensive operation and should only be
2755:   used in truly needed.

2757: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP`
2758: @*/
2759: PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V)
2760: {
2761:   PetscFunctionBegin;
2763:   PetscCheck(V || v, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "Must provide either v or V");
2764:   if (!V) V = &v;
2765:   PetscUseTypeMethod(ksp, buildsolution, v, V);
2766:   PetscFunctionReturn(PETSC_SUCCESS);
2767: }

2769: /*@C
2770:   KSPBuildResidual - Builds the residual in a vector provided.

2772:   Collective

2774:   Input Parameter:
2775: . ksp - iterative context obtained from `KSPCreate()`

2777:   Output Parameters:
2778: + v - optional location to stash residual.  If `v` is not provided,
2779:        then a location is generated.
2780: . t - work vector.  If not provided then one is generated.
2781: - V - the residual

2783:   Level: advanced

2785:   Note:
2786:   Regardless of whether or not `v` is provided, the residual is
2787:   returned in `V`.

2789: .seealso: [](ch_ksp), `KSP`, `KSPBuildSolution()`
2790: @*/
2791: PetscErrorCode KSPBuildResidual(KSP ksp, Vec t, Vec v, Vec *V)
2792: {
2793:   PetscBool flag = PETSC_FALSE;
2794:   Vec       w = v, tt = t;

2796:   PetscFunctionBegin;
2798:   if (!w) PetscCall(VecDuplicate(ksp->vec_rhs, &w));
2799:   if (!tt) {
2800:     PetscCall(VecDuplicate(ksp->vec_sol, &tt));
2801:     flag = PETSC_TRUE;
2802:   }
2803:   PetscUseTypeMethod(ksp, buildresidual, tt, w, V);
2804:   if (flag) PetscCall(VecDestroy(&tt));
2805:   PetscFunctionReturn(PETSC_SUCCESS);
2806: }

2808: /*@
2809:   KSPSetDiagonalScale - Tells `KSP` to symmetrically diagonally scale the system
2810:   before solving. This actually CHANGES the matrix (and right hand side).

2812:   Logically Collective

2814:   Input Parameters:
2815: + ksp   - the `KSP` context
2816: - scale - `PETSC_TRUE` or `PETSC_FALSE`

2818:   Options Database Keys:
2819: + -ksp_diagonal_scale     - perform a diagonal scaling before the solve
2820: - -ksp_diagonal_scale_fix - scale the matrix back AFTER the solve

2822:   Level: advanced

2824:   Notes:
2825:   Scales the matrix by  D^(-1/2)  A  D^(-1/2)  [D^(1/2) x ] = D^(-1/2) b
2826:   where D_{ii} is 1/abs(A_{ii}) unless A_{ii} is zero and then it is 1.

2828:   BE CAREFUL with this routine: it actually scales the matrix and right
2829:   hand side that define the system. After the system is solved the matrix
2830:   and right hand side remain scaled unless you use `KSPSetDiagonalScaleFix()`

2832:   This should NOT be used within the `SNES` solves if you are using a line
2833:   search.

2835:   If you use this with the `PCType` `PCEISENSTAT` preconditioner than you can
2836:   use the `PCEisenstatSetNoDiagonalScaling()` option, or -pc_eisenstat_no_diagonal_scaling
2837:   to save some unneeded, redundant flops.

2839: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2840: @*/
2841: PetscErrorCode KSPSetDiagonalScale(KSP ksp, PetscBool scale)
2842: {
2843:   PetscFunctionBegin;
2846:   ksp->dscale = scale;
2847:   PetscFunctionReturn(PETSC_SUCCESS);
2848: }

2850: /*@
2851:   KSPGetDiagonalScale - Checks if `KSP` solver scales the matrix and right hand side, that is if `KSPSetDiagonalScale()` has been called

2853:   Not Collective

2855:   Input Parameter:
2856: . ksp - the `KSP` context

2858:   Output Parameter:
2859: . scale - `PETSC_TRUE` or `PETSC_FALSE`

2861:   Level: intermediate

2863: .seealso: [](ch_ksp), `KSP`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`
2864: @*/
2865: PetscErrorCode KSPGetDiagonalScale(KSP ksp, PetscBool *scale)
2866: {
2867:   PetscFunctionBegin;
2869:   PetscAssertPointer(scale, 2);
2870:   *scale = ksp->dscale;
2871:   PetscFunctionReturn(PETSC_SUCCESS);
2872: }

2874: /*@
2875:   KSPSetDiagonalScaleFix - Tells `KSP` to diagonally scale the system back after solving.

2877:   Logically Collective

2879:   Input Parameters:
2880: + ksp - the `KSP` context
2881: - fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2882:          rescale (default)

2884:   Level: intermediate

2886:   Notes:
2887:   Must be called after `KSPSetDiagonalScale()`

2889:   Using this will slow things down, because it rescales the matrix before and
2890:   after each linear solve. This is intended mainly for testing to allow one
2891:   to easily get back the original system to make sure the solution computed is
2892:   accurate enough.

2894: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPGetDiagonalScaleFix()`, `KSP`
2895: @*/
2896: PetscErrorCode KSPSetDiagonalScaleFix(KSP ksp, PetscBool fix)
2897: {
2898:   PetscFunctionBegin;
2901:   ksp->dscalefix = fix;
2902:   PetscFunctionReturn(PETSC_SUCCESS);
2903: }

2905: /*@
2906:   KSPGetDiagonalScaleFix - Determines if `KSP` diagonally scales the system back after solving. That is `KSPSetDiagonalScaleFix()` has been called

2908:   Not Collective

2910:   Input Parameter:
2911: . ksp - the `KSP` context

2913:   Output Parameter:
2914: . fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2915:          rescale (default)

2917:   Level: intermediate

2919: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2920: @*/
2921: PetscErrorCode KSPGetDiagonalScaleFix(KSP ksp, PetscBool *fix)
2922: {
2923:   PetscFunctionBegin;
2925:   PetscAssertPointer(fix, 2);
2926:   *fix = ksp->dscalefix;
2927:   PetscFunctionReturn(PETSC_SUCCESS);
2928: }

2930: /*@C
2931:   KSPSetComputeOperators - set routine to compute the linear operators

2933:   Logically Collective

2935:   Input Parameters:
2936: + ksp  - the `KSP` context
2937: . func - function to compute the operators
2938: - ctx  - optional context

2940:   Calling sequence of `func`:
2941: + ksp - the `KSP` context
2942: . A   - the linear operator
2943: . B   - the matrix from which the preconditioner is built, often `A`
2944: - ctx - optional user-provided context

2946:   Level: beginner

2948:   Notes:
2949:   The user provided func() will be called automatically at the very next call to `KSPSolve()`. It will NOT be called at future `KSPSolve()` calls
2950:   unless either `KSPSetComputeOperators()` or `KSPSetOperators()` is called before that `KSPSolve()` is called. This allows the same system to be solved several times
2951:   with different right hand side functions but is a confusing API since one might expect it to be called for each `KSPSolve()`

2953:   To reuse the same preconditioner for the next `KSPSolve()` and not compute a new one based on the most recently computed matrix call `KSPSetReusePreconditioner()`

2955:   Developer Notes:
2956:   Perhaps this routine and `KSPSetComputeRHS()` could be combined into a new API that makes clear when new matrices are computing without requiring call this
2957:   routine to indicate when the new matrix should be computed.

2959: .seealso: [](ch_ksp), `KSP`, `KSPSetOperators()`, `KSPSetComputeRHS()`, `DMKSPSetComputeOperators()`, `KSPSetComputeInitialGuess()`
2960: @*/
2961: PetscErrorCode KSPSetComputeOperators(KSP ksp, PetscErrorCode (*func)(KSP ksp, Mat A, Mat B, void *ctx), void *ctx)
2962: {
2963:   DM dm;

2965:   PetscFunctionBegin;
2967:   PetscCall(KSPGetDM(ksp, &dm));
2968:   PetscCall(DMKSPSetComputeOperators(dm, func, ctx));
2969:   if (ksp->setupstage == KSP_SETUP_NEWRHS) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2970:   PetscFunctionReturn(PETSC_SUCCESS);
2971: }

2973: /*@C
2974:   KSPSetComputeRHS - set routine to compute the right hand side of the linear system

2976:   Logically Collective

2978:   Input Parameters:
2979: + ksp  - the `KSP` context
2980: . func - function to compute the right hand side
2981: - ctx  - optional context

2983:   Calling sequence of `func`:
2984: + ksp - the `KSP` context
2985: . b   - right hand side of linear system
2986: - ctx - optional user-provided context

2988:   Level: beginner

2990:   Notes:
2991:   The routine you provide will be called EACH you call `KSPSolve()` to prepare the new right hand side for that solve

2993: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `DMKSPSetComputeRHS()`, `KSPSetComputeOperators()`, `KSPSetOperators()`
2994: @*/
2995: PetscErrorCode KSPSetComputeRHS(KSP ksp, PetscErrorCode (*func)(KSP ksp, Vec b, void *ctx), void *ctx)
2996: {
2997:   DM dm;

2999:   PetscFunctionBegin;
3001:   PetscCall(KSPGetDM(ksp, &dm));
3002:   PetscCall(DMKSPSetComputeRHS(dm, func, ctx));
3003:   PetscFunctionReturn(PETSC_SUCCESS);
3004: }

3006: /*@C
3007:   KSPSetComputeInitialGuess - set routine to compute the initial guess of the linear system

3009:   Logically Collective

3011:   Input Parameters:
3012: + ksp  - the `KSP` context
3013: . func - function to compute the initial guess
3014: - ctx  - optional context

3016:   Calling sequence of `func`:
3017: + ksp - the `KSP` context
3018: . x   - solution vector
3019: - ctx - optional user-provided context

3021:   Level: beginner

3023:   Notes:
3024:   This should only be used in conjunction with `KSPSetComputeRHS()` and `KSPSetComputeOperators()`, otherwise
3025:   call `KSPSetInitialGuessNonzero()` and set the initial guess values in the solution vector passed to `KSPSolve()` before calling the solver

3027: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `KSPSetComputeRHS()`, `KSPSetComputeOperators()`, `DMKSPSetComputeInitialGuess()`, `KSPSetInitialGuessNonzero()`
3028: @*/
3029: PetscErrorCode KSPSetComputeInitialGuess(KSP ksp, PetscErrorCode (*func)(KSP ksp, Vec x, void *ctx), void *ctx)
3030: {
3031:   DM dm;

3033:   PetscFunctionBegin;
3035:   PetscCall(KSPGetDM(ksp, &dm));
3036:   PetscCall(DMKSPSetComputeInitialGuess(dm, func, ctx));
3037:   PetscFunctionReturn(PETSC_SUCCESS);
3038: }

3040: /*@
3041:   KSPSetUseExplicitTranspose - Determines the explicit transpose of the operator is formed in `KSPSolveTranspose()`. In some configurations (like GPUs) it may
3042:   be explicitly formed when possible since the solve is much more efficient.

3044:   Logically Collective

3046:   Input Parameter:
3047: . ksp - the `KSP` context

3049:   Output Parameter:
3050: . flg - `PETSC_TRUE` to transpose the system in `KSPSolveTranspose()`, `PETSC_FALSE` to not transpose (default)

3052:   Level: advanced

3054: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `KSP`
3055: @*/
3056: PetscErrorCode KSPSetUseExplicitTranspose(KSP ksp, PetscBool flg)
3057: {
3058:   PetscFunctionBegin;
3061:   ksp->transpose.use_explicittranspose = flg;
3062:   PetscFunctionReturn(PETSC_SUCCESS);
3063: }