Actual source code: itfunc.c

  1: /*
  2:       Interface KSP routines that the user calls.
  3: */

  5: #include <petsc/private/kspimpl.h>
  6: #include <petsc/private/matimpl.h>
  7: #include <petscdm.h>

  9: /* number of nested levels of KSPSetUp/Solve(). This is used to determine if KSP_DIVERGED_ITS should be fatal. */
 10: static PetscInt level = 0;

 12: static inline PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
 13: {
 14:   PetscCall(PetscViewerPushFormat(viewer, format));
 15:   PetscCall(PetscObjectView(obj, viewer));
 16:   PetscCall(PetscViewerPopFormat(viewer));
 17:   return PETSC_SUCCESS;
 18: }

 20: /*@
 21:   KSPComputeExtremeSingularValues - Computes the extreme singular values
 22:   for the preconditioned operator. Called after or during `KSPSolve()`.

 24:   Not Collective

 26:   Input Parameter:
 27: . ksp - iterative solver obtained from `KSPCreate()`

 29:   Output Parameters:
 30: + emax - maximum estimated singular value
 31: - emin - minimum estimated singular value

 33:   Options Database Key:
 34: . -ksp_view_singularvalues - compute extreme singular values and print when `KSPSolve()` completes.

 36:   Level: advanced

 38:   Notes:
 39:   One must call `KSPSetComputeSingularValues()` before calling `KSPSetUp()`
 40:   (or use the option `-ksp_view_singularvalues`) in order for this routine to work correctly.

 42:   Many users may just want to use the monitoring routine
 43:   `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
 44:   to print the extreme singular values at each iteration of the linear solve.

 46:   Estimates of the smallest singular value may be very inaccurate, especially if the Krylov method has not converged.
 47:   The largest singular value is usually accurate to within a few percent if the method has converged, but is still not
 48:   intended for eigenanalysis. Consider the excellent package SLEPc if accurate values are required.

 50:   Disable restarts if using `KSPGMRES`, otherwise this estimate will only be using those iterations after the last
 51:   restart. See `KSPGMRESSetRestart()` for more details.

 53: .seealso: [](ch_ksp), `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeEigenvalues()`, `KSP`, `KSPComputeRitz()`
 54: @*/
 55: PetscErrorCode KSPComputeExtremeSingularValues(KSP ksp, PetscReal *emax, PetscReal *emin)
 56: {
 57:   PetscFunctionBegin;
 59:   PetscAssertPointer(emax, 2);
 60:   PetscAssertPointer(emin, 3);
 61:   PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Singular values not requested before KSPSetUp()");

 63:   if (ksp->ops->computeextremesingularvalues) PetscUseTypeMethod(ksp, computeextremesingularvalues, emax, emin);
 64:   else {
 65:     *emin = -1.0;
 66:     *emax = -1.0;
 67:   }
 68:   PetscFunctionReturn(PETSC_SUCCESS);
 69: }

 71: /*@
 72:   KSPComputeEigenvalues - Computes the extreme eigenvalues for the
 73:   preconditioned operator. Called after or during `KSPSolve()`.

 75:   Not Collective

 77:   Input Parameters:
 78: + ksp - iterative solver obtained from `KSPCreate()`
 79: - n   - size of arrays `r` and `c`. The number of eigenvalues computed `neig` will, in general, be less than this.

 81:   Output Parameters:
 82: + r    - real part of computed eigenvalues, provided by user with a dimension of at least `n`
 83: . c    - complex part of computed eigenvalues, provided by user with a dimension of at least `n`
 84: - neig - actual number of eigenvalues computed (will be less than or equal to `n`)

 86:   Options Database Key:
 87: . -ksp_view_eigenvalues - Prints eigenvalues to stdout

 89:   Level: advanced

 91:   Notes:
 92:   The number of eigenvalues estimated depends on the size of the Krylov space
 93:   generated during the `KSPSolve()` ; for example, with
 94:   `KSPCG` it corresponds to the number of CG iterations, for `KSPGMRES` it is the number
 95:   of GMRES iterations SINCE the last restart. Any extra space in `r` and `c`
 96:   will be ignored.

 98:   `KSPComputeEigenvalues()` does not usually provide accurate estimates; it is
 99:   intended only for assistance in understanding the convergence of iterative
100:   methods, not for eigenanalysis. For accurate computation of eigenvalues we recommend using
101:   the excellent package SLEPc.

103:   One must call `KSPSetComputeEigenvalues()` before calling `KSPSetUp()`
104:   in order for this routine to work correctly.

106:   Many users may just want to use the monitoring routine
107:   `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
108:   to print the singular values at each iteration of the linear solve.

110:   `KSPComputeRitz()` provides estimates for both the eigenvalues and their corresponding eigenvectors.

112: .seealso: [](ch_ksp), `KSPSetComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeExtremeSingularValues()`, `KSP`, `KSPComputeRitz()`
113: @*/
114: PetscErrorCode KSPComputeEigenvalues(KSP ksp, PetscInt n, PetscReal r[], PetscReal c[], PetscInt *neig)
115: {
116:   PetscFunctionBegin;
118:   if (n) PetscAssertPointer(r, 3);
119:   if (n) PetscAssertPointer(c, 4);
120:   PetscCheck(n >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Requested < 0 Eigenvalues");
121:   PetscAssertPointer(neig, 5);
122:   PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Eigenvalues not requested before KSPSetUp()");

124:   if (n && ksp->ops->computeeigenvalues) PetscUseTypeMethod(ksp, computeeigenvalues, n, r, c, neig);
125:   else *neig = 0;
126:   PetscFunctionReturn(PETSC_SUCCESS);
127: }

129: /*@
130:   KSPComputeRitz - Computes the Ritz or harmonic Ritz pairs associated with the
131:   smallest or largest in modulus, for the preconditioned operator.

133:   Not Collective

135:   Input Parameters:
136: + ksp   - iterative solver obtained from `KSPCreate()`
137: . ritz  - `PETSC_TRUE` or `PETSC_FALSE` for Ritz pairs or harmonic Ritz pairs, respectively
138: - small - `PETSC_TRUE` or `PETSC_FALSE` for smallest or largest (harmonic) Ritz values, respectively

140:   Output Parameters:
141: + nrit  - On input number of (harmonic) Ritz pairs to compute; on output, actual number of computed (harmonic) Ritz pairs
142: . S     - an array of the Ritz vectors, pass in an array of vectors of size `nrit`
143: . tetar - real part of the Ritz values, pass in an array of size `nrit`
144: - tetai - imaginary part of the Ritz values, pass in an array of size `nrit`

146:   Level: advanced

148:   Notes:
149:   This only works with a `KSPType` of `KSPGMRES`.

151:   One must call `KSPSetComputeRitz()` before calling `KSPSetUp()` in order for this routine to work correctly.

153:   This routine must be called after `KSPSolve()`.

155:   In `KSPGMRES`, the (harmonic) Ritz pairs are computed from the Hessenberg matrix obtained during
156:   the last complete cycle of the GMRES solve, or during the partial cycle if the solve ended before
157:   a restart (that is a complete GMRES cycle was never achieved).

159:   The number of actual (harmonic) Ritz pairs computed is less than or equal to the restart
160:   parameter for GMRES if a complete cycle has been performed or less or equal to the number of GMRES
161:   iterations.

163:   `KSPComputeEigenvalues()` provides estimates for only the eigenvalues (Ritz values).

165:   For real matrices, the (harmonic) Ritz pairs can be complex-valued. In such a case,
166:   the routine selects the complex (harmonic) Ritz value and its conjugate, and two successive entries of the
167:   vectors `S` are equal to the real and the imaginary parts of the associated vectors.
168:   When PETSc has been built with complex scalars, the real and imaginary parts of the Ritz
169:   values are still returned in `tetar` and `tetai`, as is done in `KSPComputeEigenvalues()`, but
170:   the Ritz vectors S are complex.

172:   The (harmonic) Ritz pairs are given in order of increasing (harmonic) Ritz values in modulus.

174:   The Ritz pairs do not necessarily accurately reflect the eigenvalues and eigenvectors of the operator, consider the
175:   excellent package SLEPc if accurate values are required.

177: .seealso: [](ch_ksp), `KSPSetComputeRitz()`, `KSP`, `KSPGMRES`, `KSPComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`
178: @*/
179: PetscErrorCode KSPComputeRitz(KSP ksp, PetscBool ritz, PetscBool small, PetscInt *nrit, Vec S[], PetscReal tetar[], PetscReal tetai[])
180: {
181:   PetscFunctionBegin;
183:   PetscCheck(ksp->calc_ritz, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Ritz pairs not requested before KSPSetUp()");
184:   PetscTryTypeMethod(ksp, computeritz, ritz, small, nrit, S, tetar, tetai);
185:   PetscFunctionReturn(PETSC_SUCCESS);
186: }

188: /*@
189:   KSPSetUpOnBlocks - Sets up the preconditioner for each block in
190:   the block Jacobi `PCJACOBI`, overlapping Schwarz `PCASM`, and fieldsplit `PCFIELDSPLIT` preconditioners

192:   Collective

194:   Input Parameter:
195: . ksp - the `KSP` context

197:   Level: advanced

199:   Notes:
200:   `KSPSetUpOnBlocks()` is a routine that the user can optionally call for
201:   more precise profiling (via `-log_view`) of the setup phase for these
202:   block preconditioners.  If the user does not call `KSPSetUpOnBlocks()`,
203:   it will automatically be called from within `KSPSolve()`.

205:   Calling `KSPSetUpOnBlocks()` is the same as calling `PCSetUpOnBlocks()`
206:   on the `PC` context within the `KSP` context.

208: .seealso: [](ch_ksp), `PCSetUpOnBlocks()`, `KSPSetUp()`, `PCSetUp()`, `KSP`
209: @*/
210: PetscErrorCode KSPSetUpOnBlocks(KSP ksp)
211: {
212:   PC             pc;
213:   PCFailedReason pcreason;

215:   PetscFunctionBegin;
217:   level++;
218:   PetscCall(KSPGetPC(ksp, &pc));
219:   PetscCall(PCSetUpOnBlocks(pc));
220:   PetscCall(PCGetFailedReason(pc, &pcreason));
221:   level--;
222:   /*
223:      This is tricky since only a subset of MPI ranks may set this; each KSPSolve_*() is responsible for checking
224:      this flag and initializing an appropriate vector with VecFlag() so that the first norm computation can
225:      produce a result at KSPCheckNorm() thus communicating the known problem to all MPI ranks so they may
226:      terminate the Krylov solve. For many KSP implementations this is handled within KSPInitialResidual()
227:   */
228:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
229:   PetscFunctionReturn(PETSC_SUCCESS);
230: }

232: /*@
233:   KSPSetReusePreconditioner - reuse the current preconditioner for future `KSPSolve()`, do not construct a new preconditioner even if the `Mat` operator
234:   in the `KSP` has different values

236:   Collective

238:   Input Parameters:
239: + ksp  - iterative solver obtained from `KSPCreate()`
240: - flag - `PETSC_TRUE` to reuse the current preconditioner, or `PETSC_FALSE` to construct a new preconditioner

242:   Options Database Key:
243: . -ksp_reuse_preconditioner (true|false) - reuse the previously computed preconditioner

245:   Level: intermediate

247:   Notes:
248:   When using `SNES` one can use `SNESSetLagPreconditioner()` to determine when preconditioners are reused.

250:   Reusing the preconditioner reduces the time needed to form new preconditioners but may (significantly) increase the number
251:   of iterations needed for future solves depending on how much the matrix entries have changed.

253: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGetReusePreconditioner()`,
254:           `SNESSetLagPreconditioner()`, `SNES`
255: @*/
256: PetscErrorCode KSPSetReusePreconditioner(KSP ksp, PetscBool flag)
257: {
258:   PC pc;

260:   PetscFunctionBegin;
262:   PetscCall(KSPGetPC(ksp, &pc));
263:   PetscCall(PCSetReusePreconditioner(pc, flag));
264:   PetscFunctionReturn(PETSC_SUCCESS);
265: }

267: /*@
268:   KSPGetReusePreconditioner - Determines if the `KSP` reuses the current preconditioner even if the `Mat` operator in the `KSP` has changed.

270:   Collective

272:   Input Parameter:
273: . ksp - iterative solver obtained from `KSPCreate()`

275:   Output Parameter:
276: . flag - the boolean flag indicating if the current preconditioner should be reused

278:   Level: intermediate

280: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSPSetReusePreconditioner()`, `KSP`
281: @*/
282: PetscErrorCode KSPGetReusePreconditioner(KSP ksp, PetscBool *flag)
283: {
284:   PetscFunctionBegin;
286:   PetscAssertPointer(flag, 2);
287:   *flag = PETSC_FALSE;
288:   if (ksp->pc) PetscCall(PCGetReusePreconditioner(ksp->pc, flag));
289:   PetscFunctionReturn(PETSC_SUCCESS);
290: }

292: /*@
293:   KSPSetSkipPCSetFromOptions - prevents `KSPSetFromOptions()` from calling `PCSetFromOptions()`.
294:   This is used if the same `PC` is shared by more than one `KSP` so its options are not reset for each `KSP`

296:   Collective

298:   Input Parameters:
299: + ksp  - iterative solver obtained from `KSPCreate()`
300: - flag - `PETSC_TRUE` to skip calling the `PCSetFromOptions()`

302:   Level: developer

304: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
305: @*/
306: PetscErrorCode KSPSetSkipPCSetFromOptions(KSP ksp, PetscBool flag)
307: {
308:   PetscFunctionBegin;
310:   ksp->skippcsetfromoptions = flag;
311:   PetscFunctionReturn(PETSC_SUCCESS);
312: }

314: /*@
315:   KSPSetUp - Sets up the internal data structures for the
316:   later use `KSPSolve()` the `KSP` linear iterative solver.

318:   Collective

320:   Input Parameter:
321: . ksp - iterative solver, `KSP`, obtained from `KSPCreate()`

323:   Level: developer

325:   Note:
326:   This is called automatically by `KSPSolve()` so usually does not need to be called directly.

328: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPSetUpOnBlocks()`
329: @*/
330: PetscErrorCode KSPSetUp(KSP ksp)
331: {
332:   Mat            A, B;
333:   Mat            mat, pmat;
334:   MatNullSpace   nullsp;
335:   PCFailedReason pcreason;
336:   PC             pc;
337:   PetscBool      pcmpi;

339:   PetscFunctionBegin;
341:   PetscCall(KSPGetPC(ksp, &pc));
342:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCMPI, &pcmpi));
343:   if (pcmpi) {
344:     PetscBool ksppreonly;
345:     PetscCall(PetscObjectTypeCompare((PetscObject)ksp, KSPPREONLY, &ksppreonly));
346:     if (!ksppreonly) PetscCall(KSPSetType(ksp, KSPPREONLY));
347:   }
348:   level++;

350:   /* reset the convergence flag from the previous solves */
351:   ksp->reason = KSP_CONVERGED_ITERATING;

353:   if (!((PetscObject)ksp)->type_name) PetscCall(KSPSetType(ksp, KSPGMRES));
354:   PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));

356:   if ((ksp->dmActive & KSP_DMACTIVE_OPERATOR) && !ksp->setupstage) {
357:     /* first time in so build matrix and vector data structures using DM */
358:     if (!ksp->vec_rhs) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_rhs));
359:     if (!ksp->vec_sol) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_sol));
360:     PetscCall(DMCreateMatrix(ksp->dm, &A));
361:     PetscCall(KSPSetOperators(ksp, A, A));
362:     PetscCall(PetscObjectDereference((PetscObject)A));
363:   }

365:   if (ksp->dmActive) {
366:     DMKSP kdm;
367:     PetscCall(DMGetDMKSP(ksp->dm, &kdm));

369:     if (kdm->ops->computeinitialguess && ksp->setupstage != KSP_SETUP_NEWRHS && (ksp->dmActive & KSP_DMACTIVE_INITIAL_GUESS)) {
370:       /* only computes initial guess the first time through */
371:       PetscCallBack("KSP callback initial guess", (*kdm->ops->computeinitialguess)(ksp, ksp->vec_sol, kdm->initialguessctx));
372:       PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
373:     }
374:     if (kdm->ops->computerhs && (ksp->dmActive & KSP_DMACTIVE_RHS)) PetscCallBack("KSP callback rhs", (*kdm->ops->computerhs)(ksp, ksp->vec_rhs, kdm->rhsctx));
375:     if ((ksp->setupstage != KSP_SETUP_NEWRHS) && (ksp->dmActive & KSP_DMACTIVE_OPERATOR)) {
376:       PetscCheck(kdm->ops->computeoperators, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "You called KSPSetDM() but did not use DMKSPSetComputeOperators() or KSPSetDMActive(ksp, KSP_DMACTIVE_ALL, PETSC_FALSE);");
377:       PetscCall(KSPGetOperators(ksp, &A, &B));
378:       PetscCallBack("KSP callback operators", (*kdm->ops->computeoperators)(ksp, A, B, kdm->operatorsctx));
379:     }
380:   }

382:   if (ksp->setupstage == KSP_SETUP_NEWRHS) {
383:     level--;
384:     PetscFunctionReturn(PETSC_SUCCESS);
385:   }
386:   PetscCall(PetscLogEventBegin(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));

388:   switch (ksp->setupstage) {
389:   case KSP_SETUP_NEW:
390:     PetscUseTypeMethod(ksp, setup);
391:     break;
392:   case KSP_SETUP_NEWMATRIX: /* This should be replaced with a more general mechanism */
393:     if (ksp->setupnewmatrix) PetscUseTypeMethod(ksp, setup);
394:     break;
395:   default:
396:     break;
397:   }

399:   if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
400:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
401:   /* scale the matrix if requested */
402:   if (ksp->dscale) {
403:     PetscScalar *xx;
404:     PetscInt     i, n;
405:     PetscBool    zeroflag = PETSC_FALSE;

407:     if (!ksp->diagonal) { /* allocate vector to hold diagonal */
408:       PetscCall(MatCreateVecs(pmat, &ksp->diagonal, NULL));
409:     }
410:     PetscCall(MatGetDiagonal(pmat, ksp->diagonal));
411:     PetscCall(VecGetLocalSize(ksp->diagonal, &n));
412:     PetscCall(VecGetArray(ksp->diagonal, &xx));
413:     for (i = 0; i < n; i++) {
414:       if (xx[i] != 0.0) xx[i] = 1.0 / PetscSqrtReal(PetscAbsScalar(xx[i]));
415:       else {
416:         xx[i]    = 1.0;
417:         zeroflag = PETSC_TRUE;
418:       }
419:     }
420:     PetscCall(VecRestoreArray(ksp->diagonal, &xx));
421:     if (zeroflag) PetscCall(PetscInfo(ksp, "Zero detected in diagonal of matrix, using 1 at those locations\n"));
422:     PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
423:     if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
424:     ksp->dscalefix2 = PETSC_FALSE;
425:   }
426:   PetscCall(PetscLogEventEnd(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
427:   PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
428:   PetscCall(PCSetUp(ksp->pc));
429:   PetscCall(PCGetFailedReason(ksp->pc, &pcreason));
430:   /* TODO: this code was wrong and is still wrong, there is no way to propagate the failure to all processes; their is no code to handle a ksp->reason on only some ranks */
431:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;

433:   PetscCall(MatGetNullSpace(mat, &nullsp));
434:   if (nullsp) {
435:     PetscBool test = PETSC_FALSE;
436:     PetscCall(PetscOptionsGetBool(((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_test_null_space", &test, NULL));
437:     if (test) PetscCall(MatNullSpaceTest(nullsp, mat, NULL));
438:   }
439:   ksp->setupstage = KSP_SETUP_NEWRHS;
440:   level--;
441:   PetscFunctionReturn(PETSC_SUCCESS);
442: }

444: /*@
445:   KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged, `KSPConvergedReason` to a `PetscViewer`

447:   Collective

449:   Input Parameters:
450: + ksp    - iterative solver obtained from `KSPCreate()`
451: - viewer - the `PetscViewer` on which to display the reason

453:   Options Database Keys:
454: + -ksp_converged_reason          - print reason for converged or diverged, also prints number of iterations
455: - -ksp_converged_reason ::failed - only print reason and number of iterations when diverged

457:   Level: beginner

459:   Note:
460:   Use `KSPConvergedReasonViewFromOptions()` to display the reason based on values in the PETSc options database.

462:   To change the format of the output call `PetscViewerPushFormat`(`viewer`,`format`) before this call. Use `PETSC_VIEWER_DEFAULT` for the default,
463:   use `PETSC_VIEWER_FAILED` to only display a reason if it fails.

465: .seealso: [](ch_ksp), `KSPConvergedReasonViewFromOptions()`, `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
466:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `KSP`, `KSPGetConvergedReason()`, `PetscViewerPushFormat()`, `PetscViewerPopFormat()`
467: @*/
468: PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer)
469: {
470:   PetscBool         isAscii;
471:   PetscViewerFormat format;

473:   PetscFunctionBegin;
474:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
475:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
476:   if (isAscii) {
477:     PetscCall(PetscViewerGetFormat(viewer, &format));
478:     PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel + 1));
479:     if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) {
480:       if (((PetscObject)ksp)->prefix) {
481:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
482:       } else {
483:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
484:       }
485:     } else if (ksp->reason <= 0) {
486:       if (((PetscObject)ksp)->prefix) {
487:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
488:       } else {
489:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
490:       }
491:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
492:         PCFailedReason reason;
493:         PetscCall(PCGetFailedReason(ksp->pc, &reason));
494:         PetscCall(PetscViewerASCIIPrintf(viewer, "               PC failed due to %s\n", PCFailedReasons[reason]));
495:       }
496:     }
497:     PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel + 1));
498:   }
499:   PetscFunctionReturn(PETSC_SUCCESS);
500: }

502: /*@C
503:   KSPConvergedReasonViewSet - Sets an ADDITIONAL function that is to be used at the
504:   end of the linear solver to display the convergence reason of the linear solver.

506:   Logically Collective

508:   Input Parameters:
509: + ksp               - the `KSP` context
510: . f                 - the `ksp` converged reason view function, see `KSPConvergedReasonViewFn`
511: . ctx               - [optional] context for private data for the `KSPConvergedReason` view routine (use `NULL` if context is not needed)
512: - reasonviewdestroy - [optional] routine that frees `ctx` (may be `NULL`), see `PetscCtxDestroyFn` for the calling sequence

514:   Options Database Keys:
515: + -ksp_converged_reason             - sets a default `KSPConvergedReasonView()`
516: - -ksp_converged_reason_view_cancel - cancels all converged reason viewers that have been hardwired into a code by
517:                                       calls to `KSPConvergedReasonViewSet()`, but does not cancel those set via the options database.

519:   Level: intermediate

521:   Note:
522:   Several different converged reason view routines may be set by calling
523:   `KSPConvergedReasonViewSet()` multiple times; all will be called in the
524:   order in which they were set.

526:   Developer Note:
527:   Should be named KSPConvergedReasonViewAdd().

529: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewFn`, `KSPConvergedReasonViewCancel()`, `PetscCtxDestroyFn`
530: @*/
531: PetscErrorCode KSPConvergedReasonViewSet(KSP ksp, KSPConvergedReasonViewFn *f, PetscCtx ctx, PetscCtxDestroyFn *reasonviewdestroy)
532: {
533:   PetscFunctionBegin;
535:   for (PetscInt i = 0; i < ksp->numberreasonviews; i++) {
536:     PetscBool identical;

538:     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))(PetscVoidFn *)f, ctx, reasonviewdestroy, (PetscErrorCode (*)(void))(PetscVoidFn *)ksp->reasonview[i], ksp->reasonviewcontext[i], ksp->reasonviewdestroy[i], &identical));
539:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
540:   }
541:   PetscCheck(ksp->numberreasonviews < MAXKSPREASONVIEWS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP reasonview set");
542:   ksp->reasonview[ksp->numberreasonviews]          = f;
543:   ksp->reasonviewdestroy[ksp->numberreasonviews]   = reasonviewdestroy;
544:   ksp->reasonviewcontext[ksp->numberreasonviews++] = ctx;
545:   PetscFunctionReturn(PETSC_SUCCESS);
546: }

548: /*@
549:   KSPConvergedReasonViewCancel - Clears all the `KSPConvergedReason` view functions for a `KSP` object set with `KSPConvergedReasonViewSet()`
550:   as well as the default viewer.

552:   Collective

554:   Input Parameter:
555: . ksp - iterative solver obtained from `KSPCreate()`

557:   Level: intermediate

559: .seealso: [](ch_ksp), `KSPCreate()`, `KSPDestroy()`, `KSPReset()`, `KSPConvergedReasonViewSet()`
560: @*/
561: PetscErrorCode KSPConvergedReasonViewCancel(KSP ksp)
562: {
563:   PetscInt i;

565:   PetscFunctionBegin;
567:   for (i = 0; i < ksp->numberreasonviews; i++) {
568:     if (ksp->reasonviewdestroy[i]) PetscCall((*ksp->reasonviewdestroy[i])(&ksp->reasonviewcontext[i]));
569:   }
570:   ksp->numberreasonviews = 0;
571:   PetscCall(PetscViewerDestroy(&ksp->convergedreasonviewer));
572:   PetscFunctionReturn(PETSC_SUCCESS);
573: }

575: /*@
576:   KSPConvergedReasonViewFromOptions - Processes command line options to determine if/how a `KSPConvergedReason` is to be viewed.

578:   Collective

580:   Input Parameter:
581: . ksp - the `KSP` object

583:   Level: intermediate

585:   Note:
586:   This is called automatically at the conclusion of `KSPSolve()` so is rarely called directly by user code.

588: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewSet()`
589: @*/
590: PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp)
591: {
592:   PetscFunctionBegin;
593:   /* Call all user-provided reason review routines */
594:   for (PetscInt i = 0; i < ksp->numberreasonviews; i++) PetscCall((*ksp->reasonview[i])(ksp, ksp->reasonviewcontext[i]));

596:   /* Call the default PETSc routine */
597:   if (ksp->convergedreasonviewer) {
598:     PetscCall(PetscViewerPushFormat(ksp->convergedreasonviewer, ksp->convergedreasonformat));
599:     PetscCall(KSPConvergedReasonView(ksp, ksp->convergedreasonviewer));
600:     PetscCall(PetscViewerPopFormat(ksp->convergedreasonviewer));
601:   }
602:   PetscFunctionReturn(PETSC_SUCCESS);
603: }

605: /*@
606:   KSPConvergedRateView - Displays the convergence rate <https://en.wikipedia.org/wiki/Coefficient_of_determination> of `KSPSolve()` to a viewer

608:   Collective

610:   Input Parameters:
611: + ksp    - iterative solver obtained from `KSPCreate()`
612: - viewer - the `PetscViewer` to display the reason

614:   Options Database Key:
615: . -ksp_converged_rate - print reason for convergence or divergence and the convergence rate (or 0.0 for divergence)

617:   Level: intermediate

619:   Notes:
620:   To change the format of the output, call `PetscViewerPushFormat`(`viewer`,`format`) before this call.

622:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $\log r_k = \log r_0 + k \log c$. After linear regression,
623:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,

625: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPGetConvergedRate()`, `KSPSetTolerances()`, `KSPConvergedDefault()`
626: @*/
627: PetscErrorCode KSPConvergedRateView(KSP ksp, PetscViewer viewer)
628: {
629:   PetscViewerFormat format;
630:   PetscBool         isAscii;
631:   PetscReal         rrate, rRsq, erate = 0.0, eRsq = 0.0;
632:   PetscInt          its;
633:   const char       *prefix, *reason = KSPConvergedReasons[ksp->reason];

635:   PetscFunctionBegin;
636:   PetscCall(KSPGetIterationNumber(ksp, &its));
637:   PetscCall(KSPComputeConvergenceRate(ksp, &rrate, &rRsq, &erate, &eRsq));
638:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
639:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
640:   if (isAscii) {
641:     PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
642:     PetscCall(PetscViewerGetFormat(viewer, &format));
643:     PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
644:     if (ksp->reason > 0) {
645:       if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT, prefix, reason, its));
646:       else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT, reason, its));
647:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
648:       if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
649:       if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
650:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
651:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
652:     } else if (ksp->reason <= 0) {
653:       if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT, prefix, reason, its));
654:       else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT, reason, its));
655:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
656:       if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
657:       if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
658:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
659:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
660:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
661:         PCFailedReason reason;
662:         PetscCall(PCGetFailedReason(ksp->pc, &reason));
663:         PetscCall(PetscViewerASCIIPrintf(viewer, "               PC failed due to %s\n", PCFailedReasons[reason]));
664:       }
665:     }
666:     PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
667:   }
668:   PetscFunctionReturn(PETSC_SUCCESS);
669: }

671: #include <petscdraw.h>

673: static PetscErrorCode KSPViewEigenvalues_Internal(KSP ksp, PetscBool isExplicit, PetscViewer viewer, PetscViewerFormat format)
674: {
675:   PetscReal  *r, *c;
676:   PetscInt    n, i, neig;
677:   PetscBool   isascii, isdraw;
678:   PetscMPIInt rank;

680:   PetscFunctionBegin;
681:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)ksp), &rank));
682:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
683:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
684:   if (isExplicit) {
685:     PetscCall(VecGetSize(ksp->vec_sol, &n));
686:     PetscCall(PetscMalloc2(n, &r, n, &c));
687:     PetscCall(KSPComputeEigenvaluesExplicitly(ksp, n, r, c));
688:     neig = n;
689:   } else {
690:     PetscInt nits;

692:     PetscCall(KSPGetIterationNumber(ksp, &nits));
693:     n = nits + 2;
694:     if (!nits) {
695:       PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any eigenvalues\n"));
696:       PetscFunctionReturn(PETSC_SUCCESS);
697:     }
698:     PetscCall(PetscMalloc2(n, &r, n, &c));
699:     PetscCall(KSPComputeEigenvalues(ksp, n, r, c, &neig));
700:   }
701:   if (isascii) {
702:     PetscCall(PetscViewerASCIIPrintf(viewer, "%s computed eigenvalues\n", isExplicit ? "Explicitly" : "Iteratively"));
703:     for (i = 0; i < neig; ++i) {
704:       if (c[i] >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %gi\n", (double)r[i], (double)c[i]));
705:       else PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %gi\n", (double)r[i], -(double)c[i]));
706:     }
707:   } else if (isdraw && rank == 0) {
708:     PetscDraw   draw;
709:     PetscDrawSP drawsp;

711:     if (format == PETSC_VIEWER_DRAW_CONTOUR) {
712:       PetscCall(KSPPlotEigenContours_Private(ksp, neig, r, c));
713:     } else {
714:       PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
715:       PetscCall(PetscDrawSPCreate(draw, 1, &drawsp));
716:       PetscCall(PetscDrawSPReset(drawsp));
717:       for (i = 0; i < neig; ++i) PetscCall(PetscDrawSPAddPoint(drawsp, r + i, c + i));
718:       PetscCall(PetscDrawSPDraw(drawsp, PETSC_TRUE));
719:       PetscCall(PetscDrawSPSave(drawsp));
720:       PetscCall(PetscDrawSPDestroy(&drawsp));
721:     }
722:   }
723:   PetscCall(PetscFree2(r, c));
724:   PetscFunctionReturn(PETSC_SUCCESS);
725: }

727: static PetscErrorCode KSPViewSingularvalues_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
728: {
729:   PetscReal smax, smin;
730:   PetscInt  nits;
731:   PetscBool isascii;

733:   PetscFunctionBegin;
734:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
735:   PetscCall(KSPGetIterationNumber(ksp, &nits));
736:   if (!nits) {
737:     PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any singular values\n"));
738:     PetscFunctionReturn(PETSC_SUCCESS);
739:   }
740:   PetscCall(KSPComputeExtremeSingularValues(ksp, &smax, &smin));
741:   if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Iteratively computed extreme %svalues: max %g min %g max/min %g\n", smin < 0 ? "eigen" : "singular ", (double)smax, (double)smin, (double)(smax / smin)));
742:   PetscFunctionReturn(PETSC_SUCCESS);
743: }

745: static PetscErrorCode KSPViewFinalResidual_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
746: {
747:   PetscBool isascii;

749:   PetscFunctionBegin;
750:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
751:   PetscCheck(!ksp->dscale || ksp->dscalefix, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Cannot compute final scale with -ksp_diagonal_scale except also with -ksp_diagonal_scale_fix");
752:   if (isascii) {
753:     Mat       A;
754:     Vec       t;
755:     PetscReal norm;

757:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
758:     PetscCall(VecDuplicate(ksp->vec_rhs, &t));
759:     PetscCall(KSP_MatMult(ksp, A, ksp->vec_sol, t));
760:     PetscCall(VecAYPX(t, -1.0, ksp->vec_rhs));
761:     PetscCall(PetscOptionsPushCreateViewerOff(PETSC_FALSE));
762:     PetscCall(VecViewFromOptions(t, (PetscObject)ksp, "-ksp_view_final_residual_vec"));
763:     PetscCall(PetscOptionsPopCreateViewerOff());
764:     PetscCall(VecNorm(t, NORM_2, &norm));
765:     PetscCall(VecDestroy(&t));
766:     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP final norm of residual %g\n", (double)norm));
767:   }
768:   PetscFunctionReturn(PETSC_SUCCESS);
769: }

771: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode PetscMonitorPauseFinal_Internal(PetscInt n, PetscCtx ctx[])
772: {
773:   PetscFunctionBegin;
774:   for (PetscInt i = 0; i < n; ++i) {
775:     PetscViewerAndFormat *vf = (PetscViewerAndFormat *)ctx[i];
776:     PetscDraw             draw;
777:     PetscReal             lpause;
778:     PetscBool             isdraw;

780:     if (!vf) continue;
781:     if (!PetscCheckPointer(vf->viewer, PETSC_OBJECT)) continue;
782:     if (((PetscObject)vf->viewer)->classid != PETSC_VIEWER_CLASSID) continue;
783:     PetscCall(PetscObjectTypeCompare((PetscObject)vf->viewer, PETSCVIEWERDRAW, &isdraw));
784:     if (!isdraw) continue;

786:     PetscCall(PetscViewerDrawGetDraw(vf->viewer, 0, &draw));
787:     PetscCall(PetscDrawGetPause(draw, &lpause));
788:     PetscCall(PetscDrawSetPause(draw, -1.0));
789:     PetscCall(PetscDrawPause(draw));
790:     PetscCall(PetscDrawSetPause(draw, lpause));
791:   }
792:   PetscFunctionReturn(PETSC_SUCCESS);
793: }

795: static PetscErrorCode KSPMonitorPauseFinal_Internal(KSP ksp)
796: {
797:   PetscFunctionBegin;
798:   if (!ksp->pauseFinal) PetscFunctionReturn(PETSC_SUCCESS);
799:   PetscCall(PetscMonitorPauseFinal_Internal(ksp->numbermonitors, ksp->monitorcontext));
800:   PetscFunctionReturn(PETSC_SUCCESS);
801: }

803: static PetscErrorCode KSPSolve_Private(KSP ksp, Vec b, Vec x)
804: {
805:   PetscBool    flg = PETSC_FALSE, inXisinB = PETSC_FALSE, guess_zero;
806:   Mat          mat, pmat;
807:   MPI_Comm     comm;
808:   MatNullSpace nullsp;
809:   Vec          btmp, vec_rhs = NULL;

811:   PetscFunctionBegin;
812:   level++;
813:   comm = PetscObjectComm((PetscObject)ksp);
814:   if (x && x == b) {
815:     PetscCheck(ksp->guess_zero, comm, PETSC_ERR_ARG_INCOMP, "Cannot use x == b with nonzero initial guess");
816:     PetscCall(VecDuplicate(b, &x));
817:     inXisinB = PETSC_TRUE;
818:   }
819:   if (b) {
820:     PetscCall(PetscObjectReference((PetscObject)b));
821:     PetscCall(VecDestroy(&ksp->vec_rhs));
822:     ksp->vec_rhs = b;
823:   }
824:   if (x) {
825:     PetscCall(PetscObjectReference((PetscObject)x));
826:     PetscCall(VecDestroy(&ksp->vec_sol));
827:     ksp->vec_sol = x;
828:   }

830:   if (ksp->viewPre) PetscCall(ObjectView((PetscObject)ksp, ksp->viewerPre, ksp->formatPre));

832:   if (ksp->presolve) PetscCall((*ksp->presolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->prectx));

834:   /* reset the residual history list if requested */
835:   if (ksp->res_hist_reset) ksp->res_hist_len = 0;
836:   if (ksp->err_hist_reset) ksp->err_hist_len = 0;

838:   /* KSPSetUp() scales the matrix if needed */
839:   PetscCall(KSPSetUp(ksp));
840:   PetscCall(KSPSetUpOnBlocks(ksp));

842:   if (ksp->guess) {
843:     PetscObjectState ostate, state;

845:     PetscCall(KSPGuessSetUp(ksp->guess));
846:     PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &ostate));
847:     PetscCall(KSPGuessFormGuess(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
848:     PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &state));
849:     if (state != ostate) {
850:       ksp->guess_zero = PETSC_FALSE;
851:     } else {
852:       PetscCall(PetscInfo(ksp, "Using zero initial guess since the KSPGuess object did not change the vector\n"));
853:       ksp->guess_zero = PETSC_TRUE;
854:     }
855:   }

857:   PetscCall(VecSetErrorIfLocked(ksp->vec_sol, 3));

859:   PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
860:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
861:   /* diagonal scale RHS if called for */
862:   if (ksp->dscale) {
863:     PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
864:     /* second time in, but matrix was scaled back to original */
865:     if (ksp->dscalefix && ksp->dscalefix2) {
866:       Mat mat, pmat;

868:       PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
869:       PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
870:       if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
871:     }

873:     /* scale initial guess */
874:     if (!ksp->guess_zero) {
875:       if (!ksp->truediagonal) {
876:         PetscCall(VecDuplicate(ksp->diagonal, &ksp->truediagonal));
877:         PetscCall(VecCopy(ksp->diagonal, ksp->truediagonal));
878:         PetscCall(VecReciprocal(ksp->truediagonal));
879:       }
880:       PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->truediagonal));
881:     }
882:   }
883:   PetscCall(PCPreSolve(ksp->pc, ksp));

885:   if (ksp->guess_zero && !ksp->guess_not_read) PetscCall(VecSet(ksp->vec_sol, 0.0));
886:   if (ksp->guess_knoll) { /* The Knoll trick is independent on the KSPGuess specified */
887:     PetscCall(PCApply(ksp->pc, ksp->vec_rhs, ksp->vec_sol));
888:     PetscCall(KSP_RemoveNullSpace(ksp, ksp->vec_sol));
889:     ksp->guess_zero = PETSC_FALSE;
890:   }

892:   /* can we mark the initial guess as zero for this solve? */
893:   guess_zero = ksp->guess_zero;
894:   if (!ksp->guess_zero) {
895:     PetscReal norm;

897:     PetscCall(VecNormAvailable(ksp->vec_sol, NORM_2, &flg, &norm));
898:     if (flg && !norm) ksp->guess_zero = PETSC_TRUE;
899:   }
900:   if (ksp->transpose_solve) {
901:     PetscCall(MatGetNullSpace(mat, &nullsp));
902:   } else {
903:     PetscCall(MatGetTransposeNullSpace(mat, &nullsp));
904:   }
905:   if (nullsp) {
906:     PetscCall(VecDuplicate(ksp->vec_rhs, &btmp));
907:     PetscCall(VecCopy(ksp->vec_rhs, btmp));
908:     PetscCall(MatNullSpaceRemove(nullsp, btmp));
909:     vec_rhs      = ksp->vec_rhs;
910:     ksp->vec_rhs = btmp;
911:   }
912:   PetscCall(VecLockReadPush(ksp->vec_rhs));
913:   PetscUseTypeMethod(ksp, solve);
914:   PetscCall(KSPMonitorPauseFinal_Internal(ksp));

916:   PetscCall(VecLockReadPop(ksp->vec_rhs));
917:   if (nullsp) {
918:     ksp->vec_rhs = vec_rhs;
919:     PetscCall(VecDestroy(&btmp));
920:   }

922:   ksp->guess_zero = guess_zero;

924:   PetscCheck(ksp->reason, comm, PETSC_ERR_PLIB, "Internal error, solver returned without setting converged reason");
925:   ksp->totalits += ksp->its;

927:   PetscCall(KSPConvergedReasonViewFromOptions(ksp));

929:   if (ksp->viewRate) {
930:     PetscCall(PetscViewerPushFormat(ksp->viewerRate, ksp->formatRate));
931:     PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
932:     PetscCall(PetscViewerPopFormat(ksp->viewerRate));
933:   }
934:   PetscCall(PCPostSolve(ksp->pc, ksp));

936:   /* diagonal scale solution if called for */
937:   if (ksp->dscale) {
938:     PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->diagonal));
939:     /* unscale right-hand side and matrix */
940:     if (ksp->dscalefix) {
941:       Mat mat, pmat;

943:       PetscCall(VecReciprocal(ksp->diagonal));
944:       PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
945:       PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
946:       PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
947:       if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
948:       PetscCall(VecReciprocal(ksp->diagonal));
949:       ksp->dscalefix2 = PETSC_TRUE;
950:     }
951:   }
952:   PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
953:   if (ksp->guess) PetscCall(KSPGuessUpdate(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
954:   if (ksp->postsolve) PetscCall((*ksp->postsolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->postctx));

956:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
957:   if (ksp->viewEV) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_FALSE, ksp->viewerEV, ksp->formatEV));
958:   if (ksp->viewEVExp) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_TRUE, ksp->viewerEVExp, ksp->formatEVExp));
959:   if (ksp->viewSV) PetscCall(KSPViewSingularvalues_Internal(ksp, ksp->viewerSV, ksp->formatSV));
960:   if (ksp->viewFinalRes) PetscCall(KSPViewFinalResidual_Internal(ksp, ksp->viewerFinalRes, ksp->formatFinalRes));
961:   if (ksp->viewMat) PetscCall(ObjectView((PetscObject)mat, ksp->viewerMat, ksp->formatMat));
962:   if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)pmat, ksp->viewerPMat, ksp->formatPMat));
963:   if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)ksp->vec_rhs, ksp->viewerRhs, ksp->formatRhs));
964:   if (ksp->viewSol) PetscCall(ObjectView((PetscObject)ksp->vec_sol, ksp->viewerSol, ksp->formatSol));
965:   if (ksp->view) PetscCall(ObjectView((PetscObject)ksp, ksp->viewer, ksp->format));
966:   if (ksp->viewDScale) PetscCall(ObjectView((PetscObject)ksp->diagonal, ksp->viewerDScale, ksp->formatDScale));
967:   if (ksp->viewMatExp) {
968:     Mat A, B;

970:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
971:     if (ksp->transpose_solve) {
972:       Mat AT;

974:       PetscCall(MatCreateTranspose(A, &AT));
975:       PetscCall(MatComputeOperator(AT, MATAIJ, &B));
976:       PetscCall(MatDestroy(&AT));
977:     } else {
978:       PetscCall(MatComputeOperator(A, MATAIJ, &B));
979:     }
980:     PetscCall(ObjectView((PetscObject)B, ksp->viewerMatExp, ksp->formatMatExp));
981:     PetscCall(MatDestroy(&B));
982:   }
983:   if (ksp->viewPOpExp) {
984:     Mat B;

986:     PetscCall(KSPComputeOperator(ksp, MATAIJ, &B));
987:     PetscCall(ObjectView((PetscObject)B, ksp->viewerPOpExp, ksp->formatPOpExp));
988:     PetscCall(MatDestroy(&B));
989:   }

991:   if (inXisinB) {
992:     PetscCall(VecCopy(x, b));
993:     PetscCall(VecDestroy(&x));
994:   }
995:   PetscCall(PetscObjectSAWsBlock((PetscObject)ksp));
996:   if (ksp->errorifnotconverged && ksp->reason < 0 && ((level == 1) || (ksp->reason != KSP_DIVERGED_ITS))) {
997:     PCFailedReason reason;

999:     PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
1000:     PetscCall(PCGetFailedReason(ksp->pc, &reason));
1001:     SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
1002:   }
1003:   level--;
1004:   PetscFunctionReturn(PETSC_SUCCESS);
1005: }

1007: /*@
1008:   KSPSolve - Solves a linear system associated with `KSP` object

1010:   Collective

1012:   Input Parameters:
1013: + ksp - iterative solver obtained from `KSPCreate()`
1014: . b   - the right-hand side vector
1015: - x   - the solution (this may be the same vector as `b`, then `b` will be overwritten with the answer)

1017:   Options Database Keys:
1018: + -ksp_view_eigenvalues                      - compute preconditioned operators eigenvalues
1019: . -ksp_view_eigenvalues_explicit             - compute the eigenvalues by forming the dense operator and using LAPACK
1020: . -ksp_view_mat binary                       - save matrix to the default binary viewer
1021: . -ksp_view_pmat binary                      - save matrix used to build preconditioner to the default binary viewer
1022: . -ksp_view_rhs binary                       - save right-hand side vector to the default binary viewer
1023: . -ksp_view_solution binary                  - save computed solution vector to the default binary viewer
1024:                                                (can be read later with src/ksp/tutorials/ex10.c for testing solvers)
1025: . -ksp_view_mat_explicit                     - for matrix-free operators, computes the matrix entries and views them
1026: . -ksp_view_preconditioned_operator_explicit - computes the product of the preconditioner and matrix as an explicit matrix and views it
1027: . -ksp_converged_reason                      - print reason for converged or diverged, also prints number of iterations
1028: . -ksp_view_final_residual                   - print 2-norm of true linear system residual at the end of the solution process
1029: . -ksp_view_final_residual_vec               - print true linear system residual vector at the end of the solution process;
1030:                                                `-ksp_view_final_residual` must to be called first to enable this option
1031: . -ksp_error_if_not_converged                - stop the program as soon as an error is detected in a `KSPSolve()`
1032: . -ksp_view_pre                              - print the ksp data structure before the system solution
1033: - -ksp_view                                  - print the ksp data structure at the end of the system solution

1035:   Level: beginner

1037:   Notes:
1038:   See `KSPSetFromOptions()` for additional options database keys that affect `KSPSolve()`

1040:   If one uses `KSPSetDM()` then `x` or `b` need not be passed. Use `KSPGetSolution()` to access the solution in this case.

1042:   The operator is specified with `KSPSetOperators()`.

1044:   `KSPSolve()` will normally return without generating an error regardless of whether the linear system was solved or if constructing the preconditioner failed.
1045:   Call `KSPGetConvergedReason()` to determine if the solver converged or failed and why. The option -ksp_error_if_not_converged or function `KSPSetErrorIfNotConverged()`
1046:   will cause `KSPSolve()` to error as soon as an error occurs in the linear solver.  In inner `KSPSolve()` `KSP_DIVERGED_ITS` is not treated as an error because when using nested solvers
1047:   it may be fine that inner solvers in the preconditioner do not converge during the solution process.

1049:   The number of iterations can be obtained from `KSPGetIterationNumber()`.

1051:   If you provide a matrix that has a `MatSetNullSpace()` and `MatSetTransposeNullSpace()` this will use that information to solve singular systems
1052:   in the least squares sense with a norm minimizing solution.

1054:   $A x = b $  where $b = b_p + b_t$ where $b_t$ is not in the range of $A$ (and hence by the fundamental theorem of linear algebra is in the nullspace(A'), see `MatSetNullSpace()`).

1056:   `KSP` first removes $b_t$ producing the linear system $A x = b_p$ (which has multiple solutions) and solves this to find the $\|x\|$ minimizing solution (and hence
1057:   it finds the solution $x$ orthogonal to the nullspace(A). The algorithm is simply in each iteration of the Krylov method we remove the nullspace(A) from the search
1058:   direction thus the solution which is a linear combination of the search directions has no component in the nullspace(A).

1060:   We recommend always using `KSPGMRES` for such singular systems.
1061:   If $ nullspace(A) = nullspace(A^T)$ (note symmetric matrices always satisfy this property) then both left and right preconditioning will work
1062:   If $nullspace(A) \neq nullspace(A^T)$ then left preconditioning will work but right preconditioning may not work (or it may).

1064:   Developer Notes:
1065:   The reason we cannot always solve  $nullspace(A) \neq nullspace(A^T)$ systems with right preconditioning is because we need to remove at each iteration
1066:   $ nullspace(AB) $ from the search direction. While we know the $nullspace(A)$, $nullspace(AB)$ equals $B^{-1}$ times $nullspace(A)$ but except for trivial preconditioners
1067:   such as diagonal scaling we cannot apply the inverse of the preconditioner to a vector and thus cannot compute $nullspace(AB)$.

1069:   If using a direct method (e.g., via the `KSP` solver
1070:   `KSPPREONLY` and a preconditioner such as `PCLU` or `PCCHOLESKY` then usually one iteration of the `KSP` method will be needed for convergence.

1072:   To solve a linear system with the transpose of the matrix use `KSPSolveTranspose()`.

1074:   Understanding Convergence\:
1075:   The manual pages `KSPMonitorSet()`, `KSPComputeEigenvalues()`, and
1076:   `KSPComputeEigenvaluesExplicitly()` provide information on additional
1077:   options to monitor convergence and print eigenvalue information.

1079: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1080:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatSetTransposeNullSpace()`, `KSP`,
1081:           `KSPConvergedReasonView()`, `KSPCheckSolve()`, `KSPSetErrorIfNotConverged()`
1082: @*/
1083: PetscErrorCode KSPSolve(KSP ksp, Vec b, Vec x)
1084: {
1085:   PetscBool isPCMPI;

1087:   PetscFunctionBegin;
1091:   ksp->transpose_solve = PETSC_FALSE;
1092:   PetscCall(KSPSolve_Private(ksp, b, x));
1093:   PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
1094:   if (PCMPIServerActive && isPCMPI) {
1095:     KSP subksp;

1097:     PetscCall(PCMPIGetKSP(ksp->pc, &subksp));
1098:     ksp->its    = subksp->its;
1099:     ksp->reason = subksp->reason;
1100:   }
1101:   PetscFunctionReturn(PETSC_SUCCESS);
1102: }

1104: static PetscErrorCode KSPUseExplicitTranspose_Private(KSP ksp)
1105: {
1106:   Mat J, Jpre;

1108:   PetscFunctionBegin;
1109:   PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1110:   if (!ksp->transpose.reuse_transpose) {
1111:     PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1112:     if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1113:     ksp->transpose.reuse_transpose = PETSC_TRUE;
1114:   } else {
1115:     PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1116:     if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1117:   }
1118:   if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1119:     PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1120:     ksp->transpose.BT = ksp->transpose.AT;
1121:   }
1122:   PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1123:   PetscFunctionReturn(PETSC_SUCCESS);
1124: }

1126: /*@
1127:   KSPSolveTranspose - Solves a linear system with the transpose of the matrix associated with the `KSP` object, $A^T x = b$.

1129:   Collective

1131:   Input Parameters:
1132: + ksp - iterative solver obtained from `KSPCreate()`
1133: . b   - right-hand side vector
1134: - x   - solution vector

1136:   Level: developer

1138:   Note:
1139:   For complex numbers, this solve the non-Hermitian transpose system.

1141:   Developer Note:
1142:   We need to implement a `KSPSolveHermitianTranspose()`

1144: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1145:           `KSPSolve()`, `KSP`, `KSPSetOperators()`
1146: @*/
1147: PetscErrorCode KSPSolveTranspose(KSP ksp, Vec b, Vec x)
1148: {
1149:   PetscFunctionBegin;
1153:   if (ksp->transpose.use_explicittranspose) {
1154:     Mat J, Jpre;
1155:     PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1156:     if (!ksp->transpose.reuse_transpose) {
1157:       PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1158:       if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1159:       ksp->transpose.reuse_transpose = PETSC_TRUE;
1160:     } else {
1161:       PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1162:       if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1163:     }
1164:     if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1165:       PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1166:       ksp->transpose.BT = ksp->transpose.AT;
1167:     }
1168:     PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1169:   } else {
1170:     ksp->transpose_solve = PETSC_TRUE;
1171:   }
1172:   PetscCall(KSPSolve_Private(ksp, b, x));
1173:   PetscFunctionReturn(PETSC_SUCCESS);
1174: }

1176: static PetscErrorCode KSPViewFinalMatResidual_Internal(KSP ksp, Mat B, Mat X, PetscViewer viewer, PetscViewerFormat format, PetscInt shift)
1177: {
1178:   Mat        A, R;
1179:   PetscReal *norms;
1180:   PetscInt   i, N;
1181:   PetscBool  flg;

1183:   PetscFunctionBegin;
1184:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &flg));
1185:   if (flg) {
1186:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
1187:     if (!ksp->transpose_solve) PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &R));
1188:     else PetscCall(MatTransposeMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &R));
1189:     PetscCall(MatAYPX(R, -1.0, B, SAME_NONZERO_PATTERN));
1190:     PetscCall(MatGetSize(R, NULL, &N));
1191:     PetscCall(PetscMalloc1(N, &norms));
1192:     PetscCall(MatGetColumnNorms(R, NORM_2, norms));
1193:     PetscCall(MatDestroy(&R));
1194:     for (i = 0; i < N; ++i) PetscCall(PetscViewerASCIIPrintf(viewer, "%s #%" PetscInt_FMT " %g\n", i == 0 ? "KSP final norm of residual" : "                          ", shift + i, (double)norms[i]));
1195:     PetscCall(PetscFree(norms));
1196:   }
1197:   PetscFunctionReturn(PETSC_SUCCESS);
1198: }

1200: static PetscErrorCode KSPMatSolve_Private(KSP ksp, Mat B, Mat X)
1201: {
1202:   Mat       A, P, vB, vX;
1203:   Vec       cb, cx;
1204:   PetscInt  n1, N1, n2, N2, Bbn = PETSC_DECIDE;
1205:   PetscBool match;

1207:   PetscFunctionBegin;
1211:   PetscCheckSameComm(ksp, 1, B, 2);
1212:   PetscCheckSameComm(ksp, 1, X, 3);
1213:   PetscCheckSameType(B, 2, X, 3);
1214:   PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
1215:   MatCheckPreallocated(X, 3);
1216:   if (!X->assembled) {
1217:     PetscCall(MatSetOption(X, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1218:     PetscCall(MatAssemblyBegin(X, MAT_FINAL_ASSEMBLY));
1219:     PetscCall(MatAssemblyEnd(X, MAT_FINAL_ASSEMBLY));
1220:   }
1221:   PetscCheck(B != X, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_IDN, "B and X must be different matrices");
1222:   PetscCall(KSPGetOperators(ksp, &A, &P));
1223:   PetscCall(MatGetLocalSize(B, NULL, &n2));
1224:   PetscCall(MatGetLocalSize(X, NULL, &n1));
1225:   PetscCall(MatGetSize(B, NULL, &N2));
1226:   PetscCall(MatGetSize(X, NULL, &N1));
1227:   PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of right-hand sides (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of solutions (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
1228:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)B, &match, MATSEQDENSE, MATMPIDENSE, ""));
1229:   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of right-hand sides not stored in a dense Mat");
1230:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
1231:   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of solutions not stored in a dense Mat");
1232:   PetscCall(KSPSetUp(ksp));
1233:   PetscCall(KSPSetUpOnBlocks(ksp));
1234:   if (ksp->ops->matsolve) {
1235:     level++;
1236:     if (ksp->guess_zero) PetscCall(MatZeroEntries(X));
1237:     PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1238:     PetscCall(KSPGetMatSolveBatchSize(ksp, &Bbn));
1239:     /* by default, do a single solve with all columns */
1240:     if (Bbn == PETSC_DECIDE) Bbn = N2;
1241:     else PetscCheck(Bbn >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "KSPMatSolve() batch size %" PetscInt_FMT " must be positive", Bbn);
1242:     PetscCall(PetscInfo(ksp, "KSP type %s%s solving using batches of width at most %" PetscInt_FMT "\n", ((PetscObject)ksp)->type_name, ksp->transpose_solve ? " transpose" : "", Bbn));
1243:     /* if -ksp_matsolve_batch_size is greater than the actual number of columns, do a single solve with all columns */
1244:     if (Bbn >= N2) {
1245:       PetscUseTypeMethod(ksp, matsolve, B, X);
1246:       if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, B, X, ksp->viewerFinalRes, ksp->formatFinalRes, 0));

1248:       PetscCall(KSPConvergedReasonViewFromOptions(ksp));

1250:       if (ksp->viewRate) {
1251:         PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1252:         PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1253:         PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1254:       }
1255:     } else {
1256:       for (n2 = 0; n2 < N2; n2 += Bbn) {
1257:         PetscCall(MatDenseGetSubMatrix(B, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vB));
1258:         PetscCall(MatDenseGetSubMatrix(X, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vX));
1259:         PetscUseTypeMethod(ksp, matsolve, vB, vX);
1260:         if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, vB, vX, ksp->viewerFinalRes, ksp->formatFinalRes, n2));

1262:         PetscCall(KSPConvergedReasonViewFromOptions(ksp));

1264:         if (ksp->viewRate) {
1265:           PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1266:           PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1267:           PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1268:         }
1269:         PetscCall(MatDenseRestoreSubMatrix(B, &vB));
1270:         PetscCall(MatDenseRestoreSubMatrix(X, &vX));
1271:       }
1272:     }
1273:     if (ksp->viewMat) PetscCall(ObjectView((PetscObject)A, ksp->viewerMat, ksp->formatMat));
1274:     if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)P, ksp->viewerPMat, ksp->formatPMat));
1275:     if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)B, ksp->viewerRhs, ksp->formatRhs));
1276:     if (ksp->viewSol) PetscCall(ObjectView((PetscObject)X, ksp->viewerSol, ksp->formatSol));
1277:     if (ksp->view) PetscCall(KSPView(ksp, ksp->viewer));
1278:     PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1279:     if (ksp->errorifnotconverged && ksp->reason < 0 && (level == 1 || ksp->reason != KSP_DIVERGED_ITS)) {
1280:       PCFailedReason reason;

1282:       PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
1283:       PetscCall(PCGetFailedReason(ksp->pc, &reason));
1284:       SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
1285:     }
1286:     level--;
1287:   } else {
1288:     PetscCall(PetscInfo(ksp, "KSP type %s solving column by column\n", ((PetscObject)ksp)->type_name));
1289:     for (n2 = 0; n2 < N2; ++n2) {
1290:       PetscCall(MatDenseGetColumnVecRead(B, n2, &cb));
1291:       PetscCall(MatDenseGetColumnVecWrite(X, n2, &cx));
1292:       PetscCall(KSPSolve_Private(ksp, cb, cx));
1293:       PetscCall(MatDenseRestoreColumnVecWrite(X, n2, &cx));
1294:       PetscCall(MatDenseRestoreColumnVecRead(B, n2, &cb));
1295:     }
1296:   }
1297:   PetscFunctionReturn(PETSC_SUCCESS);
1298: }

1300: /*@
1301:   KSPMatSolve - Solves a linear system with multiple right-hand sides stored as a `MATDENSE`.

1303:   Input Parameters:
1304: + ksp - iterative solver
1305: - B   - block of right-hand sides

1307:   Output Parameter:
1308: . X - block of solutions

1310:   Level: intermediate

1312:   Notes:
1313:   This is a stripped-down version of `KSPSolve()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.

1315:   Unlike with `KSPSolve()`, `B` and `X` must be different matrices.

1317: .seealso: [](ch_ksp), `KSPSolve()`, `MatMatSolve()`, `KSPMatSolveTranspose()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`, `KSPSetMatSolveBatchSize()`
1318: @*/
1319: PetscErrorCode KSPMatSolve(KSP ksp, Mat B, Mat X)
1320: {
1321:   PetscFunctionBegin;
1322:   ksp->transpose_solve = PETSC_FALSE;
1323:   PetscCall(KSPMatSolve_Private(ksp, B, X));
1324:   PetscFunctionReturn(PETSC_SUCCESS);
1325: }

1327: /*@
1328:   KSPMatSolveTranspose - Solves a linear system with the transposed matrix with multiple right-hand sides stored as a `MATDENSE`.

1330:   Input Parameters:
1331: + ksp - iterative solver
1332: - B   - block of right-hand sides

1334:   Output Parameter:
1335: . X - block of solutions

1337:   Level: intermediate

1339:   Notes:
1340:   This is a stripped-down version of `KSPSolveTranspose()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.

1342:   Unlike `KSPSolveTranspose()`,
1343:   `B` and `X` must be different matrices and the transposed matrix cannot be assembled explicitly for the user.

1345: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `MatMatTransposeSolve()`, `KSPMatSolve()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1346: @*/
1347: PetscErrorCode KSPMatSolveTranspose(KSP ksp, Mat B, Mat X)
1348: {
1349:   PetscFunctionBegin;
1350:   if (ksp->transpose.use_explicittranspose) PetscCall(KSPUseExplicitTranspose_Private(ksp));
1351:   else ksp->transpose_solve = PETSC_TRUE;
1352:   PetscCall(KSPMatSolve_Private(ksp, B, X));
1353:   PetscFunctionReturn(PETSC_SUCCESS);
1354: }

1356: /*@
1357:   KSPSetMatSolveBatchSize - Sets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1359:   Logically Collective

1361:   Input Parameters:
1362: + ksp - the `KSP` iterative solver
1363: - bs  - batch size

1365:   Level: advanced

1367:   Note:
1368:   Using a larger block size can improve the efficiency of the solver.

1370: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPGetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matproduct_batch_size`
1371: @*/
1372: PetscErrorCode KSPSetMatSolveBatchSize(KSP ksp, PetscInt bs)
1373: {
1374:   PetscFunctionBegin;
1377:   ksp->nmax = bs;
1378:   PetscFunctionReturn(PETSC_SUCCESS);
1379: }

1381: /*@
1382:   KSPGetMatSolveBatchSize - Gets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1384:   Input Parameter:
1385: . ksp - iterative solver context

1387:   Output Parameter:
1388: . bs - batch size

1390:   Level: advanced

1392: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPSetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matproduct_batch_size`
1393: @*/
1394: PetscErrorCode KSPGetMatSolveBatchSize(KSP ksp, PetscInt *bs)
1395: {
1396:   PetscFunctionBegin;
1398:   PetscAssertPointer(bs, 2);
1399:   *bs = ksp->nmax;
1400:   PetscFunctionReturn(PETSC_SUCCESS);
1401: }

1403: /*@
1404:   KSPResetViewers - Resets all the viewers set from the options database during `KSPSetFromOptions()`

1406:   Collective

1408:   Input Parameter:
1409: . ksp - the `KSP` iterative solver context obtained from `KSPCreate()`

1411:   Level: beginner

1413: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSPSetFromOptions()`, `KSP`
1414: @*/
1415: PetscErrorCode KSPResetViewers(KSP ksp)
1416: {
1417:   PetscFunctionBegin;
1419:   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1420:   PetscCall(PetscViewerDestroy(&ksp->viewer));
1421:   PetscCall(PetscViewerDestroy(&ksp->viewerPre));
1422:   PetscCall(PetscViewerDestroy(&ksp->viewerRate));
1423:   PetscCall(PetscViewerDestroy(&ksp->viewerMat));
1424:   PetscCall(PetscViewerDestroy(&ksp->viewerPMat));
1425:   PetscCall(PetscViewerDestroy(&ksp->viewerRhs));
1426:   PetscCall(PetscViewerDestroy(&ksp->viewerSol));
1427:   PetscCall(PetscViewerDestroy(&ksp->viewerMatExp));
1428:   PetscCall(PetscViewerDestroy(&ksp->viewerEV));
1429:   PetscCall(PetscViewerDestroy(&ksp->viewerSV));
1430:   PetscCall(PetscViewerDestroy(&ksp->viewerEVExp));
1431:   PetscCall(PetscViewerDestroy(&ksp->viewerFinalRes));
1432:   PetscCall(PetscViewerDestroy(&ksp->viewerPOpExp));
1433:   PetscCall(PetscViewerDestroy(&ksp->viewerDScale));
1434:   ksp->view         = PETSC_FALSE;
1435:   ksp->viewPre      = PETSC_FALSE;
1436:   ksp->viewMat      = PETSC_FALSE;
1437:   ksp->viewPMat     = PETSC_FALSE;
1438:   ksp->viewRhs      = PETSC_FALSE;
1439:   ksp->viewSol      = PETSC_FALSE;
1440:   ksp->viewMatExp   = PETSC_FALSE;
1441:   ksp->viewEV       = PETSC_FALSE;
1442:   ksp->viewSV       = PETSC_FALSE;
1443:   ksp->viewEVExp    = PETSC_FALSE;
1444:   ksp->viewFinalRes = PETSC_FALSE;
1445:   ksp->viewPOpExp   = PETSC_FALSE;
1446:   ksp->viewDScale   = PETSC_FALSE;
1447:   PetscFunctionReturn(PETSC_SUCCESS);
1448: }

1450: /*@
1451:   KSPReset - Removes any allocated `Vec` and `Mat` from the `KSP` data structures.

1453:   Collective

1455:   Input Parameter:
1456: . ksp - iterative solver obtained from `KSPCreate()`

1458:   Level: intermediate

1460:   Notes:
1461:   Any options set in the `KSP`, including those set with `KSPSetFromOptions()` remain.

1463:   Call `KSPReset()` only before you call `KSPSetOperators()` with a different sized matrix than the previous matrix used with the `KSP`.

1465: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1466: @*/
1467: PetscErrorCode KSPReset(KSP ksp)
1468: {
1469:   PetscFunctionBegin;
1471:   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1472:   PetscTryTypeMethod(ksp, reset);
1473:   if (ksp->pc) PetscCall(PCReset(ksp->pc));
1474:   if (ksp->guess) {
1475:     KSPGuess guess = ksp->guess;
1476:     PetscTryTypeMethod(guess, reset);
1477:   }
1478:   PetscCall(VecDestroyVecs(ksp->nwork, &ksp->work));
1479:   PetscCall(VecDestroy(&ksp->vec_rhs));
1480:   PetscCall(VecDestroy(&ksp->vec_sol));
1481:   PetscCall(VecDestroy(&ksp->diagonal));
1482:   PetscCall(VecDestroy(&ksp->truediagonal));

1484:   ksp->setupstage = KSP_SETUP_NEW;
1485:   ksp->nmax       = PETSC_DECIDE;
1486:   PetscFunctionReturn(PETSC_SUCCESS);
1487: }

1489: /*@
1490:   KSPDestroy - Destroys a `KSP` context.

1492:   Collective

1494:   Input Parameter:
1495: . ksp - iterative solver obtained from `KSPCreate()`

1497:   Level: beginner

1499: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1500: @*/
1501: PetscErrorCode KSPDestroy(KSP *ksp)
1502: {
1503:   PC pc;

1505:   PetscFunctionBegin;
1506:   if (!*ksp) PetscFunctionReturn(PETSC_SUCCESS);
1508:   if (--((PetscObject)*ksp)->refct > 0) {
1509:     *ksp = NULL;
1510:     PetscFunctionReturn(PETSC_SUCCESS);
1511:   }

1513:   PetscCall(PetscObjectSAWsViewOff((PetscObject)*ksp));

1515:   /*
1516:    Avoid a cascading call to PCReset(ksp->pc) from the following call:
1517:    PCReset() shouldn't be called from KSPDestroy() as it is unprotected by pc's
1518:    refcount (and may be shared, e.g., by other ksps).
1519:    */
1520:   pc         = (*ksp)->pc;
1521:   (*ksp)->pc = NULL;
1522:   PetscCall(KSPReset(*ksp));
1523:   PetscCall(KSPResetViewers(*ksp));
1524:   (*ksp)->pc = pc;
1525:   PetscTryTypeMethod(*ksp, destroy);

1527:   if ((*ksp)->transpose.use_explicittranspose) {
1528:     PetscCall(MatDestroy(&(*ksp)->transpose.AT));
1529:     PetscCall(MatDestroy(&(*ksp)->transpose.BT));
1530:     (*ksp)->transpose.reuse_transpose = PETSC_FALSE;
1531:   }

1533:   PetscCall(KSPGuessDestroy(&(*ksp)->guess));
1534:   PetscCall(DMDestroy(&(*ksp)->dm));
1535:   PetscCall(PCDestroy(&(*ksp)->pc));
1536:   PetscCall(PetscFree((*ksp)->res_hist_alloc));
1537:   PetscCall(PetscFree((*ksp)->err_hist_alloc));
1538:   if ((*ksp)->convergeddestroy) PetscCall((*(*ksp)->convergeddestroy)(&(*ksp)->cnvP));
1539:   PetscCall(KSPMonitorCancel(*ksp));
1540:   PetscCall(KSPConvergedReasonViewCancel(*ksp));
1541:   PetscCall(PetscHeaderDestroy(ksp));
1542:   PetscFunctionReturn(PETSC_SUCCESS);
1543: }

1545: /*@
1546:   KSPSetPCSide - Sets the preconditioning side.

1548:   Logically Collective

1550:   Input Parameter:
1551: . ksp - iterative solver obtained from `KSPCreate()`

1553:   Output Parameter:
1554: . side - the preconditioning side, where side is one of
1555: .vb
1556:   PC_LEFT      - left preconditioning (default)
1557:   PC_RIGHT     - right preconditioning
1558:   PC_SYMMETRIC - symmetric preconditioning
1559: .ve

1561:   Options Database Key:
1562: . -ksp_pc_side (right|left|symmetric) - `KSP` preconditioner side

1564:   Level: intermediate

1566:   Notes:
1567:   Left preconditioning is used by default for most Krylov methods except `KSPFGMRES` which only supports right preconditioning.

1569:   For methods changing the side of the preconditioner changes the norm type that is used, see `KSPSetNormType()`.

1571:   Symmetric preconditioning is currently available only for the `KSPQCG` method. However, note that
1572:   symmetric preconditioning can be emulated by using either right or left
1573:   preconditioning, modifying the application of the matrix (with a custom `Mat` argument to `KSPSetOperators()`,
1574:   and using a pre 'KSPSetPreSolve()` or post processing `KSPSetPostSolve()` step).

1576:   Setting the `PCSide` often affects the default norm type. See `KSPSetNormType()` for details.

1578: .seealso: [](ch_ksp), `KSPGetPCSide()`, `KSPSetNormType()`, `KSPGetNormType()`, `KSP`, `KSPSetPreSolve()`, `KSPSetPostSolve()`
1579: @*/
1580: PetscErrorCode KSPSetPCSide(KSP ksp, PCSide side)
1581: {
1582:   PetscFunctionBegin;
1585:   ksp->pc_side = ksp->pc_side_set = side;
1586:   PetscFunctionReturn(PETSC_SUCCESS);
1587: }

1589: /*@
1590:   KSPGetPCSide - Gets the preconditioning side.

1592:   Not Collective

1594:   Input Parameter:
1595: . ksp - iterative solver obtained from `KSPCreate()`

1597:   Output Parameter:
1598: . side - the preconditioning side, where side is one of
1599: .vb
1600:   PC_LEFT      - left preconditioning (default)
1601:   PC_RIGHT     - right preconditioning
1602:   PC_SYMMETRIC - symmetric preconditioning
1603: .ve

1605:   Level: intermediate

1607: .seealso: [](ch_ksp), `KSPSetPCSide()`, `KSP`
1608: @*/
1609: PetscErrorCode KSPGetPCSide(KSP ksp, PCSide *side)
1610: {
1611:   PetscFunctionBegin;
1613:   PetscAssertPointer(side, 2);
1614:   PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
1615:   *side = ksp->pc_side;
1616:   PetscFunctionReturn(PETSC_SUCCESS);
1617: }

1619: /*@
1620:   KSPGetTolerances - Gets the relative, absolute, divergence, and maximum
1621:   iteration tolerances used by the default `KSP` convergence tests.

1623:   Not Collective

1625:   Input Parameter:
1626: . ksp - the Krylov subspace context

1628:   Output Parameters:
1629: + rtol   - the relative convergence tolerance
1630: . abstol - the absolute convergence tolerance
1631: . dtol   - the divergence tolerance
1632: - maxits - maximum number of iterations

1634:   Level: intermediate

1636:   Note:
1637:   The user can specify `NULL` for any parameter that is not needed.

1639: .seealso: [](ch_ksp), `KSPSetTolerances()`, `KSP`, `KSPSetMinimumIterations()`, `KSPGetMinimumIterations()`
1640: @*/
1641: PetscErrorCode KSPGetTolerances(KSP ksp, PeOp PetscReal *rtol, PeOp PetscReal *abstol, PeOp PetscReal *dtol, PeOp PetscInt *maxits)
1642: {
1643:   PetscFunctionBegin;
1645:   if (abstol) *abstol = ksp->abstol;
1646:   if (rtol) *rtol = ksp->rtol;
1647:   if (dtol) *dtol = ksp->divtol;
1648:   if (maxits) *maxits = ksp->max_it;
1649:   PetscFunctionReturn(PETSC_SUCCESS);
1650: }

1652: /*@
1653:   KSPSetTolerances - Sets the relative, absolute, divergence, and maximum
1654:   iteration tolerances used by the default `KSP` convergence testers.

1656:   Logically Collective

1658:   Input Parameters:
1659: + ksp    - the Krylov subspace context
1660: . rtol   - the relative convergence tolerance, relative decrease in the (possibly preconditioned) residual norm
1661: . abstol - the absolute convergence tolerance   absolute size of the (possibly preconditioned) residual norm
1662: . dtol   - the divergence tolerance,   amount (possibly preconditioned) residual norm can increase before `KSPConvergedDefault()` concludes that the method is diverging
1663: - maxits - maximum number of iterations to use

1665:   Options Database Keys:
1666: + -ksp_atol abstol   - Sets `abstol`
1667: . -ksp_rtol rtol     - Sets `rtol`
1668: . -ksp_divtol dtol   - Sets `dtol`
1669: - -ksp_max_it maxits - Sets `maxits`

1671:   Level: intermediate

1673:   Notes:
1674:   The tolerances are with respect to a norm of the residual of the equation $ \| b - A x^n \|$, they do not directly use the error of the equation.
1675:   The norm used depends on the `KSPNormType` that has been set with `KSPSetNormType()`, the default depends on the `KSPType` used.

1677:   All parameters must be non-negative.

1679:   Use `PETSC_CURRENT` to retain the current value of any of the parameters. The deprecated `PETSC_DEFAULT` also retains the current value (though the name is confusing).

1681:   Use `PETSC_DETERMINE` to use the default value for the given `KSP`. The default value is the value when the object's type is set.

1683:   For `dtol` and `maxits` use `PETSC_UNLIMITED` to indicate there is no upper bound on these values

1685:   See `KSPConvergedDefault()` for details how these parameters are used in the default convergence test.  See also `KSPSetConvergenceTest()`
1686:   for setting user-defined stopping criteria.

1688:   Fortran Note:
1689:   Use `PETSC_CURRENT_INTEGER`, `PETSC_CURRENT_REAL`, `PETSC_DETERMINE_INTEGER`, or `PETSC_DETERMINE_REAL`

1691: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetMinimumIterations()`
1692: @*/
1693: PetscErrorCode KSPSetTolerances(KSP ksp, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt maxits)
1694: {
1695:   PetscFunctionBegin;

1702:   if (rtol == (PetscReal)PETSC_DETERMINE) {
1703:     ksp->rtol = ksp->default_rtol;
1704:   } else if (rtol != (PetscReal)PETSC_CURRENT) {
1705:     PetscCheck(rtol >= 0.0 && rtol < 1.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Relative tolerance %g must be non-negative and less than 1.0", (double)rtol);
1706:     ksp->rtol = rtol;
1707:   }
1708:   if (abstol == (PetscReal)PETSC_DETERMINE) {
1709:     ksp->abstol = ksp->default_abstol;
1710:   } else if (abstol != (PetscReal)PETSC_CURRENT) {
1711:     PetscCheck(abstol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Absolute tolerance %g must be non-negative", (double)abstol);
1712:     ksp->abstol = abstol;
1713:   }
1714:   if (dtol == (PetscReal)PETSC_DETERMINE) {
1715:     ksp->divtol = ksp->default_divtol;
1716:   } else if (dtol == (PetscReal)PETSC_UNLIMITED) {
1717:     ksp->divtol = PETSC_MAX_REAL;
1718:   } else if (dtol != (PetscReal)PETSC_CURRENT) {
1719:     PetscCheck(dtol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Divergence tolerance %g must be larger than 1.0", (double)dtol);
1720:     ksp->divtol = dtol;
1721:   }
1722:   if (maxits == PETSC_DETERMINE) {
1723:     ksp->max_it = ksp->default_max_it;
1724:   } else if (maxits == PETSC_UNLIMITED) {
1725:     ksp->max_it = PETSC_INT_MAX;
1726:   } else if (maxits != PETSC_CURRENT) {
1727:     PetscCheck(maxits >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Maximum number of iterations %" PetscInt_FMT " must be non-negative", maxits);
1728:     ksp->max_it = maxits;
1729:   }
1730:   PetscFunctionReturn(PETSC_SUCCESS);
1731: }

1733: /*@
1734:   KSPSetMinimumIterations - Sets the minimum number of iterations to use, regardless of the tolerances

1736:   Logically Collective

1738:   Input Parameters:
1739: + ksp   - the Krylov subspace context
1740: - minit - minimum number of iterations to use

1742:   Options Database Key:
1743: . -ksp_min_it minit - Sets `minit`

1745:   Level: intermediate

1747:   Notes:
1748:   Use `KSPSetTolerances()` to set a variety of other tolerances

1750:   See `KSPConvergedDefault()` for details on how these parameters are used in the default convergence test. See also `KSPSetConvergenceTest()`
1751:   for setting user-defined stopping criteria.

1753:   If the initial residual norm is small enough solvers may return immediately without computing any improvement to the solution. Using this routine
1754:   prevents that which usually ensures the solution is changed (often minimally) from the previous solution. This option may be used with ODE integrators
1755:   to ensure the integrator does not fall into a false steady-state solution of the ODE.

1757: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPGetMinimumIterations()`
1758: @*/
1759: PetscErrorCode KSPSetMinimumIterations(KSP ksp, PetscInt minit)
1760: {
1761:   PetscFunctionBegin;

1765:   PetscCheck(minit >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Minimum number of iterations %" PetscInt_FMT " must be non-negative", minit);
1766:   ksp->min_it = minit;
1767:   PetscFunctionReturn(PETSC_SUCCESS);
1768: }

1770: /*@
1771:   KSPGetMinimumIterations - Gets the minimum number of iterations to use, regardless of the tolerances, that was set with `KSPSetMinimumIterations()` or `-ksp_min_it`

1773:   Not Collective

1775:   Input Parameter:
1776: . ksp - the Krylov subspace context

1778:   Output Parameter:
1779: . minit - minimum number of iterations to use

1781:   Level: intermediate

1783: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPSetMinimumIterations()`
1784: @*/
1785: PetscErrorCode KSPGetMinimumIterations(KSP ksp, PetscInt *minit)
1786: {
1787:   PetscFunctionBegin;
1789:   PetscAssertPointer(minit, 2);

1791:   *minit = ksp->min_it;
1792:   PetscFunctionReturn(PETSC_SUCCESS);
1793: }

1795: /*@
1796:   KSPSetInitialGuessNonzero - Tells the iterative solver that the
1797:   initial guess is nonzero; otherwise `KSP` assumes the initial guess
1798:   is to be zero (and thus zeros it out before solving).

1800:   Logically Collective

1802:   Input Parameters:
1803: + ksp - iterative solver obtained from `KSPCreate()`
1804: - flg - ``PETSC_TRUE`` indicates the guess is non-zero, `PETSC_FALSE` indicates the guess is zero

1806:   Options Database Key:
1807: . -ksp_initial_guess_nonzero (true|false) - use nonzero initial guess

1809:   Level: beginner

1811: .seealso: [](ch_ksp), `KSPGetInitialGuessNonzero()`, `KSPGuessSetType()`, `KSPGuessType`, `KSP`
1812: @*/
1813: PetscErrorCode KSPSetInitialGuessNonzero(KSP ksp, PetscBool flg)
1814: {
1815:   PetscFunctionBegin;
1818:   ksp->guess_zero = (PetscBool)!flg;
1819:   PetscFunctionReturn(PETSC_SUCCESS);
1820: }

1822: /*@
1823:   KSPGetInitialGuessNonzero - Determines whether the `KSP` solver is using
1824:   a zero initial guess.

1826:   Not Collective

1828:   Input Parameter:
1829: . ksp - iterative solver obtained from `KSPCreate()`

1831:   Output Parameter:
1832: . flag - `PETSC_TRUE` if guess is nonzero, else `PETSC_FALSE`

1834:   Level: intermediate

1836: .seealso: [](ch_ksp), `KSPSetInitialGuessNonzero()`, `KSP`
1837: @*/
1838: PetscErrorCode KSPGetInitialGuessNonzero(KSP ksp, PetscBool *flag)
1839: {
1840:   PetscFunctionBegin;
1842:   PetscAssertPointer(flag, 2);
1843:   if (ksp->guess_zero) *flag = PETSC_FALSE;
1844:   else *flag = PETSC_TRUE;
1845:   PetscFunctionReturn(PETSC_SUCCESS);
1846: }

1848: /*@
1849:   KSPSetErrorIfNotConverged - Causes `KSPSolve()` to generate an error if the solver has not converged as soon as the error is detected.

1851:   Logically Collective

1853:   Input Parameters:
1854: + ksp - iterative solver obtained from `KSPCreate()`
1855: - flg - `PETSC_TRUE` indicates you want the error generated

1857:   Options Database Key:
1858: . -ksp_error_if_not_converged (true|false) - generate an error and stop the program

1860:   Level: intermediate

1862:   Notes:
1863:   Normally PETSc continues if a linear solver fails to converge, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
1864:   to determine if it has converged. This functionality is mostly helpful while running in a debugger (`-start_in_debugger`) to determine exactly where
1865:   the failure occurs and why.

1867:   A `KSP_DIVERGED_ITS` will not generate an error in a `KSPSolve()` inside a nested linear solver

1869: .seealso: [](ch_ksp), `KSPGetErrorIfNotConverged()`, `KSP`
1870: @*/
1871: PetscErrorCode KSPSetErrorIfNotConverged(KSP ksp, PetscBool flg)
1872: {
1873:   PC pc;

1875:   PetscFunctionBegin;
1878:   ksp->errorifnotconverged = flg;
1879:   PetscCall(KSPGetPC(ksp, &pc));
1880:   PetscCall(PCSetErrorIfFailure(pc, flg));
1881:   PetscFunctionReturn(PETSC_SUCCESS);
1882: }

1884: /*@
1885:   KSPGetErrorIfNotConverged - Will `KSPSolve()` generate an error if the solver does not converge?

1887:   Not Collective

1889:   Input Parameter:
1890: . ksp - iterative solver obtained from KSPCreate()

1892:   Output Parameter:
1893: . flag - `PETSC_TRUE` if it will generate an error, else `PETSC_FALSE`

1895:   Level: intermediate

1897: .seealso: [](ch_ksp), `KSPSetErrorIfNotConverged()`, `KSP`
1898: @*/
1899: PetscErrorCode KSPGetErrorIfNotConverged(KSP ksp, PetscBool *flag)
1900: {
1901:   PetscFunctionBegin;
1903:   PetscAssertPointer(flag, 2);
1904:   *flag = ksp->errorifnotconverged;
1905:   PetscFunctionReturn(PETSC_SUCCESS);
1906: }

1908: /*@
1909:   KSPSetInitialGuessKnoll - Tells the iterative solver to use `PCApply()` on the right hand side vector to compute the initial guess (The Knoll trick)

1911:   Logically Collective

1913:   Input Parameters:
1914: + ksp - iterative solver obtained from `KSPCreate()`
1915: - flg - `PETSC_TRUE` or `PETSC_FALSE`

1917:   Level: advanced

1919:   Developer Note:
1920:   The Knoll trick is not currently implemented using the `KSPGuess` class which provides a variety of ways of computing
1921:   an initial guess based on previous solves.

1923: .seealso: [](ch_ksp), `KSPGetInitialGuessKnoll()`, `KSPGuess`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1924: @*/
1925: PetscErrorCode KSPSetInitialGuessKnoll(KSP ksp, PetscBool flg)
1926: {
1927:   PetscFunctionBegin;
1930:   ksp->guess_knoll = flg;
1931:   PetscFunctionReturn(PETSC_SUCCESS);
1932: }

1934: /*@
1935:   KSPGetInitialGuessKnoll - Determines whether the `KSP` solver is using the Knoll trick (using PCApply(pc,b,...) to compute
1936:   the initial guess

1938:   Not Collective

1940:   Input Parameter:
1941: . ksp - iterative solver obtained from `KSPCreate()`

1943:   Output Parameter:
1944: . flag - `PETSC_TRUE` if using Knoll trick, else `PETSC_FALSE`

1946:   Level: advanced

1948: .seealso: [](ch_ksp), `KSPSetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1949: @*/
1950: PetscErrorCode KSPGetInitialGuessKnoll(KSP ksp, PetscBool *flag)
1951: {
1952:   PetscFunctionBegin;
1954:   PetscAssertPointer(flag, 2);
1955:   *flag = ksp->guess_knoll;
1956:   PetscFunctionReturn(PETSC_SUCCESS);
1957: }

1959: /*@
1960:   KSPGetComputeSingularValues - Gets the flag indicating whether the extreme singular
1961:   values will be calculated via a Lanczos or Arnoldi process as the linear
1962:   system is solved.

1964:   Not Collective

1966:   Input Parameter:
1967: . ksp - iterative solver obtained from `KSPCreate()`

1969:   Output Parameter:
1970: . flg - `PETSC_TRUE` or `PETSC_FALSE`

1972:   Options Database Key:
1973: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1975:   Level: advanced

1977:   Notes:
1978:   This option is not valid for `KSPType`.

1980:   Many users may just want to use the monitoring routine
1981:   `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
1982:   to print the singular values at each iteration of the linear solve.

1984: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1985: @*/
1986: PetscErrorCode KSPGetComputeSingularValues(KSP ksp, PetscBool *flg)
1987: {
1988:   PetscFunctionBegin;
1990:   PetscAssertPointer(flg, 2);
1991:   *flg = ksp->calc_sings;
1992:   PetscFunctionReturn(PETSC_SUCCESS);
1993: }

1995: /*@
1996:   KSPSetComputeSingularValues - Sets a flag so that the extreme singular
1997:   values will be calculated via a Lanczos or Arnoldi process as the linear
1998:   system is solved.

2000:   Logically Collective

2002:   Input Parameters:
2003: + ksp - iterative solver obtained from `KSPCreate()`
2004: - flg - `PETSC_TRUE` or `PETSC_FALSE`

2006:   Options Database Key:
2007: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

2009:   Level: advanced

2011:   Notes:
2012:   This option is not valid for all iterative methods.

2014:   Many users may just want to use the monitoring routine
2015:   `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
2016:   to print the singular values at each iteration of the linear solve.

2018:   Consider using the excellent package SLEPc for accurate efficient computations of singular or eigenvalues.

2020: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`, `KSPSetComputeRitz()`
2021: @*/
2022: PetscErrorCode KSPSetComputeSingularValues(KSP ksp, PetscBool flg)
2023: {
2024:   PetscFunctionBegin;
2027:   ksp->calc_sings = flg;
2028:   PetscFunctionReturn(PETSC_SUCCESS);
2029: }

2031: /*@
2032:   KSPGetComputeEigenvalues - Gets the flag indicating that the extreme eigenvalues
2033:   values will be calculated via a Lanczos or Arnoldi process as the linear
2034:   system is solved.

2036:   Not Collective

2038:   Input Parameter:
2039: . ksp - iterative solver obtained from `KSPCreate()`

2041:   Output Parameter:
2042: . flg - `PETSC_TRUE` or `PETSC_FALSE`

2044:   Level: advanced

2046:   Note:
2047:   Currently this option is not valid for all iterative methods.

2049: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`, `KSPSetComputeRitz()`
2050: @*/
2051: PetscErrorCode KSPGetComputeEigenvalues(KSP ksp, PetscBool *flg)
2052: {
2053:   PetscFunctionBegin;
2055:   PetscAssertPointer(flg, 2);
2056:   *flg = ksp->calc_sings;
2057:   PetscFunctionReturn(PETSC_SUCCESS);
2058: }

2060: /*@
2061:   KSPSetComputeEigenvalues - Sets a flag so that the extreme eigenvalues
2062:   values will be calculated via a Lanczos or Arnoldi process as the linear
2063:   system is solved.

2065:   Logically Collective

2067:   Input Parameters:
2068: + ksp - iterative solver obtained from `KSPCreate()`
2069: - flg - `PETSC_TRUE` or `PETSC_FALSE`

2071:   Level: advanced

2073:   Note:
2074:   Currently this option is not valid for all iterative methods.

2076:   Consider using the excellent package SLEPc for accurate efficient computations of singular or eigenvalues.

2078: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`, `KSPSetComputeRitz()`
2079: @*/
2080: PetscErrorCode KSPSetComputeEigenvalues(KSP ksp, PetscBool flg)
2081: {
2082:   PetscFunctionBegin;
2085:   ksp->calc_sings = flg;
2086:   PetscFunctionReturn(PETSC_SUCCESS);
2087: }

2089: /*@
2090:   KSPSetComputeRitz - Sets a flag so that the Ritz or harmonic Ritz pairs
2091:   will be calculated via a Lanczos or Arnoldi process as the linear
2092:   system is solved.

2094:   Logically Collective

2096:   Input Parameters:
2097: + ksp - iterative solver obtained from `KSPCreate()`
2098: - flg - `PETSC_TRUE` or `PETSC_FALSE`

2100:   Level: advanced

2102:   Note:
2103:   Currently this option is only valid for the `KSPGMRES` method.

2105: .seealso: [](ch_ksp), `KSPComputeRitz()`, `KSP`, `KSPComputeEigenvalues()`, `KSPComputeExtremeSingularValues()`
2106: @*/
2107: PetscErrorCode KSPSetComputeRitz(KSP ksp, PetscBool flg)
2108: {
2109:   PetscFunctionBegin;
2112:   ksp->calc_ritz = flg;
2113:   PetscFunctionReturn(PETSC_SUCCESS);
2114: }

2116: /*@
2117:   KSPGetRhs - Gets the right-hand-side vector for the linear system to
2118:   be solved.

2120:   Not Collective

2122:   Input Parameter:
2123: . ksp - iterative solver obtained from `KSPCreate()`

2125:   Output Parameter:
2126: . r - right-hand-side vector

2128:   Level: developer

2130: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPSolve()`, `KSP`
2131: @*/
2132: PetscErrorCode KSPGetRhs(KSP ksp, Vec *r)
2133: {
2134:   PetscFunctionBegin;
2136:   PetscAssertPointer(r, 2);
2137:   *r = ksp->vec_rhs;
2138:   PetscFunctionReturn(PETSC_SUCCESS);
2139: }

2141: /*@
2142:   KSPGetSolution - Gets the location of the solution for the
2143:   linear system to be solved.

2145:   Not Collective

2147:   Input Parameter:
2148: . ksp - iterative solver obtained from `KSPCreate()`

2150:   Output Parameter:
2151: . v - solution vector

2153:   Level: developer

2155:   Note:
2156:   If this is called during a `KSPSolve()` the vector's values may not represent the solution
2157:   to the linear system.

2159: .seealso: [](ch_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP`
2160: @*/
2161: PetscErrorCode KSPGetSolution(KSP ksp, Vec *v)
2162: {
2163:   PetscFunctionBegin;
2165:   PetscAssertPointer(v, 2);
2166:   *v = ksp->vec_sol;
2167:   PetscFunctionReturn(PETSC_SUCCESS);
2168: }

2170: /*@
2171:   KSPSetPC - Sets the preconditioner to be used to calculate the
2172:   application of the preconditioner on a vector into a `KSP`.

2174:   Collective

2176:   Input Parameters:
2177: + ksp - the `KSP` iterative solver obtained from `KSPCreate()`
2178: - pc  - the preconditioner object (if `NULL` it returns the `PC` currently held by the `KSP`)

2180:   Level: developer

2182:   Note:
2183:   This routine is almost never used since `KSP` creates its own `PC` when needed.
2184:   Use `KSPGetPC()` to retrieve the preconditioner context instead of creating a new one.

2186: .seealso: [](ch_ksp), `KSPGetPC()`, `KSP`
2187: @*/
2188: PetscErrorCode KSPSetPC(KSP ksp, PC pc)
2189: {
2190:   PetscFunctionBegin;
2192:   if (pc) {
2194:     PetscCheckSameComm(ksp, 1, pc, 2);
2195:   }
2196:   if (ksp->pc != pc && ksp->setupstage) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2197:   PetscCall(PetscObjectReference((PetscObject)pc));
2198:   PetscCall(PCDestroy(&ksp->pc));
2199:   ksp->pc = pc;
2200:   PetscFunctionReturn(PETSC_SUCCESS);
2201: }

2203: PETSC_INTERN PetscErrorCode PCCreate_MPI(PC);

2205: // PetscClangLinter pragma disable: -fdoc-internal-linkage
2206: /*@C
2207:    KSPCheckPCMPI - Checks if `-mpi_linear_solver_server` is active and the `PC` should be changed to `PCMPI`

2209:    Collective, No Fortran Support

2211:    Input Parameter:
2212: .  ksp - iterative solver obtained from `KSPCreate()`

2214:    Level: developer

2216: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PCMPIServerBegin()`, `PCMPIServerEnd()`
2217: @*/
2218: PETSC_INTERN PetscErrorCode KSPCheckPCMPI(KSP ksp)
2219: {
2220:   PetscBool isPCMPI;

2222:   PetscFunctionBegin;
2224:   PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
2225:   if (PCMPIServerActive && ksp->nestlevel == 0 && !isPCMPI) {
2226:     const char *prefix;
2227:     char       *found = NULL;

2229:     PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
2230:     if (prefix) PetscCall(PetscStrstr(prefix, "mpi_linear_solver_server_", &found));
2231:     if (!found) PetscCall(KSPAppendOptionsPrefix(ksp, "mpi_linear_solver_server_"));
2232:     PetscCall(PetscInfo(NULL, "In MPI Linear Solver Server and detected (root) PC that must be changed to PCMPI\n"));
2233:     PetscCall(PCSetType(ksp->pc, PCMPI));
2234:   }
2235:   PetscFunctionReturn(PETSC_SUCCESS);
2236: }

2238: /*@
2239:   KSPGetPC - Returns a pointer to the preconditioner context with the `KSP`

2241:   Not Collective

2243:   Input Parameter:
2244: . ksp - iterative solver obtained from `KSPCreate()`

2246:   Output Parameter:
2247: . pc - preconditioner context

2249:   Level: beginner

2251:   Note:
2252:   The `PC` is created if it does not already exist.

2254:   Developer Note:
2255:   Calls `KSPCheckPCMPI()` to check if the `KSP` is effected by `-mpi_linear_solver_server`

2257: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PC`
2258: @*/
2259: PetscErrorCode KSPGetPC(KSP ksp, PC *pc)
2260: {
2261:   PetscFunctionBegin;
2263:   PetscAssertPointer(pc, 2);
2264:   if (!ksp->pc) {
2265:     PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &ksp->pc));
2266:     PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp->pc, (PetscObject)ksp, 0));
2267:     PetscCall(PetscObjectSetOptions((PetscObject)ksp->pc, ((PetscObject)ksp)->options));
2268:     PetscCall(PCSetKSPNestLevel(ksp->pc, ksp->nestlevel));
2269:     PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
2270:     if (ksp->dm) PetscCall(PCSetDM(ksp->pc, ksp->dm));
2271:   }
2272:   PetscCall(KSPCheckPCMPI(ksp));
2273:   *pc = ksp->pc;
2274:   PetscFunctionReturn(PETSC_SUCCESS);
2275: }

2277: /*@
2278:   KSPMonitor - runs the user provided monitor routines, if they exist

2280:   Collective

2282:   Input Parameters:
2283: + ksp   - iterative solver obtained from `KSPCreate()`
2284: . it    - iteration number
2285: - rnorm - relative norm of the residual

2287:   Level: developer

2289:   Notes:
2290:   This routine is called by the `KSP` implementations.
2291:   It does not typically need to be called by the user.

2293:   For Krylov methods that do not keep a running value of the current solution (such as `KSPGMRES`) this
2294:   cannot be called after the `KSPConvergedReason` has been set but before the final solution has been computed.

2296: .seealso: [](ch_ksp), `KSPMonitorSet()`
2297: @*/
2298: PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm)
2299: {
2300:   PetscInt i, n = ksp->numbermonitors;

2302:   PetscFunctionBegin;
2303:   for (i = 0; i < n; i++) PetscCall((*ksp->monitor[i])(ksp, it, rnorm, ksp->monitorcontext[i]));
2304:   PetscFunctionReturn(PETSC_SUCCESS);
2305: }

2307: /*@C
2308:   KSPMonitorSet - Sets an ADDITIONAL function to be called at every iteration to monitor, i.e. display in some way, perhaps by printing in the terminal,
2309:   the residual norm computed in a `KSPSolve()`

2311:   Logically Collective

2313:   Input Parameters:
2314: + ksp            - iterative solver obtained from `KSPCreate()`
2315: . monitor        - pointer to function (if this is `NULL`, it turns off monitoring, see `KSPMonitorFn`
2316: . ctx            - [optional] context for private data for the monitor routine (use `NULL` if no context is needed)
2317: - monitordestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for the calling sequence

2319:   Options Database Keys:
2320: + -ksp_monitor                             - sets `KSPMonitorResidual()`
2321: . -ksp_monitor hdf5:filename               - sets `KSPMonitorResidualView()` and saves residual
2322: . -ksp_monitor draw                        - sets `KSPMonitorResidualView()` and plots residual
2323: . -ksp_monitor draw::draw_lg               - sets `KSPMonitorResidualDrawLG()` and plots residual
2324: . -ksp_monitor_pause_final                 - Pauses any graphics when the solve finishes (only works for internal monitors)
2325: . -ksp_monitor_true_residual               - sets `KSPMonitorTrueResidual()`
2326: . -ksp_monitor_true_residual draw::draw_lg - sets `KSPMonitorTrueResidualDrawLG()` and plots residual
2327: . -ksp_monitor_max                         - sets `KSPMonitorTrueResidualMax()`
2328: . -ksp_monitor_singular_value              - sets `KSPMonitorSingularValue()`
2329: - -ksp_monitor_cancel                      - cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but
2330:                                              does not cancel those set via the options database.

2332:   Level: beginner

2334:   Notes:
2335:   The options database option `-ksp_monitor` and related options are the easiest way to turn on `KSP` iteration monitoring

2337:   `KSPMonitorRegister()` provides a way to associate an options database key with `KSP` monitor function.

2339:   The default is to do no monitoring.  To print the residual, or preconditioned
2340:   residual if `KSPSetNormType`(ksp,`KSP_NORM_PRECONDITIONED`) was called, use
2341:   `KSPMonitorResidual()` as the monitoring routine, with a `PETSCVIEWERASCII` as the
2342:   context.

2344:   Several different monitoring routines may be set by calling
2345:   `KSPMonitorSet()` multiple times; they will be called in the
2346:   order in which they were set.

2348:   Fortran Note:
2349:   Only a single monitor function can be set for each `KSP` object

2351: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorRegister()`, `KSPMonitorCancel()`, `KSP`, `PetscCtxDestroyFn`
2352: @*/
2353: PetscErrorCode KSPMonitorSet(KSP ksp, KSPMonitorFn *monitor, PetscCtx ctx, PetscCtxDestroyFn *monitordestroy)
2354: {
2355:   PetscFunctionBegin;
2357:   for (PetscInt i = 0; i < ksp->numbermonitors; i++) {
2358:     PetscBool identical;

2360:     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))(PetscVoidFn *)monitor, ctx, monitordestroy, (PetscErrorCode (*)(void))(PetscVoidFn *)ksp->monitor[i], ksp->monitorcontext[i], ksp->monitordestroy[i], &identical));
2361:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
2362:   }
2363:   PetscCheck(ksp->numbermonitors < MAXKSPMONITORS, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP monitors set");
2364:   ksp->monitor[ksp->numbermonitors]          = monitor;
2365:   ksp->monitordestroy[ksp->numbermonitors]   = monitordestroy;
2366:   ksp->monitorcontext[ksp->numbermonitors++] = ctx;
2367:   PetscFunctionReturn(PETSC_SUCCESS);
2368: }

2370: /*@
2371:   KSPMonitorCancel - Clears all monitors for a `KSP` object.

2373:   Logically Collective

2375:   Input Parameter:
2376: . ksp - iterative solver obtained from `KSPCreate()`

2378:   Options Database Key:
2379: . -ksp_monitor_cancel - Cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but does not cancel those set via the options database.

2381:   Level: intermediate

2383: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorSet()`, `KSP`
2384: @*/
2385: PetscErrorCode KSPMonitorCancel(KSP ksp)
2386: {
2387:   PetscInt i;

2389:   PetscFunctionBegin;
2391:   for (i = 0; i < ksp->numbermonitors; i++) {
2392:     if (ksp->monitordestroy[i]) PetscCall((*ksp->monitordestroy[i])(&ksp->monitorcontext[i]));
2393:   }
2394:   ksp->numbermonitors = 0;
2395:   PetscFunctionReturn(PETSC_SUCCESS);
2396: }

2398: /*@C
2399:   KSPGetMonitorContext - Gets the monitoring context, as set by `KSPMonitorSet()` for the FIRST monitor only.

2401:   Not Collective

2403:   Input Parameter:
2404: . ksp - iterative solver obtained from `KSPCreate()`

2406:   Output Parameter:
2407: . ctx - monitoring context

2409:   Level: intermediate

2411:   Fortran Notes:
2412:   This only works when the context is a Fortran derived type or a `PetscObject`. Declare `ctx` with
2413: .vb
2414:   type(tUsertype), pointer :: ctx
2415: .ve

2417: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSP`
2418: @*/
2419: PetscErrorCode KSPGetMonitorContext(KSP ksp, PetscCtxRt ctx)
2420: {
2421:   PetscFunctionBegin;
2423:   *(void **)ctx = ksp->monitorcontext[0];
2424:   PetscFunctionReturn(PETSC_SUCCESS);
2425: }

2427: /*@
2428:   KSPSetResidualHistory - Sets the array used to hold the residual history.
2429:   If set, this array will contain the residual norms computed at each
2430:   iteration of the solver.

2432:   Not Collective

2434:   Input Parameters:
2435: + ksp   - iterative solver obtained from `KSPCreate()`
2436: . a     - array to hold history
2437: . na    - size of `a`
2438: - reset - `PETSC_TRUE` indicates the history counter is reset to zero
2439:            for each new linear solve

2441:   Level: advanced

2443:   Notes:
2444:   If provided, `a` is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2445:   If 'a' is `NULL` then space is allocated for the history. If 'na' `PETSC_DECIDE` or (deprecated) `PETSC_DEFAULT` then a
2446:   default array of length 10,000 is allocated.

2448:   If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2450: .seealso: [](ch_ksp), `KSPGetResidualHistory()`, `KSP`
2451: @*/
2452: PetscErrorCode KSPSetResidualHistory(KSP ksp, PetscReal a[], PetscCount na, PetscBool reset)
2453: {
2454:   PetscFunctionBegin;

2457:   PetscCall(PetscFree(ksp->res_hist_alloc));
2458:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2459:     ksp->res_hist     = a;
2460:     ksp->res_hist_max = na;
2461:   } else {
2462:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->res_hist_max = (size_t)na;
2463:     else ksp->res_hist_max = 10000; /* like default ksp->max_it */
2464:     PetscCall(PetscCalloc1(ksp->res_hist_max, &ksp->res_hist_alloc));

2466:     ksp->res_hist = ksp->res_hist_alloc;
2467:   }
2468:   ksp->res_hist_len   = 0;
2469:   ksp->res_hist_reset = reset;
2470:   PetscFunctionReturn(PETSC_SUCCESS);
2471: }

2473: /*@C
2474:   KSPGetResidualHistory - Gets the array used to hold the residual history and the number of residuals it contains.

2476:   Not Collective

2478:   Input Parameter:
2479: . ksp - iterative solver obtained from `KSPCreate()`

2481:   Output Parameters:
2482: + a  - pointer to array to hold history (or `NULL`)
2483: - na - number of used entries in a (or `NULL`). Note this has different meanings depending on the `reset` argument to `KSPSetResidualHistory()`

2485:   Level: advanced

2487:   Note:
2488:   This array is borrowed and should not be freed by the caller.

2490:   Can only be called after a `KSPSetResidualHistory()` otherwise `a` and `na` are set to `NULL` and zero

2492:   When `reset` was `PETSC_TRUE` since a residual is computed before the first iteration, the value of `na` is generally one more than the value
2493:   returned with `KSPGetIterationNumber()`.

2495:   Some Krylov methods may not compute the final residual norm when convergence is declared because the maximum number of iterations allowed has been reached.
2496:   In this situation, when `reset` was `PETSC_TRUE`, `na` will then equal the number of iterations reported with `KSPGetIterationNumber()`

2498:   Some Krylov methods (such as `KSPSTCG`), under certain circumstances, do not compute the final residual norm. In this situation, when `reset` was `PETSC_TRUE`,
2499:   `na` will then equal the number of iterations reported with `KSPGetIterationNumber()`

2501:   `KSPBCGSL` does not record the residual norms for the "subiterations" hence the results from `KSPGetResidualHistory()` and `KSPGetIterationNumber()` will be different

2503:   Fortran Note:
2504:   Call `KSPRestoreResidualHistory()` when access to the history is no longer needed.

2506: .seealso: [](ch_ksp), `KSPSetResidualHistory()`, `KSP`, `KSPGetIterationNumber()`, `KSPSTCG`, `KSPBCGSL`
2507: @*/
2508: PetscErrorCode KSPGetResidualHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2509: {
2510:   PetscFunctionBegin;
2512:   if (a) *a = ksp->res_hist;
2513:   if (na) PetscCall(PetscIntCast(ksp->res_hist_len, na));
2514:   PetscFunctionReturn(PETSC_SUCCESS);
2515: }

2517: /*@
2518:   KSPSetErrorHistory - Sets the array used to hold the error history. If set, this array will contain the error norms computed at each iteration of the solver.

2520:   Not Collective

2522:   Input Parameters:
2523: + ksp   - iterative solver obtained from `KSPCreate()`
2524: . a     - array to hold history
2525: . na    - size of `a`
2526: - reset - `PETSC_TRUE` indicates the history counter is reset to zero for each new linear solve

2528:   Level: advanced

2530:   Notes:
2531:   If provided, `a` is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2532:   If 'a' is `NULL` then space is allocated for the history. If 'na' is `PETSC_DECIDE` or (deprecated) `PETSC_DEFAULT` then a default array of length 1,0000 is allocated.

2534:   If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2536: .seealso: [](ch_ksp), `KSPGetErrorHistory()`, `KSPSetResidualHistory()`, `KSP`
2537: @*/
2538: PetscErrorCode KSPSetErrorHistory(KSP ksp, PetscReal a[], PetscCount na, PetscBool reset)
2539: {
2540:   PetscFunctionBegin;

2543:   PetscCall(PetscFree(ksp->err_hist_alloc));
2544:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2545:     ksp->err_hist     = a;
2546:     ksp->err_hist_max = na;
2547:   } else {
2548:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->err_hist_max = (size_t)na;
2549:     else ksp->err_hist_max = 10000; /* like default ksp->max_it */
2550:     PetscCall(PetscCalloc1(ksp->err_hist_max, &ksp->err_hist_alloc));
2551:     ksp->err_hist = ksp->err_hist_alloc;
2552:   }
2553:   ksp->err_hist_len   = 0;
2554:   ksp->err_hist_reset = reset;
2555:   PetscFunctionReturn(PETSC_SUCCESS);
2556: }

2558: /*@C
2559:   KSPGetErrorHistory - Gets the array used to hold the error history and the number of residuals it contains.

2561:   Not Collective

2563:   Input Parameter:
2564: . ksp - iterative solver obtained from `KSPCreate()`

2566:   Output Parameters:
2567: + a  - pointer to array to hold history (or `NULL`)
2568: - na - number of used entries in a (or `NULL`)

2570:   Level: advanced

2572:   Note:
2573:   This array is borrowed and should not be freed by the caller.
2574:   Can only be called after a `KSPSetErrorHistory()` otherwise `a` and `na` are set to `NULL` and zero

2576:   Fortran Note:
2577: .vb
2578:   PetscReal, pointer :: a(:)
2579: .ve

2581: .seealso: [](ch_ksp), `KSPSetErrorHistory()`, `KSPGetResidualHistory()`, `KSP`
2582: @*/
2583: PetscErrorCode KSPGetErrorHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2584: {
2585:   PetscFunctionBegin;
2587:   if (a) *a = ksp->err_hist;
2588:   if (na) PetscCall(PetscIntCast(ksp->err_hist_len, na));
2589:   PetscFunctionReturn(PETSC_SUCCESS);
2590: }

2592: /*@
2593:   KSPComputeConvergenceRate - Compute the convergence rate for the iteration <https:/en.wikipedia.org/wiki/Coefficient_of_determination>

2595:   Not Collective

2597:   Input Parameter:
2598: . ksp - The `KSP`

2600:   Output Parameters:
2601: + cr   - The residual contraction rate
2602: . rRsq - The coefficient of determination, $R^2$, indicating the linearity of the data
2603: . ce   - The error contraction rate
2604: - eRsq - The coefficient of determination, $R^2$, indicating the linearity of the data

2606:   Level: advanced

2608:   Note:
2609:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
2610:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,

2612: .seealso: [](ch_ksp), `KSP`, `KSPConvergedRateView()`
2613: @*/
2614: PetscErrorCode KSPComputeConvergenceRate(KSP ksp, PetscReal *cr, PetscReal *rRsq, PetscReal *ce, PetscReal *eRsq)
2615: {
2616:   PetscReal const *hist;
2617:   PetscReal       *x, *y, slope, intercept, mean = 0.0, var = 0.0, res = 0.0;
2618:   PetscInt         n, k;

2620:   PetscFunctionBegin;
2621:   if (cr || rRsq) {
2622:     PetscCall(KSPGetResidualHistory(ksp, &hist, &n));
2623:     if (!n) {
2624:       if (cr) *cr = 0.0;
2625:       if (rRsq) *rRsq = -1.0;
2626:     } else {
2627:       PetscCall(PetscMalloc2(n, &x, n, &y));
2628:       for (k = 0; k < n; ++k) {
2629:         x[k] = k;
2630:         y[k] = PetscLogReal(hist[k]);
2631:         mean += y[k];
2632:       }
2633:       mean /= n;
2634:       PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2635:       for (k = 0; k < n; ++k) {
2636:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2637:         var += PetscSqr(y[k] - mean);
2638:       }
2639:       PetscCall(PetscFree2(x, y));
2640:       if (cr) *cr = PetscExpReal(slope);
2641:       if (rRsq) *rRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2642:     }
2643:   }
2644:   if (ce || eRsq) {
2645:     PetscCall(KSPGetErrorHistory(ksp, &hist, &n));
2646:     if (!n) {
2647:       if (ce) *ce = 0.0;
2648:       if (eRsq) *eRsq = -1.0;
2649:     } else {
2650:       PetscCall(PetscMalloc2(n, &x, n, &y));
2651:       for (k = 0; k < n; ++k) {
2652:         x[k] = k;
2653:         y[k] = PetscLogReal(hist[k]);
2654:         mean += y[k];
2655:       }
2656:       mean /= n;
2657:       PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2658:       for (k = 0; k < n; ++k) {
2659:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2660:         var += PetscSqr(y[k] - mean);
2661:       }
2662:       PetscCall(PetscFree2(x, y));
2663:       if (ce) *ce = PetscExpReal(slope);
2664:       if (eRsq) *eRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2665:     }
2666:   }
2667:   PetscFunctionReturn(PETSC_SUCCESS);
2668: }

2670: /*@C
2671:   KSPSetConvergenceTest - Sets the function to be used to determine convergence of `KSPSolve()`

2673:   Logically Collective

2675:   Input Parameters:
2676: + ksp      - iterative solver obtained from `KSPCreate()`
2677: . converge - pointer to the function, see `KSPConvergenceTestFn`
2678: . ctx      - context for private data for the convergence routine (may be `NULL`)
2679: - destroy  - a routine for destroying the context (may be `NULL`)

2681:   Level: advanced

2683:   Notes:
2684:   Must be called after the `KSP` type has been set so put this after
2685:   a call to `KSPSetType()`, or `KSPSetFromOptions()`.

2687:   The default convergence test, `KSPConvergedDefault()`, aborts if the
2688:   residual grows to more than 10000 times the initial residual.

2690:   The default is a combination of relative and absolute tolerances.
2691:   The residual value that is tested may be an approximation; routines
2692:   that need exact values should compute them.

2694:   In the default PETSc convergence test, the precise values of reason
2695:   are macros such as `KSP_CONVERGED_RTOL`, which are defined in petscksp.h.

2697: .seealso: [](ch_ksp), `KSP`, `KSPConvergenceTestFn`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPGetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2698: @*/
2699: PetscErrorCode KSPSetConvergenceTest(KSP ksp, KSPConvergenceTestFn *converge, PetscCtx ctx, PetscCtxDestroyFn *destroy)
2700: {
2701:   PetscFunctionBegin;
2703:   if (ksp->convergeddestroy) PetscCall((*ksp->convergeddestroy)(&ksp->cnvP));
2704:   ksp->converged        = converge;
2705:   ksp->convergeddestroy = destroy;
2706:   ksp->cnvP             = ctx;
2707:   PetscFunctionReturn(PETSC_SUCCESS);
2708: }

2710: /*@C
2711:   KSPGetConvergenceTest - Gets the function to be used to determine convergence.

2713:   Logically Collective

2715:   Input Parameter:
2716: . ksp - iterative solver obtained from `KSPCreate()`

2718:   Output Parameters:
2719: + converge - pointer to convergence test function, see `KSPConvergenceTestFn`
2720: . ctx      - context for private data for the convergence routine (may be `NULL`)
2721: - destroy  - a routine for destroying the context (may be `NULL`)

2723:   Level: advanced

2725: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2726: @*/
2727: PetscErrorCode KSPGetConvergenceTest(KSP ksp, KSPConvergenceTestFn **converge, PetscCtxRt ctx, PetscCtxDestroyFn **destroy)
2728: {
2729:   PetscFunctionBegin;
2731:   if (converge) *converge = ksp->converged;
2732:   if (destroy) *destroy = ksp->convergeddestroy;
2733:   if (ctx) *(void **)ctx = ksp->cnvP;
2734:   PetscFunctionReturn(PETSC_SUCCESS);
2735: }

2737: /*@C
2738:   KSPGetAndClearConvergenceTest - Gets the function to be used to determine convergence. Removes the current test without calling destroy on the test context

2740:   Logically Collective

2742:   Input Parameter:
2743: . ksp - iterative solver obtained from `KSPCreate()`

2745:   Output Parameters:
2746: + converge - pointer to convergence test function, see `KSPConvergenceTestFn`
2747: . ctx      - context for private data for the convergence routine
2748: - destroy  - a routine for destroying the context

2750:   Level: advanced

2752:   Note:
2753:   This is intended to be used to allow transferring the convergence test (and its context) to another testing object (for example another `KSP`)
2754:   and then calling `KSPSetConvergenceTest()` on this original `KSP`. If you just called `KSPGetConvergenceTest()` followed
2755:   by `KSPSetConvergenceTest()` the original context information
2756:   would be destroyed and hence the transferred context would be invalid and trigger a crash on use

2758: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2759: @*/
2760: PetscErrorCode KSPGetAndClearConvergenceTest(KSP ksp, KSPConvergenceTestFn **converge, PetscCtxRt ctx, PetscCtxDestroyFn **destroy)
2761: {
2762:   PetscFunctionBegin;
2764:   *converge             = ksp->converged;
2765:   *destroy              = ksp->convergeddestroy;
2766:   *(void **)ctx         = ksp->cnvP;
2767:   ksp->converged        = NULL;
2768:   ksp->cnvP             = NULL;
2769:   ksp->convergeddestroy = NULL;
2770:   PetscFunctionReturn(PETSC_SUCCESS);
2771: }

2773: /*@C
2774:   KSPGetConvergenceContext - Gets the convergence context set with `KSPSetConvergenceTest()`.

2776:   Not Collective

2778:   Input Parameter:
2779: . ksp - iterative solver obtained from `KSPCreate()`

2781:   Output Parameter:
2782: . ctx - monitoring context

2784:   Level: advanced

2786:   Fortran Note:
2787:   This only works when the context is a Fortran derived type or a `PetscObject`. Declare `ctx` with
2788: .vb
2789:   type(tUsertype), pointer :: ctx
2790: .ve

2792: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2793: @*/
2794: PetscErrorCode KSPGetConvergenceContext(KSP ksp, PetscCtxRt ctx)
2795: {
2796:   PetscFunctionBegin;
2798:   *(void **)ctx = ksp->cnvP;
2799:   PetscFunctionReturn(PETSC_SUCCESS);
2800: }

2802: /*@
2803:   KSPBuildSolution - Builds the approximate solution in a vector provided.

2805:   Collective

2807:   Input Parameter:
2808: . ksp - iterative solver obtained from `KSPCreate()`

2810:   Output Parameter:
2811:    Provide exactly one of
2812: + v - location to stash solution, optional, otherwise pass `NULL`
2813: - V - the solution is returned in this location. This vector is created internally. This vector should NOT be destroyed by the user with `VecDestroy()`.

2815:   Level: developer

2817:   Notes:
2818:   This routine can be used in one of two ways
2819: .vb
2820:       KSPBuildSolution(ksp,NULL,&V);
2821:    or
2822:       KSPBuildSolution(ksp,v,NULL); or KSPBuildSolution(ksp,v,&v);
2823: .ve
2824:   In the first case an internal vector is allocated to store the solution
2825:   (the user cannot destroy this vector). In the second case the solution
2826:   is generated in the vector that the user provides. Note that for certain
2827:   methods, such as `KSPCG`, the second case requires a copy of the solution,
2828:   while in the first case the call is essentially free since it simply
2829:   returns the vector where the solution already is stored. For some methods
2830:   like `KSPGMRES` during the solve this is a reasonably expensive operation and should only be
2831:   used if truly needed.

2833: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP`
2834: @*/
2835: PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V)
2836: {
2837:   PetscFunctionBegin;
2839:   PetscCheck(V || v, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "Must provide either v or V");
2840:   if (!V) V = &v;
2841:   if (ksp->reason != KSP_CONVERGED_ITERATING) {
2842:     if (!v) PetscCall(KSPGetSolution(ksp, V));
2843:     else PetscCall(VecCopy(ksp->vec_sol, v));
2844:   } else {
2845:     PetscUseTypeMethod(ksp, buildsolution, v, V);
2846:   }
2847:   PetscFunctionReturn(PETSC_SUCCESS);
2848: }

2850: /*@
2851:   KSPBuildResidual - Builds the residual in a vector provided.

2853:   Collective

2855:   Input Parameter:
2856: . ksp - iterative solver obtained from `KSPCreate()`

2858:   Output Parameters:
2859: + t - work vector.  If not provided then one is generated.
2860: . v - optional location to stash residual.  If `v` is not provided, then a location is generated.
2861: - V - the residual

2863:   Level: advanced

2865:   Note:
2866:   Regardless of whether or not `v` is provided, the residual is
2867:   returned in `V`.

2869: .seealso: [](ch_ksp), `KSP`, `KSPBuildSolution()`
2870: @*/
2871: PetscErrorCode KSPBuildResidual(KSP ksp, Vec t, Vec v, Vec *V)
2872: {
2873:   PetscBool flag = PETSC_FALSE;
2874:   Vec       w = v, tt = t;

2876:   PetscFunctionBegin;
2878:   if (!w) PetscCall(VecDuplicate(ksp->vec_rhs, &w));
2879:   if (!tt) {
2880:     PetscCall(VecDuplicate(ksp->vec_sol, &tt));
2881:     flag = PETSC_TRUE;
2882:   }
2883:   PetscUseTypeMethod(ksp, buildresidual, tt, w, V);
2884:   if (flag) PetscCall(VecDestroy(&tt));
2885:   PetscFunctionReturn(PETSC_SUCCESS);
2886: }

2888: /*@
2889:   KSPSetDiagonalScale - Tells `KSP` to symmetrically diagonally scale the system
2890:   before solving. This actually CHANGES the matrix (and right-hand side).

2892:   Logically Collective

2894:   Input Parameters:
2895: + ksp   - the `KSP` context
2896: - scale - `PETSC_TRUE` or `PETSC_FALSE`

2898:   Options Database Keys:
2899: + -ksp_diagonal_scale     - perform a diagonal scaling before the solve
2900: - -ksp_diagonal_scale_fix - scale the matrix back AFTER the solve

2902:   Level: advanced

2904:   Notes:
2905:   Scales the matrix by  $D^{-1/2}  A  D^{-1/2}  [D^{1/2} x ] = D^{-1/2} b $
2906:   where $D_{ii}$ is $1/abs(A_{ii}) $ unless $A_{ii}$ is zero and then it is 1.

2908:   BE CAREFUL with this routine: it actually scales the matrix and right
2909:   hand side that define the system. After the system is solved the matrix
2910:   and right-hand side remain scaled unless you use `KSPSetDiagonalScaleFix()`

2912:   This should NOT be used within the `SNES` solves if you are using a line
2913:   search.

2915:   If you use this with the `PCType` `PCEISENSTAT` preconditioner than you can
2916:   use the `PCEisenstatSetNoDiagonalScaling()` option, or `-pc_eisenstat_no_diagonal_scaling`
2917:   to save some unneeded, redundant flops.

2919: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2920: @*/
2921: PetscErrorCode KSPSetDiagonalScale(KSP ksp, PetscBool scale)
2922: {
2923:   PetscFunctionBegin;
2926:   ksp->dscale = scale;
2927:   PetscFunctionReturn(PETSC_SUCCESS);
2928: }

2930: /*@
2931:   KSPGetDiagonalScale - Checks if `KSP` solver scales the matrix and right-hand side, that is if `KSPSetDiagonalScale()` has been called

2933:   Not Collective

2935:   Input Parameter:
2936: . ksp - the `KSP` context

2938:   Output Parameter:
2939: . scale - `PETSC_TRUE` or `PETSC_FALSE`

2941:   Level: intermediate

2943: .seealso: [](ch_ksp), `KSP`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`
2944: @*/
2945: PetscErrorCode KSPGetDiagonalScale(KSP ksp, PetscBool *scale)
2946: {
2947:   PetscFunctionBegin;
2949:   PetscAssertPointer(scale, 2);
2950:   *scale = ksp->dscale;
2951:   PetscFunctionReturn(PETSC_SUCCESS);
2952: }

2954: /*@
2955:   KSPSetDiagonalScaleFix - Tells `KSP` to diagonally scale the system back after solving.

2957:   Logically Collective

2959:   Input Parameters:
2960: + ksp - the `KSP` context
2961: - fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2962:          rescale (default)

2964:   Level: intermediate

2966:   Notes:
2967:   Must be called after `KSPSetDiagonalScale()`

2969:   Using this will slow things down, because it rescales the matrix before and
2970:   after each linear solve. This is intended mainly for testing to allow one
2971:   to easily get back the original system to make sure the solution computed is
2972:   accurate enough.

2974: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPGetDiagonalScaleFix()`, `KSP`
2975: @*/
2976: PetscErrorCode KSPSetDiagonalScaleFix(KSP ksp, PetscBool fix)
2977: {
2978:   PetscFunctionBegin;
2981:   ksp->dscalefix = fix;
2982:   PetscFunctionReturn(PETSC_SUCCESS);
2983: }

2985: /*@
2986:   KSPGetDiagonalScaleFix - Determines if `KSP` diagonally scales the system back after solving. That is `KSPSetDiagonalScaleFix()` has been called

2988:   Not Collective

2990:   Input Parameter:
2991: . ksp - the `KSP` context

2993:   Output Parameter:
2994: . fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2995:          rescale (default)

2997:   Level: intermediate

2999: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
3000: @*/
3001: PetscErrorCode KSPGetDiagonalScaleFix(KSP ksp, PetscBool *fix)
3002: {
3003:   PetscFunctionBegin;
3005:   PetscAssertPointer(fix, 2);
3006:   *fix = ksp->dscalefix;
3007:   PetscFunctionReturn(PETSC_SUCCESS);
3008: }

3010: /*@C
3011:   KSPSetComputeOperators - set routine to compute the linear operators

3013:   Logically Collective

3015:   Input Parameters:
3016: + ksp  - the `KSP` context
3017: . func - function to compute the operators, see `KSPComputeOperatorsFn` for the calling sequence
3018: - ctx  - optional context

3020:   Level: beginner

3022:   Notes:
3023:   `func()` will be called automatically at the very next call to `KSPSolve()`. It will NOT be called at future `KSPSolve()` calls
3024:   unless either `KSPSetComputeOperators()` or `KSPSetOperators()` is called before that `KSPSolve()` is called. This allows the same system to be solved several times
3025:   with different right-hand side functions but is a confusing API since one might expect it to be called for each `KSPSolve()`

3027:   To reuse the same preconditioner for the next `KSPSolve()` and not compute a new one based on the most recently computed matrix call `KSPSetReusePreconditioner()`

3029:   Developer Note:
3030:   Perhaps this routine and `KSPSetComputeRHS()` could be combined into a new API that makes clear when new matrices are computing without requiring call this
3031:   routine to indicate when the new matrix should be computed.

3033: .seealso: [](ch_ksp), `KSP`, `KSPSetOperators()`, `KSPSetComputeRHS()`, `DMKSPSetComputeOperators()`, `KSPSetComputeInitialGuess()`, `KSPComputeOperatorsFn`
3034: @*/
3035: PetscErrorCode KSPSetComputeOperators(KSP ksp, KSPComputeOperatorsFn *func, PetscCtx ctx)
3036: {
3037:   DM dm;

3039:   PetscFunctionBegin;
3041:   PetscCall(KSPGetDM(ksp, &dm));
3042:   PetscCall(DMKSPSetComputeOperators(dm, func, ctx));
3043:   if (ksp->setupstage == KSP_SETUP_NEWRHS) ksp->setupstage = KSP_SETUP_NEWMATRIX;
3044:   PetscFunctionReturn(PETSC_SUCCESS);
3045: }

3047: /*@C
3048:   KSPSetComputeRHS - set routine to compute the right-hand side of the linear system

3050:   Logically Collective

3052:   Input Parameters:
3053: + ksp  - the `KSP` context
3054: . func - function to compute the right-hand side, see `KSPComputeRHSFn` for the calling sequence
3055: - ctx  - optional context

3057:   Level: beginner

3059:   Note:
3060:   The routine you provide will be called EACH you call `KSPSolve()` to prepare the new right-hand side for that solve

3062: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `DMKSPSetComputeRHS()`, `KSPSetComputeOperators()`, `KSPSetOperators()`, `KSPComputeRHSFn`
3063: @*/
3064: PetscErrorCode KSPSetComputeRHS(KSP ksp, KSPComputeRHSFn *func, PetscCtx ctx)
3065: {
3066:   DM dm;

3068:   PetscFunctionBegin;
3070:   PetscCall(KSPGetDM(ksp, &dm));
3071:   PetscCall(DMKSPSetComputeRHS(dm, func, ctx));
3072:   PetscFunctionReturn(PETSC_SUCCESS);
3073: }

3075: /*@C
3076:   KSPSetComputeInitialGuess - set routine to compute the initial guess of the linear system

3078:   Logically Collective

3080:   Input Parameters:
3081: + ksp  - the `KSP` context
3082: . func - function to compute the initial guess, see `KSPComputeInitialGuessFn` for calling sequence
3083: - ctx  - optional context

3085:   Level: beginner

3087:   Note:
3088:   This should only be used in conjunction with `KSPSetComputeRHS()` and `KSPSetComputeOperators()`, otherwise
3089:   call `KSPSetInitialGuessNonzero()` and set the initial guess values in the solution vector passed to `KSPSolve()` before calling the solver

3091: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `KSPSetComputeRHS()`, `KSPSetComputeOperators()`, `DMKSPSetComputeInitialGuess()`, `KSPSetInitialGuessNonzero()`,
3092:           `KSPComputeInitialGuessFn`
3093: @*/
3094: PetscErrorCode KSPSetComputeInitialGuess(KSP ksp, KSPComputeInitialGuessFn *func, PetscCtx ctx)
3095: {
3096:   DM dm;

3098:   PetscFunctionBegin;
3100:   PetscCall(KSPGetDM(ksp, &dm));
3101:   PetscCall(DMKSPSetComputeInitialGuess(dm, func, ctx));
3102:   PetscFunctionReturn(PETSC_SUCCESS);
3103: }

3105: /*@
3106:   KSPSetUseExplicitTranspose - Determines the explicit transpose of the operator is formed in `KSPSolveTranspose()`. In some configurations (like GPUs) it may
3107:   be explicitly formed since the solve is much more efficient.

3109:   Logically Collective

3111:   Input Parameter:
3112: . ksp - the `KSP` context

3114:   Output Parameter:
3115: . flg - `PETSC_TRUE` to transpose the system in `KSPSolveTranspose()`, `PETSC_FALSE` to not transpose (default)

3117:   Level: advanced

3119: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `KSP`
3120: @*/
3121: PetscErrorCode KSPSetUseExplicitTranspose(KSP ksp, PetscBool flg)
3122: {
3123:   PetscFunctionBegin;
3126:   ksp->transpose.use_explicittranspose = flg;
3127:   PetscFunctionReturn(PETSC_SUCCESS);
3128: }