Actual source code: itfunc.c

  1: /*
  2:       Interface KSP routines that the user calls.
  3: */

  5: #include <petsc/private/kspimpl.h>
  6: #include <petsc/private/matimpl.h>
  7: #include <petscdm.h>

  9: /* number of nested levels of KSPSetUp/Solve(). This is used to determine if KSP_DIVERGED_ITS should be fatal. */
 10: static PetscInt level = 0;

 12: static inline PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
 13: {
 14:   PetscCall(PetscViewerPushFormat(viewer, format));
 15:   PetscCall(PetscObjectView(obj, viewer));
 16:   PetscCall(PetscViewerPopFormat(viewer));
 17:   return PETSC_SUCCESS;
 18: }

 20: /*@
 21:   KSPComputeExtremeSingularValues - Computes the extreme singular values
 22:   for the preconditioned operator. Called after or during `KSPSolve()`.

 24:   Not Collective

 26:   Input Parameter:
 27: . ksp - iterative solver obtained from `KSPCreate()`

 29:   Output Parameters:
 30: + emax - maximum estimated singular value
 31: - emin - minimum estimated singular value

 33:   Options Database Key:
 34: . -ksp_view_singularvalues - compute extreme singular values and print when `KSPSolve()` completes.

 36:   Level: advanced

 38:   Notes:
 39:   One must call `KSPSetComputeSingularValues()` before calling `KSPSetUp()`
 40:   (or use the option -ksp_view_eigenvalues) in order for this routine to work correctly.

 42:   Many users may just want to use the monitoring routine
 43:   `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
 44:   to print the extreme singular values at each iteration of the linear solve.

 46:   Estimates of the smallest singular value may be very inaccurate, especially if the Krylov method has not converged.
 47:   The largest singular value is usually accurate to within a few percent if the method has converged, but is still not
 48:   intended for eigenanalysis. Consider the excellent package `SLEPc` if accurate values are required.

 50:   Disable restarts if using KSPGMRES, otherwise this estimate will only be using those iterations after the last
 51:   restart. See `KSPGMRESSetRestart()` for more details.

 53: .seealso: [](ch_ksp), `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeEigenvalues()`, `KSP`, `KSPComputeRitz()`
 54: @*/
 55: PetscErrorCode KSPComputeExtremeSingularValues(KSP ksp, PetscReal *emax, PetscReal *emin)
 56: {
 57:   PetscFunctionBegin;
 59:   PetscAssertPointer(emax, 2);
 60:   PetscAssertPointer(emin, 3);
 61:   PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Singular values not requested before KSPSetUp()");

 63:   if (ksp->ops->computeextremesingularvalues) PetscUseTypeMethod(ksp, computeextremesingularvalues, emax, emin);
 64:   else {
 65:     *emin = -1.0;
 66:     *emax = -1.0;
 67:   }
 68:   PetscFunctionReturn(PETSC_SUCCESS);
 69: }

 71: /*@
 72:   KSPComputeEigenvalues - Computes the extreme eigenvalues for the
 73:   preconditioned operator. Called after or during `KSPSolve()`.

 75:   Not Collective

 77:   Input Parameters:
 78: + ksp - iterative solver obtained from `KSPCreate()`
 79: - n   - size of arrays `r` and `c`. The number of eigenvalues computed `neig` will, in
 80:        general, be less than this.

 82:   Output Parameters:
 83: + r    - real part of computed eigenvalues, provided by user with a dimension of at least `n`
 84: . c    - complex part of computed eigenvalues, provided by user with a dimension of at least `n`
 85: - neig - actual number of eigenvalues computed (will be less than or equal to `n`)

 87:   Options Database Key:
 88: . -ksp_view_eigenvalues - Prints eigenvalues to stdout

 90:   Level: advanced

 92:   Notes:
 93:   The number of eigenvalues estimated depends on the size of the Krylov space
 94:   generated during the `KSPSolve()` ; for example, with
 95:   `KSPCG` it corresponds to the number of CG iterations, for `KSPGMRES` it is the number
 96:   of GMRES iterations SINCE the last restart. Any extra space in `r` and `c`
 97:   will be ignored.

 99:   `KSPComputeEigenvalues()` does not usually provide accurate estimates; it is
100:   intended only for assistance in understanding the convergence of iterative
101:   methods, not for eigenanalysis. For accurate computation of eigenvalues we recommend using
102:   the excellent package SLEPc.

104:   One must call `KSPSetComputeEigenvalues()` before calling `KSPSetUp()`
105:   in order for this routine to work correctly.

107:   Many users may just want to use the monitoring routine
108:   `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
109:   to print the singular values at each iteration of the linear solve.

111:   `KSPComputeRitz()` provides estimates for both the eigenvalues and their corresponding eigenvectors.

113: .seealso: [](ch_ksp), `KSPSetComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeExtremeSingularValues()`, `KSP`, `KSPComputeRitz()`
114: @*/
115: PetscErrorCode KSPComputeEigenvalues(KSP ksp, PetscInt n, PetscReal r[], PetscReal c[], PetscInt *neig)
116: {
117:   PetscFunctionBegin;
119:   if (n) PetscAssertPointer(r, 3);
120:   if (n) PetscAssertPointer(c, 4);
121:   PetscCheck(n >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Requested < 0 Eigenvalues");
122:   PetscAssertPointer(neig, 5);
123:   PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Eigenvalues not requested before KSPSetUp()");

125:   if (n && ksp->ops->computeeigenvalues) PetscUseTypeMethod(ksp, computeeigenvalues, n, r, c, neig);
126:   else *neig = 0;
127:   PetscFunctionReturn(PETSC_SUCCESS);
128: }

130: /*@
131:   KSPComputeRitz - Computes the Ritz or harmonic Ritz pairs associated with the
132:   smallest or largest in modulus, for the preconditioned operator.

134:   Not Collective

136:   Input Parameters:
137: + ksp   - iterative solver obtained from `KSPCreate()`
138: . ritz  - `PETSC_TRUE` or `PETSC_FALSE` for Ritz pairs or harmonic Ritz pairs, respectively
139: - small - `PETSC_TRUE` or `PETSC_FALSE` for smallest or largest (harmonic) Ritz values, respectively

141:   Output Parameters:
142: + nrit  - On input number of (harmonic) Ritz pairs to compute; on output, actual number of computed (harmonic) Ritz pairs
143: . S     - an array of the Ritz vectors, pass in an array of vectors of size `nrit`
144: . tetar - real part of the Ritz values, pass in an array of size `nrit`
145: - tetai - imaginary part of the Ritz values, pass in an array of size `nrit`

147:   Level: advanced

149:   Notes:
150:   This only works with a `KSPType` of `KSPGMRES`.

152:   One must call `KSPSetComputeRitz()` before calling `KSPSetUp()` in order for this routine to work correctly.

154:   This routine must be called after `KSPSolve()`.

156:   In `KSPGMRES`, the (harmonic) Ritz pairs are computed from the Hessenberg matrix obtained during
157:   the last complete cycle of the GMRES solve, or during the partial cycle if the solve ended before
158:   a restart (that is a complete GMRES cycle was never achieved).

160:   The number of actual (harmonic) Ritz pairs computed is less than or equal to the restart
161:   parameter for GMRES if a complete cycle has been performed or less or equal to the number of GMRES
162:   iterations.

164:   `KSPComputeEigenvalues()` provides estimates for only the eigenvalues (Ritz values).

166:   For real matrices, the (harmonic) Ritz pairs can be complex-valued. In such a case,
167:   the routine selects the complex (harmonic) Ritz value and its conjugate, and two successive entries of the
168:   vectors `S` are equal to the real and the imaginary parts of the associated vectors.
169:   When PETSc has been built with complex scalars, the real and imaginary parts of the Ritz
170:   values are still returned in `tetar` and `tetai`, as is done in `KSPComputeEigenvalues()`, but
171:   the Ritz vectors S are complex.

173:   The (harmonic) Ritz pairs are given in order of increasing (harmonic) Ritz values in modulus.

175:   The Ritz pairs do not necessarily accurately reflect the eigenvalues and eigenvectors of the operator, consider the
176:   excellent package `SLEPc` if accurate values are required.

178: .seealso: [](ch_ksp), `KSPSetComputeRitz()`, `KSP`, `KSPGMRES`, `KSPComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`
179: @*/
180: PetscErrorCode KSPComputeRitz(KSP ksp, PetscBool ritz, PetscBool small, PetscInt *nrit, Vec S[], PetscReal tetar[], PetscReal tetai[])
181: {
182:   PetscFunctionBegin;
184:   PetscCheck(ksp->calc_ritz, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Ritz pairs not requested before KSPSetUp()");
185:   PetscTryTypeMethod(ksp, computeritz, ritz, small, nrit, S, tetar, tetai);
186:   PetscFunctionReturn(PETSC_SUCCESS);
187: }

189: /*@
190:   KSPSetUpOnBlocks - Sets up the preconditioner for each block in
191:   the block Jacobi `PCJACOBI`, overlapping Schwarz `PCASM`, and fieldsplit `PCFIELDSPLIT` preconditioners

193:   Collective

195:   Input Parameter:
196: . ksp - the `KSP` context

198:   Level: advanced

200:   Notes:
201:   `KSPSetUpOnBlocks()` is a routine that the user can optionally call for
202:   more precise profiling (via `-log_view`) of the setup phase for these
203:   block preconditioners.  If the user does not call `KSPSetUpOnBlocks()`,
204:   it will automatically be called from within `KSPSolve()`.

206:   Calling `KSPSetUpOnBlocks()` is the same as calling `PCSetUpOnBlocks()`
207:   on the `PC` context within the `KSP` context.

209: .seealso: [](ch_ksp), `PCSetUpOnBlocks()`, `KSPSetUp()`, `PCSetUp()`, `KSP`
210: @*/
211: PetscErrorCode KSPSetUpOnBlocks(KSP ksp)
212: {
213:   PC             pc;
214:   PCFailedReason pcreason;

216:   PetscFunctionBegin;
218:   level++;
219:   PetscCall(KSPGetPC(ksp, &pc));
220:   PetscCall(PCSetUpOnBlocks(pc));
221:   PetscCall(PCGetFailedReason(pc, &pcreason));
222:   level--;
223:   /*
224:      This is tricky since only a subset of MPI ranks may set this; each KSPSolve_*() is responsible for checking
225:      this flag and initializing an appropriate vector with VecFlag() so that the first norm computation can
226:      produce a result at KSPCheckNorm() thus communicating the known problem to all MPI ranks so they may
227:      terminate the Krylov solve. For many KSP implementations this is handled within KSPInitialResidual()
228:   */
229:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
230:   PetscFunctionReturn(PETSC_SUCCESS);
231: }

233: /*@
234:   KSPSetReusePreconditioner - reuse the current preconditioner for future `KSPSolve()`, do not construct a new preconditioner even if the `Mat` operator
235:   in the `KSP` has different values

237:   Collective

239:   Input Parameters:
240: + ksp  - iterative solver obtained from `KSPCreate()`
241: - flag - `PETSC_TRUE` to reuse the current preconditioner, or `PETSC_FALSE` to construct a new preconditioner

243:   Options Database Key:
244: . -ksp_reuse_preconditioner <true,false> - reuse the previously computed preconditioner

246:   Level: intermediate

248:   Note:
249:   When using `SNES` one can use `SNESSetLagPreconditioner()` to determine when preconditioners are reused.

251: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGetReusePreconditioner()`,
252:           `SNESSetLagPreconditioner()`, `SNES`
253: @*/
254: PetscErrorCode KSPSetReusePreconditioner(KSP ksp, PetscBool flag)
255: {
256:   PC pc;

258:   PetscFunctionBegin;
260:   PetscCall(KSPGetPC(ksp, &pc));
261:   PetscCall(PCSetReusePreconditioner(pc, flag));
262:   PetscFunctionReturn(PETSC_SUCCESS);
263: }

265: /*@
266:   KSPGetReusePreconditioner - Determines if the `KSP` reuses the current preconditioner even if the `Mat` operator in the `KSP` has changed.

268:   Collective

270:   Input Parameter:
271: . ksp - iterative solver obtained from `KSPCreate()`

273:   Output Parameter:
274: . flag - the boolean flag indicating if the current preconditioner should be reused

276:   Level: intermediate

278: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSPSetReusePreconditioner()`, `KSP`
279: @*/
280: PetscErrorCode KSPGetReusePreconditioner(KSP ksp, PetscBool *flag)
281: {
282:   PetscFunctionBegin;
284:   PetscAssertPointer(flag, 2);
285:   *flag = PETSC_FALSE;
286:   if (ksp->pc) PetscCall(PCGetReusePreconditioner(ksp->pc, flag));
287:   PetscFunctionReturn(PETSC_SUCCESS);
288: }

290: /*@
291:   KSPSetSkipPCSetFromOptions - prevents `KSPSetFromOptions()` from calling `PCSetFromOptions()`.
292:   This is used if the same `PC` is shared by more than one `KSP` so its options are not reset for each `KSP`

294:   Collective

296:   Input Parameters:
297: + ksp  - iterative solver obtained from `KSPCreate()`
298: - flag - `PETSC_TRUE` to skip calling the `PCSetFromOptions()`

300:   Level: intermediate

302: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
303: @*/
304: PetscErrorCode KSPSetSkipPCSetFromOptions(KSP ksp, PetscBool flag)
305: {
306:   PetscFunctionBegin;
308:   ksp->skippcsetfromoptions = flag;
309:   PetscFunctionReturn(PETSC_SUCCESS);
310: }

312: /*@
313:   KSPSetUp - Sets up the internal data structures for the
314:   later use `KSPSolve()` the `KSP` linear iterative solver.

316:   Collective

318:   Input Parameter:
319: . ksp - iterative solver, `KSP`, obtained from `KSPCreate()`

321:   Level: developer

323:   Note:
324:   This is called automatically by `KSPSolve()` so usually does not need to be called directly.

326: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPSetUpOnBlocks()`
327: @*/
328: PetscErrorCode KSPSetUp(KSP ksp)
329: {
330:   Mat            A, B;
331:   Mat            mat, pmat;
332:   MatNullSpace   nullsp;
333:   PCFailedReason pcreason;
334:   PC             pc;
335:   PetscBool      pcmpi;

337:   PetscFunctionBegin;
339:   PetscCall(KSPGetPC(ksp, &pc));
340:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCMPI, &pcmpi));
341:   if (pcmpi) {
342:     PetscBool ksppreonly;
343:     PetscCall(PetscObjectTypeCompare((PetscObject)ksp, KSPPREONLY, &ksppreonly));
344:     if (!ksppreonly) PetscCall(KSPSetType(ksp, KSPPREONLY));
345:   }
346:   level++;

348:   /* reset the convergence flag from the previous solves */
349:   ksp->reason = KSP_CONVERGED_ITERATING;

351:   if (!((PetscObject)ksp)->type_name) PetscCall(KSPSetType(ksp, KSPGMRES));
352:   PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));

354:   if (ksp->dmActive && !ksp->setupstage) {
355:     /* first time in so build matrix and vector data structures using DM */
356:     if (!ksp->vec_rhs) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_rhs));
357:     if (!ksp->vec_sol) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_sol));
358:     PetscCall(DMCreateMatrix(ksp->dm, &A));
359:     PetscCall(KSPSetOperators(ksp, A, A));
360:     PetscCall(PetscObjectDereference((PetscObject)A));
361:   }

363:   if (ksp->dmActive) {
364:     DMKSP kdm;
365:     PetscCall(DMGetDMKSP(ksp->dm, &kdm));

367:     if (kdm->ops->computeinitialguess && ksp->setupstage != KSP_SETUP_NEWRHS) {
368:       /* only computes initial guess the first time through */
369:       PetscCallBack("KSP callback initial guess", (*kdm->ops->computeinitialguess)(ksp, ksp->vec_sol, kdm->initialguessctx));
370:       PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
371:     }
372:     if (kdm->ops->computerhs) PetscCallBack("KSP callback rhs", (*kdm->ops->computerhs)(ksp, ksp->vec_rhs, kdm->rhsctx));

374:     if (ksp->setupstage != KSP_SETUP_NEWRHS) {
375:       PetscCheck(kdm->ops->computeoperators, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "You called KSPSetDM() but did not use DMKSPSetComputeOperators() or KSPSetDMActive(ksp,PETSC_FALSE);");
376:       PetscCall(KSPGetOperators(ksp, &A, &B));
377:       PetscCallBack("KSP callback operators", (*kdm->ops->computeoperators)(ksp, A, B, kdm->operatorsctx));
378:     }
379:   }

381:   if (ksp->setupstage == KSP_SETUP_NEWRHS) {
382:     level--;
383:     PetscFunctionReturn(PETSC_SUCCESS);
384:   }
385:   PetscCall(PetscLogEventBegin(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));

387:   switch (ksp->setupstage) {
388:   case KSP_SETUP_NEW:
389:     PetscUseTypeMethod(ksp, setup);
390:     break;
391:   case KSP_SETUP_NEWMATRIX: /* This should be replaced with a more general mechanism */
392:     if (ksp->setupnewmatrix) PetscUseTypeMethod(ksp, setup);
393:     break;
394:   default:
395:     break;
396:   }

398:   if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
399:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
400:   /* scale the matrix if requested */
401:   if (ksp->dscale) {
402:     PetscScalar *xx;
403:     PetscInt     i, n;
404:     PetscBool    zeroflag = PETSC_FALSE;

406:     if (!ksp->diagonal) { /* allocate vector to hold diagonal */
407:       PetscCall(MatCreateVecs(pmat, &ksp->diagonal, NULL));
408:     }
409:     PetscCall(MatGetDiagonal(pmat, ksp->diagonal));
410:     PetscCall(VecGetLocalSize(ksp->diagonal, &n));
411:     PetscCall(VecGetArray(ksp->diagonal, &xx));
412:     for (i = 0; i < n; i++) {
413:       if (xx[i] != 0.0) xx[i] = 1.0 / PetscSqrtReal(PetscAbsScalar(xx[i]));
414:       else {
415:         xx[i]    = 1.0;
416:         zeroflag = PETSC_TRUE;
417:       }
418:     }
419:     PetscCall(VecRestoreArray(ksp->diagonal, &xx));
420:     if (zeroflag) PetscCall(PetscInfo(ksp, "Zero detected in diagonal of matrix, using 1 at those locations\n"));
421:     PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
422:     if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
423:     ksp->dscalefix2 = PETSC_FALSE;
424:   }
425:   PetscCall(PetscLogEventEnd(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
426:   PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
427:   PetscCall(PCSetUp(ksp->pc));
428:   PetscCall(PCGetFailedReason(ksp->pc, &pcreason));
429:   /* TODO: this code was wrong and is still wrong, there is no way to propagate the failure to all processes; their is no code to handle a ksp->reason on only some ranks */
430:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;

432:   PetscCall(MatGetNullSpace(mat, &nullsp));
433:   if (nullsp) {
434:     PetscBool test = PETSC_FALSE;
435:     PetscCall(PetscOptionsGetBool(((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_test_null_space", &test, NULL));
436:     if (test) PetscCall(MatNullSpaceTest(nullsp, mat, NULL));
437:   }
438:   ksp->setupstage = KSP_SETUP_NEWRHS;
439:   level--;
440:   PetscFunctionReturn(PETSC_SUCCESS);
441: }

443: /*@
444:   KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged, `KSPConvergedReason` to a `PetscViewer`

446:   Collective

448:   Input Parameters:
449: + ksp    - iterative solver obtained from `KSPCreate()`
450: - viewer - the `PetscViewer` on which to display the reason

452:   Options Database Keys:
453: + -ksp_converged_reason          - print reason for converged or diverged, also prints number of iterations
454: - -ksp_converged_reason ::failed - only print reason and number of iterations when diverged

456:   Level: beginner

458:   Note:
459:   Use `KSPConvergedReasonViewFromOptions()` to display the reason based on values in the PETSc options database.

461:   To change the format of the output call `PetscViewerPushFormat`(`viewer`,`format`) before this call. Use `PETSC_VIEWER_DEFAULT` for the default,
462:   use `PETSC_VIEWER_FAILED` to only display a reason if it fails.

464: .seealso: [](ch_ksp), `KSPConvergedReasonViewFromOptions()`, `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
465:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `KSP`, `KSPGetConvergedReason()`, `PetscViewerPushFormat()`, `PetscViewerPopFormat()`
466: @*/
467: PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer)
468: {
469:   PetscBool         isAscii;
470:   PetscViewerFormat format;

472:   PetscFunctionBegin;
473:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
474:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
475:   if (isAscii) {
476:     PetscCall(PetscViewerGetFormat(viewer, &format));
477:     PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel + 1));
478:     if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) {
479:       if (((PetscObject)ksp)->prefix) {
480:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
481:       } else {
482:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
483:       }
484:     } else if (ksp->reason <= 0) {
485:       if (((PetscObject)ksp)->prefix) {
486:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
487:       } else {
488:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
489:       }
490:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
491:         PCFailedReason reason;
492:         PetscCall(PCGetFailedReason(ksp->pc, &reason));
493:         PetscCall(PetscViewerASCIIPrintf(viewer, "               PC failed due to %s \n", PCFailedReasons[reason]));
494:       }
495:     }
496:     PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel + 1));
497:   }
498:   PetscFunctionReturn(PETSC_SUCCESS);
499: }

501: /*@C
502:   KSPConvergedReasonViewSet - Sets an ADDITIONAL function that is to be used at the
503:   end of the linear solver to display the convergence reason of the linear solver.

505:   Logically Collective

507:   Input Parameters:
508: + ksp               - the `KSP` context
509: . f                 - the ksp converged reason view function
510: . vctx              - [optional] user-defined context for private data for the
511:                       `KSPConvergedReason` view routine (use `NULL` if no context is desired)
512: - reasonviewdestroy - [optional] routine that frees `vctx` (may be `NULL`)

514:   Options Database Keys:
515: + -ksp_converged_reason             - sets a default `KSPConvergedReasonView()`
516: - -ksp_converged_reason_view_cancel - cancels all converged reason viewers that have been hardwired into a code by
517:                                       calls to `KSPConvergedReasonViewSet()`, but does not cancel those set via the options database.

519:   Level: intermediate

521:   Note:
522:   Several different converged reason view routines may be set by calling
523:   `KSPConvergedReasonViewSet()` multiple times; all will be called in the
524:   order in which they were set.

526:   Developer Note:
527:   Should be named KSPConvergedReasonViewAdd().

529: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewCancel()`
530: @*/
531: PetscErrorCode KSPConvergedReasonViewSet(KSP ksp, PetscErrorCode (*f)(KSP, void *), void *vctx, PetscErrorCode (*reasonviewdestroy)(void **))
532: {
533:   PetscInt  i;
534:   PetscBool identical;

536:   PetscFunctionBegin;
538:   for (i = 0; i < ksp->numberreasonviews; i++) {
539:     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))f, vctx, reasonviewdestroy, (PetscErrorCode (*)(void))ksp->reasonview[i], ksp->reasonviewcontext[i], ksp->reasonviewdestroy[i], &identical));
540:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
541:   }
542:   PetscCheck(ksp->numberreasonviews < MAXKSPREASONVIEWS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP reasonview set");
543:   ksp->reasonview[ksp->numberreasonviews]          = f;
544:   ksp->reasonviewdestroy[ksp->numberreasonviews]   = reasonviewdestroy;
545:   ksp->reasonviewcontext[ksp->numberreasonviews++] = (void *)vctx;
546:   PetscFunctionReturn(PETSC_SUCCESS);
547: }

549: /*@
550:   KSPConvergedReasonViewCancel - Clears all the reasonview functions for a `KSP` object set with `KSPConvergedReasonViewSet()`
551:   as well as the default viewer.

553:   Collective

555:   Input Parameter:
556: . ksp - iterative solver obtained from `KSPCreate()`

558:   Level: intermediate

560: .seealso: [](ch_ksp), `KSPCreate()`, `KSPDestroy()`, `KSPReset()`, `KSPConvergedReasonViewSet()`
561: @*/
562: PetscErrorCode KSPConvergedReasonViewCancel(KSP ksp)
563: {
564:   PetscInt i;

566:   PetscFunctionBegin;
568:   for (i = 0; i < ksp->numberreasonviews; i++) {
569:     if (ksp->reasonviewdestroy[i]) PetscCall((*ksp->reasonviewdestroy[i])(&ksp->reasonviewcontext[i]));
570:   }
571:   ksp->numberreasonviews = 0;
572:   PetscCall(PetscViewerDestroy(&ksp->convergedreasonviewer));
573:   PetscFunctionReturn(PETSC_SUCCESS);
574: }

576: /*@
577:   KSPConvergedReasonViewFromOptions - Processes command line options to determine if/how a `KSPReason` is to be viewed.

579:   Collective

581:   Input Parameter:
582: . ksp - the `KSP` object

584:   Level: intermediate

586:   Note:
587:   This is called automatically at the conclusion of `KSPSolve()` so is rarely called directly by user code.

589: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewSet()`
590: @*/
591: PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp)
592: {
593:   PetscFunctionBegin;
594:   /* Call all user-provided reason review routines */
595:   for (PetscInt i = 0; i < ksp->numberreasonviews; i++) PetscCall((*ksp->reasonview[i])(ksp, ksp->reasonviewcontext[i]));

597:   /* Call the default PETSc routine */
598:   if (ksp->convergedreasonviewer) {
599:     PetscCall(PetscViewerPushFormat(ksp->convergedreasonviewer, ksp->convergedreasonformat));
600:     PetscCall(KSPConvergedReasonView(ksp, ksp->convergedreasonviewer));
601:     PetscCall(PetscViewerPopFormat(ksp->convergedreasonviewer));
602:   }
603:   PetscFunctionReturn(PETSC_SUCCESS);
604: }

606: /*@
607:   KSPConvergedRateView - Displays the convergence rate <https://en.wikipedia.org/wiki/Coefficient_of_determination> of `KSPSolve()` to a viewer

609:   Collective

611:   Input Parameters:
612: + ksp    - iterative solver obtained from `KSPCreate()`
613: - viewer - the `PetscViewer` to display the reason

615:   Options Database Key:
616: . -ksp_converged_rate - print reason for convergence or divergence and the convergence rate (or 0.0 for divergence)

618:   Level: intermediate

620:   Notes:
621:   To change the format of the output, call `PetscViewerPushFormat`(`viewer`,`format`) before this call.

623:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $\log r_k = \log r_0 + k \log c$. After linear regression,
624:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,

626: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPGetConvergedRate()`, `KSPSetTolerances()`, `KSPConvergedDefault()`
627: @*/
628: PetscErrorCode KSPConvergedRateView(KSP ksp, PetscViewer viewer)
629: {
630:   PetscViewerFormat format;
631:   PetscBool         isAscii;
632:   PetscReal         rrate, rRsq, erate = 0.0, eRsq = 0.0;
633:   PetscInt          its;
634:   const char       *prefix, *reason = KSPConvergedReasons[ksp->reason];

636:   PetscFunctionBegin;
637:   PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
638:   PetscCall(KSPGetIterationNumber(ksp, &its));
639:   PetscCall(KSPComputeConvergenceRate(ksp, &rrate, &rRsq, &erate, &eRsq));
640:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
641:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
642:   if (isAscii) {
643:     PetscCall(PetscViewerGetFormat(viewer, &format));
644:     PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
645:     if (ksp->reason > 0) {
646:       if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT, prefix, reason, its));
647:       else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT, reason, its));
648:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
649:       if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
650:       if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
651:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
652:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
653:     } else if (ksp->reason <= 0) {
654:       if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT, prefix, reason, its));
655:       else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT, reason, its));
656:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
657:       if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
658:       if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
659:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
660:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
661:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
662:         PCFailedReason reason;
663:         PetscCall(PCGetFailedReason(ksp->pc, &reason));
664:         PetscCall(PetscViewerASCIIPrintf(viewer, "               PC failed due to %s \n", PCFailedReasons[reason]));
665:       }
666:     }
667:     PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
668:   }
669:   PetscFunctionReturn(PETSC_SUCCESS);
670: }

672: #include <petscdraw.h>

674: static PetscErrorCode KSPViewEigenvalues_Internal(KSP ksp, PetscBool isExplicit, PetscViewer viewer, PetscViewerFormat format)
675: {
676:   PetscReal  *r, *c;
677:   PetscInt    n, i, neig;
678:   PetscBool   isascii, isdraw;
679:   PetscMPIInt rank;

681:   PetscFunctionBegin;
682:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)ksp), &rank));
683:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
684:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
685:   if (isExplicit) {
686:     PetscCall(VecGetSize(ksp->vec_sol, &n));
687:     PetscCall(PetscMalloc2(n, &r, n, &c));
688:     PetscCall(KSPComputeEigenvaluesExplicitly(ksp, n, r, c));
689:     neig = n;
690:   } else {
691:     PetscInt nits;

693:     PetscCall(KSPGetIterationNumber(ksp, &nits));
694:     n = nits + 2;
695:     if (!nits) {
696:       PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any eigenvalues\n"));
697:       PetscFunctionReturn(PETSC_SUCCESS);
698:     }
699:     PetscCall(PetscMalloc2(n, &r, n, &c));
700:     PetscCall(KSPComputeEigenvalues(ksp, n, r, c, &neig));
701:   }
702:   if (isascii) {
703:     PetscCall(PetscViewerASCIIPrintf(viewer, "%s computed eigenvalues\n", isExplicit ? "Explicitly" : "Iteratively"));
704:     for (i = 0; i < neig; ++i) {
705:       if (c[i] >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %gi\n", (double)r[i], (double)c[i]));
706:       else PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %gi\n", (double)r[i], -(double)c[i]));
707:     }
708:   } else if (isdraw && rank == 0) {
709:     PetscDraw   draw;
710:     PetscDrawSP drawsp;

712:     if (format == PETSC_VIEWER_DRAW_CONTOUR) {
713:       PetscCall(KSPPlotEigenContours_Private(ksp, neig, r, c));
714:     } else {
715:       PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
716:       PetscCall(PetscDrawSPCreate(draw, 1, &drawsp));
717:       PetscCall(PetscDrawSPReset(drawsp));
718:       for (i = 0; i < neig; ++i) PetscCall(PetscDrawSPAddPoint(drawsp, r + i, c + i));
719:       PetscCall(PetscDrawSPDraw(drawsp, PETSC_TRUE));
720:       PetscCall(PetscDrawSPSave(drawsp));
721:       PetscCall(PetscDrawSPDestroy(&drawsp));
722:     }
723:   }
724:   PetscCall(PetscFree2(r, c));
725:   PetscFunctionReturn(PETSC_SUCCESS);
726: }

728: static PetscErrorCode KSPViewSingularvalues_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
729: {
730:   PetscReal smax, smin;
731:   PetscInt  nits;
732:   PetscBool isascii;

734:   PetscFunctionBegin;
735:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
736:   PetscCall(KSPGetIterationNumber(ksp, &nits));
737:   if (!nits) {
738:     PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any singular values\n"));
739:     PetscFunctionReturn(PETSC_SUCCESS);
740:   }
741:   PetscCall(KSPComputeExtremeSingularValues(ksp, &smax, &smin));
742:   if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Iteratively computed extreme %svalues: max %g min %g max/min %g\n", smin < 0 ? "eigen" : "singular ", (double)smax, (double)smin, (double)(smax / smin)));
743:   PetscFunctionReturn(PETSC_SUCCESS);
744: }

746: static PetscErrorCode KSPViewFinalResidual_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
747: {
748:   PetscBool isascii;

750:   PetscFunctionBegin;
751:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
752:   PetscCheck(!ksp->dscale || ksp->dscalefix, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Cannot compute final scale with -ksp_diagonal_scale except also with -ksp_diagonal_scale_fix");
753:   if (isascii) {
754:     Mat       A;
755:     Vec       t;
756:     PetscReal norm;

758:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
759:     PetscCall(VecDuplicate(ksp->vec_rhs, &t));
760:     PetscCall(KSP_MatMult(ksp, A, ksp->vec_sol, t));
761:     PetscCall(VecAYPX(t, -1.0, ksp->vec_rhs));
762:     PetscCall(VecViewFromOptions(t, (PetscObject)ksp, "-ksp_view_final_residual_vec"));
763:     PetscCall(VecNorm(t, NORM_2, &norm));
764:     PetscCall(VecDestroy(&t));
765:     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP final norm of residual %g\n", (double)norm));
766:   }
767:   PetscFunctionReturn(PETSC_SUCCESS);
768: }

770: static PetscErrorCode KSPMonitorPauseFinal_Internal(KSP ksp)
771: {
772:   PetscInt i;

774:   PetscFunctionBegin;
775:   if (!ksp->pauseFinal) PetscFunctionReturn(PETSC_SUCCESS);
776:   for (i = 0; i < ksp->numbermonitors; ++i) {
777:     PetscViewerAndFormat *vf = (PetscViewerAndFormat *)ksp->monitorcontext[i];
778:     PetscDraw             draw;
779:     PetscReal             lpause;

781:     if (!vf) continue;
782:     if (vf->lg) {
783:       if (!PetscCheckPointer(vf->lg, PETSC_OBJECT)) continue;
784:       if (((PetscObject)vf->lg)->classid != PETSC_DRAWLG_CLASSID) continue;
785:       PetscCall(PetscDrawLGGetDraw(vf->lg, &draw));
786:       PetscCall(PetscDrawGetPause(draw, &lpause));
787:       PetscCall(PetscDrawSetPause(draw, -1.0));
788:       PetscCall(PetscDrawPause(draw));
789:       PetscCall(PetscDrawSetPause(draw, lpause));
790:     } else {
791:       PetscBool isdraw;

793:       if (!PetscCheckPointer(vf->viewer, PETSC_OBJECT)) continue;
794:       if (((PetscObject)vf->viewer)->classid != PETSC_VIEWER_CLASSID) continue;
795:       PetscCall(PetscObjectTypeCompare((PetscObject)vf->viewer, PETSCVIEWERDRAW, &isdraw));
796:       if (!isdraw) continue;
797:       PetscCall(PetscViewerDrawGetDraw(vf->viewer, 0, &draw));
798:       PetscCall(PetscDrawGetPause(draw, &lpause));
799:       PetscCall(PetscDrawSetPause(draw, -1.0));
800:       PetscCall(PetscDrawPause(draw));
801:       PetscCall(PetscDrawSetPause(draw, lpause));
802:     }
803:   }
804:   PetscFunctionReturn(PETSC_SUCCESS);
805: }

807: static PetscErrorCode KSPSolve_Private(KSP ksp, Vec b, Vec x)
808: {
809:   PetscBool    flg = PETSC_FALSE, inXisinB = PETSC_FALSE, guess_zero;
810:   Mat          mat, pmat;
811:   MPI_Comm     comm;
812:   MatNullSpace nullsp;
813:   Vec          btmp, vec_rhs = NULL;

815:   PetscFunctionBegin;
816:   level++;
817:   comm = PetscObjectComm((PetscObject)ksp);
818:   if (x && x == b) {
819:     PetscCheck(ksp->guess_zero, comm, PETSC_ERR_ARG_INCOMP, "Cannot use x == b with nonzero initial guess");
820:     PetscCall(VecDuplicate(b, &x));
821:     inXisinB = PETSC_TRUE;
822:   }
823:   if (b) {
824:     PetscCall(PetscObjectReference((PetscObject)b));
825:     PetscCall(VecDestroy(&ksp->vec_rhs));
826:     ksp->vec_rhs = b;
827:   }
828:   if (x) {
829:     PetscCall(PetscObjectReference((PetscObject)x));
830:     PetscCall(VecDestroy(&ksp->vec_sol));
831:     ksp->vec_sol = x;
832:   }

834:   if (ksp->viewPre) PetscCall(ObjectView((PetscObject)ksp, ksp->viewerPre, ksp->formatPre));

836:   if (ksp->presolve) PetscCall((*ksp->presolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->prectx));

838:   /* reset the residual history list if requested */
839:   if (ksp->res_hist_reset) ksp->res_hist_len = 0;
840:   if (ksp->err_hist_reset) ksp->err_hist_len = 0;

842:   /* KSPSetUp() scales the matrix if needed */
843:   PetscCall(KSPSetUp(ksp));
844:   PetscCall(KSPSetUpOnBlocks(ksp));

846:   if (ksp->guess) {
847:     PetscObjectState ostate, state;

849:     PetscCall(KSPGuessSetUp(ksp->guess));
850:     PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &ostate));
851:     PetscCall(KSPGuessFormGuess(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
852:     PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &state));
853:     if (state != ostate) {
854:       ksp->guess_zero = PETSC_FALSE;
855:     } else {
856:       PetscCall(PetscInfo(ksp, "Using zero initial guess since the KSPGuess object did not change the vector\n"));
857:       ksp->guess_zero = PETSC_TRUE;
858:     }
859:   }

861:   PetscCall(VecSetErrorIfLocked(ksp->vec_sol, 3));

863:   PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
864:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
865:   /* diagonal scale RHS if called for */
866:   if (ksp->dscale) {
867:     PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
868:     /* second time in, but matrix was scaled back to original */
869:     if (ksp->dscalefix && ksp->dscalefix2) {
870:       Mat mat, pmat;

872:       PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
873:       PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
874:       if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
875:     }

877:     /* scale initial guess */
878:     if (!ksp->guess_zero) {
879:       if (!ksp->truediagonal) {
880:         PetscCall(VecDuplicate(ksp->diagonal, &ksp->truediagonal));
881:         PetscCall(VecCopy(ksp->diagonal, ksp->truediagonal));
882:         PetscCall(VecReciprocal(ksp->truediagonal));
883:       }
884:       PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->truediagonal));
885:     }
886:   }
887:   PetscCall(PCPreSolve(ksp->pc, ksp));

889:   if (ksp->guess_zero && !ksp->guess_not_read) PetscCall(VecSet(ksp->vec_sol, 0.0));
890:   if (ksp->guess_knoll) { /* The Knoll trick is independent on the KSPGuess specified */
891:     PetscCall(PCApply(ksp->pc, ksp->vec_rhs, ksp->vec_sol));
892:     PetscCall(KSP_RemoveNullSpace(ksp, ksp->vec_sol));
893:     ksp->guess_zero = PETSC_FALSE;
894:   }

896:   /* can we mark the initial guess as zero for this solve? */
897:   guess_zero = ksp->guess_zero;
898:   if (!ksp->guess_zero) {
899:     PetscReal norm;

901:     PetscCall(VecNormAvailable(ksp->vec_sol, NORM_2, &flg, &norm));
902:     if (flg && !norm) ksp->guess_zero = PETSC_TRUE;
903:   }
904:   if (ksp->transpose_solve) {
905:     PetscCall(MatGetNullSpace(pmat, &nullsp));
906:   } else {
907:     PetscCall(MatGetTransposeNullSpace(pmat, &nullsp));
908:   }
909:   if (nullsp) {
910:     PetscCall(VecDuplicate(ksp->vec_rhs, &btmp));
911:     PetscCall(VecCopy(ksp->vec_rhs, btmp));
912:     PetscCall(MatNullSpaceRemove(nullsp, btmp));
913:     vec_rhs      = ksp->vec_rhs;
914:     ksp->vec_rhs = btmp;
915:   }
916:   PetscCall(VecLockReadPush(ksp->vec_rhs));
917:   PetscUseTypeMethod(ksp, solve);
918:   PetscCall(KSPMonitorPauseFinal_Internal(ksp));

920:   PetscCall(VecLockReadPop(ksp->vec_rhs));
921:   if (nullsp) {
922:     ksp->vec_rhs = vec_rhs;
923:     PetscCall(VecDestroy(&btmp));
924:   }

926:   ksp->guess_zero = guess_zero;

928:   PetscCheck(ksp->reason, comm, PETSC_ERR_PLIB, "Internal error, solver returned without setting converged reason");
929:   ksp->totalits += ksp->its;

931:   PetscCall(KSPConvergedReasonViewFromOptions(ksp));

933:   if (ksp->viewRate) {
934:     PetscCall(PetscViewerPushFormat(ksp->viewerRate, ksp->formatRate));
935:     PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
936:     PetscCall(PetscViewerPopFormat(ksp->viewerRate));
937:   }
938:   PetscCall(PCPostSolve(ksp->pc, ksp));

940:   /* diagonal scale solution if called for */
941:   if (ksp->dscale) {
942:     PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->diagonal));
943:     /* unscale right-hand side and matrix */
944:     if (ksp->dscalefix) {
945:       Mat mat, pmat;

947:       PetscCall(VecReciprocal(ksp->diagonal));
948:       PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
949:       PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
950:       PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
951:       if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
952:       PetscCall(VecReciprocal(ksp->diagonal));
953:       ksp->dscalefix2 = PETSC_TRUE;
954:     }
955:   }
956:   PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
957:   if (ksp->guess) PetscCall(KSPGuessUpdate(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
958:   if (ksp->postsolve) PetscCall((*ksp->postsolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->postctx));

960:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
961:   if (ksp->viewEV) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_FALSE, ksp->viewerEV, ksp->formatEV));
962:   if (ksp->viewEVExp) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_TRUE, ksp->viewerEVExp, ksp->formatEVExp));
963:   if (ksp->viewSV) PetscCall(KSPViewSingularvalues_Internal(ksp, ksp->viewerSV, ksp->formatSV));
964:   if (ksp->viewFinalRes) PetscCall(KSPViewFinalResidual_Internal(ksp, ksp->viewerFinalRes, ksp->formatFinalRes));
965:   if (ksp->viewMat) PetscCall(ObjectView((PetscObject)mat, ksp->viewerMat, ksp->formatMat));
966:   if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)pmat, ksp->viewerPMat, ksp->formatPMat));
967:   if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)ksp->vec_rhs, ksp->viewerRhs, ksp->formatRhs));
968:   if (ksp->viewSol) PetscCall(ObjectView((PetscObject)ksp->vec_sol, ksp->viewerSol, ksp->formatSol));
969:   if (ksp->view) PetscCall(ObjectView((PetscObject)ksp, ksp->viewer, ksp->format));
970:   if (ksp->viewDScale) PetscCall(ObjectView((PetscObject)ksp->diagonal, ksp->viewerDScale, ksp->formatDScale));
971:   if (ksp->viewMatExp) {
972:     Mat A, B;

974:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
975:     if (ksp->transpose_solve) {
976:       Mat AT;

978:       PetscCall(MatCreateTranspose(A, &AT));
979:       PetscCall(MatComputeOperator(AT, MATAIJ, &B));
980:       PetscCall(MatDestroy(&AT));
981:     } else {
982:       PetscCall(MatComputeOperator(A, MATAIJ, &B));
983:     }
984:     PetscCall(ObjectView((PetscObject)B, ksp->viewerMatExp, ksp->formatMatExp));
985:     PetscCall(MatDestroy(&B));
986:   }
987:   if (ksp->viewPOpExp) {
988:     Mat B;

990:     PetscCall(KSPComputeOperator(ksp, MATAIJ, &B));
991:     PetscCall(ObjectView((PetscObject)B, ksp->viewerPOpExp, ksp->formatPOpExp));
992:     PetscCall(MatDestroy(&B));
993:   }

995:   if (inXisinB) {
996:     PetscCall(VecCopy(x, b));
997:     PetscCall(VecDestroy(&x));
998:   }
999:   PetscCall(PetscObjectSAWsBlock((PetscObject)ksp));
1000:   if (ksp->errorifnotconverged && ksp->reason < 0 && ((level == 1) || (ksp->reason != KSP_DIVERGED_ITS))) {
1001:     PCFailedReason reason;

1003:     PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
1004:     PetscCall(PCGetFailedReason(ksp->pc, &reason));
1005:     SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
1006:   }
1007:   level--;
1008:   PetscFunctionReturn(PETSC_SUCCESS);
1009: }

1011: /*@
1012:   KSPSolve - Solves a linear system associated with `KSP` object

1014:   Collective

1016:   Input Parameters:
1017: + ksp - iterative solver obtained from `KSPCreate()`
1018: . b   - the right-hand side vector
1019: - x   - the solution (this may be the same vector as `b`, then `b` will be overwritten with the answer)

1021:   Options Database Keys:
1022: + -ksp_view_eigenvalues                      - compute preconditioned operators eigenvalues
1023: . -ksp_view_eigenvalues_explicit             - compute the eigenvalues by forming the dense operator and using LAPACK
1024: . -ksp_view_mat binary                       - save matrix to the default binary viewer
1025: . -ksp_view_pmat binary                      - save matrix used to build preconditioner to the default binary viewer
1026: . -ksp_view_rhs binary                       - save right-hand side vector to the default binary viewer
1027: . -ksp_view_solution binary                  - save computed solution vector to the default binary viewer
1028:                                                (can be read later with src/ksp/tutorials/ex10.c for testing solvers)
1029: . -ksp_view_mat_explicit                     - for matrix-free operators, computes the matrix entries and views them
1030: . -ksp_view_preconditioned_operator_explicit - computes the product of the preconditioner and matrix as an explicit matrix and views it
1031: . -ksp_converged_reason                      - print reason for converged or diverged, also prints number of iterations
1032: . -ksp_view_final_residual                   - print 2-norm of true linear system residual at the end of the solution process
1033: . -ksp_error_if_not_converged                - stop the program as soon as an error is detected in a `KSPSolve()`
1034: . -ksp_view_pre                              - print the ksp data structure before the system solution
1035: - -ksp_view                                  - print the ksp data structure at the end of the system solution

1037:   Level: beginner

1039:   Notes:
1040:   See `KSPSetFromOptions()` for additional options database keys that affect `KSPSolve()`

1042:   If one uses `KSPSetDM()` then `x` or `b` need not be passed. Use `KSPGetSolution()` to access the solution in this case.

1044:   The operator is specified with `KSPSetOperators()`.

1046:   `KSPSolve()` will normally return without generating an error regardless of whether the linear system was solved or if constructing the preconditioner failed.
1047:   Call `KSPGetConvergedReason()` to determine if the solver converged or failed and why. The option -ksp_error_if_not_converged or function `KSPSetErrorIfNotConverged()`
1048:   will cause `KSPSolve()` to error as soon as an error occurs in the linear solver.  In inner `KSPSolve()` `KSP_DIVERGED_ITS` is not treated as an error because when using nested solvers
1049:   it may be fine that inner solvers in the preconditioner do not converge during the solution process.

1051:   The number of iterations can be obtained from `KSPGetIterationNumber()`.

1053:   If you provide a matrix that has a `MatSetNullSpace()` and `MatSetTransposeNullSpace()` this will use that information to solve singular systems
1054:   in the least squares sense with a norm minimizing solution.

1056:   $A x = b $  where $b = b_p + b_t$ where $b_t$ is not in the range of $A$ (and hence by the fundamental theorem of linear algebra is in the nullspace(A'), see `MatSetNullSpace()`).

1058:   `KSP` first removes $b_t$ producing the linear system $ A x = b_p $ (which has multiple solutions) and solves this to find the $\|x\|$ minimizing solution (and hence
1059:   it finds the solution $x$ orthogonal to the nullspace(A). The algorithm is simply in each iteration of the Krylov method we remove the nullspace(A) from the search
1060:   direction thus the solution which is a linear combination of the search directions has no component in the nullspace(A).

1062:   We recommend always using `KSPGMRES` for such singular systems.
1063:   If nullspace(A) = nullspace(A') (note symmetric matrices always satisfy this property) then both left and right preconditioning will work
1064:   If nullspace(A) != nullspace(A') then left preconditioning will work but right preconditioning may not work (or it may).

1066:   Developer Notes:
1067:   The reason we cannot always solve  nullspace(A) != nullspace(A') systems with right preconditioning is because we need to remove at each iteration
1068:   the nullspace(AB) from the search direction. While we know the nullspace(A) the nullspace(AB) equals $B^-1$ times the nullspace(A) but except for trivial preconditioners
1069:   such as diagonal scaling we cannot apply the inverse of the preconditioner to a vector and thus cannot compute the nullspace(AB).

1071:   If using a direct method (e.g., via the `KSP` solver
1072:   `KSPPREONLY` and a preconditioner such as `PCLU` or `PCCHOLESKY` then usually one iteration of the `KSP` method will be needed for convergence.

1074:   To solve a linear system with the transpose of the matrix use `KSPSolveTranspose()`.

1076:   Understanding Convergence\:
1077:   The manual pages `KSPMonitorSet()`, `KSPComputeEigenvalues()`, and
1078:   `KSPComputeEigenvaluesExplicitly()` provide information on additional
1079:   options to monitor convergence and print eigenvalue information.

1081: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1082:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatSetTransposeNullSpace()`, `KSP`,
1083:           `KSPConvergedReasonView()`, `KSPCheckSolve()`, `KSPSetErrorIfNotConverged()`
1084: @*/
1085: PetscErrorCode KSPSolve(KSP ksp, Vec b, Vec x)
1086: {
1087:   PetscBool isPCMPI;

1089:   PetscFunctionBegin;
1093:   ksp->transpose_solve = PETSC_FALSE;
1094:   PetscCall(KSPSolve_Private(ksp, b, x));
1095:   PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
1096:   if (PCMPIServerActive && isPCMPI) {
1097:     KSP subksp;

1099:     PetscCall(PCMPIGetKSP(ksp->pc, &subksp));
1100:     ksp->its    = subksp->its;
1101:     ksp->reason = subksp->reason;
1102:   }
1103:   PetscFunctionReturn(PETSC_SUCCESS);
1104: }

1106: /*@
1107:   KSPSolveTranspose - Solves a linear system with the transpose of the matrix associated with the `KSP` object, $ A^T x = b$.

1109:   Collective

1111:   Input Parameters:
1112: + ksp - iterative solver obtained from `KSPCreate()`
1113: . b   - right-hand side vector
1114: - x   - solution vector

1116:   Level: developer

1118:   Note:
1119:   For complex numbers this solve the non-Hermitian transpose system.

1121:   Developer Note:
1122:   We need to implement a `KSPSolveHermitianTranspose()`

1124: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1125:           `KSPSolve()`, `KSP`, `KSPSetOperators()`
1126: @*/
1127: PetscErrorCode KSPSolveTranspose(KSP ksp, Vec b, Vec x)
1128: {
1129:   PetscFunctionBegin;
1133:   if (ksp->transpose.use_explicittranspose) {
1134:     Mat J, Jpre;
1135:     PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1136:     if (!ksp->transpose.reuse_transpose) {
1137:       PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1138:       if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1139:       ksp->transpose.reuse_transpose = PETSC_TRUE;
1140:     } else {
1141:       PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1142:       if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1143:     }
1144:     if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1145:       PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1146:       ksp->transpose.BT = ksp->transpose.AT;
1147:     }
1148:     PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1149:   } else {
1150:     ksp->transpose_solve = PETSC_TRUE;
1151:   }
1152:   PetscCall(KSPSolve_Private(ksp, b, x));
1153:   PetscFunctionReturn(PETSC_SUCCESS);
1154: }

1156: static PetscErrorCode KSPViewFinalMatResidual_Internal(KSP ksp, Mat B, Mat X, PetscViewer viewer, PetscViewerFormat format, PetscInt shift)
1157: {
1158:   Mat        A, R;
1159:   PetscReal *norms;
1160:   PetscInt   i, N;
1161:   PetscBool  flg;

1163:   PetscFunctionBegin;
1164:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &flg));
1165:   if (flg) {
1166:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
1167:     if (!ksp->transpose_solve) PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &R));
1168:     else PetscCall(MatTransposeMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &R));
1169:     PetscCall(MatAYPX(R, -1.0, B, SAME_NONZERO_PATTERN));
1170:     PetscCall(MatGetSize(R, NULL, &N));
1171:     PetscCall(PetscMalloc1(N, &norms));
1172:     PetscCall(MatGetColumnNorms(R, NORM_2, norms));
1173:     PetscCall(MatDestroy(&R));
1174:     for (i = 0; i < N; ++i) PetscCall(PetscViewerASCIIPrintf(viewer, "%s #%" PetscInt_FMT " %g\n", i == 0 ? "KSP final norm of residual" : "                          ", shift + i, (double)norms[i]));
1175:     PetscCall(PetscFree(norms));
1176:   }
1177:   PetscFunctionReturn(PETSC_SUCCESS);
1178: }

1180: static PetscErrorCode KSPMatSolve_Private(KSP ksp, Mat B, Mat X)
1181: {
1182:   Mat       A, P, vB, vX;
1183:   Vec       cb, cx;
1184:   PetscInt  n1, N1, n2, N2, Bbn = PETSC_DECIDE;
1185:   PetscBool match;

1187:   PetscFunctionBegin;
1191:   PetscCheckSameComm(ksp, 1, B, 2);
1192:   PetscCheckSameComm(ksp, 1, X, 3);
1193:   PetscCheckSameType(B, 2, X, 3);
1194:   PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
1195:   MatCheckPreallocated(X, 3);
1196:   if (!X->assembled) {
1197:     PetscCall(MatSetOption(X, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1198:     PetscCall(MatAssemblyBegin(X, MAT_FINAL_ASSEMBLY));
1199:     PetscCall(MatAssemblyEnd(X, MAT_FINAL_ASSEMBLY));
1200:   }
1201:   PetscCheck(B != X, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_IDN, "B and X must be different matrices");
1202:   PetscCheck(!ksp->transpose_solve || !ksp->transpose.use_explicittranspose, PetscObjectComm((PetscObject)ksp), PETSC_ERR_SUP, "KSPMatSolveTranspose() does not support -ksp_use_explicittranspose");
1203:   PetscCall(KSPGetOperators(ksp, &A, &P));
1204:   PetscCall(MatGetLocalSize(B, NULL, &n2));
1205:   PetscCall(MatGetLocalSize(X, NULL, &n1));
1206:   PetscCall(MatGetSize(B, NULL, &N2));
1207:   PetscCall(MatGetSize(X, NULL, &N1));
1208:   PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of right-hand sides (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of solutions (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
1209:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)B, &match, MATSEQDENSE, MATMPIDENSE, ""));
1210:   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of right-hand sides not stored in a dense Mat");
1211:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
1212:   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of solutions not stored in a dense Mat");
1213:   PetscCall(KSPSetUp(ksp));
1214:   PetscCall(KSPSetUpOnBlocks(ksp));
1215:   if (ksp->ops->matsolve) {
1216:     level++;
1217:     if (ksp->guess_zero) PetscCall(MatZeroEntries(X));
1218:     PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1219:     PetscCall(KSPGetMatSolveBatchSize(ksp, &Bbn));
1220:     /* by default, do a single solve with all columns */
1221:     if (Bbn == PETSC_DECIDE) Bbn = N2;
1222:     else PetscCheck(Bbn >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "KSPMatSolve() batch size %" PetscInt_FMT " must be positive", Bbn);
1223:     PetscCall(PetscInfo(ksp, "KSP type %s solving using batches of width at most %" PetscInt_FMT "\n", ((PetscObject)ksp)->type_name, Bbn));
1224:     /* if -ksp_matsolve_batch_size is greater than the actual number of columns, do a single solve with all columns */
1225:     if (Bbn >= N2) {
1226:       PetscUseTypeMethod(ksp, matsolve, B, X);
1227:       if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, B, X, ksp->viewerFinalRes, ksp->formatFinalRes, 0));

1229:       PetscCall(KSPConvergedReasonViewFromOptions(ksp));

1231:       if (ksp->viewRate) {
1232:         PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1233:         PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1234:         PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1235:       }
1236:     } else {
1237:       for (n2 = 0; n2 < N2; n2 += Bbn) {
1238:         PetscCall(MatDenseGetSubMatrix(B, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vB));
1239:         PetscCall(MatDenseGetSubMatrix(X, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vX));
1240:         PetscUseTypeMethod(ksp, matsolve, vB, vX);
1241:         if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, vB, vX, ksp->viewerFinalRes, ksp->formatFinalRes, n2));

1243:         PetscCall(KSPConvergedReasonViewFromOptions(ksp));

1245:         if (ksp->viewRate) {
1246:           PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1247:           PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1248:           PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1249:         }
1250:         PetscCall(MatDenseRestoreSubMatrix(B, &vB));
1251:         PetscCall(MatDenseRestoreSubMatrix(X, &vX));
1252:       }
1253:     }
1254:     if (ksp->viewMat) PetscCall(ObjectView((PetscObject)A, ksp->viewerMat, ksp->formatMat));
1255:     if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)P, ksp->viewerPMat, ksp->formatPMat));
1256:     if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)B, ksp->viewerRhs, ksp->formatRhs));
1257:     if (ksp->viewSol) PetscCall(ObjectView((PetscObject)X, ksp->viewerSol, ksp->formatSol));
1258:     if (ksp->view) PetscCall(KSPView(ksp, ksp->viewer));
1259:     PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1260:     if (ksp->errorifnotconverged && ksp->reason < 0 && (level == 1 || ksp->reason != KSP_DIVERGED_ITS)) {
1261:       PCFailedReason reason;

1263:       PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
1264:       PetscCall(PCGetFailedReason(ksp->pc, &reason));
1265:       SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
1266:     }
1267:     level--;
1268:   } else {
1269:     PetscCall(PetscInfo(ksp, "KSP type %s solving column by column\n", ((PetscObject)ksp)->type_name));
1270:     for (n2 = 0; n2 < N2; ++n2) {
1271:       PetscCall(MatDenseGetColumnVecRead(B, n2, &cb));
1272:       PetscCall(MatDenseGetColumnVecWrite(X, n2, &cx));
1273:       PetscCall(KSPSolve_Private(ksp, cb, cx));
1274:       PetscCall(MatDenseRestoreColumnVecWrite(X, n2, &cx));
1275:       PetscCall(MatDenseRestoreColumnVecRead(B, n2, &cb));
1276:     }
1277:   }
1278:   PetscFunctionReturn(PETSC_SUCCESS);
1279: }

1281: /*@
1282:   KSPMatSolve - Solves a linear system with multiple right-hand sides stored as a `MATDENSE`.

1284:   Input Parameters:
1285: + ksp - iterative solver
1286: - B   - block of right-hand sides

1288:   Output Parameter:
1289: . X - block of solutions

1291:   Level: intermediate

1293:   Notes:
1294:   This is a stripped-down version of `KSPSolve()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.

1296:   Unlike with `KSPSolve()`, `B` and `X` must be different matrices.

1298: .seealso: [](ch_ksp), `KSPSolve()`, `MatMatSolve()`, `KSPMatSolveTranspose()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`, `KSPSetMatSolveBatchSize()`
1299: @*/
1300: PetscErrorCode KSPMatSolve(KSP ksp, Mat B, Mat X)
1301: {
1302:   PetscFunctionBegin;
1303:   ksp->transpose_solve = PETSC_FALSE;
1304:   PetscCall(KSPMatSolve_Private(ksp, B, X));
1305:   PetscFunctionReturn(PETSC_SUCCESS);
1306: }

1308: /*@
1309:   KSPMatSolveTranspose - Solves a linear system with the transposed matrix with multiple right-hand sides stored as a `MATDENSE`.

1311:   Input Parameters:
1312: + ksp - iterative solver
1313: - B   - block of right-hand sides

1315:   Output Parameter:
1316: . X - block of solutions

1318:   Level: intermediate

1320:   Notes:
1321:   This is a stripped-down version of `KSPSolveTranspose()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.

1323:   Unlike `KSPSolveTranspose()`,
1324:   `B` and `X` must be different matrices and the transposed matrix cannot be assembled explicitly for the user.

1326: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `MatMatTransposeSolve()`, `KSPMatSolve()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1327: @*/
1328: PetscErrorCode KSPMatSolveTranspose(KSP ksp, Mat B, Mat X)
1329: {
1330:   PetscFunctionBegin;
1331:   ksp->transpose_solve = PETSC_TRUE;
1332:   PetscCall(KSPMatSolve_Private(ksp, B, X));
1333:   PetscFunctionReturn(PETSC_SUCCESS);
1334: }

1336: /*@
1337:   KSPSetMatSolveBatchSize - Sets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1339:   Logically Collective

1341:   Input Parameters:
1342: + ksp - the `KSP` iterative solver
1343: - bs  - batch size

1345:   Level: advanced

1347: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPGetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1348: @*/
1349: PetscErrorCode KSPSetMatSolveBatchSize(KSP ksp, PetscInt bs)
1350: {
1351:   PetscFunctionBegin;
1354:   ksp->nmax = bs;
1355:   PetscFunctionReturn(PETSC_SUCCESS);
1356: }

1358: /*@
1359:   KSPGetMatSolveBatchSize - Gets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1361:   Input Parameter:
1362: . ksp - iterative solver context

1364:   Output Parameter:
1365: . bs - batch size

1367:   Level: advanced

1369: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPSetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1370: @*/
1371: PetscErrorCode KSPGetMatSolveBatchSize(KSP ksp, PetscInt *bs)
1372: {
1373:   PetscFunctionBegin;
1375:   PetscAssertPointer(bs, 2);
1376:   *bs = ksp->nmax;
1377:   PetscFunctionReturn(PETSC_SUCCESS);
1378: }

1380: /*@
1381:   KSPResetViewers - Resets all the viewers set from the options database during `KSPSetFromOptions()`

1383:   Collective

1385:   Input Parameter:
1386: . ksp - the `KSP` iterative solver context obtained from `KSPCreate()`

1388:   Level: beginner

1390: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSPSetFromOptions()`, `KSP`
1391: @*/
1392: PetscErrorCode KSPResetViewers(KSP ksp)
1393: {
1394:   PetscFunctionBegin;
1396:   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1397:   PetscCall(PetscViewerDestroy(&ksp->viewer));
1398:   PetscCall(PetscViewerDestroy(&ksp->viewerPre));
1399:   PetscCall(PetscViewerDestroy(&ksp->viewerRate));
1400:   PetscCall(PetscViewerDestroy(&ksp->viewerMat));
1401:   PetscCall(PetscViewerDestroy(&ksp->viewerPMat));
1402:   PetscCall(PetscViewerDestroy(&ksp->viewerRhs));
1403:   PetscCall(PetscViewerDestroy(&ksp->viewerSol));
1404:   PetscCall(PetscViewerDestroy(&ksp->viewerMatExp));
1405:   PetscCall(PetscViewerDestroy(&ksp->viewerEV));
1406:   PetscCall(PetscViewerDestroy(&ksp->viewerSV));
1407:   PetscCall(PetscViewerDestroy(&ksp->viewerEVExp));
1408:   PetscCall(PetscViewerDestroy(&ksp->viewerFinalRes));
1409:   PetscCall(PetscViewerDestroy(&ksp->viewerPOpExp));
1410:   PetscCall(PetscViewerDestroy(&ksp->viewerDScale));
1411:   ksp->view         = PETSC_FALSE;
1412:   ksp->viewPre      = PETSC_FALSE;
1413:   ksp->viewMat      = PETSC_FALSE;
1414:   ksp->viewPMat     = PETSC_FALSE;
1415:   ksp->viewRhs      = PETSC_FALSE;
1416:   ksp->viewSol      = PETSC_FALSE;
1417:   ksp->viewMatExp   = PETSC_FALSE;
1418:   ksp->viewEV       = PETSC_FALSE;
1419:   ksp->viewSV       = PETSC_FALSE;
1420:   ksp->viewEVExp    = PETSC_FALSE;
1421:   ksp->viewFinalRes = PETSC_FALSE;
1422:   ksp->viewPOpExp   = PETSC_FALSE;
1423:   ksp->viewDScale   = PETSC_FALSE;
1424:   PetscFunctionReturn(PETSC_SUCCESS);
1425: }

1427: /*@
1428:   KSPReset - Removes any allocated `Vec` and `Mat` from the `KSP` data structures.

1430:   Collective

1432:   Input Parameter:
1433: . ksp - iterative solver obtained from `KSPCreate()`

1435:   Level: beginner

1437:   Notes:
1438:   Any options set in the `KSP`, including those set with `KSPSetFromOptions()` remain.

1440:   Call `KSPReset()` only before you call `KSPSetOperators()` with a different sized matrix than the previous matrix used with the `KSP`.

1442: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1443: @*/
1444: PetscErrorCode KSPReset(KSP ksp)
1445: {
1446:   PetscFunctionBegin;
1448:   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1449:   PetscTryTypeMethod(ksp, reset);
1450:   if (ksp->pc) PetscCall(PCReset(ksp->pc));
1451:   if (ksp->guess) {
1452:     KSPGuess guess = ksp->guess;
1453:     PetscTryTypeMethod(guess, reset);
1454:   }
1455:   PetscCall(VecDestroyVecs(ksp->nwork, &ksp->work));
1456:   PetscCall(VecDestroy(&ksp->vec_rhs));
1457:   PetscCall(VecDestroy(&ksp->vec_sol));
1458:   PetscCall(VecDestroy(&ksp->diagonal));
1459:   PetscCall(VecDestroy(&ksp->truediagonal));

1461:   ksp->setupstage = KSP_SETUP_NEW;
1462:   ksp->nmax       = PETSC_DECIDE;
1463:   PetscFunctionReturn(PETSC_SUCCESS);
1464: }

1466: /*@
1467:   KSPDestroy - Destroys a `KSP` context.

1469:   Collective

1471:   Input Parameter:
1472: . ksp - iterative solver obtained from `KSPCreate()`

1474:   Level: beginner

1476: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1477: @*/
1478: PetscErrorCode KSPDestroy(KSP *ksp)
1479: {
1480:   PC pc;

1482:   PetscFunctionBegin;
1483:   if (!*ksp) PetscFunctionReturn(PETSC_SUCCESS);
1485:   if (--((PetscObject)*ksp)->refct > 0) {
1486:     *ksp = NULL;
1487:     PetscFunctionReturn(PETSC_SUCCESS);
1488:   }

1490:   PetscCall(PetscObjectSAWsViewOff((PetscObject)*ksp));

1492:   /*
1493:    Avoid a cascading call to PCReset(ksp->pc) from the following call:
1494:    PCReset() shouldn't be called from KSPDestroy() as it is unprotected by pc's
1495:    refcount (and may be shared, e.g., by other ksps).
1496:    */
1497:   pc         = (*ksp)->pc;
1498:   (*ksp)->pc = NULL;
1499:   PetscCall(KSPReset(*ksp));
1500:   PetscCall(KSPResetViewers(*ksp));
1501:   (*ksp)->pc = pc;
1502:   PetscTryTypeMethod(*ksp, destroy);

1504:   if ((*ksp)->transpose.use_explicittranspose) {
1505:     PetscCall(MatDestroy(&(*ksp)->transpose.AT));
1506:     PetscCall(MatDestroy(&(*ksp)->transpose.BT));
1507:     (*ksp)->transpose.reuse_transpose = PETSC_FALSE;
1508:   }

1510:   PetscCall(KSPGuessDestroy(&(*ksp)->guess));
1511:   PetscCall(DMDestroy(&(*ksp)->dm));
1512:   PetscCall(PCDestroy(&(*ksp)->pc));
1513:   PetscCall(PetscFree((*ksp)->res_hist_alloc));
1514:   PetscCall(PetscFree((*ksp)->err_hist_alloc));
1515:   if ((*ksp)->convergeddestroy) PetscCall((*(*ksp)->convergeddestroy)((*ksp)->cnvP));
1516:   PetscCall(KSPMonitorCancel(*ksp));
1517:   PetscCall(KSPConvergedReasonViewCancel(*ksp));
1518:   PetscCall(PetscHeaderDestroy(ksp));
1519:   PetscFunctionReturn(PETSC_SUCCESS);
1520: }

1522: /*@
1523:   KSPSetPCSide - Sets the preconditioning side.

1525:   Logically Collective

1527:   Input Parameter:
1528: . ksp - iterative solver obtained from `KSPCreate()`

1530:   Output Parameter:
1531: . side - the preconditioning side, where side is one of
1532: .vb
1533:   PC_LEFT      - left preconditioning (default)
1534:   PC_RIGHT     - right preconditioning
1535:   PC_SYMMETRIC - symmetric preconditioning
1536: .ve

1538:   Options Database Key:
1539: . -ksp_pc_side <right,left,symmetric> - `KSP` preconditioner side

1541:   Level: intermediate

1543:   Notes:
1544:   Left preconditioning is used by default for most Krylov methods except `KSPFGMRES` which only supports right preconditioning.

1546:   For methods changing the side of the preconditioner changes the norm type that is used, see `KSPSetNormType()`.

1548:   Symmetric preconditioning is currently available only for the `KSPQCG` method. However, note that
1549:   symmetric preconditioning can be emulated by using either right or left
1550:   preconditioning, modifying the application of the matrix (with a custom `Mat` argument to `KSPSetOperators()`,
1551:   and using a pre 'KSPSetPreSolve()` or post processing `KSPSetPostSolve()` step).

1553:   Setting the `PCSide` often affects the default norm type. See `KSPSetNormType()` for details.

1555: .seealso: [](ch_ksp), `KSPGetPCSide()`, `KSPSetNormType()`, `KSPGetNormType()`, `KSP`, `KSPSetPreSolve()`, `KSPSetPostSolve()`
1556: @*/
1557: PetscErrorCode KSPSetPCSide(KSP ksp, PCSide side)
1558: {
1559:   PetscFunctionBegin;
1562:   ksp->pc_side = ksp->pc_side_set = side;
1563:   PetscFunctionReturn(PETSC_SUCCESS);
1564: }

1566: /*@
1567:   KSPGetPCSide - Gets the preconditioning side.

1569:   Not Collective

1571:   Input Parameter:
1572: . ksp - iterative solver obtained from `KSPCreate()`

1574:   Output Parameter:
1575: . side - the preconditioning side, where side is one of
1576: .vb
1577:   PC_LEFT      - left preconditioning (default)
1578:   PC_RIGHT     - right preconditioning
1579:   PC_SYMMETRIC - symmetric preconditioning
1580: .ve

1582:   Level: intermediate

1584: .seealso: [](ch_ksp), `KSPSetPCSide()`, `KSP`
1585: @*/
1586: PetscErrorCode KSPGetPCSide(KSP ksp, PCSide *side)
1587: {
1588:   PetscFunctionBegin;
1590:   PetscAssertPointer(side, 2);
1591:   PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
1592:   *side = ksp->pc_side;
1593:   PetscFunctionReturn(PETSC_SUCCESS);
1594: }

1596: /*@
1597:   KSPGetTolerances - Gets the relative, absolute, divergence, and maximum
1598:   iteration tolerances used by the default `KSP` convergence tests.

1600:   Not Collective

1602:   Input Parameter:
1603: . ksp - the Krylov subspace context

1605:   Output Parameters:
1606: + rtol   - the relative convergence tolerance
1607: . abstol - the absolute convergence tolerance
1608: . dtol   - the divergence tolerance
1609: - maxits - maximum number of iterations

1611:   Level: intermediate

1613:   Note:
1614:   The user can specify `NULL` for any parameter that is not needed.

1616: .seealso: [](ch_ksp), `KSPSetTolerances()`, `KSP`, `KSPSetMinimumIterations()`, `KSPGetMinimumIterations()`
1617: @*/
1618: PetscErrorCode KSPGetTolerances(KSP ksp, PetscReal *rtol, PetscReal *abstol, PetscReal *dtol, PetscInt *maxits)
1619: {
1620:   PetscFunctionBegin;
1622:   if (abstol) *abstol = ksp->abstol;
1623:   if (rtol) *rtol = ksp->rtol;
1624:   if (dtol) *dtol = ksp->divtol;
1625:   if (maxits) *maxits = ksp->max_it;
1626:   PetscFunctionReturn(PETSC_SUCCESS);
1627: }

1629: /*@
1630:   KSPSetTolerances - Sets the relative, absolute, divergence, and maximum
1631:   iteration tolerances used by the default `KSP` convergence testers.

1633:   Logically Collective

1635:   Input Parameters:
1636: + ksp    - the Krylov subspace context
1637: . rtol   - the relative convergence tolerance, relative decrease in the (possibly preconditioned) residual norm
1638: . abstol - the absolute convergence tolerance   absolute size of the (possibly preconditioned) residual norm
1639: . dtol   - the divergence tolerance,   amount (possibly preconditioned) residual norm can increase before `KSPConvergedDefault()` concludes that the method is diverging
1640: - maxits - maximum number of iterations to use

1642:   Options Database Keys:
1643: + -ksp_atol <abstol>   - Sets `abstol`
1644: . -ksp_rtol <rtol>     - Sets `rtol`
1645: . -ksp_divtol <dtol>   - Sets `dtol`
1646: - -ksp_max_it <maxits> - Sets `maxits`

1648:   Level: intermediate

1650:   Notes:
1651:   The tolerances are with respect to a norm of the residual of the equation $ \| b - A x^n \|$, they do not directly use the error of the equation.
1652:   The norm used depends on the `KSPNormType` that has been set with `KSPSetNormType()`, the default depends on the `KSPType` used.

1654:   All parameters must be non-negative.

1656:   Use `PETSC_CURRENT` to retain the current value of any of the parameters. The deprecated `PETSC_DEFAULT` also retains the current value (though the name is confusing).

1658:   Use `PETSC_DETERMINE` to use the default value for the given `KSP`. The default value is the value when the object's type is set.

1660:   For `dtol` and `maxits` use `PETSC_UMLIMITED` to indicate there is no upper bound on these values

1662:   See `KSPConvergedDefault()` for details how these parameters are used in the default convergence test.  See also `KSPSetConvergenceTest()`
1663:   for setting user-defined stopping criteria.

1665:   Fortran Note:
1666:   Use `PETSC_CURRENT_INTEGER`, `PETSC_CURRENT_REAL`, `PETSC_DETERMINE_INTEGER`, or `PETSC_DETERMINE_REAL`

1668: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetMinimumIterations()`
1669: @*/
1670: PetscErrorCode KSPSetTolerances(KSP ksp, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt maxits)
1671: {
1672:   PetscFunctionBegin;

1679:   if (rtol == (PetscReal)PETSC_DETERMINE) {
1680:     ksp->rtol = ksp->default_rtol;
1681:   } else if (rtol != (PetscReal)PETSC_CURRENT) {
1682:     PetscCheck(rtol >= 0.0 && rtol < 1.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Relative tolerance %g must be non-negative and less than 1.0", (double)rtol);
1683:     ksp->rtol = rtol;
1684:   }
1685:   if (abstol == (PetscReal)PETSC_DETERMINE) {
1686:     ksp->abstol = ksp->default_abstol;
1687:   } else if (abstol != (PetscReal)PETSC_CURRENT) {
1688:     PetscCheck(abstol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Absolute tolerance %g must be non-negative", (double)abstol);
1689:     ksp->abstol = abstol;
1690:   }
1691:   if (dtol == (PetscReal)PETSC_DETERMINE) {
1692:     ksp->divtol = ksp->default_divtol;
1693:   } else if (dtol == (PetscReal)PETSC_UNLIMITED) {
1694:     ksp->divtol = PETSC_MAX_REAL;
1695:   } else if (dtol != (PetscReal)PETSC_CURRENT) {
1696:     PetscCheck(dtol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Divergence tolerance %g must be larger than 1.0", (double)dtol);
1697:     ksp->divtol = dtol;
1698:   }
1699:   if (maxits == (PetscInt)PETSC_DETERMINE) {
1700:     ksp->max_it = ksp->default_max_it;
1701:   } else if (maxits == (PetscInt)PETSC_UNLIMITED) {
1702:     ksp->max_it = PETSC_INT_MAX;
1703:   } else if (maxits != (PetscInt)PETSC_CURRENT) {
1704:     PetscCheck(maxits >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Maximum number of iterations %" PetscInt_FMT " must be non-negative", maxits);
1705:     ksp->max_it = maxits;
1706:   }
1707:   PetscFunctionReturn(PETSC_SUCCESS);
1708: }

1710: /*@
1711:   KSPSetMinimumIterations - Sets the minimum number of iterations to use, regardless of the tolerances

1713:   Logically Collective

1715:   Input Parameters:
1716: + ksp   - the Krylov subspace context
1717: - minit - minimum number of iterations to use

1719:   Options Database Key:
1720: . -ksp_min_it <minits> - Sets `minit`

1722:   Level: intermediate

1724:   Notes:
1725:   Use `KSPSetTolerances()` to set a variety of other tolerances

1727:   See `KSPConvergedDefault()` for details on how these parameters are used in the default convergence test. See also `KSPSetConvergenceTest()`
1728:   for setting user-defined stopping criteria.

1730: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPGetMinimumIterations()`
1731: @*/
1732: PetscErrorCode KSPSetMinimumIterations(KSP ksp, PetscInt minit)
1733: {
1734:   PetscFunctionBegin;

1738:   PetscCheck(minit >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Minimum number of iterations %" PetscInt_FMT " must be non-negative", minit);
1739:   ksp->min_it = minit;
1740:   PetscFunctionReturn(PETSC_SUCCESS);
1741: }

1743: /*@
1744:   KSPGetMinimumIterations - Gets the minimum number of iterations to use, regardless of the tolerances, that was set with `KSPSetMinimumIterations()` or `-ksp_min_it`

1746:   Not Collective

1748:   Input Parameter:
1749: . ksp - the Krylov subspace context

1751:   Output Parameter:
1752: . minit - minimum number of iterations to use

1754:   Level: intermediate

1756: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPSetMinimumIterations()`
1757: @*/
1758: PetscErrorCode KSPGetMinimumIterations(KSP ksp, PetscInt *minit)
1759: {
1760:   PetscFunctionBegin;
1762:   PetscAssertPointer(minit, 2);

1764:   *minit = ksp->min_it;
1765:   PetscFunctionReturn(PETSC_SUCCESS);
1766: }

1768: /*@
1769:   KSPSetInitialGuessNonzero - Tells the iterative solver that the
1770:   initial guess is nonzero; otherwise `KSP` assumes the initial guess
1771:   is to be zero (and thus zeros it out before solving).

1773:   Logically Collective

1775:   Input Parameters:
1776: + ksp - iterative solver obtained from `KSPCreate()`
1777: - flg - ``PETSC_TRUE`` indicates the guess is non-zero, `PETSC_FALSE` indicates the guess is zero

1779:   Options Database Key:
1780: . -ksp_initial_guess_nonzero <true,false> - use nonzero initial guess

1782:   Level: beginner

1784:   Note:
1785:   If this is not called the X vector is zeroed in the call to `KSPSolve()`.

1787: .seealso: [](ch_ksp), `KSPGetInitialGuessNonzero()`, `KSPGuessSetType()`, `KSPGuessType`, `KSP`
1788: @*/
1789: PetscErrorCode KSPSetInitialGuessNonzero(KSP ksp, PetscBool flg)
1790: {
1791:   PetscFunctionBegin;
1794:   ksp->guess_zero = (PetscBool)!flg;
1795:   PetscFunctionReturn(PETSC_SUCCESS);
1796: }

1798: /*@
1799:   KSPGetInitialGuessNonzero - Determines whether the `KSP` solver is using
1800:   a zero initial guess.

1802:   Not Collective

1804:   Input Parameter:
1805: . ksp - iterative solver obtained from `KSPCreate()`

1807:   Output Parameter:
1808: . flag - `PETSC_TRUE` if guess is nonzero, else `PETSC_FALSE`

1810:   Level: intermediate

1812: .seealso: [](ch_ksp), `KSPSetInitialGuessNonzero()`, `KSP`
1813: @*/
1814: PetscErrorCode KSPGetInitialGuessNonzero(KSP ksp, PetscBool *flag)
1815: {
1816:   PetscFunctionBegin;
1818:   PetscAssertPointer(flag, 2);
1819:   if (ksp->guess_zero) *flag = PETSC_FALSE;
1820:   else *flag = PETSC_TRUE;
1821:   PetscFunctionReturn(PETSC_SUCCESS);
1822: }

1824: /*@
1825:   KSPSetErrorIfNotConverged - Causes `KSPSolve()` to generate an error if the solver has not converged as soon as the error is detected.

1827:   Logically Collective

1829:   Input Parameters:
1830: + ksp - iterative solver obtained from `KSPCreate()`
1831: - flg - `PETSC_TRUE` indicates you want the error generated

1833:   Options Database Key:
1834: . -ksp_error_if_not_converged <true,false> - generate an error and stop the program

1836:   Level: intermediate

1838:   Notes:
1839:   Normally PETSc continues if a linear solver fails to converge, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
1840:   to determine if it has converged.

1842:   A `KSP_DIVERGED_ITS` will not generate an error in a `KSPSolve()` inside a nested linear solver

1844: .seealso: [](ch_ksp), `KSPGetErrorIfNotConverged()`, `KSP`
1845: @*/
1846: PetscErrorCode KSPSetErrorIfNotConverged(KSP ksp, PetscBool flg)
1847: {
1848:   PetscFunctionBegin;
1851:   ksp->errorifnotconverged = flg;
1852:   PetscFunctionReturn(PETSC_SUCCESS);
1853: }

1855: /*@
1856:   KSPGetErrorIfNotConverged - Will `KSPSolve()` generate an error if the solver does not converge?

1858:   Not Collective

1860:   Input Parameter:
1861: . ksp - iterative solver obtained from KSPCreate()

1863:   Output Parameter:
1864: . flag - `PETSC_TRUE` if it will generate an error, else `PETSC_FALSE`

1866:   Level: intermediate

1868: .seealso: [](ch_ksp), `KSPSetErrorIfNotConverged()`, `KSP`
1869: @*/
1870: PetscErrorCode KSPGetErrorIfNotConverged(KSP ksp, PetscBool *flag)
1871: {
1872:   PetscFunctionBegin;
1874:   PetscAssertPointer(flag, 2);
1875:   *flag = ksp->errorifnotconverged;
1876:   PetscFunctionReturn(PETSC_SUCCESS);
1877: }

1879: /*@
1880:   KSPSetInitialGuessKnoll - Tells the iterative solver to use `PCApply()` on the right hand side vector to compute the initial guess (The Knoll trick)

1882:   Logically Collective

1884:   Input Parameters:
1885: + ksp - iterative solver obtained from `KSPCreate()`
1886: - flg - `PETSC_TRUE` or `PETSC_FALSE`

1888:   Level: advanced

1890:   Developer Note:
1891:   The Knoll trick is not currently implemented using the `KSPGuess` class

1893: .seealso: [](ch_ksp), `KSPGetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1894: @*/
1895: PetscErrorCode KSPSetInitialGuessKnoll(KSP ksp, PetscBool flg)
1896: {
1897:   PetscFunctionBegin;
1900:   ksp->guess_knoll = flg;
1901:   PetscFunctionReturn(PETSC_SUCCESS);
1902: }

1904: /*@
1905:   KSPGetInitialGuessKnoll - Determines whether the `KSP` solver is using the Knoll trick (using PCApply(pc,b,...) to compute
1906:   the initial guess

1908:   Not Collective

1910:   Input Parameter:
1911: . ksp - iterative solver obtained from `KSPCreate()`

1913:   Output Parameter:
1914: . flag - `PETSC_TRUE` if using Knoll trick, else `PETSC_FALSE`

1916:   Level: advanced

1918: .seealso: [](ch_ksp), `KSPSetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1919: @*/
1920: PetscErrorCode KSPGetInitialGuessKnoll(KSP ksp, PetscBool *flag)
1921: {
1922:   PetscFunctionBegin;
1924:   PetscAssertPointer(flag, 2);
1925:   *flag = ksp->guess_knoll;
1926:   PetscFunctionReturn(PETSC_SUCCESS);
1927: }

1929: /*@
1930:   KSPGetComputeSingularValues - Gets the flag indicating whether the extreme singular
1931:   values will be calculated via a Lanczos or Arnoldi process as the linear
1932:   system is solved.

1934:   Not Collective

1936:   Input Parameter:
1937: . ksp - iterative solver obtained from `KSPCreate()`

1939:   Output Parameter:
1940: . flg - `PETSC_TRUE` or `PETSC_FALSE`

1942:   Options Database Key:
1943: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1945:   Level: advanced

1947:   Notes:
1948:   This option is not valid for `KSPType`.

1950:   Many users may just want to use the monitoring routine
1951:   `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
1952:   to print the singular values at each iteration of the linear solve.

1954: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1955: @*/
1956: PetscErrorCode KSPGetComputeSingularValues(KSP ksp, PetscBool *flg)
1957: {
1958:   PetscFunctionBegin;
1960:   PetscAssertPointer(flg, 2);
1961:   *flg = ksp->calc_sings;
1962:   PetscFunctionReturn(PETSC_SUCCESS);
1963: }

1965: /*@
1966:   KSPSetComputeSingularValues - Sets a flag so that the extreme singular
1967:   values will be calculated via a Lanczos or Arnoldi process as the linear
1968:   system is solved.

1970:   Logically Collective

1972:   Input Parameters:
1973: + ksp - iterative solver obtained from `KSPCreate()`
1974: - flg - `PETSC_TRUE` or `PETSC_FALSE`

1976:   Options Database Key:
1977: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1979:   Level: advanced

1981:   Notes:
1982:   This option is not valid for all iterative methods.

1984:   Many users may just want to use the monitoring routine
1985:   `KSPMonitorSingularValue()` (which can be set with option `-ksp_monitor_singular_value`)
1986:   to print the singular values at each iteration of the linear solve.

1988: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`, `KSPSetComputeRitz()`
1989: @*/
1990: PetscErrorCode KSPSetComputeSingularValues(KSP ksp, PetscBool flg)
1991: {
1992:   PetscFunctionBegin;
1995:   ksp->calc_sings = flg;
1996:   PetscFunctionReturn(PETSC_SUCCESS);
1997: }

1999: /*@
2000:   KSPGetComputeEigenvalues - Gets the flag indicating that the extreme eigenvalues
2001:   values will be calculated via a Lanczos or Arnoldi process as the linear
2002:   system is solved.

2004:   Not Collective

2006:   Input Parameter:
2007: . ksp - iterative solver obtained from `KSPCreate()`

2009:   Output Parameter:
2010: . flg - `PETSC_TRUE` or `PETSC_FALSE`

2012:   Level: advanced

2014:   Note:
2015:   Currently this option is not valid for all iterative methods.

2017: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`, `KSPSetComputeRitz()`
2018: @*/
2019: PetscErrorCode KSPGetComputeEigenvalues(KSP ksp, PetscBool *flg)
2020: {
2021:   PetscFunctionBegin;
2023:   PetscAssertPointer(flg, 2);
2024:   *flg = ksp->calc_sings;
2025:   PetscFunctionReturn(PETSC_SUCCESS);
2026: }

2028: /*@
2029:   KSPSetComputeEigenvalues - Sets a flag so that the extreme eigenvalues
2030:   values will be calculated via a Lanczos or Arnoldi process as the linear
2031:   system is solved.

2033:   Logically Collective

2035:   Input Parameters:
2036: + ksp - iterative solver obtained from `KSPCreate()`
2037: - flg - `PETSC_TRUE` or `PETSC_FALSE`

2039:   Level: advanced

2041:   Note:
2042:   Currently this option is not valid for all iterative methods.

2044: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`, `KSPSetComputeRitz()`
2045: @*/
2046: PetscErrorCode KSPSetComputeEigenvalues(KSP ksp, PetscBool flg)
2047: {
2048:   PetscFunctionBegin;
2051:   ksp->calc_sings = flg;
2052:   PetscFunctionReturn(PETSC_SUCCESS);
2053: }

2055: /*@
2056:   KSPSetComputeRitz - Sets a flag so that the Ritz or harmonic Ritz pairs
2057:   will be calculated via a Lanczos or Arnoldi process as the linear
2058:   system is solved.

2060:   Logically Collective

2062:   Input Parameters:
2063: + ksp - iterative solver obtained from `KSPCreate()`
2064: - flg - `PETSC_TRUE` or `PETSC_FALSE`

2066:   Level: advanced

2068:   Note:
2069:   Currently this option is only valid for the `KSPGMRES` method.

2071: .seealso: [](ch_ksp), `KSPComputeRitz()`, `KSP`, `KSPComputeEigenvalues()`, `KSPComputeExtremeSingularValues()`
2072: @*/
2073: PetscErrorCode KSPSetComputeRitz(KSP ksp, PetscBool flg)
2074: {
2075:   PetscFunctionBegin;
2078:   ksp->calc_ritz = flg;
2079:   PetscFunctionReturn(PETSC_SUCCESS);
2080: }

2082: /*@
2083:   KSPGetRhs - Gets the right-hand-side vector for the linear system to
2084:   be solved.

2086:   Not Collective

2088:   Input Parameter:
2089: . ksp - iterative solver obtained from `KSPCreate()`

2091:   Output Parameter:
2092: . r - right-hand-side vector

2094:   Level: developer

2096: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPSolve()`, `KSP`
2097: @*/
2098: PetscErrorCode KSPGetRhs(KSP ksp, Vec *r)
2099: {
2100:   PetscFunctionBegin;
2102:   PetscAssertPointer(r, 2);
2103:   *r = ksp->vec_rhs;
2104:   PetscFunctionReturn(PETSC_SUCCESS);
2105: }

2107: /*@
2108:   KSPGetSolution - Gets the location of the solution for the
2109:   linear system to be solved.

2111:   Not Collective

2113:   Input Parameter:
2114: . ksp - iterative solver obtained from `KSPCreate()`

2116:   Output Parameter:
2117: . v - solution vector

2119:   Level: developer

2121:   Note:
2122:   If this is called during a `KSPSolve()` the vector's values may not represent the solution
2123:   to the linear system.

2125: .seealso: [](ch_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP`
2126: @*/
2127: PetscErrorCode KSPGetSolution(KSP ksp, Vec *v)
2128: {
2129:   PetscFunctionBegin;
2131:   PetscAssertPointer(v, 2);
2132:   *v = ksp->vec_sol;
2133:   PetscFunctionReturn(PETSC_SUCCESS);
2134: }

2136: /*@
2137:   KSPSetPC - Sets the preconditioner to be used to calculate the
2138:   application of the preconditioner on a vector into a `KSP`.

2140:   Collective

2142:   Input Parameters:
2143: + ksp - the `KSP` iterative solver obtained from `KSPCreate()`
2144: - pc  - the preconditioner object (if `NULL` it returns the `PC` currently held by the `KSP`)

2146:   Level: developer

2148:   Note:
2149:   This routine is almost never used since `KSP` creates its own `PC` when needed.
2150:   Use `KSPGetPC()` to retrieve the preconditioner context instead of creating a new one.

2152: .seealso: [](ch_ksp), `KSPGetPC()`, `KSP`
2153: @*/
2154: PetscErrorCode KSPSetPC(KSP ksp, PC pc)
2155: {
2156:   PetscFunctionBegin;
2158:   if (pc) {
2160:     PetscCheckSameComm(ksp, 1, pc, 2);
2161:   }
2162:   if (ksp->pc != pc && ksp->setupstage) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2163:   PetscCall(PetscObjectReference((PetscObject)pc));
2164:   PetscCall(PCDestroy(&ksp->pc));
2165:   ksp->pc = pc;
2166:   PetscFunctionReturn(PETSC_SUCCESS);
2167: }

2169: PETSC_INTERN PetscErrorCode PCCreate_MPI(PC);

2171: // PetscClangLinter pragma disable: -fdoc-internal-linkage
2172: /*@C
2173:    KSPCheckPCMPI - Checks if `-mpi_linear_solver_server` is active and the `PC` should be changed to `PCMPI`

2175:    Collective, No Fortran Support

2177:    Input Parameter:
2178: .  ksp - iterative solver obtained from `KSPCreate()`

2180:    Level: developer

2182: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PCMPIServerBegin()`, `PCMPIServerEnd()`
2183: @*/
2184: PETSC_INTERN PetscErrorCode KSPCheckPCMPI(KSP ksp)
2185: {
2186:   PetscBool isPCMPI;

2188:   PetscFunctionBegin;
2190:   PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
2191:   if (PCMPIServerActive && ksp->nestlevel == 0 && !isPCMPI) {
2192:     const char *prefix;
2193:     char       *found = NULL;

2195:     PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
2196:     if (prefix) PetscCall(PetscStrstr(prefix, "mpi_linear_solver_server_", &found));
2197:     if (!found) PetscCall(KSPAppendOptionsPrefix(ksp, "mpi_linear_solver_server_"));
2198:     PetscCall(PetscInfo(NULL, "In MPI Linear Solver Server and detected (root) PC that must be changed to PCMPI\n"));
2199:     PetscCall(PCSetType(ksp->pc, PCMPI));
2200:   }
2201:   PetscFunctionReturn(PETSC_SUCCESS);
2202: }

2204: /*@
2205:   KSPGetPC - Returns a pointer to the preconditioner context with the `KSP`

2207:   Not Collective

2209:   Input Parameter:
2210: . ksp - iterative solver obtained from `KSPCreate()`

2212:   Output Parameter:
2213: . pc - preconditioner context

2215:   Level: beginner

2217:   Note:
2218:   The `PC` is created if it does not already exist.

2220:   Developer Note:
2221:   Calls `KSPCheckPCMPI()` to check if the `KSP` is effected by `-mpi_linear_solver_server`

2223: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PC`
2224: @*/
2225: PetscErrorCode KSPGetPC(KSP ksp, PC *pc)
2226: {
2227:   PetscFunctionBegin;
2229:   PetscAssertPointer(pc, 2);
2230:   if (!ksp->pc) {
2231:     PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &ksp->pc));
2232:     PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp->pc, (PetscObject)ksp, 0));
2233:     PetscCall(PetscObjectSetOptions((PetscObject)ksp->pc, ((PetscObject)ksp)->options));
2234:     PetscCall(PCSetKSPNestLevel(ksp->pc, ksp->nestlevel));
2235:   }
2236:   PetscCall(KSPCheckPCMPI(ksp));
2237:   *pc = ksp->pc;
2238:   PetscFunctionReturn(PETSC_SUCCESS);
2239: }

2241: /*@
2242:   KSPMonitor - runs the user provided monitor routines, if they exist

2244:   Collective

2246:   Input Parameters:
2247: + ksp   - iterative solver obtained from `KSPCreate()`
2248: . it    - iteration number
2249: - rnorm - relative norm of the residual

2251:   Level: developer

2253:   Notes:
2254:   This routine is called by the `KSP` implementations.
2255:   It does not typically need to be called by the user.

2257:   For Krylov methods that do not keep a running value of the current solution (such as `KSPGMRES`) this
2258:   cannot be called after the `KSPConvergedReason` has been set but before the final solution has been computed.

2260: .seealso: [](ch_ksp), `KSPMonitorSet()`
2261: @*/
2262: PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm)
2263: {
2264:   PetscInt i, n = ksp->numbermonitors;

2266:   PetscFunctionBegin;
2267:   for (i = 0; i < n; i++) PetscCall((*ksp->monitor[i])(ksp, it, rnorm, ksp->monitorcontext[i]));
2268:   PetscFunctionReturn(PETSC_SUCCESS);
2269: }

2271: /*@C
2272:   KSPMonitorSet - Sets an ADDITIONAL function to be called at every iteration to monitor, i.e. display in some way, perhaps by printing in the terminal,
2273:   the residual norm computed in a `KSPSolve()`

2275:   Logically Collective

2277:   Input Parameters:
2278: + ksp            - iterative solver obtained from `KSPCreate()`
2279: . monitor        - pointer to function (if this is `NULL`, it turns off monitoring
2280: . ctx            - [optional] context for private data for the monitor routine (use `NULL` if no context is needed)
2281: - monitordestroy - [optional] routine that frees monitor context (may be `NULL`)

2283:   Calling sequence of `monitor`:
2284: + ksp   - iterative solver obtained from `KSPCreate()`
2285: . it    - iteration number
2286: . rnorm - (estimated) 2-norm of (preconditioned) residual
2287: - ctx   - optional monitoring context, as set by `KSPMonitorSet()`

2289:   Calling sequence of `monitordestroy`:
2290: . ctx - optional monitoring context, as set by `KSPMonitorSet()`

2292:   Options Database Keys:
2293: + -ksp_monitor                             - sets `KSPMonitorResidual()`
2294: . -ksp_monitor draw                        - sets `KSPMonitorResidualDraw()` and plots residual
2295: . -ksp_monitor draw::draw_lg               - sets `KSPMonitorResidualDrawLG()` and plots residual
2296: . -ksp_monitor_pause_final                 - Pauses any graphics when the solve finishes (only works for internal monitors)
2297: . -ksp_monitor_true_residual               - sets `KSPMonitorTrueResidual()`
2298: . -ksp_monitor_true_residual draw::draw_lg - sets `KSPMonitorTrueResidualDrawLG()` and plots residual
2299: . -ksp_monitor_max                         - sets `KSPMonitorTrueResidualMax()`
2300: . -ksp_monitor_singular_value              - sets `KSPMonitorSingularValue()`
2301: - -ksp_monitor_cancel                      - cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but
2302:                                              does not cancel those set via the options database.

2304:   Level: beginner

2306:   Notes:
2307:   The options database option `-ksp_monitor` and related options are the easiest way to turn on `KSP` iteration monitoring

2309:   The default is to do no monitoring.  To print the residual, or preconditioned
2310:   residual if `KSPSetNormType`(ksp,`KSP_NORM_PRECONDITIONED`) was called, use
2311:   `KSPMonitorResidual()` as the monitoring routine, with a `PETSCVIEWERASCII` as the
2312:   context.

2314:   Several different monitoring routines may be set by calling
2315:   `KSPMonitorSet()` multiple times; all will be called in the
2316:   order in which they were set.

2318:   Fortran Note:
2319:   Only a single monitor function can be set for each `KSP` object

2321: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorCancel()`, `KSP`
2322: @*/
2323: PetscErrorCode KSPMonitorSet(KSP ksp, PetscErrorCode (*monitor)(KSP ksp, PetscInt it, PetscReal rnorm, void *ctx), void *ctx, PetscErrorCode (*monitordestroy)(void **ctx))
2324: {
2325:   PetscInt  i;
2326:   PetscBool identical;

2328:   PetscFunctionBegin;
2330:   for (i = 0; i < ksp->numbermonitors; i++) {
2331:     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))monitor, ctx, monitordestroy, (PetscErrorCode (*)(void))ksp->monitor[i], ksp->monitorcontext[i], ksp->monitordestroy[i], &identical));
2332:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
2333:   }
2334:   PetscCheck(ksp->numbermonitors < MAXKSPMONITORS, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP monitors set");
2335:   ksp->monitor[ksp->numbermonitors]          = monitor;
2336:   ksp->monitordestroy[ksp->numbermonitors]   = monitordestroy;
2337:   ksp->monitorcontext[ksp->numbermonitors++] = (void *)ctx;
2338:   PetscFunctionReturn(PETSC_SUCCESS);
2339: }

2341: /*@
2342:   KSPMonitorCancel - Clears all monitors for a `KSP` object.

2344:   Logically Collective

2346:   Input Parameter:
2347: . ksp - iterative solver obtained from `KSPCreate()`

2349:   Options Database Key:
2350: . -ksp_monitor_cancel - Cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but does not cancel those set via the options database.

2352:   Level: intermediate

2354: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorSet()`, `KSP`
2355: @*/
2356: PetscErrorCode KSPMonitorCancel(KSP ksp)
2357: {
2358:   PetscInt i;

2360:   PetscFunctionBegin;
2362:   for (i = 0; i < ksp->numbermonitors; i++) {
2363:     if (ksp->monitordestroy[i]) PetscCall((*ksp->monitordestroy[i])(&ksp->monitorcontext[i]));
2364:   }
2365:   ksp->numbermonitors = 0;
2366:   PetscFunctionReturn(PETSC_SUCCESS);
2367: }

2369: /*@C
2370:   KSPGetMonitorContext - Gets the monitoring context, as set by `KSPMonitorSet()` for the FIRST monitor only.

2372:   Not Collective

2374:   Input Parameter:
2375: . ksp - iterative solver obtained from `KSPCreate()`

2377:   Output Parameter:
2378: . ctx - monitoring context

2380:   Level: intermediate

2382: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSP`
2383: @*/
2384: PetscErrorCode KSPGetMonitorContext(KSP ksp, void *ctx)
2385: {
2386:   PetscFunctionBegin;
2388:   *(void **)ctx = ksp->monitorcontext[0];
2389:   PetscFunctionReturn(PETSC_SUCCESS);
2390: }

2392: /*@
2393:   KSPSetResidualHistory - Sets the array used to hold the residual history.
2394:   If set, this array will contain the residual norms computed at each
2395:   iteration of the solver.

2397:   Not Collective

2399:   Input Parameters:
2400: + ksp   - iterative solver obtained from `KSPCreate()`
2401: . a     - array to hold history
2402: . na    - size of `a`
2403: - reset - `PETSC_TRUE` indicates the history counter is reset to zero
2404:            for each new linear solve

2406:   Level: advanced

2408:   Notes:
2409:   If provided, `a` is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2410:   If 'a' is `NULL` then space is allocated for the history. If 'na' `PETSC_DECIDE` or (deprecated) `PETSC_DEFAULT` then a
2411:   default array of length 10,000 is allocated.

2413:   If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2415: .seealso: [](ch_ksp), `KSPGetResidualHistory()`, `KSP`
2416: @*/
2417: PetscErrorCode KSPSetResidualHistory(KSP ksp, PetscReal a[], PetscCount na, PetscBool reset)
2418: {
2419:   PetscFunctionBegin;

2422:   PetscCall(PetscFree(ksp->res_hist_alloc));
2423:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2424:     ksp->res_hist     = a;
2425:     ksp->res_hist_max = na;
2426:   } else {
2427:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->res_hist_max = (size_t)na;
2428:     else ksp->res_hist_max = 10000; /* like default ksp->max_it */
2429:     PetscCall(PetscCalloc1(ksp->res_hist_max, &ksp->res_hist_alloc));

2431:     ksp->res_hist = ksp->res_hist_alloc;
2432:   }
2433:   ksp->res_hist_len   = 0;
2434:   ksp->res_hist_reset = reset;
2435:   PetscFunctionReturn(PETSC_SUCCESS);
2436: }

2438: /*@C
2439:   KSPGetResidualHistory - Gets the array used to hold the residual history and the number of residuals it contains.

2441:   Not Collective

2443:   Input Parameter:
2444: . ksp - iterative solver obtained from `KSPCreate()`

2446:   Output Parameters:
2447: + a  - pointer to array to hold history (or `NULL`)
2448: - na - number of used entries in a (or `NULL`). Note this has different meanings depending on the `reset` argument to `KSPSetResidualHistory()`

2450:   Level: advanced

2452:   Note:
2453:   This array is borrowed and should not be freed by the caller.

2455:   Can only be called after a `KSPSetResidualHistory()` otherwise `a` and `na` are set to `NULL` and zero

2457:   When `reset` was `PETSC_TRUE` since a residual is computed before the first iteration, the value of `na` is generally one more than the value
2458:   returned with `KSPGetIterationNumber()`.

2460:   Some Krylov methods may not compute the final residual norm when convergence is declared because the maximum number of iterations allowed has been reached.
2461:   In this situation, when `reset` was `PETSC_TRUE`, `na` will then equal the number of iterations reported with `KSPGetIterationNumber()`

2463:   Some Krylov methods (such as `KSPSTCG`), under certain circumstances, do not compute the final residual norm. In this situation, when `reset` was `PETSC_TRUE`,
2464:   `na` will then equal the number of iterations reported with `KSPGetIterationNumber()`

2466:   `KSPBCGSL` does not record the residual norms for the "subiterations" hence the results from `KSPGetResidualHistory()` and `KSPGetIterationNumber()` will be different

2468:   Fortran Note:
2469:   The Fortran version of this routine has a calling sequence
2470: .vb
2471:   call KSPGetResidualHistory(KSP ksp, integer na, integer ierr)
2472: .ve
2473:   note that you have passed a Fortran array into `KSPSetResidualHistory()` and you need
2474:   to access the residual values from this Fortran array you provided. Only the `na` (number of
2475:   residual norms currently held) is set.

2477: .seealso: [](ch_ksp), `KSPSetResidualHistory()`, `KSP`, `KSPGetIterationNumber()`, `KSPSTCG`, `KSPBCGSL`
2478: @*/
2479: PetscErrorCode KSPGetResidualHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2480: {
2481:   PetscFunctionBegin;
2483:   if (a) *a = ksp->res_hist;
2484:   if (na) PetscCall(PetscIntCast(ksp->res_hist_len, na));
2485:   PetscFunctionReturn(PETSC_SUCCESS);
2486: }

2488: /*@
2489:   KSPSetErrorHistory - Sets the array used to hold the error history. If set, this array will contain the error norms computed at each iteration of the solver.

2491:   Not Collective

2493:   Input Parameters:
2494: + ksp   - iterative solver obtained from `KSPCreate()`
2495: . a     - array to hold history
2496: . na    - size of `a`
2497: - reset - `PETSC_TRUE` indicates the history counter is reset to zero for each new linear solve

2499:   Level: advanced

2501:   Notes:
2502:   If provided, `a` is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2503:   If 'a' is `NULL` then space is allocated for the history. If 'na' is `PETSC_DECIDE` or (deprecated) `PETSC_DEFAULT` then a default array of length 1,0000 is allocated.

2505:   If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2507: .seealso: [](ch_ksp), `KSPGetErrorHistory()`, `KSPSetResidualHistory()`, `KSP`
2508: @*/
2509: PetscErrorCode KSPSetErrorHistory(KSP ksp, PetscReal a[], PetscCount na, PetscBool reset)
2510: {
2511:   PetscFunctionBegin;

2514:   PetscCall(PetscFree(ksp->err_hist_alloc));
2515:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2516:     ksp->err_hist     = a;
2517:     ksp->err_hist_max = na;
2518:   } else {
2519:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->err_hist_max = (size_t)na;
2520:     else ksp->err_hist_max = 10000; /* like default ksp->max_it */
2521:     PetscCall(PetscCalloc1(ksp->err_hist_max, &ksp->err_hist_alloc));
2522:     ksp->err_hist = ksp->err_hist_alloc;
2523:   }
2524:   ksp->err_hist_len   = 0;
2525:   ksp->err_hist_reset = reset;
2526:   PetscFunctionReturn(PETSC_SUCCESS);
2527: }

2529: /*@C
2530:   KSPGetErrorHistory - Gets the array used to hold the error history and the number of residuals it contains.

2532:   Not Collective

2534:   Input Parameter:
2535: . ksp - iterative solver obtained from `KSPCreate()`

2537:   Output Parameters:
2538: + a  - pointer to array to hold history (or `NULL`)
2539: - na - number of used entries in a (or `NULL`)

2541:   Level: advanced

2543:   Note:
2544:   This array is borrowed and should not be freed by the caller.
2545:   Can only be called after a `KSPSetErrorHistory()` otherwise `a` and `na` are set to `NULL` and zero

2547:   Fortran Note:
2548:   The Fortran version of this routine has a calling sequence
2549: .vb
2550:   call KSPGetErrorHistory(KSP ksp, integer na, integer ierr)
2551: .ve
2552:   note that you have passed a Fortran array into `KSPSetErrorHistory()` and you need
2553:   to access the residual values from this Fortran array you provided. Only the `na` (number of
2554:   residual norms currently held) is set.

2556: .seealso: [](ch_ksp), `KSPSetErrorHistory()`, `KSPGetResidualHistory()`, `KSP`
2557: @*/
2558: PetscErrorCode KSPGetErrorHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2559: {
2560:   PetscFunctionBegin;
2562:   if (a) *a = ksp->err_hist;
2563:   if (na) PetscCall(PetscIntCast(ksp->err_hist_len, na));
2564:   PetscFunctionReturn(PETSC_SUCCESS);
2565: }

2567: /*@
2568:   KSPComputeConvergenceRate - Compute the convergence rate for the iteration <https:/en.wikipedia.org/wiki/Coefficient_of_determination>

2570:   Not Collective

2572:   Input Parameter:
2573: . ksp - The `KSP`

2575:   Output Parameters:
2576: + cr   - The residual contraction rate
2577: . rRsq - The coefficient of determination, $R^2$, indicating the linearity of the data
2578: . ce   - The error contraction rate
2579: - eRsq - The coefficient of determination, $R^2$, indicating the linearity of the data

2581:   Level: advanced

2583:   Note:
2584:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
2585:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,

2587: .seealso: [](ch_ksp), `KSP`, `KSPConvergedRateView()`
2588: @*/
2589: PetscErrorCode KSPComputeConvergenceRate(KSP ksp, PetscReal *cr, PetscReal *rRsq, PetscReal *ce, PetscReal *eRsq)
2590: {
2591:   PetscReal const *hist;
2592:   PetscReal       *x, *y, slope, intercept, mean = 0.0, var = 0.0, res = 0.0;
2593:   PetscInt         n, k;

2595:   PetscFunctionBegin;
2596:   if (cr || rRsq) {
2597:     PetscCall(KSPGetResidualHistory(ksp, &hist, &n));
2598:     if (!n) {
2599:       if (cr) *cr = 0.0;
2600:       if (rRsq) *rRsq = -1.0;
2601:     } else {
2602:       PetscCall(PetscMalloc2(n, &x, n, &y));
2603:       for (k = 0; k < n; ++k) {
2604:         x[k] = k;
2605:         y[k] = PetscLogReal(hist[k]);
2606:         mean += y[k];
2607:       }
2608:       mean /= n;
2609:       PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2610:       for (k = 0; k < n; ++k) {
2611:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2612:         var += PetscSqr(y[k] - mean);
2613:       }
2614:       PetscCall(PetscFree2(x, y));
2615:       if (cr) *cr = PetscExpReal(slope);
2616:       if (rRsq) *rRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2617:     }
2618:   }
2619:   if (ce || eRsq) {
2620:     PetscCall(KSPGetErrorHistory(ksp, &hist, &n));
2621:     if (!n) {
2622:       if (ce) *ce = 0.0;
2623:       if (eRsq) *eRsq = -1.0;
2624:     } else {
2625:       PetscCall(PetscMalloc2(n, &x, n, &y));
2626:       for (k = 0; k < n; ++k) {
2627:         x[k] = k;
2628:         y[k] = PetscLogReal(hist[k]);
2629:         mean += y[k];
2630:       }
2631:       mean /= n;
2632:       PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2633:       for (k = 0; k < n; ++k) {
2634:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2635:         var += PetscSqr(y[k] - mean);
2636:       }
2637:       PetscCall(PetscFree2(x, y));
2638:       if (ce) *ce = PetscExpReal(slope);
2639:       if (eRsq) *eRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2640:     }
2641:   }
2642:   PetscFunctionReturn(PETSC_SUCCESS);
2643: }

2645: /*@C
2646:   KSPSetConvergenceTest - Sets the function to be used to determine convergence of `KSPSolve()`

2648:   Logically Collective

2650:   Input Parameters:
2651: + ksp      - iterative solver obtained from `KSPCreate()`
2652: . converge - pointer to the function
2653: . ctx      - context for private data for the convergence routine (may be `NULL`)
2654: - destroy  - a routine for destroying the context (may be `NULL`)

2656:   Calling sequence of `converge`:
2657: + ksp    - iterative solver obtained from `KSPCreate()`
2658: . it     - iteration number
2659: . rnorm  - (estimated) 2-norm of (preconditioned) residual
2660: . reason - the reason why it has converged or diverged
2661: - ctx    - optional convergence context, as set by `KSPSetConvergenceTest()`

2663:   Calling sequence of `destroy`:
2664: . ctx - the context

2666:   Level: advanced

2668:   Notes:
2669:   Must be called after the `KSP` type has been set so put this after
2670:   a call to `KSPSetType()`, or `KSPSetFromOptions()`.

2672:   The default convergence test, `KSPConvergedDefault()`, aborts if the
2673:   residual grows to more than 10000 times the initial residual.

2675:   The default is a combination of relative and absolute tolerances.
2676:   The residual value that is tested may be an approximation; routines
2677:   that need exact values should compute them.

2679:   In the default PETSc convergence test, the precise values of reason
2680:   are macros such as `KSP_CONVERGED_RTOL`, which are defined in petscksp.h.

2682: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPGetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2683: @*/
2684: PetscErrorCode KSPSetConvergenceTest(KSP ksp, PetscErrorCode (*converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void *ctx, PetscErrorCode (*destroy)(void *ctx))
2685: {
2686:   PetscFunctionBegin;
2688:   if (ksp->convergeddestroy) PetscCall((*ksp->convergeddestroy)(ksp->cnvP));
2689:   ksp->converged        = converge;
2690:   ksp->convergeddestroy = destroy;
2691:   ksp->cnvP             = (void *)ctx;
2692:   PetscFunctionReturn(PETSC_SUCCESS);
2693: }

2695: /*@C
2696:   KSPGetConvergenceTest - Gets the function to be used to determine convergence.

2698:   Logically Collective

2700:   Input Parameter:
2701: . ksp - iterative solver obtained from `KSPCreate()`

2703:   Output Parameters:
2704: + converge - pointer to convergence test function
2705: . ctx      - context for private data for the convergence routine (may be `NULL`)
2706: - destroy  - a routine for destroying the context (may be `NULL`)

2708:   Calling sequence of `converge`:
2709: + ksp    - iterative solver obtained from `KSPCreate()`
2710: . it     - iteration number
2711: . rnorm  - (estimated) 2-norm of (preconditioned) residual
2712: . reason - the reason why it has converged or diverged
2713: - ctx    - optional convergence context, as set by `KSPSetConvergenceTest()`

2715:   Calling sequence of `destroy`:
2716: . ctx - the convergence test context

2718:   Level: advanced

2720: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2721: @*/
2722: PetscErrorCode KSPGetConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void **ctx, PetscErrorCode (**destroy)(void *ctx))
2723: {
2724:   PetscFunctionBegin;
2726:   if (converge) *converge = ksp->converged;
2727:   if (destroy) *destroy = ksp->convergeddestroy;
2728:   if (ctx) *ctx = ksp->cnvP;
2729:   PetscFunctionReturn(PETSC_SUCCESS);
2730: }

2732: /*@C
2733:   KSPGetAndClearConvergenceTest - Gets the function to be used to determine convergence. Removes the current test without calling destroy on the test context

2735:   Logically Collective

2737:   Input Parameter:
2738: . ksp - iterative solver obtained from `KSPCreate()`

2740:   Output Parameters:
2741: + converge - pointer to convergence test function
2742: . ctx      - context for private data for the convergence routine
2743: - destroy  - a routine for destroying the context

2745:   Calling sequence of `converge`:
2746: + ksp    - iterative solver obtained from `KSPCreate()`
2747: . it     - iteration number
2748: . rnorm  - (estimated) 2-norm of (preconditioned) residual
2749: . reason - the reason why it has converged or diverged
2750: - ctx    - optional convergence context, as set by `KSPSetConvergenceTest()`

2752:   Calling sequence of `destroy`:
2753: . ctx - the convergence test context

2755:   Level: advanced

2757:   Note:
2758:   This is intended to be used to allow transferring the convergence test (and its context) to another testing object (for example another `KSP`)
2759:   and then calling `KSPSetConvergenceTest()` on this original `KSP`. If you just called `KSPGetConvergenceTest()` followed
2760:   by `KSPSetConvergenceTest()` the original context information
2761:   would be destroyed and hence the transferred context would be invalid and trigger a crash on use

2763: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2764: @*/
2765: PetscErrorCode KSPGetAndClearConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void **ctx, PetscErrorCode (**destroy)(void *ctx))
2766: {
2767:   PetscFunctionBegin;
2769:   *converge             = ksp->converged;
2770:   *destroy              = ksp->convergeddestroy;
2771:   *ctx                  = ksp->cnvP;
2772:   ksp->converged        = NULL;
2773:   ksp->cnvP             = NULL;
2774:   ksp->convergeddestroy = NULL;
2775:   PetscFunctionReturn(PETSC_SUCCESS);
2776: }

2778: /*@C
2779:   KSPGetConvergenceContext - Gets the convergence context set with `KSPSetConvergenceTest()`.

2781:   Not Collective

2783:   Input Parameter:
2784: . ksp - iterative solver obtained from `KSPCreate()`

2786:   Output Parameter:
2787: . ctx - monitoring context

2789:   Level: advanced

2791: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2792: @*/
2793: PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx)
2794: {
2795:   PetscFunctionBegin;
2797:   *(void **)ctx = ksp->cnvP;
2798:   PetscFunctionReturn(PETSC_SUCCESS);
2799: }

2801: /*@
2802:   KSPBuildSolution - Builds the approximate solution in a vector provided.

2804:   Collective

2806:   Input Parameter:
2807: . ksp - iterative solver obtained from `KSPCreate()`

2809:   Output Parameter:
2810:    Provide exactly one of
2811: + v - location to stash solution, optional, otherwise pass `NULL`
2812: - V - the solution is returned in this location. This vector is created internally. This vector should NOT be destroyed by the user with `VecDestroy()`.

2814:   Level: developer

2816:   Notes:
2817:   This routine can be used in one of two ways
2818: .vb
2819:       KSPBuildSolution(ksp,NULL,&V);
2820:    or
2821:       KSPBuildSolution(ksp,v,NULL); or KSPBuildSolution(ksp,v,&v);
2822: .ve
2823:   In the first case an internal vector is allocated to store the solution
2824:   (the user cannot destroy this vector). In the second case the solution
2825:   is generated in the vector that the user provides. Note that for certain
2826:   methods, such as `KSPCG`, the second case requires a copy of the solution,
2827:   while in the first case the call is essentially free since it simply
2828:   returns the vector where the solution already is stored. For some methods
2829:   like `KSPGMRES` during the solve this is a reasonably expensive operation and should only be
2830:   used if truly needed.

2832: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP`
2833: @*/
2834: PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V)
2835: {
2836:   PetscFunctionBegin;
2838:   PetscCheck(V || v, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "Must provide either v or V");
2839:   if (!V) V = &v;
2840:   if (ksp->reason != KSP_CONVERGED_ITERATING) {
2841:     if (!v) PetscCall(KSPGetSolution(ksp, V));
2842:     else PetscCall(VecCopy(ksp->vec_sol, v));
2843:   } else {
2844:     PetscUseTypeMethod(ksp, buildsolution, v, V);
2845:   }
2846:   PetscFunctionReturn(PETSC_SUCCESS);
2847: }

2849: /*@
2850:   KSPBuildResidual - Builds the residual in a vector provided.

2852:   Collective

2854:   Input Parameter:
2855: . ksp - iterative solver obtained from `KSPCreate()`

2857:   Output Parameters:
2858: + t - work vector.  If not provided then one is generated.
2859: . v - optional location to stash residual.  If `v` is not provided, then a location is generated.
2860: - V - the residual

2862:   Level: advanced

2864:   Note:
2865:   Regardless of whether or not `v` is provided, the residual is
2866:   returned in `V`.

2868: .seealso: [](ch_ksp), `KSP`, `KSPBuildSolution()`
2869: @*/
2870: PetscErrorCode KSPBuildResidual(KSP ksp, Vec t, Vec v, Vec *V)
2871: {
2872:   PetscBool flag = PETSC_FALSE;
2873:   Vec       w = v, tt = t;

2875:   PetscFunctionBegin;
2877:   if (!w) PetscCall(VecDuplicate(ksp->vec_rhs, &w));
2878:   if (!tt) {
2879:     PetscCall(VecDuplicate(ksp->vec_sol, &tt));
2880:     flag = PETSC_TRUE;
2881:   }
2882:   PetscUseTypeMethod(ksp, buildresidual, tt, w, V);
2883:   if (flag) PetscCall(VecDestroy(&tt));
2884:   PetscFunctionReturn(PETSC_SUCCESS);
2885: }

2887: /*@
2888:   KSPSetDiagonalScale - Tells `KSP` to symmetrically diagonally scale the system
2889:   before solving. This actually CHANGES the matrix (and right-hand side).

2891:   Logically Collective

2893:   Input Parameters:
2894: + ksp   - the `KSP` context
2895: - scale - `PETSC_TRUE` or `PETSC_FALSE`

2897:   Options Database Keys:
2898: + -ksp_diagonal_scale     - perform a diagonal scaling before the solve
2899: - -ksp_diagonal_scale_fix - scale the matrix back AFTER the solve

2901:   Level: advanced

2903:   Notes:
2904:   Scales the matrix by  $D^{-1/2}  A  D^{-1/2}  [D^{1/2} x ] = D^{-1/2} b $
2905:   where $D_{ii}$ is $1/abs(A_{ii}) $ unless $A_{ii}$ is zero and then it is 1.

2907:   BE CAREFUL with this routine: it actually scales the matrix and right
2908:   hand side that define the system. After the system is solved the matrix
2909:   and right-hand side remain scaled unless you use `KSPSetDiagonalScaleFix()`

2911:   This should NOT be used within the `SNES` solves if you are using a line
2912:   search.

2914:   If you use this with the `PCType` `PCEISENSTAT` preconditioner than you can
2915:   use the `PCEisenstatSetNoDiagonalScaling()` option, or `-pc_eisenstat_no_diagonal_scaling`
2916:   to save some unneeded, redundant flops.

2918: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2919: @*/
2920: PetscErrorCode KSPSetDiagonalScale(KSP ksp, PetscBool scale)
2921: {
2922:   PetscFunctionBegin;
2925:   ksp->dscale = scale;
2926:   PetscFunctionReturn(PETSC_SUCCESS);
2927: }

2929: /*@
2930:   KSPGetDiagonalScale - Checks if `KSP` solver scales the matrix and right-hand side, that is if `KSPSetDiagonalScale()` has been called

2932:   Not Collective

2934:   Input Parameter:
2935: . ksp - the `KSP` context

2937:   Output Parameter:
2938: . scale - `PETSC_TRUE` or `PETSC_FALSE`

2940:   Level: intermediate

2942: .seealso: [](ch_ksp), `KSP`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`
2943: @*/
2944: PetscErrorCode KSPGetDiagonalScale(KSP ksp, PetscBool *scale)
2945: {
2946:   PetscFunctionBegin;
2948:   PetscAssertPointer(scale, 2);
2949:   *scale = ksp->dscale;
2950:   PetscFunctionReturn(PETSC_SUCCESS);
2951: }

2953: /*@
2954:   KSPSetDiagonalScaleFix - Tells `KSP` to diagonally scale the system back after solving.

2956:   Logically Collective

2958:   Input Parameters:
2959: + ksp - the `KSP` context
2960: - fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2961:          rescale (default)

2963:   Level: intermediate

2965:   Notes:
2966:   Must be called after `KSPSetDiagonalScale()`

2968:   Using this will slow things down, because it rescales the matrix before and
2969:   after each linear solve. This is intended mainly for testing to allow one
2970:   to easily get back the original system to make sure the solution computed is
2971:   accurate enough.

2973: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPGetDiagonalScaleFix()`, `KSP`
2974: @*/
2975: PetscErrorCode KSPSetDiagonalScaleFix(KSP ksp, PetscBool fix)
2976: {
2977:   PetscFunctionBegin;
2980:   ksp->dscalefix = fix;
2981:   PetscFunctionReturn(PETSC_SUCCESS);
2982: }

2984: /*@
2985:   KSPGetDiagonalScaleFix - Determines if `KSP` diagonally scales the system back after solving. That is `KSPSetDiagonalScaleFix()` has been called

2987:   Not Collective

2989:   Input Parameter:
2990: . ksp - the `KSP` context

2992:   Output Parameter:
2993: . fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2994:          rescale (default)

2996:   Level: intermediate

2998: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2999: @*/
3000: PetscErrorCode KSPGetDiagonalScaleFix(KSP ksp, PetscBool *fix)
3001: {
3002:   PetscFunctionBegin;
3004:   PetscAssertPointer(fix, 2);
3005:   *fix = ksp->dscalefix;
3006:   PetscFunctionReturn(PETSC_SUCCESS);
3007: }

3009: /*@C
3010:   KSPSetComputeOperators - set routine to compute the linear operators

3012:   Logically Collective

3014:   Input Parameters:
3015: + ksp  - the `KSP` context
3016: . func - function to compute the operators, see `KSPComputeOperatorsFn` for the calling sequence
3017: - ctx  - optional context

3019:   Level: beginner

3021:   Notes:
3022:   `func()` will be called automatically at the very next call to `KSPSolve()`. It will NOT be called at future `KSPSolve()` calls
3023:   unless either `KSPSetComputeOperators()` or `KSPSetOperators()` is called before that `KSPSolve()` is called. This allows the same system to be solved several times
3024:   with different right-hand side functions but is a confusing API since one might expect it to be called for each `KSPSolve()`

3026:   To reuse the same preconditioner for the next `KSPSolve()` and not compute a new one based on the most recently computed matrix call `KSPSetReusePreconditioner()`

3028:   Developer Note:
3029:   Perhaps this routine and `KSPSetComputeRHS()` could be combined into a new API that makes clear when new matrices are computing without requiring call this
3030:   routine to indicate when the new matrix should be computed.

3032: .seealso: [](ch_ksp), `KSP`, `KSPSetOperators()`, `KSPSetComputeRHS()`, `DMKSPSetComputeOperators()`, `KSPSetComputeInitialGuess()`, `KSPComputeOperatorsFn`
3033: @*/
3034: PetscErrorCode KSPSetComputeOperators(KSP ksp, KSPComputeOperatorsFn *func, void *ctx)
3035: {
3036:   DM dm;

3038:   PetscFunctionBegin;
3040:   PetscCall(KSPGetDM(ksp, &dm));
3041:   PetscCall(DMKSPSetComputeOperators(dm, func, ctx));
3042:   if (ksp->setupstage == KSP_SETUP_NEWRHS) ksp->setupstage = KSP_SETUP_NEWMATRIX;
3043:   PetscFunctionReturn(PETSC_SUCCESS);
3044: }

3046: /*@C
3047:   KSPSetComputeRHS - set routine to compute the right-hand side of the linear system

3049:   Logically Collective

3051:   Input Parameters:
3052: + ksp  - the `KSP` context
3053: . func - function to compute the right-hand side, see `KSPComputeRHSFn` for the calling sequence
3054: - ctx  - optional context

3056:   Level: beginner

3058:   Note:
3059:   The routine you provide will be called EACH you call `KSPSolve()` to prepare the new right-hand side for that solve

3061: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `DMKSPSetComputeRHS()`, `KSPSetComputeOperators()`, `KSPSetOperators()`, `KSPComputeRHSFn`
3062: @*/
3063: PetscErrorCode KSPSetComputeRHS(KSP ksp, KSPComputeRHSFn *func, void *ctx)
3064: {
3065:   DM dm;

3067:   PetscFunctionBegin;
3069:   PetscCall(KSPGetDM(ksp, &dm));
3070:   PetscCall(DMKSPSetComputeRHS(dm, func, ctx));
3071:   PetscFunctionReturn(PETSC_SUCCESS);
3072: }

3074: /*@C
3075:   KSPSetComputeInitialGuess - set routine to compute the initial guess of the linear system

3077:   Logically Collective

3079:   Input Parameters:
3080: + ksp  - the `KSP` context
3081: . func - function to compute the initial guess, see `KSPComputeInitialGuessFn` for calling sequence
3082: - ctx  - optional context

3084:   Level: beginner

3086:   Note:
3087:   This should only be used in conjunction with `KSPSetComputeRHS()` and `KSPSetComputeOperators()`, otherwise
3088:   call `KSPSetInitialGuessNonzero()` and set the initial guess values in the solution vector passed to `KSPSolve()` before calling the solver

3090: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `KSPSetComputeRHS()`, `KSPSetComputeOperators()`, `DMKSPSetComputeInitialGuess()`, `KSPSetInitialGuessNonzero()`,
3091:           `KSPComputeInitialGuessFn`
3092: @*/
3093: PetscErrorCode KSPSetComputeInitialGuess(KSP ksp, KSPComputeInitialGuessFn *func, void *ctx)
3094: {
3095:   DM dm;

3097:   PetscFunctionBegin;
3099:   PetscCall(KSPGetDM(ksp, &dm));
3100:   PetscCall(DMKSPSetComputeInitialGuess(dm, func, ctx));
3101:   PetscFunctionReturn(PETSC_SUCCESS);
3102: }

3104: /*@
3105:   KSPSetUseExplicitTranspose - Determines the explicit transpose of the operator is formed in `KSPSolveTranspose()`. In some configurations (like GPUs) it may
3106:   be explicitly formed since the solve is much more efficient.

3108:   Logically Collective

3110:   Input Parameter:
3111: . ksp - the `KSP` context

3113:   Output Parameter:
3114: . flg - `PETSC_TRUE` to transpose the system in `KSPSolveTranspose()`, `PETSC_FALSE` to not transpose (default)

3116:   Level: advanced

3118: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `KSP`
3119: @*/
3120: PetscErrorCode KSPSetUseExplicitTranspose(KSP ksp, PetscBool flg)
3121: {
3122:   PetscFunctionBegin;
3125:   ksp->transpose.use_explicittranspose = flg;
3126:   PetscFunctionReturn(PETSC_SUCCESS);
3127: }