Actual source code: itfunc.c

  1: /*
  2:       Interface KSP routines that the user calls.
  3: */

  5: #include <petsc/private/kspimpl.h>
  6: #include <petsc/private/matimpl.h>
  7: #include <petscdm.h>

  9: /* number of nested levels of KSPSetUp/Solve(). This is used to determine if KSP_DIVERGED_ITS should be fatal. */
 10: static PetscInt level = 0;

 12: static inline PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
 13: {
 14:   PetscCall(PetscViewerPushFormat(viewer, format));
 15:   PetscCall(PetscObjectView(obj, viewer));
 16:   PetscCall(PetscViewerPopFormat(viewer));
 17:   return PETSC_SUCCESS;
 18: }

 20: /*@
 21:    KSPComputeExtremeSingularValues - Computes the extreme singular values
 22:    for the preconditioned operator. Called after or during `KSPSolve()`.

 24:    Not Collective

 26:    Input Parameter:
 27: .  ksp - iterative context obtained from `KSPCreate()`

 29:    Output Parameters:
 30: +  emax - maximum estimated singular value
 31: -  emin - minimum estimated singular value

 33:    Options Database Key:
 34: .  -ksp_view_singularvalues - compute extreme singular values and print when `KSPSolve()` completes.

 36:    Notes:
 37:    One must call `KSPSetComputeSingularValues()` before calling `KSPSetUp()`
 38:    (or use the option -ksp_view_eigenvalues) in order for this routine to work correctly.

 40:    Many users may just want to use the monitoring routine
 41:    `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
 42:    to print the extreme singular values at each iteration of the linear solve.

 44:    Estimates of the smallest singular value may be very inaccurate, especially if the Krylov method has not converged.
 45:    The largest singular value is usually accurate to within a few percent if the method has converged, but is still not
 46:    intended for eigenanalysis. Consider the excellent package `SLEPc` if accurate values are required.

 48:    Disable restarts if using KSPGMRES, otherwise this estimate will only be using those iterations after the last
 49:    restart. See `KSPGMRESSetRestart()` for more details.

 51:    Level: advanced

 53: .seealso: [](chapter_ksp), `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeEigenvalues()`, `KSP`
 54: @*/
 55: PetscErrorCode KSPComputeExtremeSingularValues(KSP ksp, PetscReal *emax, PetscReal *emin)
 56: {
 57:   PetscFunctionBegin;
 61:   PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Singular values not requested before KSPSetUp()");

 63:   if (ksp->ops->computeextremesingularvalues) PetscUseTypeMethod(ksp, computeextremesingularvalues, emax, emin);
 64:   else {
 65:     *emin = -1.0;
 66:     *emax = -1.0;
 67:   }
 68:   PetscFunctionReturn(PETSC_SUCCESS);
 69: }

 71: /*@
 72:    KSPComputeEigenvalues - Computes the extreme eigenvalues for the
 73:    preconditioned operator. Called after or during `KSPSolve()`.

 75:    Not Collective

 77:    Input Parameters:
 78: +  ksp - iterative context obtained from `KSPCreate()`
 79: -  n - size of arrays r and c. The number of eigenvalues computed (neig) will, in
 80:        general, be less than this.

 82:    Output Parameters:
 83: +  r - real part of computed eigenvalues, provided by user with a dimension of at least n
 84: .  c - complex part of computed eigenvalues, provided by user with a dimension of at least n
 85: -  neig - actual number of eigenvalues computed (will be less than or equal to n)

 87:    Options Database Keys:
 88: .  -ksp_view_eigenvalues - Prints eigenvalues to stdout

 90:    Notes:
 91:    The number of eigenvalues estimated depends on the size of the Krylov space
 92:    generated during the `KSPSolve()` ; for example, with
 93:    CG it corresponds to the number of CG iterations, for GMRES it is the number
 94:    of GMRES iterations SINCE the last restart. Any extra space in r[] and c[]
 95:    will be ignored.

 97:    `KSPComputeEigenvalues()` does not usually provide accurate estimates; it is
 98:    intended only for assistance in understanding the convergence of iterative
 99:    methods, not for eigenanalysis. For accurate computation of eigenvalues we recommend using
100:    the excellent package SLEPc.

102:    One must call `KSPSetComputeEigenvalues()` before calling `KSPSetUp()`
103:    in order for this routine to work correctly.

105:    Many users may just want to use the monitoring routine
106:    `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
107:    to print the singular values at each iteration of the linear solve.

109:    `KSPComputeRitz()` provides estimates for both the eigenvalues and their corresponding eigenvectors.

111:    Level: advanced

113: .seealso: [](chapter_ksp), `KSPSetComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeExtremeSingularValues()`, `KSP`, `KSPComputeRitz()`
114: @*/
115: PetscErrorCode KSPComputeEigenvalues(KSP ksp, PetscInt n, PetscReal r[], PetscReal c[], PetscInt *neig)
116: {
117:   PetscFunctionBegin;
121:   PetscCheck(n >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Requested < 0 Eigenvalues");
123:   PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Eigenvalues not requested before KSPSetUp()");

125:   if (n && ksp->ops->computeeigenvalues) PetscUseTypeMethod(ksp, computeeigenvalues, n, r, c, neig);
126:   else *neig = 0;
127:   PetscFunctionReturn(PETSC_SUCCESS);
128: }

130: /*@
131:    KSPComputeRitz - Computes the Ritz or harmonic Ritz pairs associated with the
132:    smallest or largest in modulus, for the preconditioned operator.

134:    Not Collective

136:    Input Parameters:
137: +  ksp   - iterative context obtained from `KSPCreate()`
138: .  ritz  - `PETSC_TRUE` or `PETSC_FALSE` for Ritz pairs or harmonic Ritz pairs, respectively
139: -  small - `PETSC_TRUE` or `PETSC_FALSE` for smallest or largest (harmonic) Ritz values, respectively

141:    Output Parameters:
142: +  nrit  - On input number of (harmonic) Ritz pairs to compute; on output, actual number of computed (harmonic) Ritz pairs
143: .  S     - an array of the Ritz vectors, pass in an array of vectors of size nrit
144: .  tetar - real part of the Ritz values, pass in an array of size nrit
145: -  tetai - imaginary part of the Ritz values, pass in an array of size nrit

147:    Notes:
148:    This only works with a `KSPType` of `KSPGMRES`.

150:    One must call `KSPSetComputeRitz()` before calling `KSPSetUp()` in order for this routine to work correctly.

152:    This routine must be called after `KSPSolve()`.

154:    In GMRES, the (harmonic) Ritz pairs are computed from the Hessenberg matrix obtained during
155:    the last complete cycle of the GMRES solve, or during the partial cycle if the solve ended before
156:    a restart (that is a complete GMRES cycle was never achieved).

158:    The number of actual (harmonic) Ritz pairs computed is less than or equal to the restart
159:    parameter for GMRES if a complete cycle has been performed or less or equal to the number of GMRES
160:    iterations.

162:    `KSPComputeEigenvalues()` provides estimates for only the eigenvalues (Ritz values).

164:    For real matrices, the (harmonic) Ritz pairs can be complex-valued. In such a case,
165:    the routine selects the complex (harmonic) Ritz value and its conjugate, and two successive entries of the
166:    vectors S are equal to the real and the imaginary parts of the associated vectors.
167:    When PETSc has been built with complex scalars, the real and imaginary parts of the Ritz
168:    values are still returned in tetar and tetai, as is done in `KSPComputeEigenvalues()`, but
169:    the Ritz vectors S are complex.

171:    The (harmonic) Ritz pairs are given in order of increasing (harmonic) Ritz values in modulus.

173:    The Ritz pairs do not necessarily accurately reflect the eigenvalues and eigenvectors of the operator, consider the
174:    excellent package `SLEPc` if accurate values are required.

176:    Level: advanced

178: .seealso: [](chapter_ksp), `KSPSetComputeRitz()`, `KSP`, `KSPGMRES`, `KSPComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`
179: @*/
180: PetscErrorCode KSPComputeRitz(KSP ksp, PetscBool ritz, PetscBool small, PetscInt *nrit, Vec S[], PetscReal tetar[], PetscReal tetai[])
181: {
182:   PetscFunctionBegin;
184:   PetscCheck(ksp->calc_ritz, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Ritz pairs not requested before KSPSetUp()");
185:   PetscTryTypeMethod(ksp, computeritz, ritz, small, nrit, S, tetar, tetai);
186:   PetscFunctionReturn(PETSC_SUCCESS);
187: }
188: /*@
189:    KSPSetUpOnBlocks - Sets up the preconditioner for each block in
190:    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
191:    methods.

193:    Collective

195:    Input Parameter:
196: .  ksp - the `KSP` context

198:    Notes:
199:    `KSPSetUpOnBlocks()` is a routine that the user can optionally call for
200:    more precise profiling (via -log_view) of the setup phase for these
201:    block preconditioners.  If the user does not call `KSPSetUpOnBlocks()`,
202:    it will automatically be called from within `KSPSolve()`.

204:    Calling `KSPSetUpOnBlocks()` is the same as calling `PCSetUpOnBlocks()`
205:    on the PC context within the `KSP` context.

207:    Level: advanced

209: .seealso: [](chapter_ksp), `PCSetUpOnBlocks()`, `KSPSetUp()`, `PCSetUp()`, `KSP`
210: @*/
211: PetscErrorCode KSPSetUpOnBlocks(KSP ksp)
212: {
213:   PC             pc;
214:   PCFailedReason pcreason;

216:   PetscFunctionBegin;
218:   level++;
219:   PetscCall(KSPGetPC(ksp, &pc));
220:   PetscCall(PCSetUpOnBlocks(pc));
221:   PetscCall(PCGetFailedReasonRank(pc, &pcreason));
222:   level--;
223:   /*
224:      This is tricky since only a subset of MPI ranks may set this; each KSPSolve_*() is responsible for checking
225:      this flag and initializing an appropriate vector with VecSetInf() so that the first norm computation can
226:      produce a result at KSPCheckNorm() thus communicating the known problem to all MPI ranks so they may
227:      terminate the Krylov solve. For many KSP implementations this is handled within KSPInitialResidual()
228:   */
229:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
230:   PetscFunctionReturn(PETSC_SUCCESS);
231: }

233: /*@
234:    KSPSetReusePreconditioner - reuse the current preconditioner, do not construct a new one even if the operator changes

236:    Collective

238:    Input Parameters:
239: +  ksp   - iterative context obtained from `KSPCreate()`
240: -  flag - `PETSC_TRUE` to reuse the current preconditioner

242:    Level: intermediate

244: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
245: @*/
246: PetscErrorCode KSPSetReusePreconditioner(KSP ksp, PetscBool flag)
247: {
248:   PC pc;

250:   PetscFunctionBegin;
252:   PetscCall(KSPGetPC(ksp, &pc));
253:   PetscCall(PCSetReusePreconditioner(pc, flag));
254:   PetscFunctionReturn(PETSC_SUCCESS);
255: }

257: /*@
258:    KSPGetReusePreconditioner - Determines if the `KSP` reuses the current preconditioner even if the operator in the preconditioner has changed.

260:    Collective

262:    Input Parameter:
263: .  ksp   - iterative context obtained from `KSPCreate()`

265:    Output Parameter:
266: .  flag - the boolean flag

268:    Level: intermediate

270: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSPSetReusePreconditioner()`, `KSP`
271: @*/
272: PetscErrorCode KSPGetReusePreconditioner(KSP ksp, PetscBool *flag)
273: {
274:   PetscFunctionBegin;
277:   *flag = PETSC_FALSE;
278:   if (ksp->pc) PetscCall(PCGetReusePreconditioner(ksp->pc, flag));
279:   PetscFunctionReturn(PETSC_SUCCESS);
280: }

282: /*@
283:    KSPSetSkipPCSetFromOptions - prevents `KSPSetFromOptions()` from calling `PCSetFromOptions()`. This is used if the same `PC` is shared by more than one `KSP` so its options are not resettable for each `KSP`

285:    Collective

287:    Input Parameters:
288: +  ksp   - iterative context obtained from `KSPCreate()`
289: -  flag - `PETSC_TRUE` to skip calling the `PCSetFromOptions()`

291:    Level: intermediate

293: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
294: @*/
295: PetscErrorCode KSPSetSkipPCSetFromOptions(KSP ksp, PetscBool flag)
296: {
297:   PetscFunctionBegin;
299:   ksp->skippcsetfromoptions = flag;
300:   PetscFunctionReturn(PETSC_SUCCESS);
301: }

303: /*@
304:    KSPSetUp - Sets up the internal data structures for the
305:    later use of an iterative solver.

307:    Collective

309:    Input Parameter:
310: .  ksp   - iterative context obtained from `KSPCreate()`

312:    Level: developer

314: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`
315: @*/
316: PetscErrorCode KSPSetUp(KSP ksp)
317: {
318:   Mat            A, B;
319:   Mat            mat, pmat;
320:   MatNullSpace   nullsp;
321:   PCFailedReason pcreason;

323:   PetscFunctionBegin;
325:   level++;

327:   /* reset the convergence flag from the previous solves */
328:   ksp->reason = KSP_CONVERGED_ITERATING;

330:   if (!((PetscObject)ksp)->type_name) PetscCall(KSPSetType(ksp, KSPGMRES));
331:   PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));

333:   if (ksp->dmActive && !ksp->setupstage) {
334:     /* first time in so build matrix and vector data structures using DM */
335:     if (!ksp->vec_rhs) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_rhs));
336:     if (!ksp->vec_sol) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_sol));
337:     PetscCall(DMCreateMatrix(ksp->dm, &A));
338:     PetscCall(KSPSetOperators(ksp, A, A));
339:     PetscCall(PetscObjectDereference((PetscObject)A));
340:   }

342:   if (ksp->dmActive) {
343:     DMKSP kdm;
344:     PetscCall(DMGetDMKSP(ksp->dm, &kdm));

346:     if (kdm->ops->computeinitialguess && ksp->setupstage != KSP_SETUP_NEWRHS) {
347:       /* only computes initial guess the first time through */
348:       PetscCallBack("KSP callback initial guess", (*kdm->ops->computeinitialguess)(ksp, ksp->vec_sol, kdm->initialguessctx));
349:       PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
350:     }
351:     if (kdm->ops->computerhs) PetscCallBack("KSP callback rhs", (*kdm->ops->computerhs)(ksp, ksp->vec_rhs, kdm->rhsctx));

353:     if (ksp->setupstage != KSP_SETUP_NEWRHS) {
354:       PetscCheck(kdm->ops->computeoperators, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "You called KSPSetDM() but did not use DMKSPSetComputeOperators() or KSPSetDMActive(ksp,PETSC_FALSE);");
355:       PetscCall(KSPGetOperators(ksp, &A, &B));
356:       PetscCallBack("KSP callback operators", (*kdm->ops->computeoperators)(ksp, A, B, kdm->operatorsctx));
357:     }
358:   }

360:   if (ksp->setupstage == KSP_SETUP_NEWRHS) {
361:     level--;
362:     PetscFunctionReturn(PETSC_SUCCESS);
363:   }
364:   PetscCall(PetscLogEventBegin(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));

366:   switch (ksp->setupstage) {
367:   case KSP_SETUP_NEW:
368:     PetscUseTypeMethod(ksp, setup);
369:     break;
370:   case KSP_SETUP_NEWMATRIX: { /* This should be replaced with a more general mechanism */
371:     if (ksp->setupnewmatrix) PetscUseTypeMethod(ksp, setup);
372:   } break;
373:   default:
374:     break;
375:   }

377:   if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
378:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
379:   /* scale the matrix if requested */
380:   if (ksp->dscale) {
381:     PetscScalar *xx;
382:     PetscInt     i, n;
383:     PetscBool    zeroflag = PETSC_FALSE;
384:     if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
385:     if (!ksp->diagonal) { /* allocate vector to hold diagonal */
386:       PetscCall(MatCreateVecs(pmat, &ksp->diagonal, NULL));
387:     }
388:     PetscCall(MatGetDiagonal(pmat, ksp->diagonal));
389:     PetscCall(VecGetLocalSize(ksp->diagonal, &n));
390:     PetscCall(VecGetArray(ksp->diagonal, &xx));
391:     for (i = 0; i < n; i++) {
392:       if (xx[i] != 0.0) xx[i] = 1.0 / PetscSqrtReal(PetscAbsScalar(xx[i]));
393:       else {
394:         xx[i]    = 1.0;
395:         zeroflag = PETSC_TRUE;
396:       }
397:     }
398:     PetscCall(VecRestoreArray(ksp->diagonal, &xx));
399:     if (zeroflag) PetscCall(PetscInfo(ksp, "Zero detected in diagonal of matrix, using 1 at those locations\n"));
400:     PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
401:     if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
402:     ksp->dscalefix2 = PETSC_FALSE;
403:   }
404:   PetscCall(PetscLogEventEnd(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
405:   PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
406:   PetscCall(PCSetUp(ksp->pc));
407:   PetscCall(PCGetFailedReasonRank(ksp->pc, &pcreason));
408:   /* TODO: this code was wrong and is still wrong, there is no way to propagate the failure to all processes; their is no code to handle a ksp->reason on only some ranks */
409:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;

411:   PetscCall(MatGetNullSpace(mat, &nullsp));
412:   if (nullsp) {
413:     PetscBool test = PETSC_FALSE;
414:     PetscCall(PetscOptionsGetBool(((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_test_null_space", &test, NULL));
415:     if (test) PetscCall(MatNullSpaceTest(nullsp, mat, NULL));
416:   }
417:   ksp->setupstage = KSP_SETUP_NEWRHS;
418:   level--;
419:   PetscFunctionReturn(PETSC_SUCCESS);
420: }

422: /*@C
423:    KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged to a viewer

425:    Collective

427:    Parameter:
428: +  ksp - iterative context obtained from `KSPCreate()`
429: -  viewer - the viewer to display the reason

431:    Options Database Keys:
432: +  -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
433: -  -ksp_converged_reason ::failed - only print reason and number of iterations when diverged

435:    Notes:
436:      To change the format of the output call PetscViewerPushFormat(viewer,format) before this call. Use PETSC_VIEWER_DEFAULT for the default,
437:      use PETSC_VIEWER_FAILED to only display a reason if it fails.

439:    Level: beginner

441: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
442:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `KSP`, `KSPGetConvergedReason()`, `PetscViewerPushFormat()`, `PetscViewerPopFormat()`
443: @*/
444: PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer)
445: {
446:   PetscBool         isAscii;
447:   PetscViewerFormat format;

449:   PetscFunctionBegin;
450:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
451:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
452:   if (isAscii) {
453:     PetscCall(PetscViewerGetFormat(viewer, &format));
454:     PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
455:     if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) {
456:       if (((PetscObject)ksp)->prefix) {
457:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
458:       } else {
459:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
460:       }
461:     } else if (ksp->reason <= 0) {
462:       if (((PetscObject)ksp)->prefix) {
463:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
464:       } else {
465:         PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
466:       }
467:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
468:         PCFailedReason reason;
469:         PetscCall(PCGetFailedReason(ksp->pc, &reason));
470:         PetscCall(PetscViewerASCIIPrintf(viewer, "               PC failed due to %s \n", PCFailedReasons[reason]));
471:       }
472:     }
473:     PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
474:   }
475:   PetscFunctionReturn(PETSC_SUCCESS);
476: }

478: /*@C
479:    KSPConvergedReasonViewSet - Sets an ADDITIONAL function that is to be used at the
480:     end of the linear solver to display the convergence reason of the linear solver.

482:    Logically Collective

484:    Input Parameters:
485: +  ksp - the `KSP` context
486: .  f - the ksp converged reason view function
487: .  vctx - [optional] user-defined context for private data for the
488:           ksp converged reason view routine (use `NULL` if no context is desired)
489: -  reasonviewdestroy - [optional] routine that frees reasonview context
490:           (may be `NULL`)

492:    Options Database Keys:
493: +    -ksp_converged_reason        - sets a default `KSPConvergedReasonView()`
494: -    -ksp_converged_reason_view_cancel - cancels all converged reason viewers that have
495:                             been hardwired into a code by
496:                             calls to `KSPConvergedReasonViewSet()`, but
497:                             does not cancel those set via
498:                             the options database.

500:    Notes:
501:    Several different converged reason view routines may be set by calling
502:    `KSPConvergedReasonViewSet()` multiple times; all will be called in the
503:    order in which they were set.

505:    Level: intermediate

507: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewCancel()`
508: @*/
509: PetscErrorCode KSPConvergedReasonViewSet(KSP ksp, PetscErrorCode (*f)(KSP, void *), void *vctx, PetscErrorCode (*reasonviewdestroy)(void **))
510: {
511:   PetscInt  i;
512:   PetscBool identical;

514:   PetscFunctionBegin;
516:   for (i = 0; i < ksp->numberreasonviews; i++) {
517:     PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))f, vctx, reasonviewdestroy, (PetscErrorCode(*)(void))ksp->reasonview[i], ksp->reasonviewcontext[i], ksp->reasonviewdestroy[i], &identical));
518:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
519:   }
520:   PetscCheck(ksp->numberreasonviews < MAXKSPREASONVIEWS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP reasonview set");
521:   ksp->reasonview[ksp->numberreasonviews]          = f;
522:   ksp->reasonviewdestroy[ksp->numberreasonviews]   = reasonviewdestroy;
523:   ksp->reasonviewcontext[ksp->numberreasonviews++] = (void *)vctx;
524:   PetscFunctionReturn(PETSC_SUCCESS);
525: }

527: /*@
528:    KSPConvergedReasonViewCancel - Clears all the reasonview functions for a `KSP` object.

530:    Collective

532:    Input Parameter:
533: .  ksp - iterative context obtained from `KSPCreate()`

535:    Level: intermediate

537: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPDestroy()`, `KSPReset()`
538: @*/
539: PetscErrorCode KSPConvergedReasonViewCancel(KSP ksp)
540: {
541:   PetscInt i;

543:   PetscFunctionBegin;
545:   for (i = 0; i < ksp->numberreasonviews; i++) {
546:     if (ksp->reasonviewdestroy[i]) PetscCall((*ksp->reasonviewdestroy[i])(&ksp->reasonviewcontext[i]));
547:   }
548:   ksp->numberreasonviews = 0;
549:   PetscFunctionReturn(PETSC_SUCCESS);
550: }

552: /*@
553:   KSPConvergedReasonViewFromOptions - Processes command line options to determine if/how a KSPReason is to be viewed.

555:   Collective

557:   Input Parameter:
558: . ksp   - the `KSP` object

560:   Level: intermediate

562: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`
563: @*/
564: PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp)
565: {
566:   PetscViewer       viewer;
567:   PetscBool         flg;
568:   PetscViewerFormat format;
569:   PetscInt          i;

571:   PetscFunctionBegin;

573:   /* Call all user-provided reason review routines */
574:   for (i = 0; i < ksp->numberreasonviews; i++) PetscCall((*ksp->reasonview[i])(ksp, ksp->reasonviewcontext[i]));

576:   /* Call the default PETSc routine */
577:   PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ksp), ((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_converged_reason", &viewer, &format, &flg));
578:   if (flg) {
579:     PetscCall(PetscViewerPushFormat(viewer, format));
580:     PetscCall(KSPConvergedReasonView(ksp, viewer));
581:     PetscCall(PetscViewerPopFormat(viewer));
582:     PetscCall(PetscViewerDestroy(&viewer));
583:   }
584:   PetscFunctionReturn(PETSC_SUCCESS);
585: }

587: /*@C
588:   KSPConvergedRateView - Displays the reason a `KSP` solve converged or diverged to a viewer

590:   Collective

592:   Input Parameters:
593: +  ksp    - iterative context obtained from `KSPCreate()`
594: -  viewer - the viewer to display the reason

596:   Options Database Key:
597: . -ksp_converged_rate - print reason for convergence or divergence and the convergence rate (or 0.0 for divergence)

599:   Notes:
600:   To change the format of the output, call PetscViewerPushFormat(viewer,format) before this call.

602:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
603:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
604:   see also https://en.wikipedia.org/wiki/Coefficient_of_determination

606:   Level: intermediate

608: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`, `KSPGetConvergedRate()`, `KSPSetTolerances()`, `KSPConvergedDefault()`
609: @*/
610: PetscErrorCode KSPConvergedRateView(KSP ksp, PetscViewer viewer)
611: {
612:   PetscViewerFormat format;
613:   PetscBool         isAscii;
614:   PetscReal         rrate, rRsq, erate = 0.0, eRsq = 0.0;
615:   PetscInt          its;
616:   const char       *prefix, *reason = KSPConvergedReasons[ksp->reason];

618:   PetscFunctionBegin;
619:   PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
620:   PetscCall(KSPGetIterationNumber(ksp, &its));
621:   PetscCall(KSPComputeConvergenceRate(ksp, &rrate, &rRsq, &erate, &eRsq));
622:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
623:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
624:   if (isAscii) {
625:     PetscCall(PetscViewerGetFormat(viewer, &format));
626:     PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
627:     if (ksp->reason > 0) {
628:       if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT, prefix, reason, its));
629:       else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT, reason, its));
630:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
631:       if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
632:       if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
633:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
634:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
635:     } else if (ksp->reason <= 0) {
636:       if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT, prefix, reason, its));
637:       else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT, reason, its));
638:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
639:       if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
640:       if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
641:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
642:       PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
643:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
644:         PCFailedReason reason;
645:         PetscCall(PCGetFailedReason(ksp->pc, &reason));
646:         PetscCall(PetscViewerASCIIPrintf(viewer, "               PC failed due to %s \n", PCFailedReasons[reason]));
647:       }
648:     }
649:     PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
650:   }
651:   PetscFunctionReturn(PETSC_SUCCESS);
652: }

654: #include <petscdraw.h>

656: static PetscErrorCode KSPViewEigenvalues_Internal(KSP ksp, PetscBool isExplicit, PetscViewer viewer, PetscViewerFormat format)
657: {
658:   PetscReal  *r, *c;
659:   PetscInt    n, i, neig;
660:   PetscBool   isascii, isdraw;
661:   PetscMPIInt rank;

663:   PetscFunctionBegin;
664:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)ksp), &rank));
665:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
666:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
667:   if (isExplicit) {
668:     PetscCall(VecGetSize(ksp->vec_sol, &n));
669:     PetscCall(PetscMalloc2(n, &r, n, &c));
670:     PetscCall(KSPComputeEigenvaluesExplicitly(ksp, n, r, c));
671:     neig = n;
672:   } else {
673:     PetscInt nits;

675:     PetscCall(KSPGetIterationNumber(ksp, &nits));
676:     n = nits + 2;
677:     if (!nits) {
678:       PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any eigenvalues\n"));
679:       PetscFunctionReturn(PETSC_SUCCESS);
680:     }
681:     PetscCall(PetscMalloc2(n, &r, n, &c));
682:     PetscCall(KSPComputeEigenvalues(ksp, n, r, c, &neig));
683:   }
684:   if (isascii) {
685:     PetscCall(PetscViewerASCIIPrintf(viewer, "%s computed eigenvalues\n", isExplicit ? "Explicitly" : "Iteratively"));
686:     for (i = 0; i < neig; ++i) {
687:       if (c[i] >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %gi\n", (double)r[i], (double)c[i]));
688:       else PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %gi\n", (double)r[i], -(double)c[i]));
689:     }
690:   } else if (isdraw && rank == 0) {
691:     PetscDraw   draw;
692:     PetscDrawSP drawsp;

694:     if (format == PETSC_VIEWER_DRAW_CONTOUR) {
695:       PetscCall(KSPPlotEigenContours_Private(ksp, neig, r, c));
696:     } else {
697:       PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
698:       PetscCall(PetscDrawSPCreate(draw, 1, &drawsp));
699:       PetscCall(PetscDrawSPReset(drawsp));
700:       for (i = 0; i < neig; ++i) PetscCall(PetscDrawSPAddPoint(drawsp, r + i, c + i));
701:       PetscCall(PetscDrawSPDraw(drawsp, PETSC_TRUE));
702:       PetscCall(PetscDrawSPSave(drawsp));
703:       PetscCall(PetscDrawSPDestroy(&drawsp));
704:     }
705:   }
706:   PetscCall(PetscFree2(r, c));
707:   PetscFunctionReturn(PETSC_SUCCESS);
708: }

710: static PetscErrorCode KSPViewSingularvalues_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
711: {
712:   PetscReal smax, smin;
713:   PetscInt  nits;
714:   PetscBool isascii;

716:   PetscFunctionBegin;
717:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
718:   PetscCall(KSPGetIterationNumber(ksp, &nits));
719:   if (!nits) {
720:     PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any singular values\n"));
721:     PetscFunctionReturn(PETSC_SUCCESS);
722:   }
723:   PetscCall(KSPComputeExtremeSingularValues(ksp, &smax, &smin));
724:   if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Iteratively computed extreme singular values: max %g min %g max/min %g\n", (double)smax, (double)smin, (double)(smax / smin)));
725:   PetscFunctionReturn(PETSC_SUCCESS);
726: }

728: static PetscErrorCode KSPViewFinalResidual_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
729: {
730:   PetscBool isascii;

732:   PetscFunctionBegin;
733:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
734:   PetscCheck(!ksp->dscale || ksp->dscalefix, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Cannot compute final scale with -ksp_diagonal_scale except also with -ksp_diagonal_scale_fix");
735:   if (isascii) {
736:     Mat       A;
737:     Vec       t;
738:     PetscReal norm;

740:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
741:     PetscCall(VecDuplicate(ksp->vec_rhs, &t));
742:     PetscCall(KSP_MatMult(ksp, A, ksp->vec_sol, t));
743:     PetscCall(VecAYPX(t, -1.0, ksp->vec_rhs));
744:     PetscCall(VecNorm(t, NORM_2, &norm));
745:     PetscCall(VecDestroy(&t));
746:     PetscCall(PetscViewerASCIIPrintf(viewer, "KSP final norm of residual %g\n", (double)norm));
747:   }
748:   PetscFunctionReturn(PETSC_SUCCESS);
749: }

751: static PetscErrorCode KSPMonitorPauseFinal_Internal(KSP ksp)
752: {
753:   PetscInt i;

755:   PetscFunctionBegin;
756:   if (!ksp->pauseFinal) PetscFunctionReturn(PETSC_SUCCESS);
757:   for (i = 0; i < ksp->numbermonitors; ++i) {
758:     PetscViewerAndFormat *vf = (PetscViewerAndFormat *)ksp->monitorcontext[i];
759:     PetscDraw             draw;
760:     PetscReal             lpause;

762:     if (!vf) continue;
763:     if (vf->lg) {
764:       if (!PetscCheckPointer(vf->lg, PETSC_OBJECT)) continue;
765:       if (((PetscObject)vf->lg)->classid != PETSC_DRAWLG_CLASSID) continue;
766:       PetscCall(PetscDrawLGGetDraw(vf->lg, &draw));
767:       PetscCall(PetscDrawGetPause(draw, &lpause));
768:       PetscCall(PetscDrawSetPause(draw, -1.0));
769:       PetscCall(PetscDrawPause(draw));
770:       PetscCall(PetscDrawSetPause(draw, lpause));
771:     } else {
772:       PetscBool isdraw;

774:       if (!PetscCheckPointer(vf->viewer, PETSC_OBJECT)) continue;
775:       if (((PetscObject)vf->viewer)->classid != PETSC_VIEWER_CLASSID) continue;
776:       PetscCall(PetscObjectTypeCompare((PetscObject)vf->viewer, PETSCVIEWERDRAW, &isdraw));
777:       if (!isdraw) continue;
778:       PetscCall(PetscViewerDrawGetDraw(vf->viewer, 0, &draw));
779:       PetscCall(PetscDrawGetPause(draw, &lpause));
780:       PetscCall(PetscDrawSetPause(draw, -1.0));
781:       PetscCall(PetscDrawPause(draw));
782:       PetscCall(PetscDrawSetPause(draw, lpause));
783:     }
784:   }
785:   PetscFunctionReturn(PETSC_SUCCESS);
786: }

788: static PetscErrorCode KSPSolve_Private(KSP ksp, Vec b, Vec x)
789: {
790:   PetscBool    flg = PETSC_FALSE, inXisinB = PETSC_FALSE, guess_zero;
791:   Mat          mat, pmat;
792:   MPI_Comm     comm;
793:   MatNullSpace nullsp;
794:   Vec          btmp, vec_rhs = NULL;

796:   PetscFunctionBegin;
797:   level++;
798:   comm = PetscObjectComm((PetscObject)ksp);
799:   if (x && x == b) {
800:     PetscCheck(ksp->guess_zero, comm, PETSC_ERR_ARG_INCOMP, "Cannot use x == b with nonzero initial guess");
801:     PetscCall(VecDuplicate(b, &x));
802:     inXisinB = PETSC_TRUE;
803:   }
804:   if (b) {
805:     PetscCall(PetscObjectReference((PetscObject)b));
806:     PetscCall(VecDestroy(&ksp->vec_rhs));
807:     ksp->vec_rhs = b;
808:   }
809:   if (x) {
810:     PetscCall(PetscObjectReference((PetscObject)x));
811:     PetscCall(VecDestroy(&ksp->vec_sol));
812:     ksp->vec_sol = x;
813:   }

815:   if (ksp->viewPre) PetscCall(ObjectView((PetscObject)ksp, ksp->viewerPre, ksp->formatPre));

817:   if (ksp->presolve) PetscCall((*ksp->presolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->prectx));

819:   /* reset the residual history list if requested */
820:   if (ksp->res_hist_reset) ksp->res_hist_len = 0;
821:   if (ksp->err_hist_reset) ksp->err_hist_len = 0;

823:   /* KSPSetUp() scales the matrix if needed */
824:   PetscCall(KSPSetUp(ksp));
825:   PetscCall(KSPSetUpOnBlocks(ksp));

827:   if (ksp->guess) {
828:     PetscObjectState ostate, state;

830:     PetscCall(KSPGuessSetUp(ksp->guess));
831:     PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &ostate));
832:     PetscCall(KSPGuessFormGuess(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
833:     PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &state));
834:     if (state != ostate) {
835:       ksp->guess_zero = PETSC_FALSE;
836:     } else {
837:       PetscCall(PetscInfo(ksp, "Using zero initial guess since the KSPGuess object did not change the vector\n"));
838:       ksp->guess_zero = PETSC_TRUE;
839:     }
840:   }

842:   PetscCall(VecSetErrorIfLocked(ksp->vec_sol, 3));

844:   PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
845:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
846:   /* diagonal scale RHS if called for */
847:   if (ksp->dscale) {
848:     PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
849:     /* second time in, but matrix was scaled back to original */
850:     if (ksp->dscalefix && ksp->dscalefix2) {
851:       Mat mat, pmat;

853:       PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
854:       PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
855:       if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
856:     }

858:     /* scale initial guess */
859:     if (!ksp->guess_zero) {
860:       if (!ksp->truediagonal) {
861:         PetscCall(VecDuplicate(ksp->diagonal, &ksp->truediagonal));
862:         PetscCall(VecCopy(ksp->diagonal, ksp->truediagonal));
863:         PetscCall(VecReciprocal(ksp->truediagonal));
864:       }
865:       PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->truediagonal));
866:     }
867:   }
868:   PetscCall(PCPreSolve(ksp->pc, ksp));

870:   if (ksp->guess_zero) PetscCall(VecSet(ksp->vec_sol, 0.0));
871:   if (ksp->guess_knoll) { /* The Knoll trick is independent on the KSPGuess specified */
872:     PetscCall(PCApply(ksp->pc, ksp->vec_rhs, ksp->vec_sol));
873:     PetscCall(KSP_RemoveNullSpace(ksp, ksp->vec_sol));
874:     ksp->guess_zero = PETSC_FALSE;
875:   }

877:   /* can we mark the initial guess as zero for this solve? */
878:   guess_zero = ksp->guess_zero;
879:   if (!ksp->guess_zero) {
880:     PetscReal norm;

882:     PetscCall(VecNormAvailable(ksp->vec_sol, NORM_2, &flg, &norm));
883:     if (flg && !norm) ksp->guess_zero = PETSC_TRUE;
884:   }
885:   if (ksp->transpose_solve) {
886:     PetscCall(MatGetNullSpace(pmat, &nullsp));
887:   } else {
888:     PetscCall(MatGetTransposeNullSpace(pmat, &nullsp));
889:   }
890:   if (nullsp) {
891:     PetscCall(VecDuplicate(ksp->vec_rhs, &btmp));
892:     PetscCall(VecCopy(ksp->vec_rhs, btmp));
893:     PetscCall(MatNullSpaceRemove(nullsp, btmp));
894:     vec_rhs      = ksp->vec_rhs;
895:     ksp->vec_rhs = btmp;
896:   }
897:   PetscCall(VecLockReadPush(ksp->vec_rhs));
898:   PetscUseTypeMethod(ksp, solve);
899:   PetscCall(KSPMonitorPauseFinal_Internal(ksp));

901:   PetscCall(VecLockReadPop(ksp->vec_rhs));
902:   if (nullsp) {
903:     ksp->vec_rhs = vec_rhs;
904:     PetscCall(VecDestroy(&btmp));
905:   }

907:   ksp->guess_zero = guess_zero;

909:   PetscCheck(ksp->reason, comm, PETSC_ERR_PLIB, "Internal error, solver returned without setting converged reason");
910:   ksp->totalits += ksp->its;

912:   PetscCall(KSPConvergedReasonViewFromOptions(ksp));

914:   if (ksp->viewRate) {
915:     PetscCall(PetscViewerPushFormat(ksp->viewerRate, ksp->formatRate));
916:     PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
917:     PetscCall(PetscViewerPopFormat(ksp->viewerRate));
918:   }
919:   PetscCall(PCPostSolve(ksp->pc, ksp));

921:   /* diagonal scale solution if called for */
922:   if (ksp->dscale) {
923:     PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->diagonal));
924:     /* unscale right hand side and matrix */
925:     if (ksp->dscalefix) {
926:       Mat mat, pmat;

928:       PetscCall(VecReciprocal(ksp->diagonal));
929:       PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
930:       PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
931:       PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
932:       if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
933:       PetscCall(VecReciprocal(ksp->diagonal));
934:       ksp->dscalefix2 = PETSC_TRUE;
935:     }
936:   }
937:   PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
938:   if (ksp->guess) PetscCall(KSPGuessUpdate(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
939:   if (ksp->postsolve) PetscCall((*ksp->postsolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->postctx));

941:   PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
942:   if (ksp->viewEV) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_FALSE, ksp->viewerEV, ksp->formatEV));
943:   if (ksp->viewEVExp) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_TRUE, ksp->viewerEVExp, ksp->formatEVExp));
944:   if (ksp->viewSV) PetscCall(KSPViewSingularvalues_Internal(ksp, ksp->viewerSV, ksp->formatSV));
945:   if (ksp->viewFinalRes) PetscCall(KSPViewFinalResidual_Internal(ksp, ksp->viewerFinalRes, ksp->formatFinalRes));
946:   if (ksp->viewMat) PetscCall(ObjectView((PetscObject)mat, ksp->viewerMat, ksp->formatMat));
947:   if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)pmat, ksp->viewerPMat, ksp->formatPMat));
948:   if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)ksp->vec_rhs, ksp->viewerRhs, ksp->formatRhs));
949:   if (ksp->viewSol) PetscCall(ObjectView((PetscObject)ksp->vec_sol, ksp->viewerSol, ksp->formatSol));
950:   if (ksp->view) PetscCall(ObjectView((PetscObject)ksp, ksp->viewer, ksp->format));
951:   if (ksp->viewDScale) PetscCall(ObjectView((PetscObject)ksp->diagonal, ksp->viewerDScale, ksp->formatDScale));
952:   if (ksp->viewMatExp) {
953:     Mat A, B;

955:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
956:     if (ksp->transpose_solve) {
957:       Mat AT;

959:       PetscCall(MatCreateTranspose(A, &AT));
960:       PetscCall(MatComputeOperator(AT, MATAIJ, &B));
961:       PetscCall(MatDestroy(&AT));
962:     } else {
963:       PetscCall(MatComputeOperator(A, MATAIJ, &B));
964:     }
965:     PetscCall(ObjectView((PetscObject)B, ksp->viewerMatExp, ksp->formatMatExp));
966:     PetscCall(MatDestroy(&B));
967:   }
968:   if (ksp->viewPOpExp) {
969:     Mat B;

971:     PetscCall(KSPComputeOperator(ksp, MATAIJ, &B));
972:     PetscCall(ObjectView((PetscObject)B, ksp->viewerPOpExp, ksp->formatPOpExp));
973:     PetscCall(MatDestroy(&B));
974:   }

976:   if (inXisinB) {
977:     PetscCall(VecCopy(x, b));
978:     PetscCall(VecDestroy(&x));
979:   }
980:   PetscCall(PetscObjectSAWsBlock((PetscObject)ksp));
981:   if (ksp->errorifnotconverged && ksp->reason < 0 && ((level == 1) || (ksp->reason != KSP_DIVERGED_ITS))) {
982:     PCFailedReason reason;

984:     PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve has not converged, reason %s", KSPConvergedReasons[ksp->reason]);
985:     PetscCall(PCGetFailedReason(ksp->pc, &reason));
986:     SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve has not converged, reason %s PC failed due to %s", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
987:   }
988:   level--;
989:   PetscFunctionReturn(PETSC_SUCCESS);
990: }

992: /*@
993:    KSPSolve - Solves linear system.

995:    Collective

997:    Parameters:
998: +  ksp - iterative context obtained from `KSPCreate()`
999: .  b - the right hand side vector
1000: -  x - the solution (this may be the same vector as b, then b will be overwritten with answer)

1002:    Options Database Keys:
1003: +  -ksp_view_eigenvalues - compute preconditioned operators eigenvalues
1004: .  -ksp_view_eigenvalues_explicit - compute the eigenvalues by forming the dense operator and using LAPACK
1005: .  -ksp_view_mat binary - save matrix to the default binary viewer
1006: .  -ksp_view_pmat binary - save matrix used to build preconditioner to the default binary viewer
1007: .  -ksp_view_rhs binary - save right hand side vector to the default binary viewer
1008: .  -ksp_view_solution binary - save computed solution vector to the default binary viewer
1009:            (can be read later with src/ksp/tutorials/ex10.c for testing solvers)
1010: .  -ksp_view_mat_explicit - for matrix-free operators, computes the matrix entries and views them
1011: .  -ksp_view_preconditioned_operator_explicit - computes the product of the preconditioner and matrix as an explicit matrix and views it
1012: .  -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
1013: .  -ksp_view_final_residual - print 2-norm of true linear system residual at the end of the solution process
1014: .  -ksp_error_if_not_converged - stop the program as soon as an error is detected in a `KSPSolve()`
1015: -  -ksp_view - print the ksp data structure at the end of the system solution

1017:    Notes:

1019:    If one uses `KSPSetDM()` then x or b need not be passed. Use `KSPGetSolution()` to access the solution in this case.

1021:    The operator is specified with `KSPSetOperators()`.

1023:    `KSPSolve()` will normally return without generating an error regardless of whether the linear system was solved or if constructing the preconditioner failed.
1024:    Call `KSPGetConvergedReason()` to determine if the solver converged or failed and why. The option -ksp_error_if_not_converged or function `KSPSetErrorIfNotConverged()`
1025:    will cause `KSPSolve()` to error as soon as an error occurs in the linear solver.  In inner KSPSolves() KSP_DIVERGED_ITS is not treated as an error because when using nested solvers
1026:    it may be fine that inner solvers in the preconditioner do not converge during the solution process.

1028:    The number of iterations can be obtained from `KSPGetIterationNumber()`.

1030:    If you provide a matrix that has a `MatSetNullSpace()` and `MatSetTransposeNullSpace()` this will use that information to solve singular systems
1031:    in the least squares sense with a norm minimizing solution.

1033:                    A x = b   where b = b_p + b_t where b_t is not in the range of A (and hence by the fundamental theorem of linear algebra is in the nullspace(A') see `MatSetNullSpace()`

1035:     `KSP` first removes b_t producing the linear system  A x = b_p (which has multiple solutions) and solves this to find the ||x|| minimizing solution (and hence
1036:     it finds the solution x orthogonal to the nullspace(A). The algorithm is simply in each iteration of the Krylov method we remove the nullspace(A) from the search
1037:     direction thus the solution which is a linear combination of the search directions has no component in the nullspace(A).

1039:     We recommend always using `KSPGMRES` for such singular systems.
1040:     If nullspace(A) = nullspace(A') (note symmetric matrices always satisfy this property) then both left and right preconditioning will work
1041:     If nullspace(A) != nullspace(A') then left preconditioning will work but right preconditioning may not work (or it may).

1043:    Developer Note: The reason we cannot always solve  nullspace(A) != nullspace(A') systems with right preconditioning is because we need to remove at each iteration
1044:        the nullspace(AB) from the search direction. While we know the nullspace(A) the nullspace(AB) equals B^-1 times the nullspace(A) but except for trivial preconditioners
1045:        such as diagonal scaling we cannot apply the inverse of the preconditioner to a vector and thus cannot compute the nullspace(AB).

1047:    If using a direct method (e.g., via the `KSP` solver
1048:    `KSPPREONLY` and a preconditioner such as `PCLU` or `PCILU`,
1049:    then its=1.  See `KSPSetTolerances()` and `KSPConvergedDefault()`
1050:    for more details.

1052:    Understanding Convergence:
1053:    The routines `KSPMonitorSet()`, `KSPComputeEigenvalues()`, and
1054:    `KSPComputeEigenvaluesExplicitly()` provide information on additional
1055:    options to monitor convergence and print eigenvalue information.

1057:    Level: beginner

1059: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1060:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatSetTransposeNullSpace()`, `KSP`,
1061:           `KSPConvergedReasonView()`, `KSPCheckSolve()`, `KSPSetErrorIfNotConverged()`
1062: @*/
1063: PetscErrorCode KSPSolve(KSP ksp, Vec b, Vec x)
1064: {
1065:   PetscFunctionBegin;
1069:   ksp->transpose_solve = PETSC_FALSE;
1070:   PetscCall(KSPSolve_Private(ksp, b, x));
1071:   PetscFunctionReturn(PETSC_SUCCESS);
1072: }

1074: /*@
1075:    KSPSolveTranspose - Solves a linear system with the transposed matrix.

1077:    Collective

1079:    Input Parameters:
1080: +  ksp - iterative context obtained from `KSPCreate()`
1081: .  b - right hand side vector
1082: -  x - solution vector

1084:    Notes:
1085:     For complex numbers this solve the non-Hermitian transpose system.

1087:    Level: developer

1089:    Developer Notes:
1090:     We need to implement a `KSPSolveHermitianTranspose()`

1092: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1093:           `KSPSolve()`, `KSP`
1094: @*/
1095: PetscErrorCode KSPSolveTranspose(KSP ksp, Vec b, Vec x)
1096: {
1097:   PetscFunctionBegin;
1101:   if (ksp->transpose.use_explicittranspose) {
1102:     Mat J, Jpre;
1103:     PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1104:     if (!ksp->transpose.reuse_transpose) {
1105:       PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1106:       if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1107:       ksp->transpose.reuse_transpose = PETSC_TRUE;
1108:     } else {
1109:       PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1110:       if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1111:     }
1112:     if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1113:       PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1114:       ksp->transpose.BT = ksp->transpose.AT;
1115:     }
1116:     PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1117:   } else {
1118:     ksp->transpose_solve = PETSC_TRUE;
1119:   }
1120:   PetscCall(KSPSolve_Private(ksp, b, x));
1121:   PetscFunctionReturn(PETSC_SUCCESS);
1122: }

1124: static PetscErrorCode KSPViewFinalMatResidual_Internal(KSP ksp, Mat B, Mat X, PetscViewer viewer, PetscViewerFormat format, PetscInt shift)
1125: {
1126:   Mat        A, R;
1127:   PetscReal *norms;
1128:   PetscInt   i, N;
1129:   PetscBool  flg;

1131:   PetscFunctionBegin;
1132:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &flg));
1133:   if (flg) {
1134:     PetscCall(PCGetOperators(ksp->pc, &A, NULL));
1135:     if (!ksp->transpose_solve) PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &R));
1136:     else PetscCall(MatTransposeMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &R));
1137:     PetscCall(MatAYPX(R, -1.0, B, SAME_NONZERO_PATTERN));
1138:     PetscCall(MatGetSize(R, NULL, &N));
1139:     PetscCall(PetscMalloc1(N, &norms));
1140:     PetscCall(MatGetColumnNorms(R, NORM_2, norms));
1141:     PetscCall(MatDestroy(&R));
1142:     for (i = 0; i < N; ++i) PetscCall(PetscViewerASCIIPrintf(viewer, "%s #%" PetscInt_FMT " %g\n", i == 0 ? "KSP final norm of residual" : "                          ", shift + i, (double)norms[i]));
1143:     PetscCall(PetscFree(norms));
1144:   }
1145:   PetscFunctionReturn(PETSC_SUCCESS);
1146: }

1148: PetscErrorCode KSPMatSolve_Private(KSP ksp, Mat B, Mat X)
1149: {
1150:   Mat       A, P, vB, vX;
1151:   Vec       cb, cx;
1152:   PetscInt  n1, N1, n2, N2, Bbn = PETSC_DECIDE;
1153:   PetscBool match;

1155:   PetscFunctionBegin;
1159:   PetscCheckSameComm(ksp, 1, B, 2);
1160:   PetscCheckSameComm(ksp, 1, X, 3);
1161:   PetscCheckSameType(B, 2, X, 3);
1162:   PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
1163:   MatCheckPreallocated(X, 3);
1164:   if (!X->assembled) {
1165:     PetscCall(MatSetOption(X, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1166:     PetscCall(MatAssemblyBegin(X, MAT_FINAL_ASSEMBLY));
1167:     PetscCall(MatAssemblyEnd(X, MAT_FINAL_ASSEMBLY));
1168:   }
1169:   PetscCheck(B != X, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_IDN, "B and X must be different matrices");
1170:   PetscCheck(!ksp->transpose_solve || !ksp->transpose.use_explicittranspose, PetscObjectComm((PetscObject)ksp), PETSC_ERR_SUP, "KSPMatSolveTranspose() does not support -ksp_use_explicittranspose");
1171:   PetscCall(KSPGetOperators(ksp, &A, &P));
1172:   PetscCall(MatGetLocalSize(B, NULL, &n2));
1173:   PetscCall(MatGetLocalSize(X, NULL, &n1));
1174:   PetscCall(MatGetSize(B, NULL, &N2));
1175:   PetscCall(MatGetSize(X, NULL, &N1));
1176:   PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of right-hand sides (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of solutions (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
1177:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)B, &match, MATSEQDENSE, MATMPIDENSE, ""));
1178:   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of right-hand sides not stored in a dense Mat");
1179:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
1180:   PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of solutions not stored in a dense Mat");
1181:   PetscCall(KSPSetUp(ksp));
1182:   PetscCall(KSPSetUpOnBlocks(ksp));
1183:   if (ksp->ops->matsolve) {
1184:     if (ksp->guess_zero) PetscCall(MatZeroEntries(X));
1185:     PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1186:     PetscCall(KSPGetMatSolveBatchSize(ksp, &Bbn));
1187:     /* by default, do a single solve with all columns */
1188:     if (Bbn == PETSC_DECIDE) Bbn = N2;
1189:     else PetscCheck(Bbn >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "KSPMatSolve() batch size %" PetscInt_FMT " must be positive", Bbn);
1190:     PetscCall(PetscInfo(ksp, "KSP type %s solving using batches of width at most %" PetscInt_FMT "\n", ((PetscObject)ksp)->type_name, Bbn));
1191:     /* if -ksp_matsolve_batch_size is greater than the actual number of columns, do a single solve with all columns */
1192:     if (Bbn >= N2) {
1193:       PetscUseTypeMethod(ksp, matsolve, B, X);
1194:       if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, B, X, ksp->viewerFinalRes, ksp->formatFinalRes, 0));

1196:       PetscCall(KSPConvergedReasonViewFromOptions(ksp));

1198:       if (ksp->viewRate) {
1199:         PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1200:         PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1201:         PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1202:       }
1203:     } else {
1204:       for (n2 = 0; n2 < N2; n2 += Bbn) {
1205:         PetscCall(MatDenseGetSubMatrix(B, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vB));
1206:         PetscCall(MatDenseGetSubMatrix(X, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vX));
1207:         PetscUseTypeMethod(ksp, matsolve, vB, vX);
1208:         if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, vB, vX, ksp->viewerFinalRes, ksp->formatFinalRes, n2));

1210:         PetscCall(KSPConvergedReasonViewFromOptions(ksp));

1212:         if (ksp->viewRate) {
1213:           PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1214:           PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1215:           PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1216:         }
1217:         PetscCall(MatDenseRestoreSubMatrix(B, &vB));
1218:         PetscCall(MatDenseRestoreSubMatrix(X, &vX));
1219:       }
1220:     }
1221:     if (ksp->viewMat) PetscCall(ObjectView((PetscObject)A, ksp->viewerMat, ksp->formatMat));
1222:     if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)P, ksp->viewerPMat, ksp->formatPMat));
1223:     if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)B, ksp->viewerRhs, ksp->formatRhs));
1224:     if (ksp->viewSol) PetscCall(ObjectView((PetscObject)X, ksp->viewerSol, ksp->formatSol));
1225:     if (ksp->view) PetscCall(KSPView(ksp, ksp->viewer));
1226:     PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1227:   } else {
1228:     PetscCall(PetscInfo(ksp, "KSP type %s solving column by column\n", ((PetscObject)ksp)->type_name));
1229:     for (n2 = 0; n2 < N2; ++n2) {
1230:       PetscCall(MatDenseGetColumnVecRead(B, n2, &cb));
1231:       PetscCall(MatDenseGetColumnVecWrite(X, n2, &cx));
1232:       PetscCall(KSPSolve_Private(ksp, cb, cx));
1233:       PetscCall(MatDenseRestoreColumnVecWrite(X, n2, &cx));
1234:       PetscCall(MatDenseRestoreColumnVecRead(B, n2, &cb));
1235:     }
1236:   }
1237:   PetscFunctionReturn(PETSC_SUCCESS);
1238: }

1240: /*@
1241:      KSPMatSolve - Solves a linear system with multiple right-hand sides stored as a `MATDENSE`. Unlike `KSPSolve()`, `B` and `X` must be different matrices.

1243:    Input Parameters:
1244: +     ksp - iterative context
1245: -     B - block of right-hand sides

1247:    Output Parameter:
1248: .     X - block of solutions

1250:    Notes:
1251:      This is a stripped-down version of `KSPSolve()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.

1253:    Level: intermediate

1255: .seealso: [](chapter_ksp), `KSPSolve()`, `MatMatSolve()`, `KSPMatSolveTranspose()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1256: @*/
1257: PetscErrorCode KSPMatSolve(KSP ksp, Mat B, Mat X)
1258: {
1259:   PetscFunctionBegin;
1260:   ksp->transpose_solve = PETSC_FALSE;
1261:   PetscCall(KSPMatSolve_Private(ksp, B, X));
1262:   PetscFunctionReturn(PETSC_SUCCESS);
1263: }

1265: /*@
1266:      KSPMatSolveTranspose - Solves a linear system with the transposed matrix with multiple right-hand sides stored as a `MATDENSE`. Unlike `KSPSolveTranspose()`, `B` and `X` must be different matrices and the transposed matrix cannot be assembled explicitly for the user.

1268:    Input Parameters:
1269: +     ksp - iterative context
1270: -     B - block of right-hand sides

1272:    Output Parameter:
1273: .     X - block of solutions

1275:    Notes:
1276:      This is a stripped-down version of `KSPSolveTranspose()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.

1278:    Level: intermediate

1280: .seealso: [](chapter_ksp), `KSPSolveTranspose()`, `MatMatTransposeSolve()`, `KSPMatSolve()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1281: @*/
1282: PetscErrorCode KSPMatSolveTranspose(KSP ksp, Mat B, Mat X)
1283: {
1284:   PetscFunctionBegin;
1285:   ksp->transpose_solve = PETSC_TRUE;
1286:   PetscCall(KSPMatSolve_Private(ksp, B, X));
1287:   PetscFunctionReturn(PETSC_SUCCESS);
1288: }

1290: /*@
1291:      KSPSetMatSolveBatchSize - Sets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1293:     Logically Collective

1295:    Input Parameters:
1296: +     ksp - iterative context
1297: -     bs - batch size

1299:    Level: advanced

1301: .seealso: [](chapter_ksp), `KSPMatSolve()`, `KSPGetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1302: @*/
1303: PetscErrorCode KSPSetMatSolveBatchSize(KSP ksp, PetscInt bs)
1304: {
1305:   PetscFunctionBegin;
1308:   ksp->nmax = bs;
1309:   PetscFunctionReturn(PETSC_SUCCESS);
1310: }

1312: /*@
1313:      KSPGetMatSolveBatchSize - Gets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1315:    Input Parameter:
1316: .     ksp - iterative context

1318:    Output Parameter:
1319: .     bs - batch size

1321:    Level: advanced

1323: .seealso: [](chapter_ksp), `KSPMatSolve()`, `KSPSetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1324: @*/
1325: PetscErrorCode KSPGetMatSolveBatchSize(KSP ksp, PetscInt *bs)
1326: {
1327:   PetscFunctionBegin;
1330:   *bs = ksp->nmax;
1331:   PetscFunctionReturn(PETSC_SUCCESS);
1332: }

1334: /*@
1335:    KSPResetViewers - Resets all the viewers set from the options database during `KSPSetFromOptions()`

1337:    Collective

1339:    Input Parameter:
1340: .  ksp - iterative context obtained from `KSPCreate()`

1342:    Level: beginner

1344: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSPSetFromOptions()`, `KSP`
1345: @*/
1346: PetscErrorCode KSPResetViewers(KSP ksp)
1347: {
1348:   PetscFunctionBegin;
1350:   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1351:   PetscCall(PetscViewerDestroy(&ksp->viewer));
1352:   PetscCall(PetscViewerDestroy(&ksp->viewerPre));
1353:   PetscCall(PetscViewerDestroy(&ksp->viewerRate));
1354:   PetscCall(PetscViewerDestroy(&ksp->viewerMat));
1355:   PetscCall(PetscViewerDestroy(&ksp->viewerPMat));
1356:   PetscCall(PetscViewerDestroy(&ksp->viewerRhs));
1357:   PetscCall(PetscViewerDestroy(&ksp->viewerSol));
1358:   PetscCall(PetscViewerDestroy(&ksp->viewerMatExp));
1359:   PetscCall(PetscViewerDestroy(&ksp->viewerEV));
1360:   PetscCall(PetscViewerDestroy(&ksp->viewerSV));
1361:   PetscCall(PetscViewerDestroy(&ksp->viewerEVExp));
1362:   PetscCall(PetscViewerDestroy(&ksp->viewerFinalRes));
1363:   PetscCall(PetscViewerDestroy(&ksp->viewerPOpExp));
1364:   PetscCall(PetscViewerDestroy(&ksp->viewerDScale));
1365:   ksp->view         = PETSC_FALSE;
1366:   ksp->viewPre      = PETSC_FALSE;
1367:   ksp->viewMat      = PETSC_FALSE;
1368:   ksp->viewPMat     = PETSC_FALSE;
1369:   ksp->viewRhs      = PETSC_FALSE;
1370:   ksp->viewSol      = PETSC_FALSE;
1371:   ksp->viewMatExp   = PETSC_FALSE;
1372:   ksp->viewEV       = PETSC_FALSE;
1373:   ksp->viewSV       = PETSC_FALSE;
1374:   ksp->viewEVExp    = PETSC_FALSE;
1375:   ksp->viewFinalRes = PETSC_FALSE;
1376:   ksp->viewPOpExp   = PETSC_FALSE;
1377:   ksp->viewDScale   = PETSC_FALSE;
1378:   PetscFunctionReturn(PETSC_SUCCESS);
1379: }

1381: /*@
1382:    KSPReset - Resets a `KSP` context to the kspsetupcalled = 0 state and removes any allocated Vecs and Mats

1384:    Collective

1386:    Input Parameter:
1387: .  ksp - iterative context obtained from `KSPCreate()`

1389:    Level: beginner

1391: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1392: @*/
1393: PetscErrorCode KSPReset(KSP ksp)
1394: {
1395:   PetscFunctionBegin;
1397:   if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1398:   PetscTryTypeMethod(ksp, reset);
1399:   if (ksp->pc) PetscCall(PCReset(ksp->pc));
1400:   if (ksp->guess) {
1401:     KSPGuess guess = ksp->guess;
1402:     PetscTryTypeMethod(guess, reset);
1403:   }
1404:   PetscCall(VecDestroyVecs(ksp->nwork, &ksp->work));
1405:   PetscCall(VecDestroy(&ksp->vec_rhs));
1406:   PetscCall(VecDestroy(&ksp->vec_sol));
1407:   PetscCall(VecDestroy(&ksp->diagonal));
1408:   PetscCall(VecDestroy(&ksp->truediagonal));

1410:   PetscCall(KSPResetViewers(ksp));

1412:   ksp->setupstage = KSP_SETUP_NEW;
1413:   ksp->nmax       = PETSC_DECIDE;
1414:   PetscFunctionReturn(PETSC_SUCCESS);
1415: }

1417: /*@C
1418:    KSPDestroy - Destroys `KSP` context.

1420:    Collective

1422:    Input Parameter:
1423: .  ksp - iterative context obtained from `KSPCreate()`

1425:    Level: beginner

1427: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1428: @*/
1429: PetscErrorCode KSPDestroy(KSP *ksp)
1430: {
1431:   PC pc;

1433:   PetscFunctionBegin;
1434:   if (!*ksp) PetscFunctionReturn(PETSC_SUCCESS);
1436:   if (--((PetscObject)(*ksp))->refct > 0) {
1437:     *ksp = NULL;
1438:     PetscFunctionReturn(PETSC_SUCCESS);
1439:   }

1441:   PetscCall(PetscObjectSAWsViewOff((PetscObject)*ksp));

1443:   /*
1444:    Avoid a cascading call to PCReset(ksp->pc) from the following call:
1445:    PCReset() shouldn't be called from KSPDestroy() as it is unprotected by pc's
1446:    refcount (and may be shared, e.g., by other ksps).
1447:    */
1448:   pc         = (*ksp)->pc;
1449:   (*ksp)->pc = NULL;
1450:   PetscCall(KSPReset((*ksp)));
1451:   (*ksp)->pc = pc;
1452:   PetscTryTypeMethod((*ksp), destroy);

1454:   if ((*ksp)->transpose.use_explicittranspose) {
1455:     PetscCall(MatDestroy(&(*ksp)->transpose.AT));
1456:     PetscCall(MatDestroy(&(*ksp)->transpose.BT));
1457:     (*ksp)->transpose.reuse_transpose = PETSC_FALSE;
1458:   }

1460:   PetscCall(KSPGuessDestroy(&(*ksp)->guess));
1461:   PetscCall(DMDestroy(&(*ksp)->dm));
1462:   PetscCall(PCDestroy(&(*ksp)->pc));
1463:   PetscCall(PetscFree((*ksp)->res_hist_alloc));
1464:   PetscCall(PetscFree((*ksp)->err_hist_alloc));
1465:   if ((*ksp)->convergeddestroy) PetscCall((*(*ksp)->convergeddestroy)((*ksp)->cnvP));
1466:   PetscCall(KSPMonitorCancel((*ksp)));
1467:   PetscCall(KSPConvergedReasonViewCancel((*ksp)));
1468:   PetscCall(PetscHeaderDestroy(ksp));
1469:   PetscFunctionReturn(PETSC_SUCCESS);
1470: }

1472: /*@
1473:     KSPSetPCSide - Sets the preconditioning side.

1475:     Logically Collective

1477:     Input Parameter:
1478: .   ksp - iterative context obtained from `KSPCreate()`

1480:     Output Parameter:
1481: .   side - the preconditioning side, where side is one of
1482: .vb
1483:       PC_LEFT - left preconditioning (default)
1484:       PC_RIGHT - right preconditioning
1485:       PC_SYMMETRIC - symmetric preconditioning
1486: .ve

1488:     Options Database Key:
1489: .   -ksp_pc_side <right,left,symmetric> - `KSP` preconditioner side

1491:     Notes:
1492:     Left preconditioning is used by default for most Krylov methods except KSPFGMRES which only supports right preconditioning.

1494:     For methods changing the side of the preconditioner changes the norm type that is used, see `KSPSetNormType()`.

1496:     Symmetric preconditioning is currently available only for the KSPQCG method. Note, however, that
1497:     symmetric preconditioning can be emulated by using either right or left
1498:     preconditioning and a pre or post processing step.

1500:     Setting the PC side often affects the default norm type.  See `KSPSetNormType()` for details.

1502:     Level: intermediate

1504: .seealso: [](chapter_ksp), `KSPGetPCSide()`, `KSPSetNormType()`, `KSPGetNormType()`, `KSP`
1505: @*/
1506: PetscErrorCode KSPSetPCSide(KSP ksp, PCSide side)
1507: {
1508:   PetscFunctionBegin;
1511:   ksp->pc_side = ksp->pc_side_set = side;
1512:   PetscFunctionReturn(PETSC_SUCCESS);
1513: }

1515: /*@
1516:     KSPGetPCSide - Gets the preconditioning side.

1518:     Not Collective

1520:     Input Parameter:
1521: .   ksp - iterative context obtained from `KSPCreate()`

1523:     Output Parameter:
1524: .   side - the preconditioning side, where side is one of
1525: .vb
1526:       PC_LEFT - left preconditioning (default)
1527:       PC_RIGHT - right preconditioning
1528:       PC_SYMMETRIC - symmetric preconditioning
1529: .ve

1531:     Level: intermediate

1533: .seealso: [](chapter_ksp), `KSPSetPCSide()`, `KSP`
1534: @*/
1535: PetscErrorCode KSPGetPCSide(KSP ksp, PCSide *side)
1536: {
1537:   PetscFunctionBegin;
1540:   PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
1541:   *side = ksp->pc_side;
1542:   PetscFunctionReturn(PETSC_SUCCESS);
1543: }

1545: /*@
1546:    KSPGetTolerances - Gets the relative, absolute, divergence, and maximum
1547:    iteration tolerances used by the default `KSP` convergence tests.

1549:    Not Collective

1551:    Input Parameter:
1552: .  ksp - the Krylov subspace context

1554:    Output Parameters:
1555: +  rtol - the relative convergence tolerance
1556: .  abstol - the absolute convergence tolerance
1557: .  dtol - the divergence tolerance
1558: -  maxits - maximum number of iterations

1560:    Notes:
1561:    The user can specify `NULL` for any parameter that is not needed.

1563:    Level: intermediate

1565:            maximum, iterations

1567: .seealso: [](chapter_ksp), `KSPSetTolerances()`, `KSP`
1568: @*/
1569: PetscErrorCode KSPGetTolerances(KSP ksp, PetscReal *rtol, PetscReal *abstol, PetscReal *dtol, PetscInt *maxits)
1570: {
1571:   PetscFunctionBegin;
1573:   if (abstol) *abstol = ksp->abstol;
1574:   if (rtol) *rtol = ksp->rtol;
1575:   if (dtol) *dtol = ksp->divtol;
1576:   if (maxits) *maxits = ksp->max_it;
1577:   PetscFunctionReturn(PETSC_SUCCESS);
1578: }

1580: /*@
1581:    KSPSetTolerances - Sets the relative, absolute, divergence, and maximum
1582:    iteration tolerances used by the default `KSP` convergence testers.

1584:    Logically Collective

1586:    Input Parameters:
1587: +  ksp - the Krylov subspace context
1588: .  rtol - the relative convergence tolerance, relative decrease in the (possibly preconditioned) residual norm
1589: .  abstol - the absolute convergence tolerance   absolute size of the (possibly preconditioned) residual norm
1590: .  dtol - the divergence tolerance,   amount (possibly preconditioned) residual norm can increase before `KSPConvergedDefault()` concludes that the method is diverging
1591: -  maxits - maximum number of iterations to use

1593:    Options Database Keys:
1594: +  -ksp_atol <abstol> - Sets abstol
1595: .  -ksp_rtol <rtol> - Sets rtol
1596: .  -ksp_divtol <dtol> - Sets dtol
1597: -  -ksp_max_it <maxits> - Sets maxits

1599:    Level: intermediate

1601:    Notes:
1602:    Use `PETSC_DEFAULT` to retain the default value of any of the tolerances.

1604:    See `KSPConvergedDefault()` for details how these parameters are used in the default convergence test.  See also `KSPSetConvergenceTest()`
1605:    for setting user-defined stopping criteria.

1607: .seealso: [](chapter_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`
1608: @*/
1609: PetscErrorCode KSPSetTolerances(KSP ksp, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt maxits)
1610: {
1611:   PetscFunctionBegin;

1618:   if (rtol != (PetscReal)PETSC_DEFAULT) {
1619:     PetscCheck(rtol >= 0.0 && rtol < 1.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Relative tolerance %g must be non-negative and less than 1.0", (double)rtol);
1620:     ksp->rtol = rtol;
1621:   }
1622:   if (abstol != (PetscReal)PETSC_DEFAULT) {
1623:     PetscCheck(abstol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Absolute tolerance %g must be non-negative", (double)abstol);
1624:     ksp->abstol = abstol;
1625:   }
1626:   if (dtol != (PetscReal)PETSC_DEFAULT) {
1627:     PetscCheck(dtol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Divergence tolerance %g must be larger than 1.0", (double)dtol);
1628:     ksp->divtol = dtol;
1629:   }
1630:   if (maxits != PETSC_DEFAULT) {
1631:     PetscCheck(maxits >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Maximum number of iterations %" PetscInt_FMT " must be non-negative", maxits);
1632:     ksp->max_it = maxits;
1633:   }
1634:   PetscFunctionReturn(PETSC_SUCCESS);
1635: }

1637: /*@
1638:    KSPSetInitialGuessNonzero - Tells the iterative solver that the
1639:    initial guess is nonzero; otherwise `KSP` assumes the initial guess
1640:    is to be zero (and thus zeros it out before solving).

1642:    Logically Collective

1644:    Input Parameters:
1645: +  ksp - iterative context obtained from `KSPCreate()`
1646: -  flg - ``PETSC_TRUE`` indicates the guess is non-zero, `PETSC_FALSE` indicates the guess is zero

1648:    Options Database Key:
1649: .  -ksp_initial_guess_nonzero <true,false> - use nonzero initial guess

1651:    Level: beginner

1653:    Notes:
1654:     If this is not called the X vector is zeroed in the call to `KSPSolve()`.

1656: .seealso: [](chapter_ksp), `KSPGetInitialGuessNonzero()`, `KSPSetGuessType()`, `KSPGuessType`, `KSP`
1657: @*/
1658: PetscErrorCode KSPSetInitialGuessNonzero(KSP ksp, PetscBool flg)
1659: {
1660:   PetscFunctionBegin;
1663:   ksp->guess_zero = (PetscBool) !(int)flg;
1664:   PetscFunctionReturn(PETSC_SUCCESS);
1665: }

1667: /*@
1668:    KSPGetInitialGuessNonzero - Determines whether the `KSP` solver is using
1669:    a zero initial guess.

1671:    Not Collective

1673:    Input Parameter:
1674: .  ksp - iterative context obtained from `KSPCreate()`

1676:    Output Parameter:
1677: .  flag - `PETSC_TRUE` if guess is nonzero, else `PETSC_FALSE`

1679:    Level: intermediate

1681: .seealso: [](chapter_ksp), `KSPSetInitialGuessNonzero()`, `KSP`
1682: @*/
1683: PetscErrorCode KSPGetInitialGuessNonzero(KSP ksp, PetscBool *flag)
1684: {
1685:   PetscFunctionBegin;
1688:   if (ksp->guess_zero) *flag = PETSC_FALSE;
1689:   else *flag = PETSC_TRUE;
1690:   PetscFunctionReturn(PETSC_SUCCESS);
1691: }

1693: /*@
1694:    KSPSetErrorIfNotConverged - Causes `KSPSolve()` to generate an error if the solver has not converged as soon as the error is detected.

1696:    Logically Collective

1698:    Input Parameters:
1699: +  ksp - iterative context obtained from `KSPCreate()`
1700: -  flg - `PETSC_TRUE` indicates you want the error generated

1702:    Options Database Key:
1703: .  -ksp_error_if_not_converged <true,false> - generate an error and stop the program

1705:    Level: intermediate

1707:    Notes:
1708:     Normally PETSc continues if a linear solver fails to converge, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
1709:     to determine if it has converged.

1711:    A `KSP_DIVERGED_ITS` will not generate an error in a `KSPSolve()` inside a nested linear solver

1713: .seealso: [](chapter_ksp), `KSPGetErrorIfNotConverged()`, `KSP`
1714: @*/
1715: PetscErrorCode KSPSetErrorIfNotConverged(KSP ksp, PetscBool flg)
1716: {
1717:   PetscFunctionBegin;
1720:   ksp->errorifnotconverged = flg;
1721:   PetscFunctionReturn(PETSC_SUCCESS);
1722: }

1724: /*@
1725:    KSPGetErrorIfNotConverged - Will `KSPSolve()` generate an error if the solver does not converge?

1727:    Not Collective

1729:    Input Parameter:
1730: .  ksp - iterative context obtained from KSPCreate()

1732:    Output Parameter:
1733: .  flag - `PETSC_TRUE` if it will generate an error, else `PETSC_FALSE`

1735:    Level: intermediate

1737: .seealso: [](chapter_ksp), `KSPSetErrorIfNotConverged()`, `KSP`
1738: @*/
1739: PetscErrorCode KSPGetErrorIfNotConverged(KSP ksp, PetscBool *flag)
1740: {
1741:   PetscFunctionBegin;
1744:   *flag = ksp->errorifnotconverged;
1745:   PetscFunctionReturn(PETSC_SUCCESS);
1746: }

1748: /*@
1749:    KSPSetInitialGuessKnoll - Tells the iterative solver to use `PCApply()` to compute the initial guess (The Knoll trick)

1751:    Logically Collective

1753:    Input Parameters:
1754: +  ksp - iterative context obtained from `KSPCreate()`
1755: -  flg - `PETSC_TRUE` or `PETSC_FALSE`

1757:    Level: advanced

1759:    Developer Note:
1760:    The Knoll trick is not currently implemented using the `KSPGuess` class

1762: .seealso: [](chapter_ksp), `KSPGetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1763: @*/
1764: PetscErrorCode KSPSetInitialGuessKnoll(KSP ksp, PetscBool flg)
1765: {
1766:   PetscFunctionBegin;
1769:   ksp->guess_knoll = flg;
1770:   PetscFunctionReturn(PETSC_SUCCESS);
1771: }

1773: /*@
1774:    KSPGetInitialGuessKnoll - Determines whether the `KSP` solver is using the Knoll trick (using PCApply(pc,b,...) to compute
1775:      the initial guess

1777:    Not Collective

1779:    Input Parameter:
1780: .  ksp - iterative context obtained from `KSPCreate()`

1782:    Output Parameter:
1783: .  flag - `PETSC_TRUE` if using Knoll trick, else `PETSC_FALSE`

1785:    Level: advanced

1787: .seealso: [](chapter_ksp), `KSPSetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1788: @*/
1789: PetscErrorCode KSPGetInitialGuessKnoll(KSP ksp, PetscBool *flag)
1790: {
1791:   PetscFunctionBegin;
1794:   *flag = ksp->guess_knoll;
1795:   PetscFunctionReturn(PETSC_SUCCESS);
1796: }

1798: /*@
1799:    KSPGetComputeSingularValues - Gets the flag indicating whether the extreme singular
1800:    values will be calculated via a Lanczos or Arnoldi process as the linear
1801:    system is solved.

1803:    Not Collective

1805:    Input Parameter:
1806: .  ksp - iterative context obtained from `KSPCreate()`

1808:    Output Parameter:
1809: .  flg - `PETSC_TRUE` or `PETSC_FALSE`

1811:    Options Database Key:
1812: .  -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1814:    Notes:
1815:    Currently this option is not valid for all iterative methods.

1817:    Many users may just want to use the monitoring routine
1818:    `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1819:    to print the singular values at each iteration of the linear solve.

1821:    Level: advanced

1823: .seealso: [](chapter_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1824: @*/
1825: PetscErrorCode KSPGetComputeSingularValues(KSP ksp, PetscBool *flg)
1826: {
1827:   PetscFunctionBegin;
1830:   *flg = ksp->calc_sings;
1831:   PetscFunctionReturn(PETSC_SUCCESS);
1832: }

1834: /*@
1835:    KSPSetComputeSingularValues - Sets a flag so that the extreme singular
1836:    values will be calculated via a Lanczos or Arnoldi process as the linear
1837:    system is solved.

1839:    Logically Collective

1841:    Input Parameters:
1842: +  ksp - iterative context obtained from `KSPCreate()`
1843: -  flg - `PETSC_TRUE` or `PETSC_FALSE`

1845:    Options Database Key:
1846: .  -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1848:    Level: advanced

1850:    Notes:
1851:    Currently this option is not valid for all iterative methods.

1853:    Many users may just want to use the monitoring routine
1854:    `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1855:    to print the singular values at each iteration of the linear solve.

1857: .seealso: [](chapter_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1858: @*/
1859: PetscErrorCode KSPSetComputeSingularValues(KSP ksp, PetscBool flg)
1860: {
1861:   PetscFunctionBegin;
1864:   ksp->calc_sings = flg;
1865:   PetscFunctionReturn(PETSC_SUCCESS);
1866: }

1868: /*@
1869:    KSPGetComputeEigenvalues - Gets the flag indicating that the extreme eigenvalues
1870:    values will be calculated via a Lanczos or Arnoldi process as the linear
1871:    system is solved.

1873:    Not Collective

1875:    Input Parameter:
1876: .  ksp - iterative context obtained from `KSPCreate()`

1878:    Output Parameter:
1879: .  flg - `PETSC_TRUE` or `PETSC_FALSE`

1881:    Level: advanced

1883:    Note:
1884:    Currently this option is not valid for all iterative methods.

1886: .seealso: [](chapter_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`
1887: @*/
1888: PetscErrorCode KSPGetComputeEigenvalues(KSP ksp, PetscBool *flg)
1889: {
1890:   PetscFunctionBegin;
1893:   *flg = ksp->calc_sings;
1894:   PetscFunctionReturn(PETSC_SUCCESS);
1895: }

1897: /*@
1898:    KSPSetComputeEigenvalues - Sets a flag so that the extreme eigenvalues
1899:    values will be calculated via a Lanczos or Arnoldi process as the linear
1900:    system is solved.

1902:    Logically Collective

1904:    Input Parameters:
1905: +  ksp - iterative context obtained from `KSPCreate()`
1906: -  flg - `PETSC_TRUE` or `PETSC_FALSE`

1908:    Level: advanced

1910:    Note:
1911:    Currently this option is not valid for all iterative methods.

1913: .seealso: [](chapter_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`
1914: @*/
1915: PetscErrorCode KSPSetComputeEigenvalues(KSP ksp, PetscBool flg)
1916: {
1917:   PetscFunctionBegin;
1920:   ksp->calc_sings = flg;
1921:   PetscFunctionReturn(PETSC_SUCCESS);
1922: }

1924: /*@
1925:    KSPSetComputeRitz - Sets a flag so that the Ritz or harmonic Ritz pairs
1926:    will be calculated via a Lanczos or Arnoldi process as the linear
1927:    system is solved.

1929:    Logically Collective

1931:    Input Parameters:
1932: +  ksp - iterative context obtained from `KSPCreate()`
1933: -  flg - `PETSC_TRUE` or `PETSC_FALSE`

1935:    Level: advanced

1937:    Note:
1938:    Currently this option is only valid for the GMRES method.

1940: .seealso: [](chapter_ksp), `KSPComputeRitz()`, `KSP`
1941: @*/
1942: PetscErrorCode KSPSetComputeRitz(KSP ksp, PetscBool flg)
1943: {
1944:   PetscFunctionBegin;
1947:   ksp->calc_ritz = flg;
1948:   PetscFunctionReturn(PETSC_SUCCESS);
1949: }

1951: /*@
1952:    KSPGetRhs - Gets the right-hand-side vector for the linear system to
1953:    be solved.

1955:    Not Collective

1957:    Input Parameter:
1958: .  ksp - iterative context obtained from `KSPCreate()`

1960:    Output Parameter:
1961: .  r - right-hand-side vector

1963:    Level: developer

1965: .seealso: [](chapter_ksp), `KSPGetSolution()`, `KSPSolve()`, `KSP`
1966: @*/
1967: PetscErrorCode KSPGetRhs(KSP ksp, Vec *r)
1968: {
1969:   PetscFunctionBegin;
1972:   *r = ksp->vec_rhs;
1973:   PetscFunctionReturn(PETSC_SUCCESS);
1974: }

1976: /*@
1977:    KSPGetSolution - Gets the location of the solution for the
1978:    linear system to be solved.  Note that this may not be where the solution
1979:    is stored during the iterative process; see `KSPBuildSolution()`.

1981:    Not Collective

1983:    Input Parameter:
1984: .  ksp - iterative context obtained from `KSPCreate()`

1986:    Output Parameter:
1987: .  v - solution vector

1989:    Level: developer

1991: .seealso: [](chapter_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP`
1992: @*/
1993: PetscErrorCode KSPGetSolution(KSP ksp, Vec *v)
1994: {
1995:   PetscFunctionBegin;
1998:   *v = ksp->vec_sol;
1999:   PetscFunctionReturn(PETSC_SUCCESS);
2000: }

2002: /*@
2003:    KSPSetPC - Sets the preconditioner to be used to calculate the
2004:    application of the preconditioner on a vector.

2006:    Collective

2008:    Input Parameters:
2009: +  ksp - iterative context obtained from `KSPCreate()`
2010: -  pc   - the preconditioner object (can be `NULL`)

2012:    Level: developer

2014:    Note:
2015:    Use `KSPGetPC()` to retrieve the preconditioner context.

2017: .seealso: [](chapter_ksp), `KSPGetPC()`, `KSP`
2018: @*/
2019: PetscErrorCode KSPSetPC(KSP ksp, PC pc)
2020: {
2021:   PetscFunctionBegin;
2023:   if (pc) {
2025:     PetscCheckSameComm(ksp, 1, pc, 2);
2026:   }
2027:   PetscCall(PetscObjectReference((PetscObject)pc));
2028:   PetscCall(PCDestroy(&ksp->pc));
2029:   ksp->pc = pc;
2030:   PetscFunctionReturn(PETSC_SUCCESS);
2031: }

2033: /*@
2034:    KSPGetPC - Returns a pointer to the preconditioner context
2035:    set with `KSPSetPC()`.

2037:    Not Collective

2039:    Input Parameter:
2040: .  ksp - iterative context obtained from `KSPCreate()`

2042:    Output Parameter:
2043: .  pc - preconditioner context

2045:    Level: developer

2047: .seealso: [](chapter_ksp), `KSPSetPC()`, `KSP`
2048: @*/
2049: PetscErrorCode KSPGetPC(KSP ksp, PC *pc)
2050: {
2051:   PetscFunctionBegin;
2054:   if (!ksp->pc) {
2055:     PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &ksp->pc));
2056:     PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp->pc, (PetscObject)ksp, 0));
2057:     PetscCall(PetscObjectSetOptions((PetscObject)ksp->pc, ((PetscObject)ksp)->options));
2058:   }
2059:   *pc = ksp->pc;
2060:   PetscFunctionReturn(PETSC_SUCCESS);
2061: }

2063: /*@
2064:    KSPMonitor - runs the user provided monitor routines, if they exist

2066:    Collective

2068:    Input Parameters:
2069: +  ksp - iterative context obtained from `KSPCreate()`
2070: .  it - iteration number
2071: -  rnorm - relative norm of the residual

2073:    Level: developer

2075:    Notes:
2076:    This routine is called by the `KSP` implementations.
2077:    It does not typically need to be called by the user.

2079: .seealso: [](chapter_ksp), `KSPMonitorSet()`
2080: @*/
2081: PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm)
2082: {
2083:   PetscInt i, n = ksp->numbermonitors;

2085:   PetscFunctionBegin;
2086:   for (i = 0; i < n; i++) PetscCall((*ksp->monitor[i])(ksp, it, rnorm, ksp->monitorcontext[i]));
2087:   PetscFunctionReturn(PETSC_SUCCESS);
2088: }

2090: /*@C
2091:    KSPMonitorSet - Sets an ADDITIONAL function to be called at every iteration to monitor
2092:    the residual/error etc.

2094:    Logically Collective

2096:    Input Parameters:
2097: +  ksp - iterative context obtained from `KSPCreate()`
2098: .  monitor - pointer to function (if this is NULL, it turns off monitoring
2099: .  mctx    - [optional] context for private data for the
2100:              monitor routine (use `NULL` if no context is desired)
2101: -  monitordestroy - [optional] routine that frees monitor context
2102:           (may be `NULL`)

2104:    Calling Sequence of `monitor`:
2105: $  PetscErrorCode  monitor(KSP ksp, PetscInt it, PetscReal rnorm, void *mctx)
2106: +  ksp - iterative context obtained from `KSPCreate()`
2107: .  it - iteration number
2108: .  rnorm - (estimated) 2-norm of (preconditioned) residual
2109: -  mctx  - optional monitoring context, as set by `KSPMonitorSet()`

2111:    Calling Sequence of `monitordestroy`:
2112: $  PetscErrorCode destroy(void *mctx)

2114:    Options Database Keys:
2115: +    -ksp_monitor               - sets `KSPMonitorResidual()`
2116: .    -ksp_monitor draw          - sets `KSPMonitorResidualDraw()` and plots residual
2117: .    -ksp_monitor draw::draw_lg - sets `KSPMonitorResidualDrawLG()` and plots residual
2118: .    -ksp_monitor_pause_final   - Pauses any graphics when the solve finishes (only works for internal monitors)
2119: .    -ksp_monitor_true_residual - sets `KSPMonitorTrueResidual()`
2120: .    -ksp_monitor_true_residual draw::draw_lg - sets `KSPMonitorTrueResidualDrawLG()` and plots residual
2121: .    -ksp_monitor_max           - sets `KSPMonitorTrueResidualMax()`
2122: .    -ksp_monitor_singular_value - sets `KSPMonitorSingularValue()`
2123: -    -ksp_monitor_cancel - cancels all monitors that have
2124:                           been hardwired into a code by
2125:                           calls to `KSPMonitorSet()`, but
2126:                           does not cancel those set via
2127:                           the options database.

2129:    Level: beginner

2131:    Notes:
2132:    The default is to do nothing.  To print the residual, or preconditioned
2133:    residual if `KSPSetNormType`(ksp,`KSP_NORM_PRECONDITIONED`) was called, use
2134:    `KSPMonitorResidual()` as the monitoring routine, with a `PETSCVIEWERASCII` as the
2135:    context.

2137:    Several different monitoring routines may be set by calling
2138:    `KSPMonitorSet()` multiple times; all will be called in the
2139:    order in which they were set.

2141:    Fortran Notes:
2142:     Only a single monitor function can be set for each `KSP` object

2144: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSPMonitorCancel()`, `KSP`
2145: @*/
2146: PetscErrorCode KSPMonitorSet(KSP ksp, PetscErrorCode (*monitor)(KSP, PetscInt, PetscReal, void *), void *mctx, PetscErrorCode (*monitordestroy)(void **))
2147: {
2148:   PetscInt  i;
2149:   PetscBool identical;

2151:   PetscFunctionBegin;
2153:   for (i = 0; i < ksp->numbermonitors; i++) {
2154:     PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))monitor, mctx, monitordestroy, (PetscErrorCode(*)(void))ksp->monitor[i], ksp->monitorcontext[i], ksp->monitordestroy[i], &identical));
2155:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
2156:   }
2157:   PetscCheck(ksp->numbermonitors < MAXKSPMONITORS, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP monitors set");
2158:   ksp->monitor[ksp->numbermonitors]          = monitor;
2159:   ksp->monitordestroy[ksp->numbermonitors]   = monitordestroy;
2160:   ksp->monitorcontext[ksp->numbermonitors++] = (void *)mctx;
2161:   PetscFunctionReturn(PETSC_SUCCESS);
2162: }

2164: /*@
2165:    KSPMonitorCancel - Clears all monitors for a `KSP` object.

2167:    Logically Collective

2169:    Input Parameter:
2170: .  ksp - iterative context obtained from `KSPCreate()`

2172:    Options Database Key:
2173: .  -ksp_monitor_cancel - Cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but does not cancel those set via the options database.

2175:    Level: intermediate

2177: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSPMonitorSet()`, `KSP`
2178: @*/
2179: PetscErrorCode KSPMonitorCancel(KSP ksp)
2180: {
2181:   PetscInt i;

2183:   PetscFunctionBegin;
2185:   for (i = 0; i < ksp->numbermonitors; i++) {
2186:     if (ksp->monitordestroy[i]) PetscCall((*ksp->monitordestroy[i])(&ksp->monitorcontext[i]));
2187:   }
2188:   ksp->numbermonitors = 0;
2189:   PetscFunctionReturn(PETSC_SUCCESS);
2190: }

2192: /*@C
2193:    KSPGetMonitorContext - Gets the monitoring context, as set by `KSPMonitorSet()` for the FIRST monitor only.

2195:    Not Collective

2197:    Input Parameter:
2198: .  ksp - iterative context obtained from `KSPCreate()`

2200:    Output Parameter:
2201: .  ctx - monitoring context

2203:    Level: intermediate

2205: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSP`
2206: @*/
2207: PetscErrorCode KSPGetMonitorContext(KSP ksp, void *ctx)
2208: {
2209:   PetscFunctionBegin;
2211:   *(void **)ctx = ksp->monitorcontext[0];
2212:   PetscFunctionReturn(PETSC_SUCCESS);
2213: }

2215: /*@
2216:    KSPSetResidualHistory - Sets the array used to hold the residual history.
2217:    If set, this array will contain the residual norms computed at each
2218:    iteration of the solver.

2220:    Not Collective

2222:    Input Parameters:
2223: +  ksp - iterative context obtained from `KSPCreate()`
2224: .  a   - array to hold history
2225: .  na  - size of a
2226: -  reset - `PETSC_TRUE` indicates the history counter is reset to zero
2227:            for each new linear solve

2229:    Level: advanced

2231:    Notes:
2232:    If provided, he array is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2233:    If 'a' is `NULL` then space is allocated for the history. If 'na' `PETSC_DECIDE` or `PETSC_DEFAULT` then a
2234:    default array of length 10000 is allocated.

2236:    If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2238: .seealso: [](chapter_ksp), `KSPGetResidualHistory()`, `KSP`
2239: @*/
2240: PetscErrorCode KSPSetResidualHistory(KSP ksp, PetscReal a[], PetscInt na, PetscBool reset)
2241: {
2242:   PetscFunctionBegin;

2245:   PetscCall(PetscFree(ksp->res_hist_alloc));
2246:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2247:     ksp->res_hist     = a;
2248:     ksp->res_hist_max = (size_t)na;
2249:   } else {
2250:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->res_hist_max = (size_t)na;
2251:     else ksp->res_hist_max = 10000; /* like default ksp->max_it */
2252:     PetscCall(PetscCalloc1(ksp->res_hist_max, &ksp->res_hist_alloc));

2254:     ksp->res_hist = ksp->res_hist_alloc;
2255:   }
2256:   ksp->res_hist_len   = 0;
2257:   ksp->res_hist_reset = reset;
2258:   PetscFunctionReturn(PETSC_SUCCESS);
2259: }

2261: /*@C
2262:    KSPGetResidualHistory - Gets the array used to hold the residual history and the number of residuals it contains.

2264:    Not Collective

2266:    Input Parameter:
2267: .  ksp - iterative context obtained from `KSPCreate()`

2269:    Output Parameters:
2270: +  a   - pointer to array to hold history (or `NULL`)
2271: -  na  - number of used entries in a (or `NULL`)

2273:    Level: advanced

2275:    Note:
2276:      This array is borrowed and should not be freed by the caller.

2278:      Can only be called after a `KSPSetResidualHistory()` otherwise a and na are set to zero

2280:    Fortran Note:
2281:      The Fortran version of this routine has a calling sequence
2282: $   call KSPGetResidualHistory(KSP ksp, integer na, integer ierr)
2283:     note that you have passed a Fortran array into `KSPSetResidualHistory()` and you need
2284:     to access the residual values from this Fortran array you provided. Only the `na` (number of
2285:     residual norms currently held) is set.

2287: .seealso: [](chapter_ksp), `KSPSetResidualHistory()`, `KSP`
2288: @*/
2289: PetscErrorCode KSPGetResidualHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2290: {
2291:   PetscFunctionBegin;
2293:   if (a) *a = ksp->res_hist;
2294:   if (na) *na = (PetscInt)ksp->res_hist_len;
2295:   PetscFunctionReturn(PETSC_SUCCESS);
2296: }

2298: /*@
2299:   KSPSetErrorHistory - Sets the array used to hold the error history. If set, this array will contain the error norms computed at each iteration of the solver.

2301:   Not Collective

2303:   Input Parameters:
2304: + ksp   - iterative context obtained from `KSPCreate()`
2305: . a     - array to hold history
2306: . na    - size of `a`
2307: - reset - `PETSC_TRUE` indicates the history counter is reset to zero for each new linear solve

2309:   Level: advanced

2311:   Notes:
2312:   If provided, the array is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2313:   If 'a' is `NULL` then space is allocated for the history. If 'na' is `PETSC_DECIDE` or `PETSC_DEFAULT` then a default array of length 10000 is allocated.

2315:    If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2317: .seealso: [](chapter_ksp), `KSPGetErrorHistory()`, `KSPSetResidualHistory()`, `KSP`
2318: @*/
2319: PetscErrorCode KSPSetErrorHistory(KSP ksp, PetscReal a[], PetscInt na, PetscBool reset)
2320: {
2321:   PetscFunctionBegin;

2324:   PetscCall(PetscFree(ksp->err_hist_alloc));
2325:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2326:     ksp->err_hist     = a;
2327:     ksp->err_hist_max = (size_t)na;
2328:   } else {
2329:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->err_hist_max = (size_t)na;
2330:     else ksp->err_hist_max = 10000; /* like default ksp->max_it */
2331:     PetscCall(PetscCalloc1(ksp->err_hist_max, &ksp->err_hist_alloc));

2333:     ksp->err_hist = ksp->err_hist_alloc;
2334:   }
2335:   ksp->err_hist_len   = 0;
2336:   ksp->err_hist_reset = reset;
2337:   PetscFunctionReturn(PETSC_SUCCESS);
2338: }

2340: /*@C
2341:   KSPGetErrorHistory - Gets the array used to hold the error history and the number of residuals it contains.

2343:   Not Collective

2345:   Input Parameter:
2346: . ksp - iterative context obtained from `KSPCreate()`

2348:   Output Parameters:
2349: + a  - pointer to array to hold history (or `NULL`)
2350: - na - number of used entries in a (or `NULL`)

2352:   Level: advanced

2354:   Notes:
2355:   This array is borrowed and should not be freed by the caller.
2356:   Can only be called after a `KSPSetErrorHistory()` otherwise a and na are set to zero

2358:   Fortran Note:
2359:   The Fortran version of this routine has a calling sequence
2360: $   call KSPGetErrorHistory(KSP ksp, integer na, integer ierr)
2361:   note that you have passed a Fortran array into `KSPSetErrorHistory()` and you need
2362:   to access the residual values from this Fortran array you provided. Only the `na` (number of
2363:   residual norms currently held) is set.

2365: .seealso: [](chapter_ksp), `KSPSetErrorHistory()`, `KSPGetResidualHistory()`, `KSP`
2366: @*/
2367: PetscErrorCode KSPGetErrorHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2368: {
2369:   PetscFunctionBegin;
2371:   if (a) *a = ksp->err_hist;
2372:   if (na) *na = (PetscInt)ksp->err_hist_len;
2373:   PetscFunctionReturn(PETSC_SUCCESS);
2374: }

2376: /*
2377:   KSPComputeConvergenceRate - Compute the convergence rate for the iteration

2379:   Not collective

2381:   Input Parameter:
2382: . ksp - The `KSP`

2384:   Output Parameters:
2385: + cr   - The residual contraction rate
2386: . rRsq - The coefficient of determination, R^2, indicating the linearity of the data
2387: . ce   - The error contraction rate
2388: - eRsq - The coefficient of determination, R^2, indicating the linearity of the data

2390:   Level: advanced

2392:   Note:
2393:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
2394:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
2395:   see also https://en.wikipedia.org/wiki/Coefficient_of_determination

2397: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedRateView()`
2398: */
2399: PetscErrorCode KSPComputeConvergenceRate(KSP ksp, PetscReal *cr, PetscReal *rRsq, PetscReal *ce, PetscReal *eRsq)
2400: {
2401:   PetscReal const *hist;
2402:   PetscReal       *x, *y, slope, intercept, mean = 0.0, var = 0.0, res = 0.0;
2403:   PetscInt         n, k;

2405:   PetscFunctionBegin;
2406:   if (cr || rRsq) {
2407:     PetscCall(KSPGetResidualHistory(ksp, &hist, &n));
2408:     if (!n) {
2409:       if (cr) *cr = 0.0;
2410:       if (rRsq) *rRsq = -1.0;
2411:     } else {
2412:       PetscCall(PetscMalloc2(n, &x, n, &y));
2413:       for (k = 0; k < n; ++k) {
2414:         x[k] = k;
2415:         y[k] = PetscLogReal(hist[k]);
2416:         mean += y[k];
2417:       }
2418:       mean /= n;
2419:       PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2420:       for (k = 0; k < n; ++k) {
2421:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2422:         var += PetscSqr(y[k] - mean);
2423:       }
2424:       PetscCall(PetscFree2(x, y));
2425:       if (cr) *cr = PetscExpReal(slope);
2426:       if (rRsq) *rRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2427:     }
2428:   }
2429:   if (ce || eRsq) {
2430:     PetscCall(KSPGetErrorHistory(ksp, &hist, &n));
2431:     if (!n) {
2432:       if (ce) *ce = 0.0;
2433:       if (eRsq) *eRsq = -1.0;
2434:     } else {
2435:       PetscCall(PetscMalloc2(n, &x, n, &y));
2436:       for (k = 0; k < n; ++k) {
2437:         x[k] = k;
2438:         y[k] = PetscLogReal(hist[k]);
2439:         mean += y[k];
2440:       }
2441:       mean /= n;
2442:       PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2443:       for (k = 0; k < n; ++k) {
2444:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2445:         var += PetscSqr(y[k] - mean);
2446:       }
2447:       PetscCall(PetscFree2(x, y));
2448:       if (ce) *ce = PetscExpReal(slope);
2449:       if (eRsq) *eRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2450:     }
2451:   }
2452:   PetscFunctionReturn(PETSC_SUCCESS);
2453: }

2455: /*@C
2456:    KSPSetConvergenceTest - Sets the function to be used to determine convergence.

2458:    Logically Collective

2460:    Input Parameters:
2461: +  ksp - iterative context obtained from `KSPCreate()`
2462: .  converge - pointer to the function
2463: .  cctx    - context for private data for the convergence routine (may be null)
2464: -  destroy - a routine for destroying the context (may be null)

2466:    Calling sequence of `converge`:
2467: $  PetscErrorCode converge(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *mctx)
2468: +  ksp - iterative context obtained from `KSPCreate()`
2469: .  it - iteration number
2470: .  rnorm - (estimated) 2-norm of (preconditioned) residual
2471: .  reason - the reason why it has converged or diverged
2472: -  cctx  - optional convergence context, as set by `KSPSetConvergenceTest()`

2474:    Calling Sequence of `destroy`:
2475: $  PetscErrorCode destroy(void *cctx)

2477:    Level: advanced

2479:    Notes:
2480:    Must be called after the `KSP` type has been set so put this after
2481:    a call to `KSPSetType()`, or `KSPSetFromOptions()`.

2483:    The default convergence test, `KSPConvergedDefault()`, aborts if the
2484:    residual grows to more than 10000 times the initial residual.

2486:    The default is a combination of relative and absolute tolerances.
2487:    The residual value that is tested may be an approximation; routines
2488:    that need exact values should compute them.

2490:    In the default PETSc convergence test, the precise values of reason
2491:    are macros such as `KSP_CONVERGED_RTOL`, which are defined in petscksp.h.

2493: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPGetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2494: @*/
2495: PetscErrorCode KSPSetConvergenceTest(KSP ksp, PetscErrorCode (*converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void *cctx, PetscErrorCode (*destroy)(void *))
2496: {
2497:   PetscFunctionBegin;
2499:   if (ksp->convergeddestroy) PetscCall((*ksp->convergeddestroy)(ksp->cnvP));
2500:   ksp->converged        = converge;
2501:   ksp->convergeddestroy = destroy;
2502:   ksp->cnvP             = (void *)cctx;
2503:   PetscFunctionReturn(PETSC_SUCCESS);
2504: }

2506: /*@C
2507:    KSPGetConvergenceTest - Gets the function to be used to determine convergence.

2509:    Logically Collective

2511:    Input Parameter:
2512: .   ksp - iterative context obtained from `KSPCreate()`

2514:    Output Parameters:
2515: +  converge - pointer to convergence test function
2516: .  cctx    - context for private data for the convergence routine (may be null)
2517: -  destroy - a routine for destroying the context (may be null)

2519:    Calling sequence of `converge`:
2520: $  PetscErrorCode converge(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *mctx)
2521: +  ksp - iterative context obtained from `KSPCreate()`
2522: .  it - iteration number
2523: .  rnorm - (estimated) 2-norm of (preconditioned) residual
2524: .  reason - the reason why it has converged or diverged
2525: -  cctx  - optional convergence context, as set by `KSPSetConvergenceTest()`

2527:   Calling Sequence of `destroy`:
2528: $ PetscErrorCode destroy(void *cctx)

2530:    Level: advanced

2532: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPSetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2533: @*/
2534: PetscErrorCode KSPGetConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void **cctx, PetscErrorCode (**destroy)(void *))
2535: {
2536:   PetscFunctionBegin;
2538:   if (converge) *converge = ksp->converged;
2539:   if (destroy) *destroy = ksp->convergeddestroy;
2540:   if (cctx) *cctx = ksp->cnvP;
2541:   PetscFunctionReturn(PETSC_SUCCESS);
2542: }

2544: /*@C
2545:    KSPGetAndClearConvergenceTest - Gets the function to be used to determine convergence. Removes the current test without calling destroy on the test context

2547:    Logically Collective

2549:    Input Parameter:
2550: .   ksp - iterative context obtained from `KSPCreate()`

2552:    Output Parameters:
2553: +  converge - pointer to convergence test function
2554: .  cctx    - context for private data for the convergence routine
2555: -  destroy - a routine for destroying the context

2557:    Calling sequence of `converge`:
2558: $  PetscErrorCode converge(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *mctx)
2559: +  ksp - iterative context obtained from `KSPCreate()`
2560: .  it - iteration number
2561: .  rnorm - (estimated) 2-norm of (preconditioned) residual
2562: .  reason - the reason why it has converged or diverged
2563: -  cctx  - optional convergence context, as set by `KSPSetConvergenceTest()`

2565:    Calling Sequence of `destroy`:
2566: $  PetscErrorCode destroy(void *cctx)

2568:    Level: advanced

2570:    Note:
2571:    This is intended to be used to allow transferring the convergence test (and its context) to another testing object (for example another `KSP`) and then calling
2572:    `KSPSetConvergenceTest()` on this original `KSP`. If you just called `KSPGetConvergenceTest()` followed by `KSPSetConvergenceTest()` the original context information
2573:    would be destroyed and hence the transferred context would be invalid and trigger a crash on use

2575: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2576: @*/
2577: PetscErrorCode KSPGetAndClearConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void **cctx, PetscErrorCode (**destroy)(void *))
2578: {
2579:   PetscFunctionBegin;
2581:   *converge             = ksp->converged;
2582:   *destroy              = ksp->convergeddestroy;
2583:   *cctx                 = ksp->cnvP;
2584:   ksp->converged        = NULL;
2585:   ksp->cnvP             = NULL;
2586:   ksp->convergeddestroy = NULL;
2587:   PetscFunctionReturn(PETSC_SUCCESS);
2588: }

2590: /*@C
2591:    KSPGetConvergenceContext - Gets the convergence context set with `KSPSetConvergenceTest()`.

2593:    Not Collective

2595:    Input Parameter:
2596: .  ksp - iterative context obtained from `KSPCreate()`

2598:    Output Parameter:
2599: .  ctx - monitoring context

2601:    Level: advanced

2603: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2604: @*/
2605: PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx)
2606: {
2607:   PetscFunctionBegin;
2609:   *(void **)ctx = ksp->cnvP;
2610:   PetscFunctionReturn(PETSC_SUCCESS);
2611: }

2613: /*@C
2614:    KSPBuildSolution - Builds the approximate solution in a vector provided.

2616:    Collective

2618:    Input Parameter:
2619: .  ctx - iterative context obtained from `KSPCreate()`

2621:    Output Parameter:
2622:    Provide exactly one of
2623: +  v - location to stash solution.
2624: -  V - the solution is returned in this location. This vector is created
2625:        internally. This vector should NOT be destroyed by the user with
2626:        `VecDestroy()`.

2628:    Level: developer

2630:    Notes:
2631:    This routine can be used in one of two ways
2632: .vb
2633:       KSPBuildSolution(ksp,NULL,&V);
2634:    or
2635:       KSPBuildSolution(ksp,v,NULL); or KSPBuildSolution(ksp,v,&v);
2636: .ve
2637:    In the first case an internal vector is allocated to store the solution
2638:    (the user cannot destroy this vector). In the second case the solution
2639:    is generated in the vector that the user provides. Note that for certain
2640:    methods, such as `KSPCG`, the second case requires a copy of the solution,
2641:    while in the first case the call is essentially free since it simply
2642:    returns the vector where the solution already is stored. For some methods
2643:    like `KSPGMRES` this is a reasonably expensive operation and should only be
2644:    used in truly needed.

2646: .seealso: [](chapter_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP`
2647: @*/
2648: PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V)
2649: {
2650:   PetscFunctionBegin;
2652:   PetscCheck(V || v, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "Must provide either v or V");
2653:   if (!V) V = &v;
2654:   PetscUseTypeMethod(ksp, buildsolution, v, V);
2655:   PetscFunctionReturn(PETSC_SUCCESS);
2656: }

2658: /*@C
2659:    KSPBuildResidual - Builds the residual in a vector provided.

2661:    Collective

2663:    Input Parameter:
2664: .  ksp - iterative context obtained from `KSPCreate()`

2666:    Output Parameters:
2667: +  v - optional location to stash residual.  If `v` is not provided,
2668:        then a location is generated.
2669: .  t - work vector.  If not provided then one is generated.
2670: -  V - the residual

2672:    Level: advanced

2674:    Note:
2675:    Regardless of whether or not `v` is provided, the residual is
2676:    returned in `V`.

2678: .seealso: [](chapter_ksp), `KSP`, `KSPBuildSolution()`
2679: @*/
2680: PetscErrorCode KSPBuildResidual(KSP ksp, Vec t, Vec v, Vec *V)
2681: {
2682:   PetscBool flag = PETSC_FALSE;
2683:   Vec       w = v, tt = t;

2685:   PetscFunctionBegin;
2687:   if (!w) PetscCall(VecDuplicate(ksp->vec_rhs, &w));
2688:   if (!tt) {
2689:     PetscCall(VecDuplicate(ksp->vec_sol, &tt));
2690:     flag = PETSC_TRUE;
2691:   }
2692:   PetscUseTypeMethod(ksp, buildresidual, tt, w, V);
2693:   if (flag) PetscCall(VecDestroy(&tt));
2694:   PetscFunctionReturn(PETSC_SUCCESS);
2695: }

2697: /*@
2698:    KSPSetDiagonalScale - Tells `KSP` to symmetrically diagonally scale the system
2699:      before solving. This actually CHANGES the matrix (and right hand side).

2701:    Logically Collective

2703:    Input Parameters:
2704: +  ksp - the `KSP` context
2705: -  scale - `PETSC_TRUE` or `PETSC_FALSE`

2707:    Options Database Keys:
2708: +   -ksp_diagonal_scale - perform a diagonal scaling before the solve
2709: -   -ksp_diagonal_scale_fix - scale the matrix back AFTER the solve

2711:    Level: advanced

2713:     Notes:
2714:     Scales the matrix by  D^(-1/2)  A  D^(-1/2)  [D^(1/2) x ] = D^(-1/2) b
2715:        where D_{ii} is 1/abs(A_{ii}) unless A_{ii} is zero and then it is 1.

2717:     BE CAREFUL with this routine: it actually scales the matrix and right
2718:     hand side that define the system. After the system is solved the matrix
2719:     and right hand side remain scaled unless you use `KSPSetDiagonalScaleFix()`

2721:     This should NOT be used within the `SNES` solves if you are using a line
2722:     search.

2724:     If you use this with the `PCType` `PCEISENSTAT` preconditioner than you can
2725:     use the `PCEisenstatSetNoDiagonalScaling()` option, or -pc_eisenstat_no_diagonal_scaling
2726:     to save some unneeded, redundant flops.

2728: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2729: @*/
2730: PetscErrorCode KSPSetDiagonalScale(KSP ksp, PetscBool scale)
2731: {
2732:   PetscFunctionBegin;
2735:   ksp->dscale = scale;
2736:   PetscFunctionReturn(PETSC_SUCCESS);
2737: }

2739: /*@
2740:    KSPGetDiagonalScale - Checks if `KSP` solver scales the matrix and right hand side, that is if `KSPSetDiagonalScale()` has been called

2742:    Not Collective

2744:    Input Parameter:
2745: .  ksp - the `KSP` context

2747:    Output Parameter:
2748: .  scale - `PETSC_TRUE` or `PETSC_FALSE`

2750:    Level: intermediate

2752: .seealso: [](chapter_ksp), `KSP`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2753: @*/
2754: PetscErrorCode KSPGetDiagonalScale(KSP ksp, PetscBool *scale)
2755: {
2756:   PetscFunctionBegin;
2759:   *scale = ksp->dscale;
2760:   PetscFunctionReturn(PETSC_SUCCESS);
2761: }

2763: /*@
2764:    KSPSetDiagonalScaleFix - Tells `KSP` to diagonally scale the system back after solving.

2766:    Logically Collective

2768:    Input Parameters:
2769: +  ksp - the `KSP` context
2770: -  fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2771:          rescale (default)

2773:    Level: intermediate

2775:    Notes:
2776:      Must be called after `KSPSetDiagonalScale()`

2778:      Using this will slow things down, because it rescales the matrix before and
2779:      after each linear solve. This is intended mainly for testing to allow one
2780:      to easily get back the original system to make sure the solution computed is
2781:      accurate enough.

2783: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPGetDiagonalScaleFix()`, `KSP`
2784: @*/
2785: PetscErrorCode KSPSetDiagonalScaleFix(KSP ksp, PetscBool fix)
2786: {
2787:   PetscFunctionBegin;
2790:   ksp->dscalefix = fix;
2791:   PetscFunctionReturn(PETSC_SUCCESS);
2792: }

2794: /*@
2795:    KSPGetDiagonalScaleFix - Determines if `KSP` diagonally scales the system back after solving. That is `KSPSetDiagonalScaleFix()` has been called

2797:    Not Collective

2799:    Input Parameter:
2800: .  ksp - the `KSP` context

2802:    Output Parameter:
2803: .  fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2804:          rescale (default)

2806:    Level: intermediate

2808: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2809: @*/
2810: PetscErrorCode KSPGetDiagonalScaleFix(KSP ksp, PetscBool *fix)
2811: {
2812:   PetscFunctionBegin;
2815:   *fix = ksp->dscalefix;
2816:   PetscFunctionReturn(PETSC_SUCCESS);
2817: }

2819: /*@C
2820:    KSPSetComputeOperators - set routine to compute the linear operators

2822:    Logically Collective

2824:    Input Parameters:
2825: +  ksp - the `KSP` context
2826: .  func - function to compute the operators
2827: -  ctx - optional context

2829:    Calling sequence of `func`:
2830: $  PetscErrorCode func(KSP ksp, Mat A, Mat B, void *ctx)
2831: +  ksp - the `KSP` context
2832: .  A - the linear operator
2833: .  B - preconditioning matrix
2834: -  ctx - optional user-provided context

2836:    Level: beginner

2838:    Notes:
2839:    The user provided func() will be called automatically at the very next call to `KSPSolve()`. It will NOT be called at future `KSPSolve()` calls
2840:    unless either `KSPSetComputeOperators()` or `KSPSetOperators()` is called before that `KSPSolve()` is called. This allows the same system to be solved several times
2841:    with different right hand side functions but is a confusing API since one might expect it to be called for each `KSPSolve()`

2843:    To reuse the same preconditioner for the next `KSPSolve()` and not compute a new one based on the most recently computed matrix call `KSPSetReusePreconditioner()`

2845:    Developer Note:
2846:    Perhaps this routine and `KSPSetComputeRHS()` could be combined into a new API that makes clear when new matrices are computing without requiring call this
2847:    routine to indicate when the new matrix should be computed.

2849: .seealso: [](chapter_ksp), `KSP`, `KSPSetOperators()`, `KSPSetComputeRHS()`, `DMKSPSetComputeOperators()`, `KSPSetComputeInitialGuess()`
2850: @*/
2851: PetscErrorCode KSPSetComputeOperators(KSP ksp, PetscErrorCode (*func)(KSP, Mat, Mat, void *), void *ctx)
2852: {
2853:   DM dm;

2855:   PetscFunctionBegin;
2857:   PetscCall(KSPGetDM(ksp, &dm));
2858:   PetscCall(DMKSPSetComputeOperators(dm, func, ctx));
2859:   if (ksp->setupstage == KSP_SETUP_NEWRHS) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2860:   PetscFunctionReturn(PETSC_SUCCESS);
2861: }

2863: /*@C
2864:    KSPSetComputeRHS - set routine to compute the right hand side of the linear system

2866:    Logically Collective

2868:    Input Parameters:
2869: +  ksp - the `KSP` context
2870: .  func - function to compute the right hand side
2871: -  ctx - optional context

2873:    Calling sequence of `func`:
2874: $  PetscErrorCode func(KSP ksp, Vec b, void *ctx)
2875: +  ksp - the `KSP` context
2876: .  b - right hand side of linear system
2877: -  ctx - optional user-provided context

2879:    Level: beginner

2881:    Notes:
2882:     The routine you provide will be called EACH you call `KSPSolve()` to prepare the new right hand side for that solve

2884: .seealso: [](chapter_ksp), `KSP`, `KSPSolve()`, `DMKSPSetComputeRHS()`, `KSPSetComputeOperators()`, `KSPSetOperators()`
2885: @*/
2886: PetscErrorCode KSPSetComputeRHS(KSP ksp, PetscErrorCode (*func)(KSP, Vec, void *), void *ctx)
2887: {
2888:   DM dm;

2890:   PetscFunctionBegin;
2892:   PetscCall(KSPGetDM(ksp, &dm));
2893:   PetscCall(DMKSPSetComputeRHS(dm, func, ctx));
2894:   PetscFunctionReturn(PETSC_SUCCESS);
2895: }

2897: /*@C
2898:    KSPSetComputeInitialGuess - set routine to compute the initial guess of the linear system

2900:    Logically Collective

2902:    Input Parameters:
2903: +  ksp - the `KSP` context
2904: .  func - function to compute the initial guess
2905: -  ctx - optional context

2907:    Calling sequence of `func`:
2908: $  PetscErrorCode func(KSP ksp, Vec x, void *ctx)
2909: +  ksp - the `KSP` context
2910: .  x - solution vector
2911: -  ctx - optional user-provided context

2913:    Level: beginner

2915:    Notes:
2916:    This should only be used in conjunction with `KSPSetComputeRHS()` and `KSPSetComputeOperators()`, otherwise
2917:    call `KSPSetInitialGuessNonzero()` and set the initial guess values in the solution vector passed to `KSPSolve()` before calling the solver

2919: .seealso: [](chapter_ksp), `KSP`, `KSPSolve()`, `KSPSetComputeRHS()`, `KSPSetComputeOperators()`, `DMKSPSetComputeInitialGuess()`, `KSPSetInitialGuessNonzero()`
2920: @*/
2921: PetscErrorCode KSPSetComputeInitialGuess(KSP ksp, PetscErrorCode (*func)(KSP, Vec, void *), void *ctx)
2922: {
2923:   DM dm;

2925:   PetscFunctionBegin;
2927:   PetscCall(KSPGetDM(ksp, &dm));
2928:   PetscCall(DMKSPSetComputeInitialGuess(dm, func, ctx));
2929:   PetscFunctionReturn(PETSC_SUCCESS);
2930: }

2932: /*@
2933:    KSPSetUseExplicitTranspose - Determines the explicit transpose of the operator is formed in `KSPSolveTranspose()`. In some configurations (like GPUs) it may
2934:    be explicitly formed when possible since the solve is much more efficient.

2936:    Logically Collective

2938:    Input Parameter:
2939: .  ksp - the `KSP` context

2941:    Output Parameter:
2942: .  flg - `PETSC_TRUE` to transpose the system in `KSPSolveTranspose()`, `PETSC_FALSE` to not transpose (default)

2944:    Level: advanced

2946: .seealso: [](chapter_ksp), `KSPSolveTranspose()`, `KSP`
2947: @*/
2948: PetscErrorCode KSPSetUseExplicitTranspose(KSP ksp, PetscBool flg)
2949: {
2950:   PetscFunctionBegin;
2953:   ksp->transpose.use_explicittranspose = flg;
2954:   PetscFunctionReturn(PETSC_SUCCESS);
2955: }