Actual source code: itfunc.c

  1: /*
  2:       Interface KSP routines that the user calls.
  3: */

  5: #include <petsc/private/kspimpl.h>
  6: #include <petsc/private/matimpl.h>
  7: #include <petscdm.h>

  9: /* number of nested levels of KSPSetUp/Solve(). This is used to determine if KSP_DIVERGED_ITS should be fatal. */
 10: static PetscInt level = 0;

 12: static inline PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
 13: {
 14:   PetscViewerPushFormat(viewer, format);
 15:   PetscObjectView(obj, viewer);
 16:   PetscViewerPopFormat(viewer);
 17:   return (0);
 18: }

 20: /*@
 21:    KSPComputeExtremeSingularValues - Computes the extreme singular values
 22:    for the preconditioned operator. Called after or during `KSPSolve()`.

 24:    Not Collective

 26:    Input Parameter:
 27: .  ksp - iterative context obtained from `KSPCreate()`

 29:    Output Parameters:
 30: .  emin, emax - extreme singular values

 32:    Options Database Keys:
 33: .  -ksp_view_singularvalues - compute extreme singular values and print when `KSPSolve()` completes.

 35:    Notes:
 36:    One must call `KSPSetComputeSingularValues()` before calling `KSPSetUp()`
 37:    (or use the option -ksp_view_eigenvalues) in order for this routine to work correctly.

 39:    Many users may just want to use the monitoring routine
 40:    `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
 41:    to print the extreme singular values at each iteration of the linear solve.

 43:    Estimates of the smallest singular value may be very inaccurate, especially if the Krylov method has not converged.
 44:    The largest singular value is usually accurate to within a few percent if the method has converged, but is still not
 45:    intended for eigenanalysis. Consider the excellent package `SLEPc` if accurate values are required.

 47:    Disable restarts if using KSPGMRES, otherwise this estimate will only be using those iterations after the last
 48:    restart. See `KSPGMRESSetRestart()` for more details.

 50:    Level: advanced

 52: .seealso: [](chapter_ksp), `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeEigenvalues()`, `KSP`
 53: @*/
 54: PetscErrorCode KSPComputeExtremeSingularValues(KSP ksp, PetscReal *emax, PetscReal *emin)
 55: {

 61:   if (ksp->ops->computeextremesingularvalues) PetscUseTypeMethod(ksp, computeextremesingularvalues, emax, emin);
 62:   else {
 63:     *emin = -1.0;
 64:     *emax = -1.0;
 65:   }
 66:   return 0;
 67: }

 69: /*@
 70:    KSPComputeEigenvalues - Computes the extreme eigenvalues for the
 71:    preconditioned operator. Called after or during `KSPSolve()`.

 73:    Not Collective

 75:    Input Parameters:
 76: +  ksp - iterative context obtained from `KSPCreate()`
 77: -  n - size of arrays r and c. The number of eigenvalues computed (neig) will, in
 78:        general, be less than this.

 80:    Output Parameters:
 81: +  r - real part of computed eigenvalues, provided by user with a dimension of at least n
 82: .  c - complex part of computed eigenvalues, provided by user with a dimension of at least n
 83: -  neig - actual number of eigenvalues computed (will be less than or equal to n)

 85:    Options Database Keys:
 86: .  -ksp_view_eigenvalues - Prints eigenvalues to stdout

 88:    Notes:
 89:    The number of eigenvalues estimated depends on the size of the Krylov space
 90:    generated during the `KSPSolve()` ; for example, with
 91:    CG it corresponds to the number of CG iterations, for GMRES it is the number
 92:    of GMRES iterations SINCE the last restart. Any extra space in r[] and c[]
 93:    will be ignored.

 95:    `KSPComputeEigenvalues()` does not usually provide accurate estimates; it is
 96:    intended only for assistance in understanding the convergence of iterative
 97:    methods, not for eigenanalysis. For accurate computation of eigenvalues we recommend using
 98:    the excellent package SLEPc.

100:    One must call `KSPSetComputeEigenvalues()` before calling `KSPSetUp()`
101:    in order for this routine to work correctly.

103:    Many users may just want to use the monitoring routine
104:    `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
105:    to print the singular values at each iteration of the linear solve.

107:    `KSPComputeRitz()` provides estimates for both the eigenvalues and their corresponding eigenvectors.

109:    Level: advanced

111: .seealso: [](chapter_ksp), `KSPSetComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeExtremeSingularValues()`, `KSP`, `KSPComputeRitz()`
112: @*/
113: PetscErrorCode KSPComputeEigenvalues(KSP ksp, PetscInt n, PetscReal r[], PetscReal c[], PetscInt *neig)
114: {

122:   if (n && ksp->ops->computeeigenvalues) PetscUseTypeMethod(ksp, computeeigenvalues, n, r, c, neig);
123:   else *neig = 0;
124:   return 0;
125: }

127: /*@
128:    KSPComputeRitz - Computes the Ritz or harmonic Ritz pairs associated with the
129:    smallest or largest in modulus, for the preconditioned operator.

131:    Not Collective

133:    Input Parameters:
134: +  ksp   - iterative context obtained from `KSPCreate()`
135: .  ritz  - `PETSC_TRUE` or `PETSC_FALSE` for Ritz pairs or harmonic Ritz pairs, respectively
136: -  small - `PETSC_TRUE` or `PETSC_FALSE` for smallest or largest (harmonic) Ritz values, respectively

138:    Output Parameters:
139: +  nrit  - On input number of (harmonic) Ritz pairs to compute; on output, actual number of computed (harmonic) Ritz pairs
140: .  S     - an array of the Ritz vectors, pass in an array of vectors of size nrit
141: .  tetar - real part of the Ritz values, pass in an array of size nrit
142: -  tetai - imaginary part of the Ritz values, pass in an array of size nrit

144:    Notes:
145:    This only works with a `KSPType` of `KSPGMRES`.

147:    One must call `KSPSetComputeRitz()` before calling `KSPSetUp()` in order for this routine to work correctly.

149:    This routine must be called after `KSPSolve()`.

151:    In GMRES, the (harmonic) Ritz pairs are computed from the Hessenberg matrix obtained during
152:    the last complete cycle of the GMRES solve, or during the partial cycle if the solve ended before
153:    a restart (that is a complete GMRES cycle was never achieved).

155:    The number of actual (harmonic) Ritz pairs computed is less than or equal to the restart
156:    parameter for GMRES if a complete cycle has been performed or less or equal to the number of GMRES
157:    iterations.

159:    `KSPComputeEigenvalues()` provides estimates for only the eigenvalues (Ritz values).

161:    For real matrices, the (harmonic) Ritz pairs can be complex-valued. In such a case,
162:    the routine selects the complex (harmonic) Ritz value and its conjugate, and two successive entries of the
163:    vectors S are equal to the real and the imaginary parts of the associated vectors.
164:    When PETSc has been built with complex scalars, the real and imaginary parts of the Ritz
165:    values are still returned in tetar and tetai, as is done in `KSPComputeEigenvalues()`, but
166:    the Ritz vectors S are complex.

168:    The (harmonic) Ritz pairs are given in order of increasing (harmonic) Ritz values in modulus.

170:    The Ritz pairs do not neccessarily accurately reflect the eigenvalues and eigenvectors of the operator, consider the
171:    excellant package `SLEPc` if accurate values are required.

173:    Level: advanced

175: .seealso: [](chapter_ksp), `KSPSetComputeRitz()`, `KSP`, `KSPGMRES`, `KSPComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`
176: @*/
177: PetscErrorCode KSPComputeRitz(KSP ksp, PetscBool ritz, PetscBool small, PetscInt *nrit, Vec S[], PetscReal tetar[], PetscReal tetai[])
178: {
181:   PetscTryTypeMethod(ksp, computeritz, ritz, small, nrit, S, tetar, tetai);
182:   return 0;
183: }
184: /*@
185:    KSPSetUpOnBlocks - Sets up the preconditioner for each block in
186:    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
187:    methods.

189:    Collective

191:    Input Parameter:
192: .  ksp - the `KSP` context

194:    Notes:
195:    `KSPSetUpOnBlocks()` is a routine that the user can optionally call for
196:    more precise profiling (via -log_view) of the setup phase for these
197:    block preconditioners.  If the user does not call `KSPSetUpOnBlocks()`,
198:    it will automatically be called from within `KSPSolve()`.

200:    Calling `KSPSetUpOnBlocks()` is the same as calling `PCSetUpOnBlocks()`
201:    on the PC context within the `KSP` context.

203:    Level: advanced

205: .seealso: [](chapter_ksp), `PCSetUpOnBlocks()`, `KSPSetUp()`, `PCSetUp()`, `KSP`
206: @*/
207: PetscErrorCode KSPSetUpOnBlocks(KSP ksp)
208: {
209:   PC             pc;
210:   PCFailedReason pcreason;

213:   level++;
214:   KSPGetPC(ksp, &pc);
215:   PCSetUpOnBlocks(pc);
216:   PCGetFailedReasonRank(pc, &pcreason);
217:   level--;
218:   /*
219:      This is tricky since only a subset of MPI ranks may set this; each KSPSolve_*() is responsible for checking
220:      this flag and initializing an appropriate vector with VecSetInf() so that the first norm computation can
221:      produce a result at KSPCheckNorm() thus communicating the known problem to all MPI ranks so they may
222:      terminate the Krylov solve. For many KSP implementations this is handled within KSPInitialResidual()
223:   */
224:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
225:   return 0;
226: }

228: /*@
229:    KSPSetReusePreconditioner - reuse the current preconditioner, do not construct a new one even if the operator changes

231:    Collective

233:    Input Parameters:
234: +  ksp   - iterative context obtained from `KSPCreate()`
235: -  flag - `PETSC_TRUE` to reuse the current preconditioner

237:    Level: intermediate

239: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
240: @*/
241: PetscErrorCode KSPSetReusePreconditioner(KSP ksp, PetscBool flag)
242: {
243:   PC pc;

246:   KSPGetPC(ksp, &pc);
247:   PCSetReusePreconditioner(pc, flag);
248:   return 0;
249: }

251: /*@
252:    KSPGetReusePreconditioner - Determines if the `KSP` reuses the current preconditioner even if the operator in the preconditioner has changed.

254:    Collective

256:    Input Parameters:
257: .  ksp   - iterative context obtained from `KSPCreate()`

259:    Output Parameters:
260: .  flag - the boolean flag

262:    Level: intermediate

264: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSPSetReusePreconditioner()`, `KSP`
265: @*/
266: PetscErrorCode KSPGetReusePreconditioner(KSP ksp, PetscBool *flag)
267: {
270:   *flag = PETSC_FALSE;
271:   if (ksp->pc) PCGetReusePreconditioner(ksp->pc, flag);
272:   return 0;
273: }

275: /*@
276:    KSPSetSkipPCSetFromOptions - prevents `KSPSetFromOptions()` from calling `PCSetFromOptions()`. This is used if the same PC is shared by more than one KSP so its options are not resetable for each KSP

278:    Collective

280:    Input Parameters:
281: +  ksp   - iterative context obtained from `KSPCreate()`
282: -  flag - `PETSC_TRUE` to skip calling the `PCSetFromOptions()`

284:    Level: intermediate

286: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
287: @*/
288: PetscErrorCode KSPSetSkipPCSetFromOptions(KSP ksp, PetscBool flag)
289: {
291:   ksp->skippcsetfromoptions = flag;
292:   return 0;
293: }

295: /*@
296:    KSPSetUp - Sets up the internal data structures for the
297:    later use of an iterative solver.

299:    Collective

301:    Input Parameter:
302: .  ksp   - iterative context obtained from `KSPCreate()`

304:    Level: developer

306: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`
307: @*/
308: PetscErrorCode KSPSetUp(KSP ksp)
309: {
310:   Mat            A, B;
311:   Mat            mat, pmat;
312:   MatNullSpace   nullsp;
313:   PCFailedReason pcreason;

316:   level++;

318:   /* reset the convergence flag from the previous solves */
319:   ksp->reason = KSP_CONVERGED_ITERATING;

321:   if (!((PetscObject)ksp)->type_name) KSPSetType(ksp, KSPGMRES);
322:   KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side);

324:   if (ksp->dmActive && !ksp->setupstage) {
325:     /* first time in so build matrix and vector data structures using DM */
326:     if (!ksp->vec_rhs) DMCreateGlobalVector(ksp->dm, &ksp->vec_rhs);
327:     if (!ksp->vec_sol) DMCreateGlobalVector(ksp->dm, &ksp->vec_sol);
328:     DMCreateMatrix(ksp->dm, &A);
329:     KSPSetOperators(ksp, A, A);
330:     PetscObjectDereference((PetscObject)A);
331:   }

333:   if (ksp->dmActive) {
334:     DMKSP kdm;
335:     DMGetDMKSP(ksp->dm, &kdm);

337:     if (kdm->ops->computeinitialguess && ksp->setupstage != KSP_SETUP_NEWRHS) {
338:       /* only computes initial guess the first time through */
339:       PetscCallBack("KSP callback initial guess", (*kdm->ops->computeinitialguess)(ksp, ksp->vec_sol, kdm->initialguessctx));
340:       KSPSetInitialGuessNonzero(ksp, PETSC_TRUE);
341:     }
342:     if (kdm->ops->computerhs) PetscCallBack("KSP callback rhs", (*kdm->ops->computerhs)(ksp, ksp->vec_rhs, kdm->rhsctx));

344:     if (ksp->setupstage != KSP_SETUP_NEWRHS) {
345:       if (kdm->ops->computeoperators) {
346:         KSPGetOperators(ksp, &A, &B);
347:         PetscCallBack("KSP callback operators", (*kdm->ops->computeoperators)(ksp, A, B, kdm->operatorsctx));
348:       } else SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "You called KSPSetDM() but did not use DMKSPSetComputeOperators() or KSPSetDMActive(ksp,PETSC_FALSE);");
349:     }
350:   }

352:   if (ksp->setupstage == KSP_SETUP_NEWRHS) {
353:     level--;
354:     return 0;
355:   }
356:   PetscLogEventBegin(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0);

358:   switch (ksp->setupstage) {
359:   case KSP_SETUP_NEW:
360:     PetscUseTypeMethod(ksp, setup);
361:     break;
362:   case KSP_SETUP_NEWMATRIX: { /* This should be replaced with a more general mechanism */
363:     if (ksp->setupnewmatrix) PetscUseTypeMethod(ksp, setup);
364:   } break;
365:   default:
366:     break;
367:   }

369:   if (!ksp->pc) KSPGetPC(ksp, &ksp->pc);
370:   PCGetOperators(ksp->pc, &mat, &pmat);
371:   /* scale the matrix if requested */
372:   if (ksp->dscale) {
373:     PetscScalar *xx;
374:     PetscInt     i, n;
375:     PetscBool    zeroflag = PETSC_FALSE;
376:     if (!ksp->pc) KSPGetPC(ksp, &ksp->pc);
377:     if (!ksp->diagonal) { /* allocate vector to hold diagonal */
378:       MatCreateVecs(pmat, &ksp->diagonal, NULL);
379:     }
380:     MatGetDiagonal(pmat, ksp->diagonal);
381:     VecGetLocalSize(ksp->diagonal, &n);
382:     VecGetArray(ksp->diagonal, &xx);
383:     for (i = 0; i < n; i++) {
384:       if (xx[i] != 0.0) xx[i] = 1.0 / PetscSqrtReal(PetscAbsScalar(xx[i]));
385:       else {
386:         xx[i]    = 1.0;
387:         zeroflag = PETSC_TRUE;
388:       }
389:     }
390:     VecRestoreArray(ksp->diagonal, &xx);
391:     if (zeroflag) PetscInfo(ksp, "Zero detected in diagonal of matrix, using 1 at those locations\n");
392:     MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal);
393:     if (mat != pmat) MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal);
394:     ksp->dscalefix2 = PETSC_FALSE;
395:   }
396:   PetscLogEventEnd(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0);
397:   PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged);
398:   PCSetUp(ksp->pc);
399:   PCGetFailedReasonRank(ksp->pc, &pcreason);
400:   /* TODO: this code was wrong and is still wrong, there is no way to propagate the failure to all processes; their is no code to handle a ksp->reason on only some ranks */
401:   if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;

403:   MatGetNullSpace(mat, &nullsp);
404:   if (nullsp) {
405:     PetscBool test = PETSC_FALSE;
406:     PetscOptionsGetBool(((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_test_null_space", &test, NULL);
407:     if (test) MatNullSpaceTest(nullsp, mat, NULL);
408:   }
409:   ksp->setupstage = KSP_SETUP_NEWRHS;
410:   level--;
411:   return 0;
412: }

414: /*@C
415:    KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged to a viewer

417:    Collective

419:    Parameter:
420: +  ksp - iterative context obtained from `KSPCreate()`
421: -  viewer - the viewer to display the reason

423:    Options Database Keys:
424: +  -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
425: -  -ksp_converged_reason ::failed - only print reason and number of iterations when diverged

427:    Notes:
428:      To change the format of the output call PetscViewerPushFormat(viewer,format) before this call. Use PETSC_VIEWER_DEFAULT for the default,
429:      use PETSC_VIEWER_FAILED to only display a reason if it fails.

431:    Level: beginner

433: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
434:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `KSP`, `KSPGetConvergedReason()`, `PetscViewerPushFormat()`, `PetscViewerPopFormat()`
435: @*/
436: PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer)
437: {
438:   PetscBool         isAscii;
439:   PetscViewerFormat format;

441:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
442:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii);
443:   if (isAscii) {
444:     PetscViewerGetFormat(viewer, &format);
445:     PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel);
446:     if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) {
447:       if (((PetscObject)ksp)->prefix) {
448:         PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its);
449:       } else {
450:         PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its);
451:       }
452:     } else if (ksp->reason <= 0) {
453:       if (((PetscObject)ksp)->prefix) {
454:         PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its);
455:       } else {
456:         PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its);
457:       }
458:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
459:         PCFailedReason reason;
460:         PCGetFailedReason(ksp->pc, &reason);
461:         PetscViewerASCIIPrintf(viewer, "               PC failed due to %s \n", PCFailedReasons[reason]);
462:       }
463:     }
464:     PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel);
465:   }
466:   return 0;
467: }

469: /*@C
470:    KSPConvergedReasonViewSet - Sets an ADDITIONAL function that is to be used at the
471:     end of the linear solver to display the convergence reason of the linear solver.

473:    Logically Collective

475:    Input Parameters:
476: +  ksp - the `KSP` context
477: .  f - the ksp converged reason view function
478: .  vctx - [optional] user-defined context for private data for the
479:           ksp converged reason view routine (use NULL if no context is desired)
480: -  reasonviewdestroy - [optional] routine that frees reasonview context
481:           (may be NULL)

483:    Options Database Keys:
484: +    -ksp_converged_reason        - sets a default `KSPConvergedReasonView()`
485: -    -ksp_converged_reason_view_cancel - cancels all converged reason viewers that have
486:                             been hardwired into a code by
487:                             calls to `KSPConvergedReasonViewSet()`, but
488:                             does not cancel those set via
489:                             the options database.

491:    Notes:
492:    Several different converged reason view routines may be set by calling
493:    `KSPConvergedReasonViewSet()` multiple times; all will be called in the
494:    order in which they were set.

496:    Level: intermediate

498: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewCancel()`
499: @*/
500: PetscErrorCode KSPConvergedReasonViewSet(KSP ksp, PetscErrorCode (*f)(KSP, void *), void *vctx, PetscErrorCode (*reasonviewdestroy)(void **))
501: {
502:   PetscInt  i;
503:   PetscBool identical;

506:   for (i = 0; i < ksp->numberreasonviews; i++) {
507:     PetscMonitorCompare((PetscErrorCode(*)(void))f, vctx, reasonviewdestroy, (PetscErrorCode(*)(void))ksp->reasonview[i], ksp->reasonviewcontext[i], ksp->reasonviewdestroy[i], &identical);
508:     if (identical) return 0;
509:   }
511:   ksp->reasonview[ksp->numberreasonviews]          = f;
512:   ksp->reasonviewdestroy[ksp->numberreasonviews]   = reasonviewdestroy;
513:   ksp->reasonviewcontext[ksp->numberreasonviews++] = (void *)vctx;
514:   return 0;
515: }

517: /*@
518:    KSPConvergedReasonViewCancel - Clears all the reasonview functions for a `KSP` object.

520:    Collective

522:    Input Parameter:
523: .  ksp - iterative context obtained from `KSPCreate()`

525:    Level: intermediate

527: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPDestroy()`, `KSPReset()`
528: @*/
529: PetscErrorCode KSPConvergedReasonViewCancel(KSP ksp)
530: {
531:   PetscInt i;

534:   for (i = 0; i < ksp->numberreasonviews; i++) {
535:     if (ksp->reasonviewdestroy[i]) (*ksp->reasonviewdestroy[i])(&ksp->reasonviewcontext[i]);
536:   }
537:   ksp->numberreasonviews = 0;
538:   return 0;
539: }

541: /*@
542:   KSPConvergedReasonViewFromOptions - Processes command line options to determine if/how a KSPReason is to be viewed.

544:   Collective

546:   Input Parameters:
547: . ksp   - the `KSP` object

549:   Level: intermediate

551: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`
552: @*/
553: PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp)
554: {
555:   PetscViewer       viewer;
556:   PetscBool         flg;
557:   PetscViewerFormat format;
558:   PetscInt          i;


561:   /* Call all user-provided reason review routines */
562:   for (i = 0; i < ksp->numberreasonviews; i++) (*ksp->reasonview[i])(ksp, ksp->reasonviewcontext[i]);

564:   /* Call the default PETSc routine */
565:   PetscOptionsGetViewer(PetscObjectComm((PetscObject)ksp), ((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_converged_reason", &viewer, &format, &flg);
566:   if (flg) {
567:     PetscViewerPushFormat(viewer, format);
568:     KSPConvergedReasonView(ksp, viewer);
569:     PetscViewerPopFormat(viewer);
570:     PetscViewerDestroy(&viewer);
571:   }
572:   return 0;
573: }

575: /*@C
576:   KSPConvergedRateView - Displays the reason a `KSP` solve converged or diverged to a viewer

578:   Collective

580:   Input Parameters:
581: +  ksp    - iterative context obtained from `KSPCreate()`
582: -  viewer - the viewer to display the reason

584:   Options Database Keys:
585: . -ksp_converged_rate - print reason for convergence or divergence and the convergence rate (or 0.0 for divergence)

587:   Notes:
588:   To change the format of the output, call PetscViewerPushFormat(viewer,format) before this call.

590:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
591:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
592:   see also https://en.wikipedia.org/wiki/Coefficient_of_determination

594:   Level: intermediate

596: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`, `KSPGetConvergedRate()`, `KSPSetTolerances()`, `KSPConvergedDefault()`
597: @*/
598: PetscErrorCode KSPConvergedRateView(KSP ksp, PetscViewer viewer)
599: {
600:   PetscViewerFormat format;
601:   PetscBool         isAscii;
602:   PetscReal         rrate, rRsq, erate = 0.0, eRsq = 0.0;
603:   PetscInt          its;
604:   const char       *prefix, *reason = KSPConvergedReasons[ksp->reason];

606:   KSPGetOptionsPrefix(ksp, &prefix);
607:   KSPGetIterationNumber(ksp, &its);
608:   KSPComputeConvergenceRate(ksp, &rrate, &rRsq, &erate, &eRsq);
609:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
610:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii);
611:   if (isAscii) {
612:     PetscViewerGetFormat(viewer, &format);
613:     PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel);
614:     if (ksp->reason > 0) {
615:       if (prefix) PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT, prefix, reason, its);
616:       else PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT, reason, its);
617:       PetscViewerASCIIUseTabs(viewer, PETSC_FALSE);
618:       if (rRsq >= 0.0) PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq);
619:       if (eRsq >= 0.0) PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq);
620:       PetscViewerASCIIPrintf(viewer, "\n");
621:       PetscViewerASCIIUseTabs(viewer, PETSC_TRUE);
622:     } else if (ksp->reason <= 0) {
623:       if (prefix) PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT, prefix, reason, its);
624:       else PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT, reason, its);
625:       PetscViewerASCIIUseTabs(viewer, PETSC_FALSE);
626:       if (rRsq >= 0.0) PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq);
627:       if (eRsq >= 0.0) PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq);
628:       PetscViewerASCIIPrintf(viewer, "\n");
629:       PetscViewerASCIIUseTabs(viewer, PETSC_TRUE);
630:       if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
631:         PCFailedReason reason;
632:         PCGetFailedReason(ksp->pc, &reason);
633:         PetscViewerASCIIPrintf(viewer, "               PC failed due to %s \n", PCFailedReasons[reason]);
634:       }
635:     }
636:     PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel);
637:   }
638:   return 0;
639: }

641: #include <petscdraw.h>

643: static PetscErrorCode KSPViewEigenvalues_Internal(KSP ksp, PetscBool isExplicit, PetscViewer viewer, PetscViewerFormat format)
644: {
645:   PetscReal  *r, *c;
646:   PetscInt    n, i, neig;
647:   PetscBool   isascii, isdraw;
648:   PetscMPIInt rank;

650:   MPI_Comm_rank(PetscObjectComm((PetscObject)ksp), &rank);
651:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);
652:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw);
653:   if (isExplicit) {
654:     VecGetSize(ksp->vec_sol, &n);
655:     PetscMalloc2(n, &r, n, &c);
656:     KSPComputeEigenvaluesExplicitly(ksp, n, r, c);
657:     neig = n;
658:   } else {
659:     PetscInt nits;

661:     KSPGetIterationNumber(ksp, &nits);
662:     n = nits + 2;
663:     if (!nits) {
664:       PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any eigenvalues\n");
665:       return 0;
666:     }
667:     PetscMalloc2(n, &r, n, &c);
668:     KSPComputeEigenvalues(ksp, n, r, c, &neig);
669:   }
670:   if (isascii) {
671:     PetscViewerASCIIPrintf(viewer, "%s computed eigenvalues\n", isExplicit ? "Explicitly" : "Iteratively");
672:     for (i = 0; i < neig; ++i) {
673:       if (c[i] >= 0.0) PetscViewerASCIIPrintf(viewer, "%g + %gi\n", (double)r[i], (double)c[i]);
674:       else PetscViewerASCIIPrintf(viewer, "%g - %gi\n", (double)r[i], -(double)c[i]);
675:     }
676:   } else if (isdraw && rank == 0) {
677:     PetscDraw   draw;
678:     PetscDrawSP drawsp;

680:     if (format == PETSC_VIEWER_DRAW_CONTOUR) {
681:       KSPPlotEigenContours_Private(ksp, neig, r, c);
682:     } else {
683:       if (!ksp->eigviewer) PetscViewerDrawOpen(PETSC_COMM_SELF, NULL, isExplicit ? "Explicitly Computed Eigenvalues" : "Iteratively Computed Eigenvalues", PETSC_DECIDE, PETSC_DECIDE, 400, 400, &ksp->eigviewer);
684:       PetscViewerDrawGetDraw(ksp->eigviewer, 0, &draw);
685:       PetscDrawSPCreate(draw, 1, &drawsp);
686:       PetscDrawSPReset(drawsp);
687:       for (i = 0; i < neig; ++i) PetscDrawSPAddPoint(drawsp, r + i, c + i);
688:       PetscDrawSPDraw(drawsp, PETSC_TRUE);
689:       PetscDrawSPSave(drawsp);
690:       PetscDrawSPDestroy(&drawsp);
691:     }
692:   }
693:   PetscFree2(r, c);
694:   return 0;
695: }

697: static PetscErrorCode KSPViewSingularvalues_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
698: {
699:   PetscReal smax, smin;
700:   PetscInt  nits;
701:   PetscBool isascii;

703:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);
704:   KSPGetIterationNumber(ksp, &nits);
705:   if (!nits) {
706:     PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any singular values\n");
707:     return 0;
708:   }
709:   KSPComputeExtremeSingularValues(ksp, &smax, &smin);
710:   if (isascii) PetscViewerASCIIPrintf(viewer, "Iteratively computed extreme singular values: max %g min %g max/min %g\n", (double)smax, (double)smin, (double)(smax / smin));
711:   return 0;
712: }

714: static PetscErrorCode KSPViewFinalResidual_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
715: {
716:   PetscBool isascii;

718:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);
720:   if (isascii) {
721:     Mat       A;
722:     Vec       t;
723:     PetscReal norm;

725:     PCGetOperators(ksp->pc, &A, NULL);
726:     VecDuplicate(ksp->vec_rhs, &t);
727:     KSP_MatMult(ksp, A, ksp->vec_sol, t);
728:     VecAYPX(t, -1.0, ksp->vec_rhs);
729:     VecNorm(t, NORM_2, &norm);
730:     VecDestroy(&t);
731:     PetscViewerASCIIPrintf(viewer, "KSP final norm of residual %g\n", (double)norm);
732:   }
733:   return 0;
734: }

736: static PetscErrorCode KSPMonitorPauseFinal_Internal(KSP ksp)
737: {
738:   PetscInt i;

740:   if (!ksp->pauseFinal) return 0;
741:   for (i = 0; i < ksp->numbermonitors; ++i) {
742:     PetscViewerAndFormat *vf = (PetscViewerAndFormat *)ksp->monitorcontext[i];
743:     PetscDraw             draw;
744:     PetscReal             lpause;

746:     if (!vf) continue;
747:     if (vf->lg) {
749:       if (((PetscObject)vf->lg)->classid != PETSC_DRAWLG_CLASSID) continue;
750:       PetscDrawLGGetDraw(vf->lg, &draw);
751:       PetscDrawGetPause(draw, &lpause);
752:       PetscDrawSetPause(draw, -1.0);
753:       PetscDrawPause(draw);
754:       PetscDrawSetPause(draw, lpause);
755:     } else {
756:       PetscBool isdraw;

759:       if (((PetscObject)vf->viewer)->classid != PETSC_VIEWER_CLASSID) continue;
760:       PetscObjectTypeCompare((PetscObject)vf->viewer, PETSCVIEWERDRAW, &isdraw);
761:       if (!isdraw) continue;
762:       PetscViewerDrawGetDraw(vf->viewer, 0, &draw);
763:       PetscDrawGetPause(draw, &lpause);
764:       PetscDrawSetPause(draw, -1.0);
765:       PetscDrawPause(draw);
766:       PetscDrawSetPause(draw, lpause);
767:     }
768:   }
769:   return 0;
770: }

772: static PetscErrorCode KSPSolve_Private(KSP ksp, Vec b, Vec x)
773: {
774:   PetscBool    flg = PETSC_FALSE, inXisinB = PETSC_FALSE, guess_zero;
775:   Mat          mat, pmat;
776:   MPI_Comm     comm;
777:   MatNullSpace nullsp;
778:   Vec          btmp, vec_rhs = NULL;

780:   level++;
781:   comm = PetscObjectComm((PetscObject)ksp);
782:   if (x && x == b) {
784:     VecDuplicate(b, &x);
785:     inXisinB = PETSC_TRUE;
786:   }
787:   if (b) {
788:     PetscObjectReference((PetscObject)b);
789:     VecDestroy(&ksp->vec_rhs);
790:     ksp->vec_rhs = b;
791:   }
792:   if (x) {
793:     PetscObjectReference((PetscObject)x);
794:     VecDestroy(&ksp->vec_sol);
795:     ksp->vec_sol = x;
796:   }

798:   if (ksp->viewPre) ObjectView((PetscObject)ksp, ksp->viewerPre, ksp->formatPre);

800:   if (ksp->presolve) (*ksp->presolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->prectx);

802:   /* reset the residual history list if requested */
803:   if (ksp->res_hist_reset) ksp->res_hist_len = 0;
804:   if (ksp->err_hist_reset) ksp->err_hist_len = 0;

806:   /* KSPSetUp() scales the matrix if needed */
807:   KSPSetUp(ksp);
808:   KSPSetUpOnBlocks(ksp);

810:   if (ksp->guess) {
811:     PetscObjectState ostate, state;

813:     KSPGuessSetUp(ksp->guess);
814:     PetscObjectStateGet((PetscObject)ksp->vec_sol, &ostate);
815:     KSPGuessFormGuess(ksp->guess, ksp->vec_rhs, ksp->vec_sol);
816:     PetscObjectStateGet((PetscObject)ksp->vec_sol, &state);
817:     if (state != ostate) {
818:       ksp->guess_zero = PETSC_FALSE;
819:     } else {
820:       PetscInfo(ksp, "Using zero initial guess since the KSPGuess object did not change the vector\n");
821:       ksp->guess_zero = PETSC_TRUE;
822:     }
823:   }

825:   VecSetErrorIfLocked(ksp->vec_sol, 3);

827:   PetscLogEventBegin(KSP_Solve, ksp, ksp->vec_rhs, ksp->vec_sol, 0);
828:   PCGetOperators(ksp->pc, &mat, &pmat);
829:   /* diagonal scale RHS if called for */
830:   if (ksp->dscale) {
831:     VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal);
832:     /* second time in, but matrix was scaled back to original */
833:     if (ksp->dscalefix && ksp->dscalefix2) {
834:       Mat mat, pmat;

836:       PCGetOperators(ksp->pc, &mat, &pmat);
837:       MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal);
838:       if (mat != pmat) MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal);
839:     }

841:     /* scale initial guess */
842:     if (!ksp->guess_zero) {
843:       if (!ksp->truediagonal) {
844:         VecDuplicate(ksp->diagonal, &ksp->truediagonal);
845:         VecCopy(ksp->diagonal, ksp->truediagonal);
846:         VecReciprocal(ksp->truediagonal);
847:       }
848:       VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->truediagonal);
849:     }
850:   }
851:   PCPreSolve(ksp->pc, ksp);

853:   if (ksp->guess_zero) VecSet(ksp->vec_sol, 0.0);
854:   if (ksp->guess_knoll) { /* The Knoll trick is independent on the KSPGuess specified */
855:     PCApply(ksp->pc, ksp->vec_rhs, ksp->vec_sol);
856:     KSP_RemoveNullSpace(ksp, ksp->vec_sol);
857:     ksp->guess_zero = PETSC_FALSE;
858:   }

860:   /* can we mark the initial guess as zero for this solve? */
861:   guess_zero = ksp->guess_zero;
862:   if (!ksp->guess_zero) {
863:     PetscReal norm;

865:     VecNormAvailable(ksp->vec_sol, NORM_2, &flg, &norm);
866:     if (flg && !norm) ksp->guess_zero = PETSC_TRUE;
867:   }
868:   if (ksp->transpose_solve) {
869:     MatGetNullSpace(pmat, &nullsp);
870:   } else {
871:     MatGetTransposeNullSpace(pmat, &nullsp);
872:   }
873:   if (nullsp) {
874:     VecDuplicate(ksp->vec_rhs, &btmp);
875:     VecCopy(ksp->vec_rhs, btmp);
876:     MatNullSpaceRemove(nullsp, btmp);
877:     vec_rhs      = ksp->vec_rhs;
878:     ksp->vec_rhs = btmp;
879:   }
880:   VecLockReadPush(ksp->vec_rhs);
881:   PetscUseTypeMethod(ksp, solve);
882:   KSPMonitorPauseFinal_Internal(ksp);

884:   VecLockReadPop(ksp->vec_rhs);
885:   if (nullsp) {
886:     ksp->vec_rhs = vec_rhs;
887:     VecDestroy(&btmp);
888:   }

890:   ksp->guess_zero = guess_zero;

893:   ksp->totalits += ksp->its;

895:   KSPConvergedReasonViewFromOptions(ksp);

897:   if (ksp->viewRate) {
898:     PetscViewerPushFormat(ksp->viewerRate, ksp->formatRate);
899:     KSPConvergedRateView(ksp, ksp->viewerRate);
900:     PetscViewerPopFormat(ksp->viewerRate);
901:   }
902:   PCPostSolve(ksp->pc, ksp);

904:   /* diagonal scale solution if called for */
905:   if (ksp->dscale) {
906:     VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->diagonal);
907:     /* unscale right hand side and matrix */
908:     if (ksp->dscalefix) {
909:       Mat mat, pmat;

911:       VecReciprocal(ksp->diagonal);
912:       VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal);
913:       PCGetOperators(ksp->pc, &mat, &pmat);
914:       MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal);
915:       if (mat != pmat) MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal);
916:       VecReciprocal(ksp->diagonal);
917:       ksp->dscalefix2 = PETSC_TRUE;
918:     }
919:   }
920:   PetscLogEventEnd(KSP_Solve, ksp, ksp->vec_rhs, ksp->vec_sol, 0);
921:   if (ksp->guess) KSPGuessUpdate(ksp->guess, ksp->vec_rhs, ksp->vec_sol);
922:   if (ksp->postsolve) (*ksp->postsolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->postctx);

924:   PCGetOperators(ksp->pc, &mat, &pmat);
925:   if (ksp->viewEV) KSPViewEigenvalues_Internal(ksp, PETSC_FALSE, ksp->viewerEV, ksp->formatEV);
926:   if (ksp->viewEVExp) KSPViewEigenvalues_Internal(ksp, PETSC_TRUE, ksp->viewerEVExp, ksp->formatEVExp);
927:   if (ksp->viewSV) KSPViewSingularvalues_Internal(ksp, ksp->viewerSV, ksp->formatSV);
928:   if (ksp->viewFinalRes) KSPViewFinalResidual_Internal(ksp, ksp->viewerFinalRes, ksp->formatFinalRes);
929:   if (ksp->viewMat) ObjectView((PetscObject)mat, ksp->viewerMat, ksp->formatMat);
930:   if (ksp->viewPMat) ObjectView((PetscObject)pmat, ksp->viewerPMat, ksp->formatPMat);
931:   if (ksp->viewRhs) ObjectView((PetscObject)ksp->vec_rhs, ksp->viewerRhs, ksp->formatRhs);
932:   if (ksp->viewSol) ObjectView((PetscObject)ksp->vec_sol, ksp->viewerSol, ksp->formatSol);
933:   if (ksp->view) ObjectView((PetscObject)ksp, ksp->viewer, ksp->format);
934:   if (ksp->viewDScale) ObjectView((PetscObject)ksp->diagonal, ksp->viewerDScale, ksp->formatDScale);
935:   if (ksp->viewMatExp) {
936:     Mat A, B;

938:     PCGetOperators(ksp->pc, &A, NULL);
939:     if (ksp->transpose_solve) {
940:       Mat AT;

942:       MatCreateTranspose(A, &AT);
943:       MatComputeOperator(AT, MATAIJ, &B);
944:       MatDestroy(&AT);
945:     } else {
946:       MatComputeOperator(A, MATAIJ, &B);
947:     }
948:     ObjectView((PetscObject)B, ksp->viewerMatExp, ksp->formatMatExp);
949:     MatDestroy(&B);
950:   }
951:   if (ksp->viewPOpExp) {
952:     Mat B;

954:     KSPComputeOperator(ksp, MATAIJ, &B);
955:     ObjectView((PetscObject)B, ksp->viewerPOpExp, ksp->formatPOpExp);
956:     MatDestroy(&B);
957:   }

959:   if (inXisinB) {
960:     VecCopy(x, b);
961:     VecDestroy(&x);
962:   }
963:   PetscObjectSAWsBlock((PetscObject)ksp);
964:   if (ksp->errorifnotconverged && ksp->reason < 0 && ((level == 1) || (ksp->reason != KSP_DIVERGED_ITS))) {
965:     if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
966:       PCFailedReason reason;
967:       PCGetFailedReason(ksp->pc, &reason);
968:       SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve has not converged, reason %s PC failed due to %s", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
969:     } else SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve has not converged, reason %s", KSPConvergedReasons[ksp->reason]);
970:   }
971:   level--;
972:   return 0;
973: }

975: /*@
976:    KSPSolve - Solves linear system.

978:    Collective

980:    Parameters:
981: +  ksp - iterative context obtained from `KSPCreate()`
982: .  b - the right hand side vector
983: -  x - the solution (this may be the same vector as b, then b will be overwritten with answer)

985:    Options Database Keys:
986: +  -ksp_view_eigenvalues - compute preconditioned operators eigenvalues
987: .  -ksp_view_eigenvalues_explicit - compute the eigenvalues by forming the dense operator and using LAPACK
988: .  -ksp_view_mat binary - save matrix to the default binary viewer
989: .  -ksp_view_pmat binary - save matrix used to build preconditioner to the default binary viewer
990: .  -ksp_view_rhs binary - save right hand side vector to the default binary viewer
991: .  -ksp_view_solution binary - save computed solution vector to the default binary viewer
992:            (can be read later with src/ksp/tutorials/ex10.c for testing solvers)
993: .  -ksp_view_mat_explicit - for matrix-free operators, computes the matrix entries and views them
994: .  -ksp_view_preconditioned_operator_explicit - computes the product of the preconditioner and matrix as an explicit matrix and views it
995: .  -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
996: .  -ksp_view_final_residual - print 2-norm of true linear system residual at the end of the solution process
997: .  -ksp_error_if_not_converged - stop the program as soon as an error is detected in a `KSPSolve()`
998: -  -ksp_view - print the ksp data structure at the end of the system solution

1000:    Notes:

1002:    If one uses `KSPSetDM()` then x or b need not be passed. Use `KSPGetSolution()` to access the solution in this case.

1004:    The operator is specified with `KSPSetOperators()`.

1006:    `KSPSolve()` will normally return without generating an error regardless of whether the linear system was solved or if constructing the preconditioner failed.
1007:    Call `KSPGetConvergedReason()` to determine if the solver converged or failed and why. The option -ksp_error_if_not_converged or function `KSPSetErrorIfNotConverged()`
1008:    will cause `KSPSolve()` to error as soon as an error occurs in the linear solver.  In inner KSPSolves() KSP_DIVERGED_ITS is not treated as an error because when using nested solvers
1009:    it may be fine that inner solvers in the preconditioner do not converge during the solution process.

1011:    The number of iterations can be obtained from `KSPGetIterationNumber()`.

1013:    If you provide a matrix that has a `MatSetNullSpace()` and `MatSetTransposeNullSpace()` this will use that information to solve singular systems
1014:    in the least squares sense with a norm minimizing solution.

1016:                    A x = b   where b = b_p + b_t where b_t is not in the range of A (and hence by the fundamental theorem of linear algebra is in the nullspace(A') see `MatSetNullSpace()`

1018:     `KSP` first removes b_t producing the linear system  A x = b_p (which has multiple solutions) and solves this to find the ||x|| minimizing solution (and hence
1019:     it finds the solution x orthogonal to the nullspace(A). The algorithm is simply in each iteration of the Krylov method we remove the nullspace(A) from the search
1020:     direction thus the solution which is a linear combination of the search directions has no component in the nullspace(A).

1022:     We recommend always using `KSPGMRES` for such singular systems.
1023:     If nullspace(A) = nullspace(A') (note symmetric matrices always satisfy this property) then both left and right preconditioning will work
1024:     If nullspace(A) != nullspace(A') then left preconditioning will work but right preconditioning may not work (or it may).

1026:    Developer Note: The reason we cannot always solve  nullspace(A) != nullspace(A') systems with right preconditioning is because we need to remove at each iteration
1027:        the nullspace(AB) from the search direction. While we know the nullspace(A) the nullspace(AB) equals B^-1 times the nullspace(A) but except for trivial preconditioners
1028:        such as diagonal scaling we cannot apply the inverse of the preconditioner to a vector and thus cannot compute the nullspace(AB).

1030:    If using a direct method (e.g., via the `KSP` solver
1031:    `KSPPREONLY` and a preconditioner such as `PCLU` or `PCILU`,
1032:    then its=1.  See `KSPSetTolerances()` and `KSPConvergedDefault()`
1033:    for more details.

1035:    Understanding Convergence:
1036:    The routines `KSPMonitorSet()`, `KSPComputeEigenvalues()`, and
1037:    `KSPComputeEigenvaluesExplicitly()` provide information on additional
1038:    options to monitor convergence and print eigenvalue information.

1040:    Level: beginner

1042: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1043:           `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatSetTransposeNullSpace()`, `KSP`,
1044:           `KSPConvergedReasonView()`, `KSPCheckSolve()`, `KSPSetErrorIfNotConverged()`
1045: @*/
1046: PetscErrorCode KSPSolve(KSP ksp, Vec b, Vec x)
1047: {
1051:   ksp->transpose_solve = PETSC_FALSE;
1052:   KSPSolve_Private(ksp, b, x);
1053:   return 0;
1054: }

1056: /*@
1057:    KSPSolveTranspose - Solves the transpose of a linear system.

1059:    Collective

1061:    Input Parameters:
1062: +  ksp - iterative context obtained from `KSPCreate()`
1063: .  b - right hand side vector
1064: -  x - solution vector

1066:    Notes:
1067:     For complex numbers this solve the non-Hermitian transpose system.

1069:    Developer Notes:
1070:     We need to implement a `KSPSolveHermitianTranspose()`

1072:    Level: developer

1074: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1075:           `KSPSolve()`, `KSP`
1076: @*/
1077: PetscErrorCode KSPSolveTranspose(KSP ksp, Vec b, Vec x)
1078: {
1082:   if (ksp->transpose.use_explicittranspose) {
1083:     Mat J, Jpre;
1084:     KSPGetOperators(ksp, &J, &Jpre);
1085:     if (!ksp->transpose.reuse_transpose) {
1086:       MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT);
1087:       if (J != Jpre) MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT);
1088:       ksp->transpose.reuse_transpose = PETSC_TRUE;
1089:     } else {
1090:       MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT);
1091:       if (J != Jpre) MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT);
1092:     }
1093:     if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1094:       PetscObjectReference((PetscObject)ksp->transpose.AT);
1095:       ksp->transpose.BT = ksp->transpose.AT;
1096:     }
1097:     KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT);
1098:   } else {
1099:     ksp->transpose_solve = PETSC_TRUE;
1100:   }
1101:   KSPSolve_Private(ksp, b, x);
1102:   return 0;
1103: }

1105: static PetscErrorCode KSPViewFinalMatResidual_Internal(KSP ksp, Mat B, Mat X, PetscViewer viewer, PetscViewerFormat format, PetscInt shift)
1106: {
1107:   Mat        A, R;
1108:   PetscReal *norms;
1109:   PetscInt   i, N;
1110:   PetscBool  flg;

1112:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &flg);
1113:   if (flg) {
1114:     PCGetOperators(ksp->pc, &A, NULL);
1115:     MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &R);
1116:     MatAYPX(R, -1.0, B, SAME_NONZERO_PATTERN);
1117:     MatGetSize(R, NULL, &N);
1118:     PetscMalloc1(N, &norms);
1119:     MatGetColumnNorms(R, NORM_2, norms);
1120:     MatDestroy(&R);
1121:     for (i = 0; i < N; ++i) PetscViewerASCIIPrintf(viewer, "%s #%" PetscInt_FMT " %g\n", i == 0 ? "KSP final norm of residual" : "                          ", shift + i, (double)norms[i]);
1122:     PetscFree(norms);
1123:   }
1124:   return 0;
1125: }

1127: /*@
1128:      KSPMatSolve - Solves a linear system with multiple right-hand sides stored as a MATDENSE. Unlike `KSPSolve()`, B and X must be different matrices.

1130:    Input Parameters:
1131: +     ksp - iterative context
1132: -     B - block of right-hand sides

1134:    Output Parameter:
1135: .     X - block of solutions

1137:    Notes:
1138:      This is a stripped-down version of `KSPSolve()`, which only handles -ksp_view, -ksp_converged_reason, and -ksp_view_final_residual.

1140:    Level: intermediate

1142: .seealso: [](chapter_ksp), `KSPSolve()`, `MatMatSolve()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1143: @*/
1144: PetscErrorCode KSPMatSolve(KSP ksp, Mat B, Mat X)
1145: {
1146:   Mat       A, P, vB, vX;
1147:   Vec       cb, cx;
1148:   PetscInt  n1, N1, n2, N2, Bbn = PETSC_DECIDE;
1149:   PetscBool match;

1157:   MatCheckPreallocated(X, 3);
1158:   if (!X->assembled) {
1159:     MatSetOption(X, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE);
1160:     MatAssemblyBegin(X, MAT_FINAL_ASSEMBLY);
1161:     MatAssemblyEnd(X, MAT_FINAL_ASSEMBLY);
1162:   }
1164:   KSPGetOperators(ksp, &A, &P);
1165:   MatGetLocalSize(B, NULL, &n2);
1166:   MatGetLocalSize(X, NULL, &n1);
1167:   MatGetSize(B, NULL, &N2);
1168:   MatGetSize(X, NULL, &N1);
1170:   PetscObjectBaseTypeCompareAny((PetscObject)B, &match, MATSEQDENSE, MATMPIDENSE, "");
1172:   PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");
1174:   KSPSetUp(ksp);
1175:   KSPSetUpOnBlocks(ksp);
1176:   if (ksp->ops->matsolve) {
1177:     if (ksp->guess_zero) MatZeroEntries(X);
1178:     PetscLogEventBegin(KSP_MatSolve, ksp, B, X, 0);
1179:     KSPGetMatSolveBatchSize(ksp, &Bbn);
1180:     /* by default, do a single solve with all columns */
1181:     if (Bbn == PETSC_DECIDE) Bbn = N2;
1183:     PetscInfo(ksp, "KSP type %s solving using batches of width at most %" PetscInt_FMT "\n", ((PetscObject)ksp)->type_name, Bbn);
1184:     /* if -ksp_matsolve_batch_size is greater than the actual number of columns, do a single solve with all columns */
1185:     if (Bbn >= N2) {
1186:       PetscUseTypeMethod(ksp, matsolve, B, X);
1187:       if (ksp->viewFinalRes) KSPViewFinalMatResidual_Internal(ksp, B, X, ksp->viewerFinalRes, ksp->formatFinalRes, 0);

1189:       KSPConvergedReasonViewFromOptions(ksp);

1191:       if (ksp->viewRate) {
1192:         PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT);
1193:         KSPConvergedRateView(ksp, ksp->viewerRate);
1194:         PetscViewerPopFormat(ksp->viewerRate);
1195:       }
1196:     } else {
1197:       for (n2 = 0; n2 < N2; n2 += Bbn) {
1198:         MatDenseGetSubMatrix(B, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vB);
1199:         MatDenseGetSubMatrix(X, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vX);
1200:         PetscUseTypeMethod(ksp, matsolve, vB, vX);
1201:         if (ksp->viewFinalRes) KSPViewFinalMatResidual_Internal(ksp, vB, vX, ksp->viewerFinalRes, ksp->formatFinalRes, n2);

1203:         KSPConvergedReasonViewFromOptions(ksp);

1205:         if (ksp->viewRate) {
1206:           PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT);
1207:           KSPConvergedRateView(ksp, ksp->viewerRate);
1208:           PetscViewerPopFormat(ksp->viewerRate);
1209:         }
1210:         MatDenseRestoreSubMatrix(B, &vB);
1211:         MatDenseRestoreSubMatrix(X, &vX);
1212:       }
1213:     }
1214:     if (ksp->viewMat) ObjectView((PetscObject)A, ksp->viewerMat, ksp->formatMat);
1215:     if (ksp->viewPMat) ObjectView((PetscObject)P, ksp->viewerPMat, ksp->formatPMat);
1216:     if (ksp->viewRhs) ObjectView((PetscObject)B, ksp->viewerRhs, ksp->formatRhs);
1217:     if (ksp->viewSol) ObjectView((PetscObject)X, ksp->viewerSol, ksp->formatSol);
1218:     if (ksp->view) KSPView(ksp, ksp->viewer);
1219:     PetscLogEventEnd(KSP_MatSolve, ksp, B, X, 0);
1220:   } else {
1221:     PetscInfo(ksp, "KSP type %s solving column by column\n", ((PetscObject)ksp)->type_name);
1222:     for (n2 = 0; n2 < N2; ++n2) {
1223:       MatDenseGetColumnVecRead(B, n2, &cb);
1224:       MatDenseGetColumnVecWrite(X, n2, &cx);
1225:       KSPSolve(ksp, cb, cx);
1226:       MatDenseRestoreColumnVecWrite(X, n2, &cx);
1227:       MatDenseRestoreColumnVecRead(B, n2, &cb);
1228:     }
1229:   }
1230:   return 0;
1231: }

1233: /*@
1234:      KSPSetMatSolveBatchSize - Sets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1236:     Logically collective

1238:    Input Parameters:
1239: +     ksp - iterative context
1240: -     bs - batch size

1242:    Level: advanced

1244: .seealso: [](chapter_ksp), `KSPMatSolve()`, `KSPGetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1245: @*/
1246: PetscErrorCode KSPSetMatSolveBatchSize(KSP ksp, PetscInt bs)
1247: {
1250:   ksp->nmax = bs;
1251:   return 0;
1252: }

1254: /*@
1255:      KSPGetMatSolveBatchSize - Gets the maximum number of columns treated simultaneously in `KSPMatSolve()`.

1257:    Input Parameter:
1258: .     ksp - iterative context

1260:    Output Parameter:
1261: .     bs - batch size

1263:    Level: advanced

1265: .seealso: [](chapter_ksp), `KSPMatSolve()`, `KSPSetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1266: @*/
1267: PetscErrorCode KSPGetMatSolveBatchSize(KSP ksp, PetscInt *bs)
1268: {
1271:   *bs = ksp->nmax;
1272:   return 0;
1273: }

1275: /*@
1276:    KSPResetViewers - Resets all the viewers set from the options database during `KSPSetFromOptions()`

1278:    Collective

1280:    Input Parameter:
1281: .  ksp - iterative context obtained from `KSPCreate()`

1283:    Level: beginner

1285: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSPSetFromOptions()`, `KSP`
1286: @*/
1287: PetscErrorCode KSPResetViewers(KSP ksp)
1288: {
1290:   if (!ksp) return 0;
1291:   PetscViewerDestroy(&ksp->viewer);
1292:   PetscViewerDestroy(&ksp->viewerPre);
1293:   PetscViewerDestroy(&ksp->viewerRate);
1294:   PetscViewerDestroy(&ksp->viewerMat);
1295:   PetscViewerDestroy(&ksp->viewerPMat);
1296:   PetscViewerDestroy(&ksp->viewerRhs);
1297:   PetscViewerDestroy(&ksp->viewerSol);
1298:   PetscViewerDestroy(&ksp->viewerMatExp);
1299:   PetscViewerDestroy(&ksp->viewerEV);
1300:   PetscViewerDestroy(&ksp->viewerSV);
1301:   PetscViewerDestroy(&ksp->viewerEVExp);
1302:   PetscViewerDestroy(&ksp->viewerFinalRes);
1303:   PetscViewerDestroy(&ksp->viewerPOpExp);
1304:   PetscViewerDestroy(&ksp->viewerDScale);
1305:   ksp->view         = PETSC_FALSE;
1306:   ksp->viewPre      = PETSC_FALSE;
1307:   ksp->viewMat      = PETSC_FALSE;
1308:   ksp->viewPMat     = PETSC_FALSE;
1309:   ksp->viewRhs      = PETSC_FALSE;
1310:   ksp->viewSol      = PETSC_FALSE;
1311:   ksp->viewMatExp   = PETSC_FALSE;
1312:   ksp->viewEV       = PETSC_FALSE;
1313:   ksp->viewSV       = PETSC_FALSE;
1314:   ksp->viewEVExp    = PETSC_FALSE;
1315:   ksp->viewFinalRes = PETSC_FALSE;
1316:   ksp->viewPOpExp   = PETSC_FALSE;
1317:   ksp->viewDScale   = PETSC_FALSE;
1318:   return 0;
1319: }

1321: /*@
1322:    KSPReset - Resets a `KSP` context to the kspsetupcalled = 0 state and removes any allocated Vecs and Mats

1324:    Collective

1326:    Input Parameter:
1327: .  ksp - iterative context obtained from `KSPCreate()`

1329:    Level: beginner

1331: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1332: @*/
1333: PetscErrorCode KSPReset(KSP ksp)
1334: {
1336:   if (!ksp) return 0;
1337:   PetscTryTypeMethod(ksp, reset);
1338:   if (ksp->pc) PCReset(ksp->pc);
1339:   if (ksp->guess) {
1340:     KSPGuess guess = ksp->guess;
1341:     PetscTryTypeMethod(guess, reset);
1342:   }
1343:   VecDestroyVecs(ksp->nwork, &ksp->work);
1344:   VecDestroy(&ksp->vec_rhs);
1345:   VecDestroy(&ksp->vec_sol);
1346:   VecDestroy(&ksp->diagonal);
1347:   VecDestroy(&ksp->truediagonal);

1349:   KSPResetViewers(ksp);

1351:   ksp->setupstage = KSP_SETUP_NEW;
1352:   ksp->nmax       = PETSC_DECIDE;
1353:   return 0;
1354: }

1356: /*@C
1357:    KSPDestroy - Destroys `KSP` context.

1359:    Collective

1361:    Input Parameter:
1362: .  ksp - iterative context obtained from `KSPCreate()`

1364:    Level: beginner

1366: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1367: @*/
1368: PetscErrorCode KSPDestroy(KSP *ksp)
1369: {
1370:   PC pc;

1372:   if (!*ksp) return 0;
1374:   if (--((PetscObject)(*ksp))->refct > 0) {
1375:     *ksp = NULL;
1376:     return 0;
1377:   }

1379:   PetscObjectSAWsViewOff((PetscObject)*ksp);

1381:   /*
1382:    Avoid a cascading call to PCReset(ksp->pc) from the following call:
1383:    PCReset() shouldn't be called from KSPDestroy() as it is unprotected by pc's
1384:    refcount (and may be shared, e.g., by other ksps).
1385:    */
1386:   pc         = (*ksp)->pc;
1387:   (*ksp)->pc = NULL;
1388:   KSPReset((*ksp));
1389:   (*ksp)->pc = pc;
1390:   PetscTryTypeMethod((*ksp), destroy);

1392:   if ((*ksp)->transpose.use_explicittranspose) {
1393:     MatDestroy(&(*ksp)->transpose.AT);
1394:     MatDestroy(&(*ksp)->transpose.BT);
1395:     (*ksp)->transpose.reuse_transpose = PETSC_FALSE;
1396:   }

1398:   KSPGuessDestroy(&(*ksp)->guess);
1399:   DMDestroy(&(*ksp)->dm);
1400:   PCDestroy(&(*ksp)->pc);
1401:   PetscFree((*ksp)->res_hist_alloc);
1402:   PetscFree((*ksp)->err_hist_alloc);
1403:   if ((*ksp)->convergeddestroy) (*(*ksp)->convergeddestroy)((*ksp)->cnvP);
1404:   KSPMonitorCancel((*ksp));
1405:   KSPConvergedReasonViewCancel((*ksp));
1406:   PetscViewerDestroy(&(*ksp)->eigviewer);
1407:   PetscHeaderDestroy(ksp);
1408:   return 0;
1409: }

1411: /*@
1412:     KSPSetPCSide - Sets the preconditioning side.

1414:     Logically Collective

1416:     Input Parameter:
1417: .   ksp - iterative context obtained from `KSPCreate()`

1419:     Output Parameter:
1420: .   side - the preconditioning side, where side is one of
1421: .vb
1422:       PC_LEFT - left preconditioning (default)
1423:       PC_RIGHT - right preconditioning
1424:       PC_SYMMETRIC - symmetric preconditioning
1425: .ve

1427:     Options Database Keys:
1428: .   -ksp_pc_side <right,left,symmetric> - `KSP` preconditioner side

1430:     Notes:
1431:     Left preconditioning is used by default for most Krylov methods except KSPFGMRES which only supports right preconditioning.

1433:     For methods changing the side of the preconditioner changes the norm type that is used, see `KSPSetNormType()`.

1435:     Symmetric preconditioning is currently available only for the KSPQCG method. Note, however, that
1436:     symmetric preconditioning can be emulated by using either right or left
1437:     preconditioning and a pre or post processing step.

1439:     Setting the PC side often affects the default norm type.  See `KSPSetNormType()` for details.

1441:     Level: intermediate

1443: .seealso: [](chapter_ksp), `KSPGetPCSide()`, `KSPSetNormType()`, `KSPGetNormType()`, `KSP`
1444: @*/
1445: PetscErrorCode KSPSetPCSide(KSP ksp, PCSide side)
1446: {
1449:   ksp->pc_side = ksp->pc_side_set = side;
1450:   return 0;
1451: }

1453: /*@
1454:     KSPGetPCSide - Gets the preconditioning side.

1456:     Not Collective

1458:     Input Parameter:
1459: .   ksp - iterative context obtained from `KSPCreate()`

1461:     Output Parameter:
1462: .   side - the preconditioning side, where side is one of
1463: .vb
1464:       PC_LEFT - left preconditioning (default)
1465:       PC_RIGHT - right preconditioning
1466:       PC_SYMMETRIC - symmetric preconditioning
1467: .ve

1469:     Level: intermediate

1471: .seealso: [](chapter_ksp), `KSPSetPCSide()`, `KSP`
1472: @*/
1473: PetscErrorCode KSPGetPCSide(KSP ksp, PCSide *side)
1474: {
1477:   KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side);
1478:   *side = ksp->pc_side;
1479:   return 0;
1480: }

1482: /*@
1483:    KSPGetTolerances - Gets the relative, absolute, divergence, and maximum
1484:    iteration tolerances used by the default `KSP` convergence tests.

1486:    Not Collective

1488:    Input Parameter:
1489: .  ksp - the Krylov subspace context

1491:    Output Parameters:
1492: +  rtol - the relative convergence tolerance
1493: .  abstol - the absolute convergence tolerance
1494: .  dtol - the divergence tolerance
1495: -  maxits - maximum number of iterations

1497:    Notes:
1498:    The user can specify NULL for any parameter that is not needed.

1500:    Level: intermediate

1502:            maximum, iterations

1504: .seealso: [](chapter_ksp), `KSPSetTolerances()`, `KSP`
1505: @*/
1506: PetscErrorCode KSPGetTolerances(KSP ksp, PetscReal *rtol, PetscReal *abstol, PetscReal *dtol, PetscInt *maxits)
1507: {
1509:   if (abstol) *abstol = ksp->abstol;
1510:   if (rtol) *rtol = ksp->rtol;
1511:   if (dtol) *dtol = ksp->divtol;
1512:   if (maxits) *maxits = ksp->max_it;
1513:   return 0;
1514: }

1516: /*@
1517:    KSPSetTolerances - Sets the relative, absolute, divergence, and maximum
1518:    iteration tolerances used by the default `KSP` convergence testers.

1520:    Logically Collective

1522:    Input Parameters:
1523: +  ksp - the Krylov subspace context
1524: .  rtol - the relative convergence tolerance, relative decrease in the (possibly preconditioned) residual norm
1525: .  abstol - the absolute convergence tolerance   absolute size of the (possibly preconditioned) residual norm
1526: .  dtol - the divergence tolerance,   amount (possibly preconditioned) residual norm can increase before `KSPConvergedDefault()` concludes that the method is diverging
1527: -  maxits - maximum number of iterations to use

1529:    Options Database Keys:
1530: +  -ksp_atol <abstol> - Sets abstol
1531: .  -ksp_rtol <rtol> - Sets rtol
1532: .  -ksp_divtol <dtol> - Sets dtol
1533: -  -ksp_max_it <maxits> - Sets maxits

1535:    Notes:
1536:    Use PETSC_DEFAULT to retain the default value of any of the tolerances.

1538:    See `KSPConvergedDefault()` for details how these parameters are used in the default convergence test.  See also `KSPSetConvergenceTest()`
1539:    for setting user-defined stopping criteria.

1541:    Level: intermediate

1543:            convergence, maximum, iterations

1545: .seealso: [](chapter_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`
1546: @*/
1547: PetscErrorCode KSPSetTolerances(KSP ksp, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt maxits)
1548: {

1555:   if (rtol != PETSC_DEFAULT) {
1557:     ksp->rtol = rtol;
1558:   }
1559:   if (abstol != PETSC_DEFAULT) {
1561:     ksp->abstol = abstol;
1562:   }
1563:   if (dtol != PETSC_DEFAULT) {
1565:     ksp->divtol = dtol;
1566:   }
1567:   if (maxits != PETSC_DEFAULT) {
1569:     ksp->max_it = maxits;
1570:   }
1571:   return 0;
1572: }

1574: /*@
1575:    KSPSetInitialGuessNonzero - Tells the iterative solver that the
1576:    initial guess is nonzero; otherwise `KSP` assumes the initial guess
1577:    is to be zero (and thus zeros it out before solving).

1579:    Logically Collective

1581:    Input Parameters:
1582: +  ksp - iterative context obtained from `KSPCreate()`
1583: -  flg - ``PETSC_TRUE`` indicates the guess is non-zero, `PETSC_FALSE` indicates the guess is zero

1585:    Options database keys:
1586: .  -ksp_initial_guess_nonzero <true,false> - use nonzero initial guess

1588:    Level: beginner

1590:    Notes:
1591:     If this is not called the X vector is zeroed in the call to `KSPSolve()`.

1593: .seealso: [](chapter_ksp), `KSPGetInitialGuessNonzero()`, `KSPSetGuessType()`, `KSPGuessType`, `KSP`
1594: @*/
1595: PetscErrorCode KSPSetInitialGuessNonzero(KSP ksp, PetscBool flg)
1596: {
1599:   ksp->guess_zero = (PetscBool) !(int)flg;
1600:   return 0;
1601: }

1603: /*@
1604:    KSPGetInitialGuessNonzero - Determines whether the `KSP` solver is using
1605:    a zero initial guess.

1607:    Not Collective

1609:    Input Parameter:
1610: .  ksp - iterative context obtained from `KSPCreate()`

1612:    Output Parameter:
1613: .  flag - `PETSC_TRUE` if guess is nonzero, else `PETSC_FALSE`

1615:    Level: intermediate

1617: .seealso: [](chapter_ksp), `KSPSetInitialGuessNonzero()`, `KSP`
1618: @*/
1619: PetscErrorCode KSPGetInitialGuessNonzero(KSP ksp, PetscBool *flag)
1620: {
1623:   if (ksp->guess_zero) *flag = PETSC_FALSE;
1624:   else *flag = PETSC_TRUE;
1625:   return 0;
1626: }

1628: /*@
1629:    KSPSetErrorIfNotConverged - Causes `KSPSolve()` to generate an error if the solver has not converged as soon as the error is detected.

1631:    Logically Collective

1633:    Input Parameters:
1634: +  ksp - iterative context obtained from `KSPCreate()`
1635: -  flg - `PETSC_TRUE` indicates you want the error generated

1637:    Options database keys:
1638: .  -ksp_error_if_not_converged <true,false> - generate an error and stop the program

1640:    Level: intermediate

1642:    Notes:
1643:     Normally PETSc continues if a linear solver fails to converge, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
1644:     to determine if it has converged.

1646:    A `KSP_DIVERGED_ITS` will not generate an error in a `KSPSolve()` inside a nested linear solver

1648: .seealso: [](chapter_ksp), `KSPGetErrorIfNotConverged()`, `KSP`
1649: @*/
1650: PetscErrorCode KSPSetErrorIfNotConverged(KSP ksp, PetscBool flg)
1651: {
1654:   ksp->errorifnotconverged = flg;
1655:   return 0;
1656: }

1658: /*@
1659:    KSPGetErrorIfNotConverged - Will `KSPSolve()` generate an error if the solver does not converge?

1661:    Not Collective

1663:    Input Parameter:
1664: .  ksp - iterative context obtained from KSPCreate()

1666:    Output Parameter:
1667: .  flag - `PETSC_TRUE` if it will generate an error, else `PETSC_FALSE`

1669:    Level: intermediate

1671: .seealso: [](chapter_ksp), `KSPSetErrorIfNotConverged()`, `KSP`
1672: @*/
1673: PetscErrorCode KSPGetErrorIfNotConverged(KSP ksp, PetscBool *flag)
1674: {
1677:   *flag = ksp->errorifnotconverged;
1678:   return 0;
1679: }

1681: /*@
1682:    KSPSetInitialGuessKnoll - Tells the iterative solver to use `PCApply()` to compute the initial guess (The Knoll trick)

1684:    Logically Collective

1686:    Input Parameters:
1687: +  ksp - iterative context obtained from `KSPCreate()`
1688: -  flg - `PETSC_TRUE` or `PETSC_FALSE`

1690:    Level: advanced

1692:    Developer Note: the Knoll trick is not currently implemented using the KSPGuess class

1694: .seealso: [](chapter_ksp), `KSPGetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1695: @*/
1696: PetscErrorCode KSPSetInitialGuessKnoll(KSP ksp, PetscBool flg)
1697: {
1700:   ksp->guess_knoll = flg;
1701:   return 0;
1702: }

1704: /*@
1705:    KSPGetInitialGuessKnoll - Determines whether the `KSP` solver is using the Knoll trick (using PCApply(pc,b,...) to compute
1706:      the initial guess

1708:    Not Collective

1710:    Input Parameter:
1711: .  ksp - iterative context obtained from `KSPCreate()`

1713:    Output Parameter:
1714: .  flag - `PETSC_TRUE` if using Knoll trick, else `PETSC_FALSE`

1716:    Level: advanced

1718: .seealso: [](chapter_ksp), `KSPSetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1719: @*/
1720: PetscErrorCode KSPGetInitialGuessKnoll(KSP ksp, PetscBool *flag)
1721: {
1724:   *flag = ksp->guess_knoll;
1725:   return 0;
1726: }

1728: /*@
1729:    KSPGetComputeSingularValues - Gets the flag indicating whether the extreme singular
1730:    values will be calculated via a Lanczos or Arnoldi process as the linear
1731:    system is solved.

1733:    Not Collective

1735:    Input Parameter:
1736: .  ksp - iterative context obtained from `KSPCreate()`

1738:    Output Parameter:
1739: .  flg - `PETSC_TRUE` or `PETSC_FALSE`

1741:    Options Database Key:
1742: .  -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1744:    Notes:
1745:    Currently this option is not valid for all iterative methods.

1747:    Many users may just want to use the monitoring routine
1748:    `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1749:    to print the singular values at each iteration of the linear solve.

1751:    Level: advanced

1753: .seealso: [](chapter_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1754: @*/
1755: PetscErrorCode KSPGetComputeSingularValues(KSP ksp, PetscBool *flg)
1756: {
1759:   *flg = ksp->calc_sings;
1760:   return 0;
1761: }

1763: /*@
1764:    KSPSetComputeSingularValues - Sets a flag so that the extreme singular
1765:    values will be calculated via a Lanczos or Arnoldi process as the linear
1766:    system is solved.

1768:    Logically Collective

1770:    Input Parameters:
1771: +  ksp - iterative context obtained from `KSPCreate()`
1772: -  flg - `PETSC_TRUE` or `PETSC_FALSE`

1774:    Options Database Key:
1775: .  -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`

1777:    Notes:
1778:    Currently this option is not valid for all iterative methods.

1780:    Many users may just want to use the monitoring routine
1781:    `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1782:    to print the singular values at each iteration of the linear solve.

1784:    Level: advanced

1786: .seealso: [](chapter_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1787: @*/
1788: PetscErrorCode KSPSetComputeSingularValues(KSP ksp, PetscBool flg)
1789: {
1792:   ksp->calc_sings = flg;
1793:   return 0;
1794: }

1796: /*@
1797:    KSPGetComputeEigenvalues - Gets the flag indicating that the extreme eigenvalues
1798:    values will be calculated via a Lanczos or Arnoldi process as the linear
1799:    system is solved.

1801:    Not Collective

1803:    Input Parameter:
1804: .  ksp - iterative context obtained from `KSPCreate()`

1806:    Output Parameter:
1807: .  flg - `PETSC_TRUE` or `PETSC_FALSE`

1809:    Notes:
1810:    Currently this option is not valid for all iterative methods.

1812:    Level: advanced

1814: .seealso: [](chapter_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`
1815: @*/
1816: PetscErrorCode KSPGetComputeEigenvalues(KSP ksp, PetscBool *flg)
1817: {
1820:   *flg = ksp->calc_sings;
1821:   return 0;
1822: }

1824: /*@
1825:    KSPSetComputeEigenvalues - Sets a flag so that the extreme eigenvalues
1826:    values will be calculated via a Lanczos or Arnoldi process as the linear
1827:    system is solved.

1829:    Logically Collective

1831:    Input Parameters:
1832: +  ksp - iterative context obtained from `KSPCreate()`
1833: -  flg - `PETSC_TRUE` or `PETSC_FALSE`

1835:    Notes:
1836:    Currently this option is not valid for all iterative methods.

1838:    Level: advanced

1840: .seealso: [](chapter_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`
1841: @*/
1842: PetscErrorCode KSPSetComputeEigenvalues(KSP ksp, PetscBool flg)
1843: {
1846:   ksp->calc_sings = flg;
1847:   return 0;
1848: }

1850: /*@
1851:    KSPSetComputeRitz - Sets a flag so that the Ritz or harmonic Ritz pairs
1852:    will be calculated via a Lanczos or Arnoldi process as the linear
1853:    system is solved.

1855:    Logically Collective

1857:    Input Parameters:
1858: +  ksp - iterative context obtained from `KSPCreate()`
1859: -  flg - `PETSC_TRUE` or `PETSC_FALSE`

1861:    Notes:
1862:    Currently this option is only valid for the GMRES method.

1864:    Level: advanced

1866: .seealso: [](chapter_ksp), `KSPComputeRitz()`, `KSP`
1867: @*/
1868: PetscErrorCode KSPSetComputeRitz(KSP ksp, PetscBool flg)
1869: {
1872:   ksp->calc_ritz = flg;
1873:   return 0;
1874: }

1876: /*@
1877:    KSPGetRhs - Gets the right-hand-side vector for the linear system to
1878:    be solved.

1880:    Not Collective

1882:    Input Parameter:
1883: .  ksp - iterative context obtained from `KSPCreate()`

1885:    Output Parameter:
1886: .  r - right-hand-side vector

1888:    Level: developer

1890: .seealso: [](chapter_ksp), `KSPGetSolution()`, `KSPSolve()`, `KSP`
1891: @*/
1892: PetscErrorCode KSPGetRhs(KSP ksp, Vec *r)
1893: {
1896:   *r = ksp->vec_rhs;
1897:   return 0;
1898: }

1900: /*@
1901:    KSPGetSolution - Gets the location of the solution for the
1902:    linear system to be solved.  Note that this may not be where the solution
1903:    is stored during the iterative process; see `KSPBuildSolution()`.

1905:    Not Collective

1907:    Input Parameters:
1908: .  ksp - iterative context obtained from `KSPCreate()`

1910:    Output Parameters:
1911: .  v - solution vector

1913:    Level: developer

1915: .seealso: [](chapter_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP`
1916: @*/
1917: PetscErrorCode KSPGetSolution(KSP ksp, Vec *v)
1918: {
1921:   *v = ksp->vec_sol;
1922:   return 0;
1923: }

1925: /*@
1926:    KSPSetPC - Sets the preconditioner to be used to calculate the
1927:    application of the preconditioner on a vector.

1929:    Collective

1931:    Input Parameters:
1932: +  ksp - iterative context obtained from `KSPCreate()`
1933: -  pc   - the preconditioner object (can be NULL)

1935:    Notes:
1936:    Use `KSPGetPC()` to retrieve the preconditioner context.

1938:    Level: developer

1940: .seealso: [](chapter_ksp), `KSPGetPC()`, `KSP`
1941: @*/
1942: PetscErrorCode KSPSetPC(KSP ksp, PC pc)
1943: {
1945:   if (pc) {
1948:   }
1949:   PetscObjectReference((PetscObject)pc);
1950:   PCDestroy(&ksp->pc);
1951:   ksp->pc = pc;
1952:   return 0;
1953: }

1955: /*@
1956:    KSPGetPC - Returns a pointer to the preconditioner context
1957:    set with `KSPSetPC()`.

1959:    Not Collective

1961:    Input Parameters:
1962: .  ksp - iterative context obtained from `KSPCreate()`

1964:    Output Parameter:
1965: .  pc - preconditioner context

1967:    Level: developer

1969: .seealso: [](chapter_ksp), `KSPSetPC()`, `KSP`
1970: @*/
1971: PetscErrorCode KSPGetPC(KSP ksp, PC *pc)
1972: {
1975:   if (!ksp->pc) {
1976:     PCCreate(PetscObjectComm((PetscObject)ksp), &ksp->pc);
1977:     PetscObjectIncrementTabLevel((PetscObject)ksp->pc, (PetscObject)ksp, 0);
1978:     PetscObjectSetOptions((PetscObject)ksp->pc, ((PetscObject)ksp)->options);
1979:   }
1980:   *pc = ksp->pc;
1981:   return 0;
1982: }

1984: /*@
1985:    KSPMonitor - runs the user provided monitor routines, if they exist

1987:    Collective

1989:    Input Parameters:
1990: +  ksp - iterative context obtained from `KSPCreate()`
1991: .  it - iteration number
1992: -  rnorm - relative norm of the residual

1994:    Notes:
1995:    This routine is called by the `KSP` implementations.
1996:    It does not typically need to be called by the user.

1998:    Level: developer

2000: .seealso: [](chapter_ksp), `KSPMonitorSet()`
2001: @*/
2002: PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm)
2003: {
2004:   PetscInt i, n = ksp->numbermonitors;

2006:   for (i = 0; i < n; i++) (*ksp->monitor[i])(ksp, it, rnorm, ksp->monitorcontext[i]);
2007:   return 0;
2008: }

2010: /*@C
2011:    KSPMonitorSet - Sets an ADDITIONAL function to be called at every iteration to monitor
2012:    the residual/error etc.

2014:    Logically Collective

2016:    Input Parameters:
2017: +  ksp - iterative context obtained from `KSPCreate()`
2018: .  monitor - pointer to function (if this is NULL, it turns off monitoring
2019: .  mctx    - [optional] context for private data for the
2020:              monitor routine (use NULL if no context is desired)
2021: -  monitordestroy - [optional] routine that frees monitor context
2022:           (may be NULL)

2024:    Calling Sequence of monitor:
2025: $     monitor (KSP ksp, PetscInt it, PetscReal rnorm, void *mctx)

2027: +  ksp - iterative context obtained from `KSPCreate()`
2028: .  it - iteration number
2029: .  rnorm - (estimated) 2-norm of (preconditioned) residual
2030: -  mctx  - optional monitoring context, as set by `KSPMonitorSet()`

2032:    Options Database Keys:
2033: +    -ksp_monitor               - sets `KSPMonitorResidual()`
2034: .    -ksp_monitor draw          - sets `KSPMonitorResidualDraw()` and plots residual
2035: .    -ksp_monitor draw::draw_lg - sets `KSPMonitorResidualDrawLG()` and plots residual
2036: .    -ksp_monitor_pause_final   - Pauses any graphics when the solve finishes (only works for internal monitors)
2037: .    -ksp_monitor_true_residual - sets `KSPMonitorTrueResidual()`
2038: .    -ksp_monitor_true_residual draw::draw_lg - sets `KSPMonitorTrueResidualDrawLG()` and plots residual
2039: .    -ksp_monitor_max           - sets `KSPMonitorTrueResidualMax()`
2040: .    -ksp_monitor_singular_value - sets `KSPMonitorSingularValue()`
2041: -    -ksp_monitor_cancel - cancels all monitors that have
2042:                           been hardwired into a code by
2043:                           calls to `KSPMonitorSet()`, but
2044:                           does not cancel those set via
2045:                           the options database.

2047:    Notes:
2048:    The default is to do nothing.  To print the residual, or preconditioned
2049:    residual if `KSPSetNormType`(ksp,`KSP_NORM_PRECONDITIONED`) was called, use
2050:    `KSPMonitorResidual()` as the monitoring routine, with a `PETSCVIEWERASCII` as the
2051:    context.

2053:    Several different monitoring routines may be set by calling
2054:    `KSPMonitorSet()` multiple times; all will be called in the
2055:    order in which they were set.

2057:    Fortran Notes:
2058:     Only a single monitor function can be set for each `KSP` object

2060:    Level: beginner

2062: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSPMonitorCancel()`, `KSP`
2063: @*/
2064: PetscErrorCode KSPMonitorSet(KSP ksp, PetscErrorCode (*monitor)(KSP, PetscInt, PetscReal, void *), void *mctx, PetscErrorCode (*monitordestroy)(void **))
2065: {
2066:   PetscInt  i;
2067:   PetscBool identical;

2070:   for (i = 0; i < ksp->numbermonitors; i++) {
2071:     PetscMonitorCompare((PetscErrorCode(*)(void))monitor, mctx, monitordestroy, (PetscErrorCode(*)(void))ksp->monitor[i], ksp->monitorcontext[i], ksp->monitordestroy[i], &identical);
2072:     if (identical) return 0;
2073:   }
2075:   ksp->monitor[ksp->numbermonitors]          = monitor;
2076:   ksp->monitordestroy[ksp->numbermonitors]   = monitordestroy;
2077:   ksp->monitorcontext[ksp->numbermonitors++] = (void *)mctx;
2078:   return 0;
2079: }

2081: /*@
2082:    KSPMonitorCancel - Clears all monitors for a `KSP` object.

2084:    Logically Collective

2086:    Input Parameters:
2087: .  ksp - iterative context obtained from `KSPCreate()`

2089:    Options Database Key:
2090: .  -ksp_monitor_cancel - Cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but does not cancel those set via the options database.

2092:    Level: intermediate

2094: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSPMonitorSet()`, `KSP`
2095: @*/
2096: PetscErrorCode KSPMonitorCancel(KSP ksp)
2097: {
2098:   PetscInt i;

2101:   for (i = 0; i < ksp->numbermonitors; i++) {
2102:     if (ksp->monitordestroy[i]) (*ksp->monitordestroy[i])(&ksp->monitorcontext[i]);
2103:   }
2104:   ksp->numbermonitors = 0;
2105:   return 0;
2106: }

2108: /*@C
2109:    KSPGetMonitorContext - Gets the monitoring context, as set by `KSPMonitorSet()` for the FIRST monitor only.

2111:    Not Collective

2113:    Input Parameter:
2114: .  ksp - iterative context obtained from `KSPCreate()`

2116:    Output Parameter:
2117: .  ctx - monitoring context

2119:    Level: intermediate

2121: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSP`
2122: @*/
2123: PetscErrorCode KSPGetMonitorContext(KSP ksp, void *ctx)
2124: {
2126:   *(void **)ctx = ksp->monitorcontext[0];
2127:   return 0;
2128: }

2130: /*@
2131:    KSPSetResidualHistory - Sets the array used to hold the residual history.
2132:    If set, this array will contain the residual norms computed at each
2133:    iteration of the solver.

2135:    Not Collective

2137:    Input Parameters:
2138: +  ksp - iterative context obtained from `KSPCreate()`
2139: .  a   - array to hold history
2140: .  na  - size of a
2141: -  reset - `PETSC_TRUE` indicates the history counter is reset to zero
2142:            for each new linear solve

2144:    Level: advanced

2146:    Notes:
2147:    If provided, he array is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2148:    If 'a' is NULL then space is allocated for the history. If 'na' `PETSC_DECIDE` or `PETSC_DEFAULT` then a
2149:    default array of length 10000 is allocated.

2151:    If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2153: .seealso: [](chapter_ksp), `KSPGetResidualHistory()`, `KSP`
2154: @*/
2155: PetscErrorCode KSPSetResidualHistory(KSP ksp, PetscReal a[], PetscInt na, PetscBool reset)
2156: {

2159:   PetscFree(ksp->res_hist_alloc);
2160:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2161:     ksp->res_hist     = a;
2162:     ksp->res_hist_max = (size_t)na;
2163:   } else {
2164:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->res_hist_max = (size_t)na;
2165:     else ksp->res_hist_max = 10000; /* like default ksp->max_it */
2166:     PetscCalloc1(ksp->res_hist_max, &ksp->res_hist_alloc);

2168:     ksp->res_hist = ksp->res_hist_alloc;
2169:   }
2170:   ksp->res_hist_len   = 0;
2171:   ksp->res_hist_reset = reset;
2172:   return 0;
2173: }

2175: /*@C
2176:    KSPGetResidualHistory - Gets the array used to hold the residual history and the number of residuals it contains.

2178:    Not Collective

2180:    Input Parameter:
2181: .  ksp - iterative context obtained from `KSPCreate()`

2183:    Output Parameters:
2184: +  a   - pointer to array to hold history (or NULL)
2185: -  na  - number of used entries in a (or NULL)

2187:    Level: advanced

2189:    Note:
2190:      This array is borrowed and should not be freed by the caller.

2192:      Can only be called after a `KSPSetResidualHistory()` otherwise a and na are set to zero

2194:    Fortran Note:
2195:      The Fortran version of this routine has a calling sequence
2196: $   call `KSPGetResidualHistory`(`KSP` ksp, integer na, integer ierr)
2197:     note that you have passed a Fortran array into `KSPSetResidualHistory()` and you need
2198:     to access the residual values from this Fortran array you provided. Only the na (number of
2199:     residual norms currently held) is set.

2201: .seealso: [](chapter_ksp), `KSPSetResidualHistory()`, `KSP`
2202: @*/
2203: PetscErrorCode KSPGetResidualHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2204: {
2206:   if (a) *a = ksp->res_hist;
2207:   if (na) *na = (PetscInt)ksp->res_hist_len;
2208:   return 0;
2209: }

2211: /*@
2212:   KSPSetErrorHistory - Sets the array used to hold the error history. If set, this array will contain the error norms computed at each iteration of the solver.

2214:   Not Collective

2216:   Input Parameters:
2217: + ksp   - iterative context obtained from `KSPCreate()`
2218: . a     - array to hold history
2219: . na    - size of a
2220: - reset - `PETSC_TRUE` indicates the history counter is reset to zero for each new linear solve

2222:   Level: advanced

2224:   Notes:
2225:   If provided, the array is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2226:   If 'a' is NULL then space is allocated for the history. If 'na' PETSC_DECIDE or PETSC_DEFAULT then a default array of length 10000 is allocated.

2228:    If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history

2230: .seealso: [](chapter_ksp), `KSPGetErrorHistory()`, `KSPSetResidualHistory()`, `KSP`
2231: @*/
2232: PetscErrorCode KSPSetErrorHistory(KSP ksp, PetscReal a[], PetscInt na, PetscBool reset)
2233: {

2236:   PetscFree(ksp->err_hist_alloc);
2237:   if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2238:     ksp->err_hist     = a;
2239:     ksp->err_hist_max = (size_t)na;
2240:   } else {
2241:     if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->err_hist_max = (size_t)na;
2242:     else ksp->err_hist_max = 10000; /* like default ksp->max_it */
2243:     PetscCalloc1(ksp->err_hist_max, &ksp->err_hist_alloc);

2245:     ksp->err_hist = ksp->err_hist_alloc;
2246:   }
2247:   ksp->err_hist_len   = 0;
2248:   ksp->err_hist_reset = reset;
2249:   return 0;
2250: }

2252: /*@C
2253:   KSPGetErrorHistory - Gets the array used to hold the error history and the number of residuals it contains.

2255:   Not Collective

2257:   Input Parameter:
2258: . ksp - iterative context obtained from `KSPCreate()`

2260:   Output Parameters:
2261: + a  - pointer to array to hold history (or NULL)
2262: - na - number of used entries in a (or NULL)

2264:   Level: advanced

2266:   Notes:
2267:   This array is borrowed and should not be freed by the caller.
2268:   Can only be called after a `KSPSetErrorHistory()` otherwise a and na are set to zero

2270:   Fortran Note:
2271:   The Fortran version of this routine has a calling sequence
2272: $   call KSPGetErrorHistory(KSP ksp, integer na, integer ierr)
2273:   note that you have passed a Fortran array into `KSPSetErrorHistory()` and you need
2274:   to access the residual values from this Fortran array you provided. Only the na (number of
2275:   residual norms currently held) is set.

2277: .seealso: [](chapter_ksp), `KSPSetErrorHistory()`, `KSPGetResidualHistory()`, `KSP`
2278: @*/
2279: PetscErrorCode KSPGetErrorHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2280: {
2282:   if (a) *a = ksp->err_hist;
2283:   if (na) *na = (PetscInt)ksp->err_hist_len;
2284:   return 0;
2285: }

2287: /*
2288:   KSPComputeConvergenceRate - Compute the convergence rate for the iteration

2290:   Not collective

2292:   Input Parameter:
2293: . ksp - The `KSP`

2295:   Output Parameters:
2296: + cr   - The residual contraction rate
2297: . rRsq - The coefficient of determination, R^2, indicating the linearity of the data
2298: . ce   - The error contraction rate
2299: - eRsq - The coefficient of determination, R^2, indicating the linearity of the data

2301:   Note:
2302:   Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
2303:   the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
2304:   see also https://en.wikipedia.org/wiki/Coefficient_of_determination

2306:   Level: advanced

2308: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedRateView()`
2309: */
2310: PetscErrorCode KSPComputeConvergenceRate(KSP ksp, PetscReal *cr, PetscReal *rRsq, PetscReal *ce, PetscReal *eRsq)
2311: {
2312:   PetscReal const *hist;
2313:   PetscReal       *x, *y, slope, intercept, mean = 0.0, var = 0.0, res = 0.0;
2314:   PetscInt         n, k;

2316:   if (cr || rRsq) {
2317:     KSPGetResidualHistory(ksp, &hist, &n);
2318:     if (!n) {
2319:       if (cr) *cr = 0.0;
2320:       if (rRsq) *rRsq = -1.0;
2321:     } else {
2322:       PetscMalloc2(n, &x, n, &y);
2323:       for (k = 0; k < n; ++k) {
2324:         x[k] = k;
2325:         y[k] = PetscLogReal(hist[k]);
2326:         mean += y[k];
2327:       }
2328:       mean /= n;
2329:       PetscLinearRegression(n, x, y, &slope, &intercept);
2330:       for (k = 0; k < n; ++k) {
2331:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2332:         var += PetscSqr(y[k] - mean);
2333:       }
2334:       PetscFree2(x, y);
2335:       if (cr) *cr = PetscExpReal(slope);
2336:       if (rRsq) *rRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2337:     }
2338:   }
2339:   if (ce || eRsq) {
2340:     KSPGetErrorHistory(ksp, &hist, &n);
2341:     if (!n) {
2342:       if (ce) *ce = 0.0;
2343:       if (eRsq) *eRsq = -1.0;
2344:     } else {
2345:       PetscMalloc2(n, &x, n, &y);
2346:       for (k = 0; k < n; ++k) {
2347:         x[k] = k;
2348:         y[k] = PetscLogReal(hist[k]);
2349:         mean += y[k];
2350:       }
2351:       mean /= n;
2352:       PetscLinearRegression(n, x, y, &slope, &intercept);
2353:       for (k = 0; k < n; ++k) {
2354:         res += PetscSqr(y[k] - (slope * x[k] + intercept));
2355:         var += PetscSqr(y[k] - mean);
2356:       }
2357:       PetscFree2(x, y);
2358:       if (ce) *ce = PetscExpReal(slope);
2359:       if (eRsq) *eRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2360:     }
2361:   }
2362:   return 0;
2363: }

2365: /*@C
2366:    KSPSetConvergenceTest - Sets the function to be used to determine convergence.

2368:    Logically Collective

2370:    Input Parameters:
2371: +  ksp - iterative context obtained from `KSPCreate()`
2372: .  converge - pointer to the function
2373: .  cctx    - context for private data for the convergence routine (may be null)
2374: -  destroy - a routine for destroying the context (may be null)

2376:    Calling sequence of converge:
2377: $     converge (`KSP` ksp, `PetscInt` it, `PetscReal` rnorm, `KSPConvergedReason` *reason,void *mctx)

2379: +  ksp - iterative context obtained from `KSPCreate()`
2380: .  it - iteration number
2381: .  rnorm - (estimated) 2-norm of (preconditioned) residual
2382: .  reason - the reason why it has converged or diverged
2383: -  cctx  - optional convergence context, as set by `KSPSetConvergenceTest()`

2385:    Level: advanced

2387:    Notes:
2388:    Must be called after the `KSP` type has been set so put this after
2389:    a call to `KSPSetType()`, or `KSPSetFromOptions()`.

2391:    The default convergence test, `KSPConvergedDefault()`, aborts if the
2392:    residual grows to more than 10000 times the initial residual.

2394:    The default is a combination of relative and absolute tolerances.
2395:    The residual value that is tested may be an approximation; routines
2396:    that need exact values should compute them.

2398:    In the default PETSc convergence test, the precise values of reason
2399:    are macros such as `KSP_CONVERGED_RTOL`, which are defined in petscksp.h.

2401: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPGetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2402: @*/
2403: PetscErrorCode KSPSetConvergenceTest(KSP ksp, PetscErrorCode (*converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void *cctx, PetscErrorCode (*destroy)(void *))
2404: {
2406:   if (ksp->convergeddestroy) (*ksp->convergeddestroy)(ksp->cnvP);
2407:   ksp->converged        = converge;
2408:   ksp->convergeddestroy = destroy;
2409:   ksp->cnvP             = (void *)cctx;
2410:   return 0;
2411: }

2413: /*@C
2414:    KSPGetConvergenceTest - Gets the function to be used to determine convergence.

2416:    Logically Collective

2418:    Input Parameter:
2419: .   ksp - iterative context obtained from `KSPCreate()`

2421:    Output Parameters:
2422: +  converge - pointer to convergence test function
2423: .  cctx    - context for private data for the convergence routine (may be null)
2424: -  destroy - a routine for destroying the context (may be null)

2426:    Calling sequence of converge:
2427: $     converge (`KSP` ksp, `PetscInt` it, `PetscReal` rnorm, `KSPConvergedReason` *reason,void *mctx)

2429: +  ksp - iterative context obtained from `KSPCreate()`
2430: .  it - iteration number
2431: .  rnorm - (estimated) 2-norm of (preconditioned) residual
2432: .  reason - the reason why it has converged or diverged
2433: -  cctx  - optional convergence context, as set by `KSPSetConvergenceTest()`

2435:    Level: advanced

2437: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPSetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2438: @*/
2439: PetscErrorCode KSPGetConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void **cctx, PetscErrorCode (**destroy)(void *))
2440: {
2442:   if (converge) *converge = ksp->converged;
2443:   if (destroy) *destroy = ksp->convergeddestroy;
2444:   if (cctx) *cctx = ksp->cnvP;
2445:   return 0;
2446: }

2448: /*@C
2449:    KSPGetAndClearConvergenceTest - Gets the function to be used to determine convergence. Removes the current test without calling destroy on the test context

2451:    Logically Collective

2453:    Input Parameter:
2454: .   ksp - iterative context obtained from `KSPCreate()`

2456:    Output Parameters:
2457: +  converge - pointer to convergence test function
2458: .  cctx    - context for private data for the convergence routine
2459: -  destroy - a routine for destroying the context

2461:    Calling sequence of converge:
2462: $     converge (`KSP` ksp, `PetscInt` it, `PetscReal` rnorm, `KSPConvergedReason` *reason,void *mctx)

2464: +  ksp - iterative context obtained from `KSPCreate()`
2465: .  it - iteration number
2466: .  rnorm - (estimated) 2-norm of (preconditioned) residual
2467: .  reason - the reason why it has converged or diverged
2468: -  cctx  - optional convergence context, as set by `KSPSetConvergenceTest()`

2470:    Level: advanced

2472:    Note:
2473:    This is intended to be used to allow transferring the convergence test (and its context) to another testing object (for example another `KSP`) and then calling
2474:    `KSPSetConvergenceTest()` on this original `KSP`. If you just called `KSPGetConvergenceTest()` followed by `KSPSetConvergenceTest()` the original context information
2475:    would be destroyed and hence the transferred context would be invalid and trigger a crash on use

2477: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2478: @*/
2479: PetscErrorCode KSPGetAndClearConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void **cctx, PetscErrorCode (**destroy)(void *))
2480: {
2482:   *converge             = ksp->converged;
2483:   *destroy              = ksp->convergeddestroy;
2484:   *cctx                 = ksp->cnvP;
2485:   ksp->converged        = NULL;
2486:   ksp->cnvP             = NULL;
2487:   ksp->convergeddestroy = NULL;
2488:   return 0;
2489: }

2491: /*@C
2492:    KSPGetConvergenceContext - Gets the convergence context set with `KSPSetConvergenceTest()`.

2494:    Not Collective

2496:    Input Parameter:
2497: .  ksp - iterative context obtained from `KSPCreate()`

2499:    Output Parameter:
2500: .  ctx - monitoring context

2502:    Level: advanced

2504: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2505: @*/
2506: PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx)
2507: {
2509:   *(void **)ctx = ksp->cnvP;
2510:   return 0;
2511: }

2513: /*@C
2514:    KSPBuildSolution - Builds the approximate solution in a vector provided.

2516:    Collective

2518:    Input Parameter:
2519: .  ctx - iterative context obtained from `KSPCreate()`

2521:    Output Parameter:
2522:    Provide exactly one of
2523: +  v - location to stash solution.
2524: -  V - the solution is returned in this location. This vector is created
2525:        internally. This vector should NOT be destroyed by the user with
2526:        `VecDestroy()`.

2528:    Notes:
2529:    This routine can be used in one of two ways
2530: .vb
2531:       KSPBuildSolution(ksp,NULL,&V);
2532:    or
2533:       KSPBuildSolution(ksp,v,NULL); or KSPBuildSolution(ksp,v,&v);
2534: .ve
2535:    In the first case an internal vector is allocated to store the solution
2536:    (the user cannot destroy this vector). In the second case the solution
2537:    is generated in the vector that the user provides. Note that for certain
2538:    methods, such as `KSPCG`, the second case requires a copy of the solution,
2539:    while in the first case the call is essentially free since it simply
2540:    returns the vector where the solution already is stored. For some methods
2541:    like `KSPGMRES` this is a reasonably expensive operation and should only be
2542:    used in truly needed.

2544:    Level: developer

2546: .seealso: [](chapter_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP`
2547: @*/
2548: PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V)
2549: {
2552:   if (!V) V = &v;
2553:   PetscUseTypeMethod(ksp, buildsolution, v, V);
2554:   return 0;
2555: }

2557: /*@C
2558:    KSPBuildResidual - Builds the residual in a vector provided.

2560:    Collective

2562:    Input Parameter:
2563: .  ksp - iterative context obtained from `KSPCreate()`

2565:    Output Parameters:
2566: +  v - optional location to stash residual.  If v is not provided,
2567:        then a location is generated.
2568: .  t - work vector.  If not provided then one is generated.
2569: -  V - the residual

2571:    Note:
2572:    Regardless of whether or not v is provided, the residual is
2573:    returned in V.

2575:    Level: advanced

2577: .seealso: [](chapter_ksp), `KSP`, `KSPBuildSolution()`
2578: @*/
2579: PetscErrorCode KSPBuildResidual(KSP ksp, Vec t, Vec v, Vec *V)
2580: {
2581:   PetscBool flag = PETSC_FALSE;
2582:   Vec       w = v, tt = t;

2585:   if (!w) { VecDuplicate(ksp->vec_rhs, &w); }
2586:   if (!tt) {
2587:     VecDuplicate(ksp->vec_sol, &tt);
2588:     flag = PETSC_TRUE;
2589:   }
2590:   PetscUseTypeMethod(ksp, buildresidual, tt, w, V);
2591:   if (flag) VecDestroy(&tt);
2592:   return 0;
2593: }

2595: /*@
2596:    KSPSetDiagonalScale - Tells `KSP` to symmetrically diagonally scale the system
2597:      before solving. This actually CHANGES the matrix (and right hand side).

2599:    Logically Collective

2601:    Input Parameters:
2602: +  ksp - the `KSP` context
2603: -  scale - `PETSC_TRUE` or `PETSC_FALSE`

2605:    Options Database Key:
2606: +   -ksp_diagonal_scale -
2607: -   -ksp_diagonal_scale_fix - scale the matrix back AFTER the solve

2609:    Level: advanced

2611:     Notes:
2612:     Scales the matrix by  D^(-1/2)  A  D^(-1/2)  [D^(1/2) x ] = D^(-1/2) b
2613:        where D_{ii} is 1/abs(A_{ii}) unless A_{ii} is zero and then it is 1.

2615:     BE CAREFUL with this routine: it actually scales the matrix and right
2616:     hand side that define the system. After the system is solved the matrix
2617:     and right hand side remain scaled unless you use `KSPSetDiagonalScaleFix()`

2619:     This should NOT be used within the `SNES` solves if you are using a line
2620:     search.

2622:     If you use this with the `PCType` `PCEISENSTAT` preconditioner than you can
2623:     use the `PCEisenstatSetNoDiagonalScaling()` option, or -pc_eisenstat_no_diagonal_scaling
2624:     to save some unneeded, redundant flops.

2626: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2627: @*/
2628: PetscErrorCode KSPSetDiagonalScale(KSP ksp, PetscBool scale)
2629: {
2632:   ksp->dscale = scale;
2633:   return 0;
2634: }

2636: /*@
2637:    KSPGetDiagonalScale - Checks if `KSP` solver scales the matrix and right hand side, that is if `KSPSetDiagonalScale()` has been called

2639:    Not Collective

2641:    Input Parameter:
2642: .  ksp - the `KSP` context

2644:    Output Parameter:
2645: .  scale - `PETSC_TRUE` or `PETSC_FALSE`

2647:    Level: intermediate

2649: .seealso: [](chapter_ksp), `KSP`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2650: @*/
2651: PetscErrorCode KSPGetDiagonalScale(KSP ksp, PetscBool *scale)
2652: {
2655:   *scale = ksp->dscale;
2656:   return 0;
2657: }

2659: /*@
2660:    KSPSetDiagonalScaleFix - Tells `KSP` to diagonally scale the system back after solving.

2662:    Logically Collective

2664:    Input Parameters:
2665: +  ksp - the `KSP` context
2666: -  fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2667:          rescale (default)

2669:    Notes:
2670:      Must be called after `KSPSetDiagonalScale()`

2672:      Using this will slow things down, because it rescales the matrix before and
2673:      after each linear solve. This is intended mainly for testing to allow one
2674:      to easily get back the original system to make sure the solution computed is
2675:      accurate enough.

2677:    Level: intermediate

2679: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPGetDiagonalScaleFix()`, `KSP`
2680: @*/
2681: PetscErrorCode KSPSetDiagonalScaleFix(KSP ksp, PetscBool fix)
2682: {
2685:   ksp->dscalefix = fix;
2686:   return 0;
2687: }

2689: /*@
2690:    KSPGetDiagonalScaleFix - Determines if `KSP` diagonally scales the system back after solving. That is `KSPSetDiagonalScaleFix()` has been called

2692:    Not Collective

2694:    Input Parameter:
2695: .  ksp - the `KSP` context

2697:    Output Parameter:
2698: .  fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2699:          rescale (default)

2701:    Level: intermediate

2703: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2704: @*/
2705: PetscErrorCode KSPGetDiagonalScaleFix(KSP ksp, PetscBool *fix)
2706: {
2709:   *fix = ksp->dscalefix;
2710:   return 0;
2711: }

2713: /*@C
2714:    KSPSetComputeOperators - set routine to compute the linear operators

2716:    Logically Collective

2718:    Input Parameters:
2719: +  ksp - the `KSP` context
2720: .  func - function to compute the operators
2721: -  ctx - optional context

2723:    Calling sequence of func:
2724: $  func(KSP ksp,Mat A,Mat B,void *ctx)

2726: +  ksp - the `KSP` context
2727: .  A - the linear operator
2728: .  B - preconditioning matrix
2729: -  ctx - optional user-provided context

2731:    Level: beginner

2733:    Notes:
2734:    The user provided func() will be called automatically at the very next call to `KSPSolve()`. It will NOT be called at future `KSPSolve()` calls
2735:    unless either `KSPSetComputeOperators()` or `KSPSetOperators()` is called before that `KSPSolve()` is called. This allows the same system to be solved several times
2736:    with different right hand side functions but is a confusing API since one might expect it to be called for each `KSPSolve()`

2738:    To reuse the same preconditioner for the next `KSPSolve()` and not compute a new one based on the most recently computed matrix call `KSPSetReusePreconditioner()`

2740:    Developer Note:
2741:    Perhaps this routine and `KSPSetComputeRHS()` could be combined into a new API that makes clear when new matrices are computing without requiring call this
2742:    routine to indicate when the new matrix should be computed.

2744: .seealso: [](chapter_ksp), `KSP`, `KSPSetOperators()`, `KSPSetComputeRHS()`, `DMKSPSetComputeOperators()`, `KSPSetComputeInitialGuess()`
2745: @*/
2746: PetscErrorCode KSPSetComputeOperators(KSP ksp, PetscErrorCode (*func)(KSP, Mat, Mat, void *), void *ctx)
2747: {
2748:   DM dm;

2751:   KSPGetDM(ksp, &dm);
2752:   DMKSPSetComputeOperators(dm, func, ctx);
2753:   if (ksp->setupstage == KSP_SETUP_NEWRHS) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2754:   return 0;
2755: }

2757: /*@C
2758:    KSPSetComputeRHS - set routine to compute the right hand side of the linear system

2760:    Logically Collective

2762:    Input Parameters:
2763: +  ksp - the `KSP` context
2764: .  func - function to compute the right hand side
2765: -  ctx - optional context

2767:    Calling sequence of func:
2768: $  func(KSP ksp,Vec b,void *ctx)

2770: +  ksp - the `KSP` context
2771: .  b - right hand side of linear system
2772: -  ctx - optional user-provided context

2774:    Notes:
2775:     The routine you provide will be called EACH you call `KSPSolve()` to prepare the new right hand side for that solve

2777:    Level: beginner

2779: .seealso: [](chapter_ksp), `KSP`, `KSPSolve()`, `DMKSPSetComputeRHS()`, `KSPSetComputeOperators()`, `KSPSetOperators()`
2780: @*/
2781: PetscErrorCode KSPSetComputeRHS(KSP ksp, PetscErrorCode (*func)(KSP, Vec, void *), void *ctx)
2782: {
2783:   DM dm;

2786:   KSPGetDM(ksp, &dm);
2787:   DMKSPSetComputeRHS(dm, func, ctx);
2788:   return 0;
2789: }

2791: /*@C
2792:    KSPSetComputeInitialGuess - set routine to compute the initial guess of the linear system

2794:    Logically Collective

2796:    Input Parameters:
2797: +  ksp - the `KSP` context
2798: .  func - function to compute the initial guess
2799: -  ctx - optional context

2801:    Calling sequence of func:
2802: $  func(KSP ksp,Vec x,void *ctx)

2804: +  ksp - the `KSP` context
2805: .  x - solution vector
2806: -  ctx - optional user-provided context

2808:    Notes:
2809:    This should only be used in conjunction with `KSPSetComputeRHS()` and `KSPSetComputeOperators()`, otherwise
2810:    call `KSPSetInitialGuessNonzero()` and set the initial guess values in the solution vector passed to `KSPSolve()` before calling the solver

2812:    Level: beginner

2814: .seealso: [](chapter_ksp), `KSP`, `KSPSolve()`, `KSPSetComputeRHS()`, `KSPSetComputeOperators()`, `DMKSPSetComputeInitialGuess()`, `KSPSetInitialGuessNonzero()`
2815: @*/
2816: PetscErrorCode KSPSetComputeInitialGuess(KSP ksp, PetscErrorCode (*func)(KSP, Vec, void *), void *ctx)
2817: {
2818:   DM dm;

2821:   KSPGetDM(ksp, &dm);
2822:   DMKSPSetComputeInitialGuess(dm, func, ctx);
2823:   return 0;
2824: }

2826: /*@
2827:    KSPSetUseExplicitTranspose - Determines the explicit transpose of the operator is formed in `KSPSolveTranspose()`. In some configurations (like GPUs) it may
2828:    be explictly formed when possible since the solve is much more efficient.

2830:    Logically Collective

2832:    Input Parameter:
2833: .  ksp - the `KSP` context

2835:    Output Parameter:
2836: .  flg - `PETSC_TRUE` to transpose the system in `KSPSolveTranspose()`, `PETSC_FALSE` to not transpose (default)

2838:    Level: advanced

2840: .seealso: [](chapter_ksp), `KSPSolveTranspose()`, `KSP`
2841: @*/
2842: PetscErrorCode KSPSetUseExplicitTranspose(KSP ksp, PetscBool flg)
2843: {
2846:   ksp->transpose.use_explicittranspose = flg;
2847:   return 0;
2848: }