Actual source code: itfunc.c
1: /*
2: Interface KSP routines that the user calls.
3: */
5: #include <petsc/private/kspimpl.h>
6: #include <petsc/private/matimpl.h>
7: #include <petscdm.h>
9: /* number of nested levels of KSPSetUp/Solve(). This is used to determine if KSP_DIVERGED_ITS should be fatal. */
10: static PetscInt level = 0;
12: static inline PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
13: {
14: PetscCall(PetscViewerPushFormat(viewer, format));
15: PetscCall(PetscObjectView(obj, viewer));
16: PetscCall(PetscViewerPopFormat(viewer));
17: return PETSC_SUCCESS;
18: }
20: /*@
21: KSPComputeExtremeSingularValues - Computes the extreme singular values
22: for the preconditioned operator. Called after or during `KSPSolve()`.
24: Not Collective
26: Input Parameter:
27: . ksp - iterative context obtained from `KSPCreate()`
29: Output Parameters:
30: + emax - maximum estimated singular value
31: - emin - minimum estimated singular value
33: Options Database Key:
34: . -ksp_view_singularvalues - compute extreme singular values and print when `KSPSolve()` completes.
36: Level: advanced
38: Notes:
39: One must call `KSPSetComputeSingularValues()` before calling `KSPSetUp()`
40: (or use the option -ksp_view_eigenvalues) in order for this routine to work correctly.
42: Many users may just want to use the monitoring routine
43: `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
44: to print the extreme singular values at each iteration of the linear solve.
46: Estimates of the smallest singular value may be very inaccurate, especially if the Krylov method has not converged.
47: The largest singular value is usually accurate to within a few percent if the method has converged, but is still not
48: intended for eigenanalysis. Consider the excellent package `SLEPc` if accurate values are required.
50: Disable restarts if using KSPGMRES, otherwise this estimate will only be using those iterations after the last
51: restart. See `KSPGMRESSetRestart()` for more details.
53: .seealso: [](ch_ksp), `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeEigenvalues()`, `KSP`, `KSPComputeRitz()`
54: @*/
55: PetscErrorCode KSPComputeExtremeSingularValues(KSP ksp, PetscReal *emax, PetscReal *emin)
56: {
57: PetscFunctionBegin;
59: PetscAssertPointer(emax, 2);
60: PetscAssertPointer(emin, 3);
61: PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Singular values not requested before KSPSetUp()");
63: if (ksp->ops->computeextremesingularvalues) PetscUseTypeMethod(ksp, computeextremesingularvalues, emax, emin);
64: else {
65: *emin = -1.0;
66: *emax = -1.0;
67: }
68: PetscFunctionReturn(PETSC_SUCCESS);
69: }
71: /*@
72: KSPComputeEigenvalues - Computes the extreme eigenvalues for the
73: preconditioned operator. Called after or during `KSPSolve()`.
75: Not Collective
77: Input Parameters:
78: + ksp - iterative context obtained from `KSPCreate()`
79: - n - size of arrays `r` and `c`. The number of eigenvalues computed `neig` will, in
80: general, be less than this.
82: Output Parameters:
83: + r - real part of computed eigenvalues, provided by user with a dimension of at least `n`
84: . c - complex part of computed eigenvalues, provided by user with a dimension of at least `n`
85: - neig - actual number of eigenvalues computed (will be less than or equal to `n`)
87: Options Database Key:
88: . -ksp_view_eigenvalues - Prints eigenvalues to stdout
90: Level: advanced
92: Notes:
93: The number of eigenvalues estimated depends on the size of the Krylov space
94: generated during the `KSPSolve()` ; for example, with
95: `KSPCG` it corresponds to the number of CG iterations, for `KSPGMRES` it is the number
96: of GMRES iterations SINCE the last restart. Any extra space in `r` and `c`
97: will be ignored.
99: `KSPComputeEigenvalues()` does not usually provide accurate estimates; it is
100: intended only for assistance in understanding the convergence of iterative
101: methods, not for eigenanalysis. For accurate computation of eigenvalues we recommend using
102: the excellent package SLEPc.
104: One must call `KSPSetComputeEigenvalues()` before calling `KSPSetUp()`
105: in order for this routine to work correctly.
107: Many users may just want to use the monitoring routine
108: `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
109: to print the singular values at each iteration of the linear solve.
111: `KSPComputeRitz()` provides estimates for both the eigenvalues and their corresponding eigenvectors.
113: .seealso: [](ch_ksp), `KSPSetComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeExtremeSingularValues()`, `KSP`, `KSPComputeRitz()`
114: @*/
115: PetscErrorCode KSPComputeEigenvalues(KSP ksp, PetscInt n, PetscReal r[], PetscReal c[], PetscInt *neig)
116: {
117: PetscFunctionBegin;
119: if (n) PetscAssertPointer(r, 3);
120: if (n) PetscAssertPointer(c, 4);
121: PetscCheck(n >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Requested < 0 Eigenvalues");
122: PetscAssertPointer(neig, 5);
123: PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Eigenvalues not requested before KSPSetUp()");
125: if (n && ksp->ops->computeeigenvalues) PetscUseTypeMethod(ksp, computeeigenvalues, n, r, c, neig);
126: else *neig = 0;
127: PetscFunctionReturn(PETSC_SUCCESS);
128: }
130: /*@
131: KSPComputeRitz - Computes the Ritz or harmonic Ritz pairs associated with the
132: smallest or largest in modulus, for the preconditioned operator.
134: Not Collective
136: Input Parameters:
137: + ksp - iterative context obtained from `KSPCreate()`
138: . ritz - `PETSC_TRUE` or `PETSC_FALSE` for Ritz pairs or harmonic Ritz pairs, respectively
139: - small - `PETSC_TRUE` or `PETSC_FALSE` for smallest or largest (harmonic) Ritz values, respectively
141: Output Parameters:
142: + nrit - On input number of (harmonic) Ritz pairs to compute; on output, actual number of computed (harmonic) Ritz pairs
143: . S - an array of the Ritz vectors, pass in an array of vectors of size `nrit`
144: . tetar - real part of the Ritz values, pass in an array of size `nrit`
145: - tetai - imaginary part of the Ritz values, pass in an array of size `nrit`
147: Level: advanced
149: Notes:
150: This only works with a `KSPType` of `KSPGMRES`.
152: One must call `KSPSetComputeRitz()` before calling `KSPSetUp()` in order for this routine to work correctly.
154: This routine must be called after `KSPSolve()`.
156: In `KSPGMRES`, the (harmonic) Ritz pairs are computed from the Hessenberg matrix obtained during
157: the last complete cycle of the GMRES solve, or during the partial cycle if the solve ended before
158: a restart (that is a complete GMRES cycle was never achieved).
160: The number of actual (harmonic) Ritz pairs computed is less than or equal to the restart
161: parameter for GMRES if a complete cycle has been performed or less or equal to the number of GMRES
162: iterations.
164: `KSPComputeEigenvalues()` provides estimates for only the eigenvalues (Ritz values).
166: For real matrices, the (harmonic) Ritz pairs can be complex-valued. In such a case,
167: the routine selects the complex (harmonic) Ritz value and its conjugate, and two successive entries of the
168: vectors `S` are equal to the real and the imaginary parts of the associated vectors.
169: When PETSc has been built with complex scalars, the real and imaginary parts of the Ritz
170: values are still returned in `tetar` and `tetai`, as is done in `KSPComputeEigenvalues()`, but
171: the Ritz vectors S are complex.
173: The (harmonic) Ritz pairs are given in order of increasing (harmonic) Ritz values in modulus.
175: The Ritz pairs do not necessarily accurately reflect the eigenvalues and eigenvectors of the operator, consider the
176: excellent package `SLEPc` if accurate values are required.
178: .seealso: [](ch_ksp), `KSPSetComputeRitz()`, `KSP`, `KSPGMRES`, `KSPComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`
179: @*/
180: PetscErrorCode KSPComputeRitz(KSP ksp, PetscBool ritz, PetscBool small, PetscInt *nrit, Vec S[], PetscReal tetar[], PetscReal tetai[])
181: {
182: PetscFunctionBegin;
184: PetscCheck(ksp->calc_ritz, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Ritz pairs not requested before KSPSetUp()");
185: PetscTryTypeMethod(ksp, computeritz, ritz, small, nrit, S, tetar, tetai);
186: PetscFunctionReturn(PETSC_SUCCESS);
187: }
189: /*@
190: KSPSetUpOnBlocks - Sets up the preconditioner for each block in
191: the block Jacobi, overlapping Schwarz, and fieldsplit methods.
193: Collective
195: Input Parameter:
196: . ksp - the `KSP` context
198: Level: advanced
200: Notes:
201: `KSPSetUpOnBlocks()` is a routine that the user can optionally call for
202: more precise profiling (via -log_view) of the setup phase for these
203: block preconditioners. If the user does not call `KSPSetUpOnBlocks()`,
204: it will automatically be called from within `KSPSolve()`.
206: Calling `KSPSetUpOnBlocks()` is the same as calling `PCSetUpOnBlocks()`
207: on the PC context within the `KSP` context.
209: .seealso: [](ch_ksp), `PCSetUpOnBlocks()`, `KSPSetUp()`, `PCSetUp()`, `KSP`
210: @*/
211: PetscErrorCode KSPSetUpOnBlocks(KSP ksp)
212: {
213: PC pc;
214: PCFailedReason pcreason;
216: PetscFunctionBegin;
218: level++;
219: PetscCall(KSPGetPC(ksp, &pc));
220: PetscCall(PCSetUpOnBlocks(pc));
221: PetscCall(PCGetFailedReason(pc, &pcreason));
222: level--;
223: /*
224: This is tricky since only a subset of MPI ranks may set this; each KSPSolve_*() is responsible for checking
225: this flag and initializing an appropriate vector with VecFlag() so that the first norm computation can
226: produce a result at KSPCheckNorm() thus communicating the known problem to all MPI ranks so they may
227: terminate the Krylov solve. For many KSP implementations this is handled within KSPInitialResidual()
228: */
229: if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
230: PetscFunctionReturn(PETSC_SUCCESS);
231: }
233: /*@
234: KSPSetReusePreconditioner - reuse the current preconditioner, do not construct a new one even if the operator changes
236: Collective
238: Input Parameters:
239: + ksp - iterative context obtained from `KSPCreate()`
240: - flag - `PETSC_TRUE` to reuse the current preconditioner
242: Level: intermediate
244: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
245: @*/
246: PetscErrorCode KSPSetReusePreconditioner(KSP ksp, PetscBool flag)
247: {
248: PC pc;
250: PetscFunctionBegin;
252: PetscCall(KSPGetPC(ksp, &pc));
253: PetscCall(PCSetReusePreconditioner(pc, flag));
254: PetscFunctionReturn(PETSC_SUCCESS);
255: }
257: /*@
258: KSPGetReusePreconditioner - Determines if the `KSP` reuses the current preconditioner even if the operator in the preconditioner has changed.
260: Collective
262: Input Parameter:
263: . ksp - iterative context obtained from `KSPCreate()`
265: Output Parameter:
266: . flag - the boolean flag
268: Level: intermediate
270: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSPSetReusePreconditioner()`, `KSP`
271: @*/
272: PetscErrorCode KSPGetReusePreconditioner(KSP ksp, PetscBool *flag)
273: {
274: PetscFunctionBegin;
276: PetscAssertPointer(flag, 2);
277: *flag = PETSC_FALSE;
278: if (ksp->pc) PetscCall(PCGetReusePreconditioner(ksp->pc, flag));
279: PetscFunctionReturn(PETSC_SUCCESS);
280: }
282: /*@
283: KSPSetSkipPCSetFromOptions - prevents `KSPSetFromOptions()` from calling `PCSetFromOptions()`. This is used if the same `PC` is shared by more than one `KSP` so its options are not resettable for each `KSP`
285: Collective
287: Input Parameters:
288: + ksp - iterative context obtained from `KSPCreate()`
289: - flag - `PETSC_TRUE` to skip calling the `PCSetFromOptions()`
291: Level: intermediate
293: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
294: @*/
295: PetscErrorCode KSPSetSkipPCSetFromOptions(KSP ksp, PetscBool flag)
296: {
297: PetscFunctionBegin;
299: ksp->skippcsetfromoptions = flag;
300: PetscFunctionReturn(PETSC_SUCCESS);
301: }
303: /*@
304: KSPSetUp - Sets up the internal data structures for the
305: later use of an iterative solver.
307: Collective
309: Input Parameter:
310: . ksp - iterative context obtained from `KSPCreate()`
312: Level: developer
314: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`
315: @*/
316: PetscErrorCode KSPSetUp(KSP ksp)
317: {
318: Mat A, B;
319: Mat mat, pmat;
320: MatNullSpace nullsp;
321: PCFailedReason pcreason;
322: PC pc;
323: PetscBool pcmpi;
325: PetscFunctionBegin;
327: PetscCall(KSPGetPC(ksp, &pc));
328: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCMPI, &pcmpi));
329: if (pcmpi) {
330: PetscBool ksppreonly;
331: PetscCall(PetscObjectTypeCompare((PetscObject)ksp, KSPPREONLY, &ksppreonly));
332: if (!ksppreonly) PetscCall(KSPSetType(ksp, KSPPREONLY));
333: }
334: level++;
336: /* reset the convergence flag from the previous solves */
337: ksp->reason = KSP_CONVERGED_ITERATING;
339: if (!((PetscObject)ksp)->type_name) PetscCall(KSPSetType(ksp, KSPGMRES));
340: PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
342: if (ksp->dmActive && !ksp->setupstage) {
343: /* first time in so build matrix and vector data structures using DM */
344: if (!ksp->vec_rhs) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_rhs));
345: if (!ksp->vec_sol) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_sol));
346: PetscCall(DMCreateMatrix(ksp->dm, &A));
347: PetscCall(KSPSetOperators(ksp, A, A));
348: PetscCall(PetscObjectDereference((PetscObject)A));
349: }
351: if (ksp->dmActive) {
352: DMKSP kdm;
353: PetscCall(DMGetDMKSP(ksp->dm, &kdm));
355: if (kdm->ops->computeinitialguess && ksp->setupstage != KSP_SETUP_NEWRHS) {
356: /* only computes initial guess the first time through */
357: PetscCallBack("KSP callback initial guess", (*kdm->ops->computeinitialguess)(ksp, ksp->vec_sol, kdm->initialguessctx));
358: PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
359: }
360: if (kdm->ops->computerhs) PetscCallBack("KSP callback rhs", (*kdm->ops->computerhs)(ksp, ksp->vec_rhs, kdm->rhsctx));
362: if (ksp->setupstage != KSP_SETUP_NEWRHS) {
363: PetscCheck(kdm->ops->computeoperators, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "You called KSPSetDM() but did not use DMKSPSetComputeOperators() or KSPSetDMActive(ksp,PETSC_FALSE);");
364: PetscCall(KSPGetOperators(ksp, &A, &B));
365: PetscCallBack("KSP callback operators", (*kdm->ops->computeoperators)(ksp, A, B, kdm->operatorsctx));
366: }
367: }
369: if (ksp->setupstage == KSP_SETUP_NEWRHS) {
370: level--;
371: PetscFunctionReturn(PETSC_SUCCESS);
372: }
373: PetscCall(PetscLogEventBegin(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
375: switch (ksp->setupstage) {
376: case KSP_SETUP_NEW:
377: PetscUseTypeMethod(ksp, setup);
378: break;
379: case KSP_SETUP_NEWMATRIX: /* This should be replaced with a more general mechanism */
380: if (ksp->setupnewmatrix) PetscUseTypeMethod(ksp, setup);
381: break;
382: default:
383: break;
384: }
386: if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
387: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
388: /* scale the matrix if requested */
389: if (ksp->dscale) {
390: PetscScalar *xx;
391: PetscInt i, n;
392: PetscBool zeroflag = PETSC_FALSE;
394: if (!ksp->diagonal) { /* allocate vector to hold diagonal */
395: PetscCall(MatCreateVecs(pmat, &ksp->diagonal, NULL));
396: }
397: PetscCall(MatGetDiagonal(pmat, ksp->diagonal));
398: PetscCall(VecGetLocalSize(ksp->diagonal, &n));
399: PetscCall(VecGetArray(ksp->diagonal, &xx));
400: for (i = 0; i < n; i++) {
401: if (xx[i] != 0.0) xx[i] = 1.0 / PetscSqrtReal(PetscAbsScalar(xx[i]));
402: else {
403: xx[i] = 1.0;
404: zeroflag = PETSC_TRUE;
405: }
406: }
407: PetscCall(VecRestoreArray(ksp->diagonal, &xx));
408: if (zeroflag) PetscCall(PetscInfo(ksp, "Zero detected in diagonal of matrix, using 1 at those locations\n"));
409: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
410: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
411: ksp->dscalefix2 = PETSC_FALSE;
412: }
413: PetscCall(PetscLogEventEnd(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
414: PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
415: PetscCall(PCSetUp(ksp->pc));
416: PetscCall(PCGetFailedReason(ksp->pc, &pcreason));
417: /* TODO: this code was wrong and is still wrong, there is no way to propagate the failure to all processes; their is no code to handle a ksp->reason on only some ranks */
418: if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
420: PetscCall(MatGetNullSpace(mat, &nullsp));
421: if (nullsp) {
422: PetscBool test = PETSC_FALSE;
423: PetscCall(PetscOptionsGetBool(((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_test_null_space", &test, NULL));
424: if (test) PetscCall(MatNullSpaceTest(nullsp, mat, NULL));
425: }
426: ksp->setupstage = KSP_SETUP_NEWRHS;
427: level--;
428: PetscFunctionReturn(PETSC_SUCCESS);
429: }
431: /*@
432: KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged to a viewer
434: Collective
436: Input Parameters:
437: + ksp - iterative context obtained from `KSPCreate()`
438: - viewer - the viewer to display the reason
440: Options Database Keys:
441: + -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
442: - -ksp_converged_reason ::failed - only print reason and number of iterations when diverged
444: Level: beginner
446: Note:
447: To change the format of the output call `PetscViewerPushFormat`(`viewer`,`format`) before this call. Use `PETSC_VIEWER_DEFAULT` for the default,
448: use `PETSC_VIEWER_FAILED` to only display a reason if it fails.
450: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
451: `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `KSP`, `KSPGetConvergedReason()`, `PetscViewerPushFormat()`, `PetscViewerPopFormat()`
452: @*/
453: PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer)
454: {
455: PetscBool isAscii;
456: PetscViewerFormat format;
458: PetscFunctionBegin;
459: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
460: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
461: if (isAscii) {
462: PetscCall(PetscViewerGetFormat(viewer, &format));
463: PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel + 1));
464: if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) {
465: if (((PetscObject)ksp)->prefix) {
466: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
467: } else {
468: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
469: }
470: } else if (ksp->reason <= 0) {
471: if (((PetscObject)ksp)->prefix) {
472: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
473: } else {
474: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
475: }
476: if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
477: PCFailedReason reason;
478: PetscCall(PCGetFailedReason(ksp->pc, &reason));
479: PetscCall(PetscViewerASCIIPrintf(viewer, " PC failed due to %s \n", PCFailedReasons[reason]));
480: }
481: }
482: PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel + 1));
483: }
484: PetscFunctionReturn(PETSC_SUCCESS);
485: }
487: /*@C
488: KSPConvergedReasonViewSet - Sets an ADDITIONAL function that is to be used at the
489: end of the linear solver to display the convergence reason of the linear solver.
491: Logically Collective
493: Input Parameters:
494: + ksp - the `KSP` context
495: . f - the ksp converged reason view function
496: . vctx - [optional] user-defined context for private data for the
497: `KSPConvergedReason` view routine (use `NULL` if no context is desired)
498: - reasonviewdestroy - [optional] routine that frees `vctx` (may be `NULL`)
500: Options Database Keys:
501: + -ksp_converged_reason - sets a default `KSPConvergedReasonView()`
502: - -ksp_converged_reason_view_cancel - cancels all converged reason viewers that have been hardwired into a code by
503: calls to `KSPConvergedReasonViewSet()`, but does not cancel those set via the options database.
505: Level: intermediate
507: Note:
508: Several different converged reason view routines may be set by calling
509: `KSPConvergedReasonViewSet()` multiple times; all will be called in the
510: order in which they were set.
512: Developer Note:
513: Should be named KSPConvergedReasonViewAdd().
515: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewCancel()`
516: @*/
517: PetscErrorCode KSPConvergedReasonViewSet(KSP ksp, PetscErrorCode (*f)(KSP, void *), void *vctx, PetscErrorCode (*reasonviewdestroy)(void **))
518: {
519: PetscInt i;
520: PetscBool identical;
522: PetscFunctionBegin;
524: for (i = 0; i < ksp->numberreasonviews; i++) {
525: PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))f, vctx, reasonviewdestroy, (PetscErrorCode (*)(void))ksp->reasonview[i], ksp->reasonviewcontext[i], ksp->reasonviewdestroy[i], &identical));
526: if (identical) PetscFunctionReturn(PETSC_SUCCESS);
527: }
528: PetscCheck(ksp->numberreasonviews < MAXKSPREASONVIEWS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP reasonview set");
529: ksp->reasonview[ksp->numberreasonviews] = f;
530: ksp->reasonviewdestroy[ksp->numberreasonviews] = reasonviewdestroy;
531: ksp->reasonviewcontext[ksp->numberreasonviews++] = (void *)vctx;
532: PetscFunctionReturn(PETSC_SUCCESS);
533: }
535: /*@
536: KSPConvergedReasonViewCancel - Clears all the reasonview functions for a `KSP` object set with `KSPConvergedReasonViewSet()`
537: as well as the default viewer.
539: Collective
541: Input Parameter:
542: . ksp - iterative context obtained from `KSPCreate()`
544: Level: intermediate
546: .seealso: [](ch_ksp), `KSPCreate()`, `KSPDestroy()`, `KSPReset()`, `KSPConvergedReasonViewSet()`
547: @*/
548: PetscErrorCode KSPConvergedReasonViewCancel(KSP ksp)
549: {
550: PetscInt i;
552: PetscFunctionBegin;
554: for (i = 0; i < ksp->numberreasonviews; i++) {
555: if (ksp->reasonviewdestroy[i]) PetscCall((*ksp->reasonviewdestroy[i])(&ksp->reasonviewcontext[i]));
556: }
557: ksp->numberreasonviews = 0;
558: PetscCall(PetscViewerDestroy(&ksp->convergedreasonviewer));
559: PetscFunctionReturn(PETSC_SUCCESS);
560: }
562: /*@
563: KSPConvergedReasonViewFromOptions - Processes command line options to determine if/how a `KSPReason` is to be viewed.
565: Collective
567: Input Parameter:
568: . ksp - the `KSP` object
570: Level: intermediate
572: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewSet()`
573: @*/
574: PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp)
575: {
576: PetscFunctionBegin;
577: /* Call all user-provided reason review routines */
578: for (PetscInt i = 0; i < ksp->numberreasonviews; i++) PetscCall((*ksp->reasonview[i])(ksp, ksp->reasonviewcontext[i]));
580: /* Call the default PETSc routine */
581: if (ksp->convergedreasonviewer) {
582: PetscCall(PetscViewerPushFormat(ksp->convergedreasonviewer, ksp->convergedreasonformat));
583: PetscCall(KSPConvergedReasonView(ksp, ksp->convergedreasonviewer));
584: PetscCall(PetscViewerPopFormat(ksp->convergedreasonviewer));
585: }
586: PetscFunctionReturn(PETSC_SUCCESS);
587: }
589: /*@
590: KSPConvergedRateView - Displays the convergence rate <https://en.wikipedia.org/wiki/Coefficient_of_determination> of `KSPSolve()` to a viewer
592: Collective
594: Input Parameters:
595: + ksp - iterative context obtained from `KSPCreate()`
596: - viewer - the viewer to display the reason
598: Options Database Key:
599: . -ksp_converged_rate - print reason for convergence or divergence and the convergence rate (or 0.0 for divergence)
601: Level: intermediate
603: Notes:
604: To change the format of the output, call `PetscViewerPushFormat`(`viewer`,`format`) before this call.
606: Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $\log r_k = \log r_0 + k \log c$. After linear regression,
607: the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
609: .seealso: [](ch_ksp), `KSPConvergedReasonView()`, `KSPGetConvergedRate()`, `KSPSetTolerances()`, `KSPConvergedDefault()`
610: @*/
611: PetscErrorCode KSPConvergedRateView(KSP ksp, PetscViewer viewer)
612: {
613: PetscViewerFormat format;
614: PetscBool isAscii;
615: PetscReal rrate, rRsq, erate = 0.0, eRsq = 0.0;
616: PetscInt its;
617: const char *prefix, *reason = KSPConvergedReasons[ksp->reason];
619: PetscFunctionBegin;
620: PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
621: PetscCall(KSPGetIterationNumber(ksp, &its));
622: PetscCall(KSPComputeConvergenceRate(ksp, &rrate, &rRsq, &erate, &eRsq));
623: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
624: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
625: if (isAscii) {
626: PetscCall(PetscViewerGetFormat(viewer, &format));
627: PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
628: if (ksp->reason > 0) {
629: if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT, prefix, reason, its));
630: else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT, reason, its));
631: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
632: if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
633: if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
634: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
635: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
636: } else if (ksp->reason <= 0) {
637: if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT, prefix, reason, its));
638: else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT, reason, its));
639: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
640: if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
641: if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
642: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
643: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
644: if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
645: PCFailedReason reason;
646: PetscCall(PCGetFailedReason(ksp->pc, &reason));
647: PetscCall(PetscViewerASCIIPrintf(viewer, " PC failed due to %s \n", PCFailedReasons[reason]));
648: }
649: }
650: PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
651: }
652: PetscFunctionReturn(PETSC_SUCCESS);
653: }
655: #include <petscdraw.h>
657: static PetscErrorCode KSPViewEigenvalues_Internal(KSP ksp, PetscBool isExplicit, PetscViewer viewer, PetscViewerFormat format)
658: {
659: PetscReal *r, *c;
660: PetscInt n, i, neig;
661: PetscBool isascii, isdraw;
662: PetscMPIInt rank;
664: PetscFunctionBegin;
665: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)ksp), &rank));
666: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
667: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
668: if (isExplicit) {
669: PetscCall(VecGetSize(ksp->vec_sol, &n));
670: PetscCall(PetscMalloc2(n, &r, n, &c));
671: PetscCall(KSPComputeEigenvaluesExplicitly(ksp, n, r, c));
672: neig = n;
673: } else {
674: PetscInt nits;
676: PetscCall(KSPGetIterationNumber(ksp, &nits));
677: n = nits + 2;
678: if (!nits) {
679: PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any eigenvalues\n"));
680: PetscFunctionReturn(PETSC_SUCCESS);
681: }
682: PetscCall(PetscMalloc2(n, &r, n, &c));
683: PetscCall(KSPComputeEigenvalues(ksp, n, r, c, &neig));
684: }
685: if (isascii) {
686: PetscCall(PetscViewerASCIIPrintf(viewer, "%s computed eigenvalues\n", isExplicit ? "Explicitly" : "Iteratively"));
687: for (i = 0; i < neig; ++i) {
688: if (c[i] >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %gi\n", (double)r[i], (double)c[i]));
689: else PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %gi\n", (double)r[i], -(double)c[i]));
690: }
691: } else if (isdraw && rank == 0) {
692: PetscDraw draw;
693: PetscDrawSP drawsp;
695: if (format == PETSC_VIEWER_DRAW_CONTOUR) {
696: PetscCall(KSPPlotEigenContours_Private(ksp, neig, r, c));
697: } else {
698: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
699: PetscCall(PetscDrawSPCreate(draw, 1, &drawsp));
700: PetscCall(PetscDrawSPReset(drawsp));
701: for (i = 0; i < neig; ++i) PetscCall(PetscDrawSPAddPoint(drawsp, r + i, c + i));
702: PetscCall(PetscDrawSPDraw(drawsp, PETSC_TRUE));
703: PetscCall(PetscDrawSPSave(drawsp));
704: PetscCall(PetscDrawSPDestroy(&drawsp));
705: }
706: }
707: PetscCall(PetscFree2(r, c));
708: PetscFunctionReturn(PETSC_SUCCESS);
709: }
711: static PetscErrorCode KSPViewSingularvalues_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
712: {
713: PetscReal smax, smin;
714: PetscInt nits;
715: PetscBool isascii;
717: PetscFunctionBegin;
718: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
719: PetscCall(KSPGetIterationNumber(ksp, &nits));
720: if (!nits) {
721: PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any singular values\n"));
722: PetscFunctionReturn(PETSC_SUCCESS);
723: }
724: PetscCall(KSPComputeExtremeSingularValues(ksp, &smax, &smin));
725: if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Iteratively computed extreme %svalues: max %g min %g max/min %g\n", smin < 0 ? "eigen" : "singular ", (double)smax, (double)smin, (double)(smax / smin)));
726: PetscFunctionReturn(PETSC_SUCCESS);
727: }
729: static PetscErrorCode KSPViewFinalResidual_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
730: {
731: PetscBool isascii;
733: PetscFunctionBegin;
734: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
735: PetscCheck(!ksp->dscale || ksp->dscalefix, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Cannot compute final scale with -ksp_diagonal_scale except also with -ksp_diagonal_scale_fix");
736: if (isascii) {
737: Mat A;
738: Vec t;
739: PetscReal norm;
741: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
742: PetscCall(VecDuplicate(ksp->vec_rhs, &t));
743: PetscCall(KSP_MatMult(ksp, A, ksp->vec_sol, t));
744: PetscCall(VecAYPX(t, -1.0, ksp->vec_rhs));
745: PetscCall(VecViewFromOptions(t, (PetscObject)ksp, "-ksp_view_final_residual_vec"));
746: PetscCall(VecNorm(t, NORM_2, &norm));
747: PetscCall(VecDestroy(&t));
748: PetscCall(PetscViewerASCIIPrintf(viewer, "KSP final norm of residual %g\n", (double)norm));
749: }
750: PetscFunctionReturn(PETSC_SUCCESS);
751: }
753: static PetscErrorCode KSPMonitorPauseFinal_Internal(KSP ksp)
754: {
755: PetscInt i;
757: PetscFunctionBegin;
758: if (!ksp->pauseFinal) PetscFunctionReturn(PETSC_SUCCESS);
759: for (i = 0; i < ksp->numbermonitors; ++i) {
760: PetscViewerAndFormat *vf = (PetscViewerAndFormat *)ksp->monitorcontext[i];
761: PetscDraw draw;
762: PetscReal lpause;
764: if (!vf) continue;
765: if (vf->lg) {
766: if (!PetscCheckPointer(vf->lg, PETSC_OBJECT)) continue;
767: if (((PetscObject)vf->lg)->classid != PETSC_DRAWLG_CLASSID) continue;
768: PetscCall(PetscDrawLGGetDraw(vf->lg, &draw));
769: PetscCall(PetscDrawGetPause(draw, &lpause));
770: PetscCall(PetscDrawSetPause(draw, -1.0));
771: PetscCall(PetscDrawPause(draw));
772: PetscCall(PetscDrawSetPause(draw, lpause));
773: } else {
774: PetscBool isdraw;
776: if (!PetscCheckPointer(vf->viewer, PETSC_OBJECT)) continue;
777: if (((PetscObject)vf->viewer)->classid != PETSC_VIEWER_CLASSID) continue;
778: PetscCall(PetscObjectTypeCompare((PetscObject)vf->viewer, PETSCVIEWERDRAW, &isdraw));
779: if (!isdraw) continue;
780: PetscCall(PetscViewerDrawGetDraw(vf->viewer, 0, &draw));
781: PetscCall(PetscDrawGetPause(draw, &lpause));
782: PetscCall(PetscDrawSetPause(draw, -1.0));
783: PetscCall(PetscDrawPause(draw));
784: PetscCall(PetscDrawSetPause(draw, lpause));
785: }
786: }
787: PetscFunctionReturn(PETSC_SUCCESS);
788: }
790: static PetscErrorCode KSPSolve_Private(KSP ksp, Vec b, Vec x)
791: {
792: PetscBool flg = PETSC_FALSE, inXisinB = PETSC_FALSE, guess_zero;
793: Mat mat, pmat;
794: MPI_Comm comm;
795: MatNullSpace nullsp;
796: Vec btmp, vec_rhs = NULL;
798: PetscFunctionBegin;
799: level++;
800: comm = PetscObjectComm((PetscObject)ksp);
801: if (x && x == b) {
802: PetscCheck(ksp->guess_zero, comm, PETSC_ERR_ARG_INCOMP, "Cannot use x == b with nonzero initial guess");
803: PetscCall(VecDuplicate(b, &x));
804: inXisinB = PETSC_TRUE;
805: }
806: if (b) {
807: PetscCall(PetscObjectReference((PetscObject)b));
808: PetscCall(VecDestroy(&ksp->vec_rhs));
809: ksp->vec_rhs = b;
810: }
811: if (x) {
812: PetscCall(PetscObjectReference((PetscObject)x));
813: PetscCall(VecDestroy(&ksp->vec_sol));
814: ksp->vec_sol = x;
815: }
817: if (ksp->viewPre) PetscCall(ObjectView((PetscObject)ksp, ksp->viewerPre, ksp->formatPre));
819: if (ksp->presolve) PetscCall((*ksp->presolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->prectx));
821: /* reset the residual history list if requested */
822: if (ksp->res_hist_reset) ksp->res_hist_len = 0;
823: if (ksp->err_hist_reset) ksp->err_hist_len = 0;
825: /* KSPSetUp() scales the matrix if needed */
826: PetscCall(KSPSetUp(ksp));
827: PetscCall(KSPSetUpOnBlocks(ksp));
829: if (ksp->guess) {
830: PetscObjectState ostate, state;
832: PetscCall(KSPGuessSetUp(ksp->guess));
833: PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &ostate));
834: PetscCall(KSPGuessFormGuess(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
835: PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &state));
836: if (state != ostate) {
837: ksp->guess_zero = PETSC_FALSE;
838: } else {
839: PetscCall(PetscInfo(ksp, "Using zero initial guess since the KSPGuess object did not change the vector\n"));
840: ksp->guess_zero = PETSC_TRUE;
841: }
842: }
844: PetscCall(VecSetErrorIfLocked(ksp->vec_sol, 3));
846: PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
847: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
848: /* diagonal scale RHS if called for */
849: if (ksp->dscale) {
850: PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
851: /* second time in, but matrix was scaled back to original */
852: if (ksp->dscalefix && ksp->dscalefix2) {
853: Mat mat, pmat;
855: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
856: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
857: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
858: }
860: /* scale initial guess */
861: if (!ksp->guess_zero) {
862: if (!ksp->truediagonal) {
863: PetscCall(VecDuplicate(ksp->diagonal, &ksp->truediagonal));
864: PetscCall(VecCopy(ksp->diagonal, ksp->truediagonal));
865: PetscCall(VecReciprocal(ksp->truediagonal));
866: }
867: PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->truediagonal));
868: }
869: }
870: PetscCall(PCPreSolve(ksp->pc, ksp));
872: if (ksp->guess_zero && !ksp->guess_not_read) PetscCall(VecSet(ksp->vec_sol, 0.0));
873: if (ksp->guess_knoll) { /* The Knoll trick is independent on the KSPGuess specified */
874: PetscCall(PCApply(ksp->pc, ksp->vec_rhs, ksp->vec_sol));
875: PetscCall(KSP_RemoveNullSpace(ksp, ksp->vec_sol));
876: ksp->guess_zero = PETSC_FALSE;
877: }
879: /* can we mark the initial guess as zero for this solve? */
880: guess_zero = ksp->guess_zero;
881: if (!ksp->guess_zero) {
882: PetscReal norm;
884: PetscCall(VecNormAvailable(ksp->vec_sol, NORM_2, &flg, &norm));
885: if (flg && !norm) ksp->guess_zero = PETSC_TRUE;
886: }
887: if (ksp->transpose_solve) {
888: PetscCall(MatGetNullSpace(pmat, &nullsp));
889: } else {
890: PetscCall(MatGetTransposeNullSpace(pmat, &nullsp));
891: }
892: if (nullsp) {
893: PetscCall(VecDuplicate(ksp->vec_rhs, &btmp));
894: PetscCall(VecCopy(ksp->vec_rhs, btmp));
895: PetscCall(MatNullSpaceRemove(nullsp, btmp));
896: vec_rhs = ksp->vec_rhs;
897: ksp->vec_rhs = btmp;
898: }
899: PetscCall(VecLockReadPush(ksp->vec_rhs));
900: PetscUseTypeMethod(ksp, solve);
901: PetscCall(KSPMonitorPauseFinal_Internal(ksp));
903: PetscCall(VecLockReadPop(ksp->vec_rhs));
904: if (nullsp) {
905: ksp->vec_rhs = vec_rhs;
906: PetscCall(VecDestroy(&btmp));
907: }
909: ksp->guess_zero = guess_zero;
911: PetscCheck(ksp->reason, comm, PETSC_ERR_PLIB, "Internal error, solver returned without setting converged reason");
912: ksp->totalits += ksp->its;
914: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
916: if (ksp->viewRate) {
917: PetscCall(PetscViewerPushFormat(ksp->viewerRate, ksp->formatRate));
918: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
919: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
920: }
921: PetscCall(PCPostSolve(ksp->pc, ksp));
923: /* diagonal scale solution if called for */
924: if (ksp->dscale) {
925: PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->diagonal));
926: /* unscale right-hand side and matrix */
927: if (ksp->dscalefix) {
928: Mat mat, pmat;
930: PetscCall(VecReciprocal(ksp->diagonal));
931: PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
932: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
933: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
934: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
935: PetscCall(VecReciprocal(ksp->diagonal));
936: ksp->dscalefix2 = PETSC_TRUE;
937: }
938: }
939: PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_Solve : KSP_SolveTranspose, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
940: if (ksp->guess) PetscCall(KSPGuessUpdate(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
941: if (ksp->postsolve) PetscCall((*ksp->postsolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->postctx));
943: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
944: if (ksp->viewEV) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_FALSE, ksp->viewerEV, ksp->formatEV));
945: if (ksp->viewEVExp) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_TRUE, ksp->viewerEVExp, ksp->formatEVExp));
946: if (ksp->viewSV) PetscCall(KSPViewSingularvalues_Internal(ksp, ksp->viewerSV, ksp->formatSV));
947: if (ksp->viewFinalRes) PetscCall(KSPViewFinalResidual_Internal(ksp, ksp->viewerFinalRes, ksp->formatFinalRes));
948: if (ksp->viewMat) PetscCall(ObjectView((PetscObject)mat, ksp->viewerMat, ksp->formatMat));
949: if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)pmat, ksp->viewerPMat, ksp->formatPMat));
950: if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)ksp->vec_rhs, ksp->viewerRhs, ksp->formatRhs));
951: if (ksp->viewSol) PetscCall(ObjectView((PetscObject)ksp->vec_sol, ksp->viewerSol, ksp->formatSol));
952: if (ksp->view) PetscCall(ObjectView((PetscObject)ksp, ksp->viewer, ksp->format));
953: if (ksp->viewDScale) PetscCall(ObjectView((PetscObject)ksp->diagonal, ksp->viewerDScale, ksp->formatDScale));
954: if (ksp->viewMatExp) {
955: Mat A, B;
957: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
958: if (ksp->transpose_solve) {
959: Mat AT;
961: PetscCall(MatCreateTranspose(A, &AT));
962: PetscCall(MatComputeOperator(AT, MATAIJ, &B));
963: PetscCall(MatDestroy(&AT));
964: } else {
965: PetscCall(MatComputeOperator(A, MATAIJ, &B));
966: }
967: PetscCall(ObjectView((PetscObject)B, ksp->viewerMatExp, ksp->formatMatExp));
968: PetscCall(MatDestroy(&B));
969: }
970: if (ksp->viewPOpExp) {
971: Mat B;
973: PetscCall(KSPComputeOperator(ksp, MATAIJ, &B));
974: PetscCall(ObjectView((PetscObject)B, ksp->viewerPOpExp, ksp->formatPOpExp));
975: PetscCall(MatDestroy(&B));
976: }
978: if (inXisinB) {
979: PetscCall(VecCopy(x, b));
980: PetscCall(VecDestroy(&x));
981: }
982: PetscCall(PetscObjectSAWsBlock((PetscObject)ksp));
983: if (ksp->errorifnotconverged && ksp->reason < 0 && ((level == 1) || (ksp->reason != KSP_DIVERGED_ITS))) {
984: PCFailedReason reason;
986: PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
987: PetscCall(PCGetFailedReason(ksp->pc, &reason));
988: SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
989: }
990: level--;
991: PetscFunctionReturn(PETSC_SUCCESS);
992: }
994: /*@
995: KSPSolve - Solves linear system.
997: Collective
999: Input Parameters:
1000: + ksp - iterative context obtained from `KSPCreate()`
1001: . b - the right-hand side vector
1002: - x - the solution (this may be the same vector as `b`, then `b` will be overwritten with answer)
1004: Options Database Keys:
1005: + -ksp_view_eigenvalues - compute preconditioned operators eigenvalues
1006: . -ksp_view_eigenvalues_explicit - compute the eigenvalues by forming the dense operator and using LAPACK
1007: . -ksp_view_mat binary - save matrix to the default binary viewer
1008: . -ksp_view_pmat binary - save matrix used to build preconditioner to the default binary viewer
1009: . -ksp_view_rhs binary - save right-hand side vector to the default binary viewer
1010: . -ksp_view_solution binary - save computed solution vector to the default binary viewer
1011: (can be read later with src/ksp/tutorials/ex10.c for testing solvers)
1012: . -ksp_view_mat_explicit - for matrix-free operators, computes the matrix entries and views them
1013: . -ksp_view_preconditioned_operator_explicit - computes the product of the preconditioner and matrix as an explicit matrix and views it
1014: . -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
1015: . -ksp_view_final_residual - print 2-norm of true linear system residual at the end of the solution process
1016: . -ksp_error_if_not_converged - stop the program as soon as an error is detected in a `KSPSolve()`
1017: . -ksp_view_pre - print the ksp data structure before the system solution
1018: - -ksp_view - print the ksp data structure at the end of the system solution
1020: Level: beginner
1022: Notes:
1023: If one uses `KSPSetDM()` then `x` or `b` need not be passed. Use `KSPGetSolution()` to access the solution in this case.
1025: The operator is specified with `KSPSetOperators()`.
1027: `KSPSolve()` will normally return without generating an error regardless of whether the linear system was solved or if constructing the preconditioner failed.
1028: Call `KSPGetConvergedReason()` to determine if the solver converged or failed and why. The option -ksp_error_if_not_converged or function `KSPSetErrorIfNotConverged()`
1029: will cause `KSPSolve()` to error as soon as an error occurs in the linear solver. In inner `KSPSolve()` `KSP_DIVERGED_ITS` is not treated as an error because when using nested solvers
1030: it may be fine that inner solvers in the preconditioner do not converge during the solution process.
1032: The number of iterations can be obtained from `KSPGetIterationNumber()`.
1034: If you provide a matrix that has a `MatSetNullSpace()` and `MatSetTransposeNullSpace()` this will use that information to solve singular systems
1035: in the least squares sense with a norm minimizing solution.
1037: $A x = b $ where $b = b_p + b_t$ where $b_t$ is not in the range of A (and hence by the fundamental theorem of linear algebra is in the nullspace(A'), see `MatSetNullSpace()`).
1039: `KSP` first removes b_t producing the linear system A x = b_p (which has multiple solutions) and solves this to find the ||x|| minimizing solution (and hence
1040: it finds the solution x orthogonal to the nullspace(A). The algorithm is simply in each iteration of the Krylov method we remove the nullspace(A) from the search
1041: direction thus the solution which is a linear combination of the search directions has no component in the nullspace(A).
1043: We recommend always using `KSPGMRES` for such singular systems.
1044: If nullspace(A) = nullspace(A') (note symmetric matrices always satisfy this property) then both left and right preconditioning will work
1045: If nullspace(A) != nullspace(A') then left preconditioning will work but right preconditioning may not work (or it may).
1047: Developer Notes:
1048: The reason we cannot always solve nullspace(A) != nullspace(A') systems with right preconditioning is because we need to remove at each iteration
1049: the nullspace(AB) from the search direction. While we know the nullspace(A) the nullspace(AB) equals B^-1 times the nullspace(A) but except for trivial preconditioners
1050: such as diagonal scaling we cannot apply the inverse of the preconditioner to a vector and thus cannot compute the nullspace(AB).
1052: If using a direct method (e.g., via the `KSP` solver
1053: `KSPPREONLY` and a preconditioner such as `PCLU` or `PCILU`,
1054: then its=1. See `KSPSetTolerances()` and `KSPConvergedDefault()`
1055: for more details.
1057: Understanding Convergence\:
1058: The routines `KSPMonitorSet()`, `KSPComputeEigenvalues()`, and
1059: `KSPComputeEigenvaluesExplicitly()` provide information on additional
1060: options to monitor convergence and print eigenvalue information.
1062: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1063: `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatSetTransposeNullSpace()`, `KSP`,
1064: `KSPConvergedReasonView()`, `KSPCheckSolve()`, `KSPSetErrorIfNotConverged()`
1065: @*/
1066: PetscErrorCode KSPSolve(KSP ksp, Vec b, Vec x)
1067: {
1068: PetscBool isPCMPI;
1070: PetscFunctionBegin;
1074: ksp->transpose_solve = PETSC_FALSE;
1075: PetscCall(KSPSolve_Private(ksp, b, x));
1076: PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
1077: if (PCMPIServerActive && isPCMPI) {
1078: KSP subksp;
1080: PetscCall(PCMPIGetKSP(ksp->pc, &subksp));
1081: ksp->its = subksp->its;
1082: ksp->reason = subksp->reason;
1083: }
1084: PetscFunctionReturn(PETSC_SUCCESS);
1085: }
1087: /*@
1088: KSPSolveTranspose - Solves a linear system with the transpose of the matrix, $ A^T x = b$.
1090: Collective
1092: Input Parameters:
1093: + ksp - iterative context obtained from `KSPCreate()`
1094: . b - right-hand side vector
1095: - x - solution vector
1097: Level: developer
1099: Note:
1100: For complex numbers this solve the non-Hermitian transpose system.
1102: Developer Note:
1103: We need to implement a `KSPSolveHermitianTranspose()`
1105: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1106: `KSPSolve()`, `KSP`
1107: @*/
1108: PetscErrorCode KSPSolveTranspose(KSP ksp, Vec b, Vec x)
1109: {
1110: PetscFunctionBegin;
1114: if (ksp->transpose.use_explicittranspose) {
1115: Mat J, Jpre;
1116: PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1117: if (!ksp->transpose.reuse_transpose) {
1118: PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1119: if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1120: ksp->transpose.reuse_transpose = PETSC_TRUE;
1121: } else {
1122: PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1123: if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1124: }
1125: if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1126: PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1127: ksp->transpose.BT = ksp->transpose.AT;
1128: }
1129: PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1130: } else {
1131: ksp->transpose_solve = PETSC_TRUE;
1132: }
1133: PetscCall(KSPSolve_Private(ksp, b, x));
1134: PetscFunctionReturn(PETSC_SUCCESS);
1135: }
1137: static PetscErrorCode KSPViewFinalMatResidual_Internal(KSP ksp, Mat B, Mat X, PetscViewer viewer, PetscViewerFormat format, PetscInt shift)
1138: {
1139: Mat A, R;
1140: PetscReal *norms;
1141: PetscInt i, N;
1142: PetscBool flg;
1144: PetscFunctionBegin;
1145: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &flg));
1146: if (flg) {
1147: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
1148: if (!ksp->transpose_solve) PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &R));
1149: else PetscCall(MatTransposeMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &R));
1150: PetscCall(MatAYPX(R, -1.0, B, SAME_NONZERO_PATTERN));
1151: PetscCall(MatGetSize(R, NULL, &N));
1152: PetscCall(PetscMalloc1(N, &norms));
1153: PetscCall(MatGetColumnNorms(R, NORM_2, norms));
1154: PetscCall(MatDestroy(&R));
1155: for (i = 0; i < N; ++i) PetscCall(PetscViewerASCIIPrintf(viewer, "%s #%" PetscInt_FMT " %g\n", i == 0 ? "KSP final norm of residual" : " ", shift + i, (double)norms[i]));
1156: PetscCall(PetscFree(norms));
1157: }
1158: PetscFunctionReturn(PETSC_SUCCESS);
1159: }
1161: static PetscErrorCode KSPMatSolve_Private(KSP ksp, Mat B, Mat X)
1162: {
1163: Mat A, P, vB, vX;
1164: Vec cb, cx;
1165: PetscInt n1, N1, n2, N2, Bbn = PETSC_DECIDE;
1166: PetscBool match;
1168: PetscFunctionBegin;
1172: PetscCheckSameComm(ksp, 1, B, 2);
1173: PetscCheckSameComm(ksp, 1, X, 3);
1174: PetscCheckSameType(B, 2, X, 3);
1175: PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
1176: MatCheckPreallocated(X, 3);
1177: if (!X->assembled) {
1178: PetscCall(MatSetOption(X, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1179: PetscCall(MatAssemblyBegin(X, MAT_FINAL_ASSEMBLY));
1180: PetscCall(MatAssemblyEnd(X, MAT_FINAL_ASSEMBLY));
1181: }
1182: PetscCheck(B != X, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_IDN, "B and X must be different matrices");
1183: PetscCheck(!ksp->transpose_solve || !ksp->transpose.use_explicittranspose, PetscObjectComm((PetscObject)ksp), PETSC_ERR_SUP, "KSPMatSolveTranspose() does not support -ksp_use_explicittranspose");
1184: PetscCall(KSPGetOperators(ksp, &A, &P));
1185: PetscCall(MatGetLocalSize(B, NULL, &n2));
1186: PetscCall(MatGetLocalSize(X, NULL, &n1));
1187: PetscCall(MatGetSize(B, NULL, &N2));
1188: PetscCall(MatGetSize(X, NULL, &N1));
1189: PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of right-hand sides (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of solutions (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
1190: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)B, &match, MATSEQDENSE, MATMPIDENSE, ""));
1191: PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of right-hand sides not stored in a dense Mat");
1192: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
1193: PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of solutions not stored in a dense Mat");
1194: PetscCall(KSPSetUp(ksp));
1195: PetscCall(KSPSetUpOnBlocks(ksp));
1196: if (ksp->ops->matsolve) {
1197: level++;
1198: if (ksp->guess_zero) PetscCall(MatZeroEntries(X));
1199: PetscCall(PetscLogEventBegin(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1200: PetscCall(KSPGetMatSolveBatchSize(ksp, &Bbn));
1201: /* by default, do a single solve with all columns */
1202: if (Bbn == PETSC_DECIDE) Bbn = N2;
1203: else PetscCheck(Bbn >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "KSPMatSolve() batch size %" PetscInt_FMT " must be positive", Bbn);
1204: PetscCall(PetscInfo(ksp, "KSP type %s solving using batches of width at most %" PetscInt_FMT "\n", ((PetscObject)ksp)->type_name, Bbn));
1205: /* if -ksp_matsolve_batch_size is greater than the actual number of columns, do a single solve with all columns */
1206: if (Bbn >= N2) {
1207: PetscUseTypeMethod(ksp, matsolve, B, X);
1208: if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, B, X, ksp->viewerFinalRes, ksp->formatFinalRes, 0));
1210: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
1212: if (ksp->viewRate) {
1213: PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1214: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1215: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1216: }
1217: } else {
1218: for (n2 = 0; n2 < N2; n2 += Bbn) {
1219: PetscCall(MatDenseGetSubMatrix(B, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vB));
1220: PetscCall(MatDenseGetSubMatrix(X, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vX));
1221: PetscUseTypeMethod(ksp, matsolve, vB, vX);
1222: if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, vB, vX, ksp->viewerFinalRes, ksp->formatFinalRes, n2));
1224: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
1226: if (ksp->viewRate) {
1227: PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1228: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1229: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1230: }
1231: PetscCall(MatDenseRestoreSubMatrix(B, &vB));
1232: PetscCall(MatDenseRestoreSubMatrix(X, &vX));
1233: }
1234: }
1235: if (ksp->viewMat) PetscCall(ObjectView((PetscObject)A, ksp->viewerMat, ksp->formatMat));
1236: if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)P, ksp->viewerPMat, ksp->formatPMat));
1237: if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)B, ksp->viewerRhs, ksp->formatRhs));
1238: if (ksp->viewSol) PetscCall(ObjectView((PetscObject)X, ksp->viewerSol, ksp->formatSol));
1239: if (ksp->view) PetscCall(KSPView(ksp, ksp->viewer));
1240: PetscCall(PetscLogEventEnd(!ksp->transpose_solve ? KSP_MatSolve : KSP_MatSolveTranspose, ksp, B, X, 0));
1241: if (ksp->errorifnotconverged && ksp->reason < 0 && (level == 1 || ksp->reason != KSP_DIVERGED_ITS)) {
1242: PCFailedReason reason;
1244: PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason]);
1245: PetscCall(PCGetFailedReason(ksp->pc, &reason));
1246: SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_NOT_CONVERGED, "KSPMatSolve%s() has not converged, reason %s PC failed due to %s", !ksp->transpose_solve ? "" : "Transpose", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
1247: }
1248: level--;
1249: } else {
1250: PetscCall(PetscInfo(ksp, "KSP type %s solving column by column\n", ((PetscObject)ksp)->type_name));
1251: for (n2 = 0; n2 < N2; ++n2) {
1252: PetscCall(MatDenseGetColumnVecRead(B, n2, &cb));
1253: PetscCall(MatDenseGetColumnVecWrite(X, n2, &cx));
1254: PetscCall(KSPSolve_Private(ksp, cb, cx));
1255: PetscCall(MatDenseRestoreColumnVecWrite(X, n2, &cx));
1256: PetscCall(MatDenseRestoreColumnVecRead(B, n2, &cb));
1257: }
1258: }
1259: PetscFunctionReturn(PETSC_SUCCESS);
1260: }
1262: /*@
1263: KSPMatSolve - Solves a linear system with multiple right-hand sides stored as a `MATDENSE`. Unlike `KSPSolve()`, `B` and `X` must be different matrices.
1265: Input Parameters:
1266: + ksp - iterative context
1267: - B - block of right-hand sides
1269: Output Parameter:
1270: . X - block of solutions
1272: Level: intermediate
1274: Note:
1275: This is a stripped-down version of `KSPSolve()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.
1277: .seealso: [](ch_ksp), `KSPSolve()`, `MatMatSolve()`, `KSPMatSolveTranspose()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1278: @*/
1279: PetscErrorCode KSPMatSolve(KSP ksp, Mat B, Mat X)
1280: {
1281: PetscFunctionBegin;
1282: ksp->transpose_solve = PETSC_FALSE;
1283: PetscCall(KSPMatSolve_Private(ksp, B, X));
1284: PetscFunctionReturn(PETSC_SUCCESS);
1285: }
1287: /*@
1288: KSPMatSolveTranspose - Solves a linear system with the transposed matrix with multiple right-hand sides stored as a `MATDENSE`. Unlike `KSPSolveTranspose()`,
1289: `B` and `X` must be different matrices and the transposed matrix cannot be assembled explicitly for the user.
1291: Input Parameters:
1292: + ksp - iterative context
1293: - B - block of right-hand sides
1295: Output Parameter:
1296: . X - block of solutions
1298: Level: intermediate
1300: Note:
1301: This is a stripped-down version of `KSPSolveTranspose()`, which only handles `-ksp_view`, `-ksp_converged_reason`, `-ksp_converged_rate`, and `-ksp_view_final_residual`.
1303: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `MatMatTransposeSolve()`, `KSPMatSolve()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1304: @*/
1305: PetscErrorCode KSPMatSolveTranspose(KSP ksp, Mat B, Mat X)
1306: {
1307: PetscFunctionBegin;
1308: ksp->transpose_solve = PETSC_TRUE;
1309: PetscCall(KSPMatSolve_Private(ksp, B, X));
1310: PetscFunctionReturn(PETSC_SUCCESS);
1311: }
1313: /*@
1314: KSPSetMatSolveBatchSize - Sets the maximum number of columns treated simultaneously in `KSPMatSolve()`.
1316: Logically Collective
1318: Input Parameters:
1319: + ksp - iterative context
1320: - bs - batch size
1322: Level: advanced
1324: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPGetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1325: @*/
1326: PetscErrorCode KSPSetMatSolveBatchSize(KSP ksp, PetscInt bs)
1327: {
1328: PetscFunctionBegin;
1331: ksp->nmax = bs;
1332: PetscFunctionReturn(PETSC_SUCCESS);
1333: }
1335: /*@
1336: KSPGetMatSolveBatchSize - Gets the maximum number of columns treated simultaneously in `KSPMatSolve()`.
1338: Input Parameter:
1339: . ksp - iterative context
1341: Output Parameter:
1342: . bs - batch size
1344: Level: advanced
1346: .seealso: [](ch_ksp), `KSPMatSolve()`, `KSPSetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1347: @*/
1348: PetscErrorCode KSPGetMatSolveBatchSize(KSP ksp, PetscInt *bs)
1349: {
1350: PetscFunctionBegin;
1352: PetscAssertPointer(bs, 2);
1353: *bs = ksp->nmax;
1354: PetscFunctionReturn(PETSC_SUCCESS);
1355: }
1357: /*@
1358: KSPResetViewers - Resets all the viewers set from the options database during `KSPSetFromOptions()`
1360: Collective
1362: Input Parameter:
1363: . ksp - iterative context obtained from `KSPCreate()`
1365: Level: beginner
1367: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSPSetFromOptions()`, `KSP`
1368: @*/
1369: PetscErrorCode KSPResetViewers(KSP ksp)
1370: {
1371: PetscFunctionBegin;
1373: if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1374: PetscCall(PetscViewerDestroy(&ksp->viewer));
1375: PetscCall(PetscViewerDestroy(&ksp->viewerPre));
1376: PetscCall(PetscViewerDestroy(&ksp->viewerRate));
1377: PetscCall(PetscViewerDestroy(&ksp->viewerMat));
1378: PetscCall(PetscViewerDestroy(&ksp->viewerPMat));
1379: PetscCall(PetscViewerDestroy(&ksp->viewerRhs));
1380: PetscCall(PetscViewerDestroy(&ksp->viewerSol));
1381: PetscCall(PetscViewerDestroy(&ksp->viewerMatExp));
1382: PetscCall(PetscViewerDestroy(&ksp->viewerEV));
1383: PetscCall(PetscViewerDestroy(&ksp->viewerSV));
1384: PetscCall(PetscViewerDestroy(&ksp->viewerEVExp));
1385: PetscCall(PetscViewerDestroy(&ksp->viewerFinalRes));
1386: PetscCall(PetscViewerDestroy(&ksp->viewerPOpExp));
1387: PetscCall(PetscViewerDestroy(&ksp->viewerDScale));
1388: ksp->view = PETSC_FALSE;
1389: ksp->viewPre = PETSC_FALSE;
1390: ksp->viewMat = PETSC_FALSE;
1391: ksp->viewPMat = PETSC_FALSE;
1392: ksp->viewRhs = PETSC_FALSE;
1393: ksp->viewSol = PETSC_FALSE;
1394: ksp->viewMatExp = PETSC_FALSE;
1395: ksp->viewEV = PETSC_FALSE;
1396: ksp->viewSV = PETSC_FALSE;
1397: ksp->viewEVExp = PETSC_FALSE;
1398: ksp->viewFinalRes = PETSC_FALSE;
1399: ksp->viewPOpExp = PETSC_FALSE;
1400: ksp->viewDScale = PETSC_FALSE;
1401: PetscFunctionReturn(PETSC_SUCCESS);
1402: }
1404: /*@
1405: KSPReset - Resets a `KSP` context to the kspsetupcalled = 0 state and removes any allocated Vecs and Mats
1407: Collective
1409: Input Parameter:
1410: . ksp - iterative context obtained from `KSPCreate()`
1412: Level: beginner
1414: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1415: @*/
1416: PetscErrorCode KSPReset(KSP ksp)
1417: {
1418: PetscFunctionBegin;
1420: if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1421: PetscTryTypeMethod(ksp, reset);
1422: if (ksp->pc) PetscCall(PCReset(ksp->pc));
1423: if (ksp->guess) {
1424: KSPGuess guess = ksp->guess;
1425: PetscTryTypeMethod(guess, reset);
1426: }
1427: PetscCall(VecDestroyVecs(ksp->nwork, &ksp->work));
1428: PetscCall(VecDestroy(&ksp->vec_rhs));
1429: PetscCall(VecDestroy(&ksp->vec_sol));
1430: PetscCall(VecDestroy(&ksp->diagonal));
1431: PetscCall(VecDestroy(&ksp->truediagonal));
1433: ksp->setupstage = KSP_SETUP_NEW;
1434: ksp->nmax = PETSC_DECIDE;
1435: PetscFunctionReturn(PETSC_SUCCESS);
1436: }
1438: /*@
1439: KSPDestroy - Destroys a `KSP` context.
1441: Collective
1443: Input Parameter:
1444: . ksp - iterative context obtained from `KSPCreate()`
1446: Level: beginner
1448: .seealso: [](ch_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1449: @*/
1450: PetscErrorCode KSPDestroy(KSP *ksp)
1451: {
1452: PC pc;
1454: PetscFunctionBegin;
1455: if (!*ksp) PetscFunctionReturn(PETSC_SUCCESS);
1457: if (--((PetscObject)*ksp)->refct > 0) {
1458: *ksp = NULL;
1459: PetscFunctionReturn(PETSC_SUCCESS);
1460: }
1462: PetscCall(PetscObjectSAWsViewOff((PetscObject)*ksp));
1464: /*
1465: Avoid a cascading call to PCReset(ksp->pc) from the following call:
1466: PCReset() shouldn't be called from KSPDestroy() as it is unprotected by pc's
1467: refcount (and may be shared, e.g., by other ksps).
1468: */
1469: pc = (*ksp)->pc;
1470: (*ksp)->pc = NULL;
1471: PetscCall(KSPReset(*ksp));
1472: PetscCall(KSPResetViewers(*ksp));
1473: (*ksp)->pc = pc;
1474: PetscTryTypeMethod(*ksp, destroy);
1476: if ((*ksp)->transpose.use_explicittranspose) {
1477: PetscCall(MatDestroy(&(*ksp)->transpose.AT));
1478: PetscCall(MatDestroy(&(*ksp)->transpose.BT));
1479: (*ksp)->transpose.reuse_transpose = PETSC_FALSE;
1480: }
1482: PetscCall(KSPGuessDestroy(&(*ksp)->guess));
1483: PetscCall(DMDestroy(&(*ksp)->dm));
1484: PetscCall(PCDestroy(&(*ksp)->pc));
1485: PetscCall(PetscFree((*ksp)->res_hist_alloc));
1486: PetscCall(PetscFree((*ksp)->err_hist_alloc));
1487: if ((*ksp)->convergeddestroy) PetscCall((*(*ksp)->convergeddestroy)((*ksp)->cnvP));
1488: PetscCall(KSPMonitorCancel(*ksp));
1489: PetscCall(KSPConvergedReasonViewCancel(*ksp));
1490: PetscCall(PetscHeaderDestroy(ksp));
1491: PetscFunctionReturn(PETSC_SUCCESS);
1492: }
1494: /*@
1495: KSPSetPCSide - Sets the preconditioning side.
1497: Logically Collective
1499: Input Parameter:
1500: . ksp - iterative context obtained from `KSPCreate()`
1502: Output Parameter:
1503: . side - the preconditioning side, where side is one of
1504: .vb
1505: PC_LEFT - left preconditioning (default)
1506: PC_RIGHT - right preconditioning
1507: PC_SYMMETRIC - symmetric preconditioning
1508: .ve
1510: Options Database Key:
1511: . -ksp_pc_side <right,left,symmetric> - `KSP` preconditioner side
1513: Level: intermediate
1515: Notes:
1516: Left preconditioning is used by default for most Krylov methods except `KSPFGMRES` which only supports right preconditioning.
1518: For methods changing the side of the preconditioner changes the norm type that is used, see `KSPSetNormType()`.
1520: Symmetric preconditioning is currently available only for the `KSPQCG` method. However, note that
1521: symmetric preconditioning can be emulated by using either right or left
1522: preconditioning and a pre or post processing step.
1524: Setting the `PCSide` often affects the default norm type. See `KSPSetNormType()` for details.
1526: .seealso: [](ch_ksp), `KSPGetPCSide()`, `KSPSetNormType()`, `KSPGetNormType()`, `KSP`
1527: @*/
1528: PetscErrorCode KSPSetPCSide(KSP ksp, PCSide side)
1529: {
1530: PetscFunctionBegin;
1533: ksp->pc_side = ksp->pc_side_set = side;
1534: PetscFunctionReturn(PETSC_SUCCESS);
1535: }
1537: /*@
1538: KSPGetPCSide - Gets the preconditioning side.
1540: Not Collective
1542: Input Parameter:
1543: . ksp - iterative context obtained from `KSPCreate()`
1545: Output Parameter:
1546: . side - the preconditioning side, where side is one of
1547: .vb
1548: PC_LEFT - left preconditioning (default)
1549: PC_RIGHT - right preconditioning
1550: PC_SYMMETRIC - symmetric preconditioning
1551: .ve
1553: Level: intermediate
1555: .seealso: [](ch_ksp), `KSPSetPCSide()`, `KSP`
1556: @*/
1557: PetscErrorCode KSPGetPCSide(KSP ksp, PCSide *side)
1558: {
1559: PetscFunctionBegin;
1561: PetscAssertPointer(side, 2);
1562: PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
1563: *side = ksp->pc_side;
1564: PetscFunctionReturn(PETSC_SUCCESS);
1565: }
1567: /*@
1568: KSPGetTolerances - Gets the relative, absolute, divergence, and maximum
1569: iteration tolerances used by the default `KSP` convergence tests.
1571: Not Collective
1573: Input Parameter:
1574: . ksp - the Krylov subspace context
1576: Output Parameters:
1577: + rtol - the relative convergence tolerance
1578: . abstol - the absolute convergence tolerance
1579: . dtol - the divergence tolerance
1580: - maxits - maximum number of iterations
1582: Level: intermediate
1584: Note:
1585: The user can specify `NULL` for any parameter that is not needed.
1587: .seealso: [](ch_ksp), `KSPSetTolerances()`, `KSP`, `KSPSetMinimumIterations()`, `KSPGetMinimumIterations()`
1588: @*/
1589: PetscErrorCode KSPGetTolerances(KSP ksp, PetscReal *rtol, PetscReal *abstol, PetscReal *dtol, PetscInt *maxits)
1590: {
1591: PetscFunctionBegin;
1593: if (abstol) *abstol = ksp->abstol;
1594: if (rtol) *rtol = ksp->rtol;
1595: if (dtol) *dtol = ksp->divtol;
1596: if (maxits) *maxits = ksp->max_it;
1597: PetscFunctionReturn(PETSC_SUCCESS);
1598: }
1600: /*@
1601: KSPSetTolerances - Sets the relative, absolute, divergence, and maximum
1602: iteration tolerances used by the default `KSP` convergence testers.
1604: Logically Collective
1606: Input Parameters:
1607: + ksp - the Krylov subspace context
1608: . rtol - the relative convergence tolerance, relative decrease in the (possibly preconditioned) residual norm
1609: . abstol - the absolute convergence tolerance absolute size of the (possibly preconditioned) residual norm
1610: . dtol - the divergence tolerance, amount (possibly preconditioned) residual norm can increase before `KSPConvergedDefault()` concludes that the method is diverging
1611: - maxits - maximum number of iterations to use
1613: Options Database Keys:
1614: + -ksp_atol <abstol> - Sets `abstol`
1615: . -ksp_rtol <rtol> - Sets `rtol`
1616: . -ksp_divtol <dtol> - Sets `dtol`
1617: - -ksp_max_it <maxits> - Sets `maxits`
1619: Level: intermediate
1621: Notes:
1622: All parameters must be non-negative.
1624: Use `PETSC_CURRENT` to retain the current value of any of the parameters. The deprecated `PETSC_DEFAULT` also retains the current value (though the name is confusing).
1626: Use `PETSC_DETERMINE` to use the default value for the given `KSP`. The default value is the value when the object's type is set.
1628: For `dtol` and `maxits` use `PETSC_UMLIMITED` to indicate there is no upper bound on these values
1630: See `KSPConvergedDefault()` for details how these parameters are used in the default convergence test. See also `KSPSetConvergenceTest()`
1631: for setting user-defined stopping criteria.
1633: Fortran Note:
1634: Use `PETSC_CURRENT_INTEGER`, `PETSC_CURRENT_REAL`, `PETSC_DETERMINE_INTEGER`, or `PETSC_DETERMINE_REAL`
1636: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetMinimumIterations()`
1637: @*/
1638: PetscErrorCode KSPSetTolerances(KSP ksp, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt maxits)
1639: {
1640: PetscFunctionBegin;
1647: if (rtol == (PetscReal)PETSC_DETERMINE) {
1648: ksp->rtol = ksp->default_rtol;
1649: } else if (rtol != (PetscReal)PETSC_CURRENT) {
1650: PetscCheck(rtol >= 0.0 && rtol < 1.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Relative tolerance %g must be non-negative and less than 1.0", (double)rtol);
1651: ksp->rtol = rtol;
1652: }
1653: if (abstol == (PetscReal)PETSC_DETERMINE) {
1654: ksp->abstol = ksp->default_abstol;
1655: } else if (abstol != (PetscReal)PETSC_CURRENT) {
1656: PetscCheck(abstol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Absolute tolerance %g must be non-negative", (double)abstol);
1657: ksp->abstol = abstol;
1658: }
1659: if (dtol == (PetscReal)PETSC_DETERMINE) {
1660: ksp->divtol = ksp->default_divtol;
1661: } else if (dtol == (PetscReal)PETSC_UNLIMITED) {
1662: ksp->divtol = PETSC_MAX_REAL;
1663: } else if (dtol != (PetscReal)PETSC_CURRENT) {
1664: PetscCheck(dtol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Divergence tolerance %g must be larger than 1.0", (double)dtol);
1665: ksp->divtol = dtol;
1666: }
1667: if (maxits == (PetscInt)PETSC_DETERMINE) {
1668: ksp->max_it = ksp->default_max_it;
1669: } else if (maxits == (PetscInt)PETSC_UNLIMITED) {
1670: ksp->max_it = PETSC_INT_MAX;
1671: } else if (maxits != (PetscInt)PETSC_CURRENT) {
1672: PetscCheck(maxits >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Maximum number of iterations %" PetscInt_FMT " must be non-negative", maxits);
1673: ksp->max_it = maxits;
1674: }
1675: PetscFunctionReturn(PETSC_SUCCESS);
1676: }
1678: /*@
1679: KSPSetMinimumIterations - Sets the minimum number of iterations to use, regardless of the tolerances
1681: Logically Collective
1683: Input Parameters:
1684: + ksp - the Krylov subspace context
1685: - minit - minimum number of iterations to use
1687: Options Database Key:
1688: . -ksp_min_it <minits> - Sets `minit`
1690: Level: intermediate
1692: Notes:
1693: Use `KSPSetTolerances()` to set a variety of other tolerances
1695: See `KSPConvergedDefault()` for details on how these parameters are used in the default convergence test. See also `KSPSetConvergenceTest()`
1696: for setting user-defined stopping criteria.
1698: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPGetMinimumIterations()`
1699: @*/
1700: PetscErrorCode KSPSetMinimumIterations(KSP ksp, PetscInt minit)
1701: {
1702: PetscFunctionBegin;
1706: PetscCheck(minit >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Minimum number of iterations %" PetscInt_FMT " must be non-negative", minit);
1707: ksp->min_it = minit;
1708: PetscFunctionReturn(PETSC_SUCCESS);
1709: }
1711: /*@
1712: KSPGetMinimumIterations - Gets the minimum number of iterations to use, regardless of the tolerances, that was set with `KSPSetMinimumIterations()` or `-ksp_min_it`
1714: Not Collective
1716: Input Parameter:
1717: . ksp - the Krylov subspace context
1719: Output Parameter:
1720: . minit - minimum number of iterations to use
1722: Level: intermediate
1724: .seealso: [](ch_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`, `KSPSetTolerances()`, `KSPSetMinimumIterations()`
1725: @*/
1726: PetscErrorCode KSPGetMinimumIterations(KSP ksp, PetscInt *minit)
1727: {
1728: PetscFunctionBegin;
1730: PetscAssertPointer(minit, 2);
1732: *minit = ksp->min_it;
1733: PetscFunctionReturn(PETSC_SUCCESS);
1734: }
1736: /*@
1737: KSPSetInitialGuessNonzero - Tells the iterative solver that the
1738: initial guess is nonzero; otherwise `KSP` assumes the initial guess
1739: is to be zero (and thus zeros it out before solving).
1741: Logically Collective
1743: Input Parameters:
1744: + ksp - iterative context obtained from `KSPCreate()`
1745: - flg - ``PETSC_TRUE`` indicates the guess is non-zero, `PETSC_FALSE` indicates the guess is zero
1747: Options Database Key:
1748: . -ksp_initial_guess_nonzero <true,false> - use nonzero initial guess
1750: Level: beginner
1752: Note:
1753: If this is not called the X vector is zeroed in the call to `KSPSolve()`.
1755: .seealso: [](ch_ksp), `KSPGetInitialGuessNonzero()`, `KSPGuessSetType()`, `KSPGuessType`, `KSP`
1756: @*/
1757: PetscErrorCode KSPSetInitialGuessNonzero(KSP ksp, PetscBool flg)
1758: {
1759: PetscFunctionBegin;
1762: ksp->guess_zero = (PetscBool)!flg;
1763: PetscFunctionReturn(PETSC_SUCCESS);
1764: }
1766: /*@
1767: KSPGetInitialGuessNonzero - Determines whether the `KSP` solver is using
1768: a zero initial guess.
1770: Not Collective
1772: Input Parameter:
1773: . ksp - iterative context obtained from `KSPCreate()`
1775: Output Parameter:
1776: . flag - `PETSC_TRUE` if guess is nonzero, else `PETSC_FALSE`
1778: Level: intermediate
1780: .seealso: [](ch_ksp), `KSPSetInitialGuessNonzero()`, `KSP`
1781: @*/
1782: PetscErrorCode KSPGetInitialGuessNonzero(KSP ksp, PetscBool *flag)
1783: {
1784: PetscFunctionBegin;
1786: PetscAssertPointer(flag, 2);
1787: if (ksp->guess_zero) *flag = PETSC_FALSE;
1788: else *flag = PETSC_TRUE;
1789: PetscFunctionReturn(PETSC_SUCCESS);
1790: }
1792: /*@
1793: KSPSetErrorIfNotConverged - Causes `KSPSolve()` to generate an error if the solver has not converged as soon as the error is detected.
1795: Logically Collective
1797: Input Parameters:
1798: + ksp - iterative context obtained from `KSPCreate()`
1799: - flg - `PETSC_TRUE` indicates you want the error generated
1801: Options Database Key:
1802: . -ksp_error_if_not_converged <true,false> - generate an error and stop the program
1804: Level: intermediate
1806: Notes:
1807: Normally PETSc continues if a linear solver fails to converge, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
1808: to determine if it has converged.
1810: A `KSP_DIVERGED_ITS` will not generate an error in a `KSPSolve()` inside a nested linear solver
1812: .seealso: [](ch_ksp), `KSPGetErrorIfNotConverged()`, `KSP`
1813: @*/
1814: PetscErrorCode KSPSetErrorIfNotConverged(KSP ksp, PetscBool flg)
1815: {
1816: PetscFunctionBegin;
1819: ksp->errorifnotconverged = flg;
1820: PetscFunctionReturn(PETSC_SUCCESS);
1821: }
1823: /*@
1824: KSPGetErrorIfNotConverged - Will `KSPSolve()` generate an error if the solver does not converge?
1826: Not Collective
1828: Input Parameter:
1829: . ksp - iterative context obtained from KSPCreate()
1831: Output Parameter:
1832: . flag - `PETSC_TRUE` if it will generate an error, else `PETSC_FALSE`
1834: Level: intermediate
1836: .seealso: [](ch_ksp), `KSPSetErrorIfNotConverged()`, `KSP`
1837: @*/
1838: PetscErrorCode KSPGetErrorIfNotConverged(KSP ksp, PetscBool *flag)
1839: {
1840: PetscFunctionBegin;
1842: PetscAssertPointer(flag, 2);
1843: *flag = ksp->errorifnotconverged;
1844: PetscFunctionReturn(PETSC_SUCCESS);
1845: }
1847: /*@
1848: KSPSetInitialGuessKnoll - Tells the iterative solver to use `PCApply()` to compute the initial guess (The Knoll trick)
1850: Logically Collective
1852: Input Parameters:
1853: + ksp - iterative context obtained from `KSPCreate()`
1854: - flg - `PETSC_TRUE` or `PETSC_FALSE`
1856: Level: advanced
1858: Developer Note:
1859: The Knoll trick is not currently implemented using the `KSPGuess` class
1861: .seealso: [](ch_ksp), `KSPGetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1862: @*/
1863: PetscErrorCode KSPSetInitialGuessKnoll(KSP ksp, PetscBool flg)
1864: {
1865: PetscFunctionBegin;
1868: ksp->guess_knoll = flg;
1869: PetscFunctionReturn(PETSC_SUCCESS);
1870: }
1872: /*@
1873: KSPGetInitialGuessKnoll - Determines whether the `KSP` solver is using the Knoll trick (using PCApply(pc,b,...) to compute
1874: the initial guess
1876: Not Collective
1878: Input Parameter:
1879: . ksp - iterative context obtained from `KSPCreate()`
1881: Output Parameter:
1882: . flag - `PETSC_TRUE` if using Knoll trick, else `PETSC_FALSE`
1884: Level: advanced
1886: .seealso: [](ch_ksp), `KSPSetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1887: @*/
1888: PetscErrorCode KSPGetInitialGuessKnoll(KSP ksp, PetscBool *flag)
1889: {
1890: PetscFunctionBegin;
1892: PetscAssertPointer(flag, 2);
1893: *flag = ksp->guess_knoll;
1894: PetscFunctionReturn(PETSC_SUCCESS);
1895: }
1897: /*@
1898: KSPGetComputeSingularValues - Gets the flag indicating whether the extreme singular
1899: values will be calculated via a Lanczos or Arnoldi process as the linear
1900: system is solved.
1902: Not Collective
1904: Input Parameter:
1905: . ksp - iterative context obtained from `KSPCreate()`
1907: Output Parameter:
1908: . flg - `PETSC_TRUE` or `PETSC_FALSE`
1910: Options Database Key:
1911: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`
1913: Level: advanced
1915: Notes:
1916: Currently this option is not valid for all iterative methods.
1918: Many users may just want to use the monitoring routine
1919: `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1920: to print the singular values at each iteration of the linear solve.
1922: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1923: @*/
1924: PetscErrorCode KSPGetComputeSingularValues(KSP ksp, PetscBool *flg)
1925: {
1926: PetscFunctionBegin;
1928: PetscAssertPointer(flg, 2);
1929: *flg = ksp->calc_sings;
1930: PetscFunctionReturn(PETSC_SUCCESS);
1931: }
1933: /*@
1934: KSPSetComputeSingularValues - Sets a flag so that the extreme singular
1935: values will be calculated via a Lanczos or Arnoldi process as the linear
1936: system is solved.
1938: Logically Collective
1940: Input Parameters:
1941: + ksp - iterative context obtained from `KSPCreate()`
1942: - flg - `PETSC_TRUE` or `PETSC_FALSE`
1944: Options Database Key:
1945: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`
1947: Level: advanced
1949: Notes:
1950: Currently this option is not valid for all iterative methods.
1952: Many users may just want to use the monitoring routine
1953: `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1954: to print the singular values at each iteration of the linear solve.
1956: .seealso: [](ch_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`, `KSPSetComputeRitz()`
1957: @*/
1958: PetscErrorCode KSPSetComputeSingularValues(KSP ksp, PetscBool flg)
1959: {
1960: PetscFunctionBegin;
1963: ksp->calc_sings = flg;
1964: PetscFunctionReturn(PETSC_SUCCESS);
1965: }
1967: /*@
1968: KSPGetComputeEigenvalues - Gets the flag indicating that the extreme eigenvalues
1969: values will be calculated via a Lanczos or Arnoldi process as the linear
1970: system is solved.
1972: Not Collective
1974: Input Parameter:
1975: . ksp - iterative context obtained from `KSPCreate()`
1977: Output Parameter:
1978: . flg - `PETSC_TRUE` or `PETSC_FALSE`
1980: Level: advanced
1982: Note:
1983: Currently this option is not valid for all iterative methods.
1985: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`, `KSPSetComputeRitz()`
1986: @*/
1987: PetscErrorCode KSPGetComputeEigenvalues(KSP ksp, PetscBool *flg)
1988: {
1989: PetscFunctionBegin;
1991: PetscAssertPointer(flg, 2);
1992: *flg = ksp->calc_sings;
1993: PetscFunctionReturn(PETSC_SUCCESS);
1994: }
1996: /*@
1997: KSPSetComputeEigenvalues - Sets a flag so that the extreme eigenvalues
1998: values will be calculated via a Lanczos or Arnoldi process as the linear
1999: system is solved.
2001: Logically Collective
2003: Input Parameters:
2004: + ksp - iterative context obtained from `KSPCreate()`
2005: - flg - `PETSC_TRUE` or `PETSC_FALSE`
2007: Level: advanced
2009: Note:
2010: Currently this option is not valid for all iterative methods.
2012: .seealso: [](ch_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`, `KSPSetComputeRitz()`
2013: @*/
2014: PetscErrorCode KSPSetComputeEigenvalues(KSP ksp, PetscBool flg)
2015: {
2016: PetscFunctionBegin;
2019: ksp->calc_sings = flg;
2020: PetscFunctionReturn(PETSC_SUCCESS);
2021: }
2023: /*@
2024: KSPSetComputeRitz - Sets a flag so that the Ritz or harmonic Ritz pairs
2025: will be calculated via a Lanczos or Arnoldi process as the linear
2026: system is solved.
2028: Logically Collective
2030: Input Parameters:
2031: + ksp - iterative context obtained from `KSPCreate()`
2032: - flg - `PETSC_TRUE` or `PETSC_FALSE`
2034: Level: advanced
2036: Note:
2037: Currently this option is only valid for the `KSPGMRES` method.
2039: .seealso: [](ch_ksp), `KSPComputeRitz()`, `KSP`
2040: @*/
2041: PetscErrorCode KSPSetComputeRitz(KSP ksp, PetscBool flg)
2042: {
2043: PetscFunctionBegin;
2046: ksp->calc_ritz = flg;
2047: PetscFunctionReturn(PETSC_SUCCESS);
2048: }
2050: /*@
2051: KSPGetRhs - Gets the right-hand-side vector for the linear system to
2052: be solved.
2054: Not Collective
2056: Input Parameter:
2057: . ksp - iterative context obtained from `KSPCreate()`
2059: Output Parameter:
2060: . r - right-hand-side vector
2062: Level: developer
2064: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPSolve()`, `KSP`
2065: @*/
2066: PetscErrorCode KSPGetRhs(KSP ksp, Vec *r)
2067: {
2068: PetscFunctionBegin;
2070: PetscAssertPointer(r, 2);
2071: *r = ksp->vec_rhs;
2072: PetscFunctionReturn(PETSC_SUCCESS);
2073: }
2075: /*@
2076: KSPGetSolution - Gets the location of the solution for the
2077: linear system to be solved.
2079: Not Collective
2081: Input Parameter:
2082: . ksp - iterative context obtained from `KSPCreate()`
2084: Output Parameter:
2085: . v - solution vector
2087: Level: developer
2089: Note:
2090: If this is called during a `KSPSolve()` the vector's values may not represent the solution
2091: to the linear system.
2093: .seealso: [](ch_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP`
2094: @*/
2095: PetscErrorCode KSPGetSolution(KSP ksp, Vec *v)
2096: {
2097: PetscFunctionBegin;
2099: PetscAssertPointer(v, 2);
2100: *v = ksp->vec_sol;
2101: PetscFunctionReturn(PETSC_SUCCESS);
2102: }
2104: /*@
2105: KSPSetPC - Sets the preconditioner to be used to calculate the
2106: application of the preconditioner on a vector.
2108: Collective
2110: Input Parameters:
2111: + ksp - iterative context obtained from `KSPCreate()`
2112: - pc - the preconditioner object (can be `NULL`)
2114: Level: developer
2116: Note:
2117: Use `KSPGetPC()` to retrieve the preconditioner context.
2119: .seealso: [](ch_ksp), `KSPGetPC()`, `KSP`
2120: @*/
2121: PetscErrorCode KSPSetPC(KSP ksp, PC pc)
2122: {
2123: PetscFunctionBegin;
2125: if (pc) {
2127: PetscCheckSameComm(ksp, 1, pc, 2);
2128: }
2129: if (ksp->pc != pc && ksp->setupstage) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2130: PetscCall(PetscObjectReference((PetscObject)pc));
2131: PetscCall(PCDestroy(&ksp->pc));
2132: ksp->pc = pc;
2133: PetscFunctionReturn(PETSC_SUCCESS);
2134: }
2136: PETSC_INTERN PetscErrorCode PCCreate_MPI(PC);
2138: // PetscClangLinter pragma disable: -fdoc-internal-linkage
2139: /*@C
2140: KSPCheckPCMPI - Checks if `-mpi_linear_solver_server` is active and the `PC` should be changed to `PCMPI`
2142: Collective, No Fortran Support
2144: Input Parameter:
2145: . ksp - iterative context obtained from `KSPCreate()`
2147: Level: developer
2149: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PCMPIServerBegin()`, `PCMPIServerEnd()`
2150: @*/
2151: PETSC_INTERN PetscErrorCode KSPCheckPCMPI(KSP ksp)
2152: {
2153: PetscBool isPCMPI;
2155: PetscFunctionBegin;
2157: PetscCall(PetscObjectTypeCompare((PetscObject)ksp->pc, PCMPI, &isPCMPI));
2158: if (PCMPIServerActive && ksp->nestlevel == 0 && !isPCMPI) {
2159: const char *prefix;
2160: char *found = NULL;
2162: PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
2163: if (prefix) PetscCall(PetscStrstr(prefix, "mpi_linear_solver_server_", &found));
2164: if (!found) PetscCall(KSPAppendOptionsPrefix(ksp, "mpi_linear_solver_server_"));
2165: PetscCall(PetscInfo(NULL, "In MPI Linear Solver Server and detected (root) PC that must be changed to PCMPI\n"));
2166: PetscCall(PCSetType(ksp->pc, PCMPI));
2167: }
2168: PetscFunctionReturn(PETSC_SUCCESS);
2169: }
2171: /*@
2172: KSPGetPC - Returns a pointer to the preconditioner context with the `KSP`
2174: Not Collective
2176: Input Parameter:
2177: . ksp - iterative context obtained from `KSPCreate()`
2179: Output Parameter:
2180: . pc - preconditioner context
2182: Level: developer
2184: Note:
2185: The `PC` is created if it does not already exist.
2187: Developer Note:
2188: Calls `KSPCheckPCMPI()` to check if the `KSP` is effected by `-mpi_linear_solver_server`
2190: .seealso: [](ch_ksp), `KSPSetPC()`, `KSP`, `PC`
2191: @*/
2192: PetscErrorCode KSPGetPC(KSP ksp, PC *pc)
2193: {
2194: PetscFunctionBegin;
2196: PetscAssertPointer(pc, 2);
2197: if (!ksp->pc) {
2198: PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &ksp->pc));
2199: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp->pc, (PetscObject)ksp, 0));
2200: PetscCall(PetscObjectSetOptions((PetscObject)ksp->pc, ((PetscObject)ksp)->options));
2201: PetscCall(PCSetKSPNestLevel(ksp->pc, ksp->nestlevel));
2202: }
2203: PetscCall(KSPCheckPCMPI(ksp));
2204: *pc = ksp->pc;
2205: PetscFunctionReturn(PETSC_SUCCESS);
2206: }
2208: /*@
2209: KSPMonitor - runs the user provided monitor routines, if they exist
2211: Collective
2213: Input Parameters:
2214: + ksp - iterative context obtained from `KSPCreate()`
2215: . it - iteration number
2216: - rnorm - relative norm of the residual
2218: Level: developer
2220: Notes:
2221: This routine is called by the `KSP` implementations.
2222: It does not typically need to be called by the user.
2224: For Krylov methods that do not keep a running value of the current solution (such as `KSPGMRES`) this
2225: cannot be called after the `KSPConvergedReason` has been set but before the final solution has been computed.
2227: .seealso: [](ch_ksp), `KSPMonitorSet()`
2228: @*/
2229: PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm)
2230: {
2231: PetscInt i, n = ksp->numbermonitors;
2233: PetscFunctionBegin;
2234: for (i = 0; i < n; i++) PetscCall((*ksp->monitor[i])(ksp, it, rnorm, ksp->monitorcontext[i]));
2235: PetscFunctionReturn(PETSC_SUCCESS);
2236: }
2238: /*@C
2239: KSPMonitorSet - Sets an ADDITIONAL function to be called at every iteration to monitor
2240: the residual/error etc.
2242: Logically Collective
2244: Input Parameters:
2245: + ksp - iterative context obtained from `KSPCreate()`
2246: . monitor - pointer to function (if this is `NULL`, it turns off monitoring
2247: . ctx - [optional] context for private data for the monitor routine (use `NULL` if no context is needed)
2248: - monitordestroy - [optional] routine that frees monitor context (may be `NULL`)
2250: Calling sequence of `monitor`:
2251: + ksp - iterative context obtained from `KSPCreate()`
2252: . it - iteration number
2253: . rnorm - (estimated) 2-norm of (preconditioned) residual
2254: - ctx - optional monitoring context, as set by `KSPMonitorSet()`
2256: Calling sequence of `monitordestroy`:
2257: . ctx - optional monitoring context, as set by `KSPMonitorSet()`
2259: Options Database Keys:
2260: + -ksp_monitor - sets `KSPMonitorResidual()`
2261: . -ksp_monitor draw - sets `KSPMonitorResidualDraw()` and plots residual
2262: . -ksp_monitor draw::draw_lg - sets `KSPMonitorResidualDrawLG()` and plots residual
2263: . -ksp_monitor_pause_final - Pauses any graphics when the solve finishes (only works for internal monitors)
2264: . -ksp_monitor_true_residual - sets `KSPMonitorTrueResidual()`
2265: . -ksp_monitor_true_residual draw::draw_lg - sets `KSPMonitorTrueResidualDrawLG()` and plots residual
2266: . -ksp_monitor_max - sets `KSPMonitorTrueResidualMax()`
2267: . -ksp_monitor_singular_value - sets `KSPMonitorSingularValue()`
2268: - -ksp_monitor_cancel - cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but
2269: does not cancel those set via the options database.
2271: Level: beginner
2273: Notes:
2274: The default is to do nothing. To print the residual, or preconditioned
2275: residual if `KSPSetNormType`(ksp,`KSP_NORM_PRECONDITIONED`) was called, use
2276: `KSPMonitorResidual()` as the monitoring routine, with a `PETSCVIEWERASCII` as the
2277: context.
2279: Several different monitoring routines may be set by calling
2280: `KSPMonitorSet()` multiple times; all will be called in the
2281: order in which they were set.
2283: Fortran Note:
2284: Only a single monitor function can be set for each `KSP` object
2286: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorCancel()`, `KSP`
2287: @*/
2288: PetscErrorCode KSPMonitorSet(KSP ksp, PetscErrorCode (*monitor)(KSP ksp, PetscInt it, PetscReal rnorm, void *ctx), void *ctx, PetscErrorCode (*monitordestroy)(void **ctx))
2289: {
2290: PetscInt i;
2291: PetscBool identical;
2293: PetscFunctionBegin;
2295: for (i = 0; i < ksp->numbermonitors; i++) {
2296: PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))monitor, ctx, monitordestroy, (PetscErrorCode (*)(void))ksp->monitor[i], ksp->monitorcontext[i], ksp->monitordestroy[i], &identical));
2297: if (identical) PetscFunctionReturn(PETSC_SUCCESS);
2298: }
2299: PetscCheck(ksp->numbermonitors < MAXKSPMONITORS, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP monitors set");
2300: ksp->monitor[ksp->numbermonitors] = monitor;
2301: ksp->monitordestroy[ksp->numbermonitors] = monitordestroy;
2302: ksp->monitorcontext[ksp->numbermonitors++] = (void *)ctx;
2303: PetscFunctionReturn(PETSC_SUCCESS);
2304: }
2306: /*@
2307: KSPMonitorCancel - Clears all monitors for a `KSP` object.
2309: Logically Collective
2311: Input Parameter:
2312: . ksp - iterative context obtained from `KSPCreate()`
2314: Options Database Key:
2315: . -ksp_monitor_cancel - Cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but does not cancel those set via the options database.
2317: Level: intermediate
2319: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSPMonitorSet()`, `KSP`
2320: @*/
2321: PetscErrorCode KSPMonitorCancel(KSP ksp)
2322: {
2323: PetscInt i;
2325: PetscFunctionBegin;
2327: for (i = 0; i < ksp->numbermonitors; i++) {
2328: if (ksp->monitordestroy[i]) PetscCall((*ksp->monitordestroy[i])(&ksp->monitorcontext[i]));
2329: }
2330: ksp->numbermonitors = 0;
2331: PetscFunctionReturn(PETSC_SUCCESS);
2332: }
2334: /*@C
2335: KSPGetMonitorContext - Gets the monitoring context, as set by `KSPMonitorSet()` for the FIRST monitor only.
2337: Not Collective
2339: Input Parameter:
2340: . ksp - iterative context obtained from `KSPCreate()`
2342: Output Parameter:
2343: . ctx - monitoring context
2345: Level: intermediate
2347: .seealso: [](ch_ksp), `KSPMonitorResidual()`, `KSP`
2348: @*/
2349: PetscErrorCode KSPGetMonitorContext(KSP ksp, void *ctx)
2350: {
2351: PetscFunctionBegin;
2353: *(void **)ctx = ksp->monitorcontext[0];
2354: PetscFunctionReturn(PETSC_SUCCESS);
2355: }
2357: /*@
2358: KSPSetResidualHistory - Sets the array used to hold the residual history.
2359: If set, this array will contain the residual norms computed at each
2360: iteration of the solver.
2362: Not Collective
2364: Input Parameters:
2365: + ksp - iterative context obtained from `KSPCreate()`
2366: . a - array to hold history
2367: . na - size of `a`
2368: - reset - `PETSC_TRUE` indicates the history counter is reset to zero
2369: for each new linear solve
2371: Level: advanced
2373: Notes:
2374: If provided, `a` is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2375: If 'a' is `NULL` then space is allocated for the history. If 'na' `PETSC_DECIDE` or (deprecated) `PETSC_DEFAULT` then a
2376: default array of length 10,000 is allocated.
2378: If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history
2380: .seealso: [](ch_ksp), `KSPGetResidualHistory()`, `KSP`
2381: @*/
2382: PetscErrorCode KSPSetResidualHistory(KSP ksp, PetscReal a[], PetscCount na, PetscBool reset)
2383: {
2384: PetscFunctionBegin;
2387: PetscCall(PetscFree(ksp->res_hist_alloc));
2388: if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2389: ksp->res_hist = a;
2390: ksp->res_hist_max = na;
2391: } else {
2392: if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->res_hist_max = (size_t)na;
2393: else ksp->res_hist_max = 10000; /* like default ksp->max_it */
2394: PetscCall(PetscCalloc1(ksp->res_hist_max, &ksp->res_hist_alloc));
2396: ksp->res_hist = ksp->res_hist_alloc;
2397: }
2398: ksp->res_hist_len = 0;
2399: ksp->res_hist_reset = reset;
2400: PetscFunctionReturn(PETSC_SUCCESS);
2401: }
2403: /*@C
2404: KSPGetResidualHistory - Gets the array used to hold the residual history and the number of residuals it contains.
2406: Not Collective
2408: Input Parameter:
2409: . ksp - iterative context obtained from `KSPCreate()`
2411: Output Parameters:
2412: + a - pointer to array to hold history (or `NULL`)
2413: - na - number of used entries in a (or `NULL`). Note this has different meanings depending on the `reset` argument to `KSPSetResidualHistory()`
2415: Level: advanced
2417: Note:
2418: This array is borrowed and should not be freed by the caller.
2420: Can only be called after a `KSPSetResidualHistory()` otherwise `a` and `na` are set to `NULL` and zero
2422: When `reset` was `PETSC_TRUE` since a residual is computed before the first iteration, the value of `na` is generally one more than the value
2423: returned with `KSPGetIterationNumber()`.
2425: Some Krylov methods may not compute the final residual norm when convergence is declared because the maximum number of iterations allowed has been reached.
2426: In this situation, when `reset` was `PETSC_TRUE`, `na` will then equal the number of iterations reported with `KSPGetIterationNumber()`
2428: Some Krylov methods (such as `KSPSTCG`), under certain circumstances, do not compute the final residual norm. In this situation, when `reset` was `PETSC_TRUE`,
2429: `na` will then equal the number of iterations reported with `KSPGetIterationNumber()`
2431: `KSPBCGSL` does not record the residual norms for the "subiterations" hence the results from `KSPGetResidualHistory()` and `KSPGetIterationNumber()` will be different
2433: Fortran Note:
2434: The Fortran version of this routine has a calling sequence
2435: .vb
2436: call KSPGetResidualHistory(KSP ksp, integer na, integer ierr)
2437: .ve
2438: note that you have passed a Fortran array into `KSPSetResidualHistory()` and you need
2439: to access the residual values from this Fortran array you provided. Only the `na` (number of
2440: residual norms currently held) is set.
2442: .seealso: [](ch_ksp), `KSPSetResidualHistory()`, `KSP`, `KSPGetIterationNumber()`, `KSPSTCG`, `KSPBCGSL`
2443: @*/
2444: PetscErrorCode KSPGetResidualHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2445: {
2446: PetscFunctionBegin;
2448: if (a) *a = ksp->res_hist;
2449: if (na) PetscCall(PetscIntCast(ksp->res_hist_len, na));
2450: PetscFunctionReturn(PETSC_SUCCESS);
2451: }
2453: /*@
2454: KSPSetErrorHistory - Sets the array used to hold the error history. If set, this array will contain the error norms computed at each iteration of the solver.
2456: Not Collective
2458: Input Parameters:
2459: + ksp - iterative context obtained from `KSPCreate()`
2460: . a - array to hold history
2461: . na - size of `a`
2462: - reset - `PETSC_TRUE` indicates the history counter is reset to zero for each new linear solve
2464: Level: advanced
2466: Notes:
2467: If provided, `a` is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2468: If 'a' is `NULL` then space is allocated for the history. If 'na' is `PETSC_DECIDE` or (deprecated) `PETSC_DEFAULT` then a default array of length 1,0000 is allocated.
2470: If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history
2472: .seealso: [](ch_ksp), `KSPGetErrorHistory()`, `KSPSetResidualHistory()`, `KSP`
2473: @*/
2474: PetscErrorCode KSPSetErrorHistory(KSP ksp, PetscReal a[], PetscCount na, PetscBool reset)
2475: {
2476: PetscFunctionBegin;
2479: PetscCall(PetscFree(ksp->err_hist_alloc));
2480: if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2481: ksp->err_hist = a;
2482: ksp->err_hist_max = na;
2483: } else {
2484: if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->err_hist_max = (size_t)na;
2485: else ksp->err_hist_max = 10000; /* like default ksp->max_it */
2486: PetscCall(PetscCalloc1(ksp->err_hist_max, &ksp->err_hist_alloc));
2487: ksp->err_hist = ksp->err_hist_alloc;
2488: }
2489: ksp->err_hist_len = 0;
2490: ksp->err_hist_reset = reset;
2491: PetscFunctionReturn(PETSC_SUCCESS);
2492: }
2494: /*@C
2495: KSPGetErrorHistory - Gets the array used to hold the error history and the number of residuals it contains.
2497: Not Collective
2499: Input Parameter:
2500: . ksp - iterative context obtained from `KSPCreate()`
2502: Output Parameters:
2503: + a - pointer to array to hold history (or `NULL`)
2504: - na - number of used entries in a (or `NULL`)
2506: Level: advanced
2508: Note:
2509: This array is borrowed and should not be freed by the caller.
2510: Can only be called after a `KSPSetErrorHistory()` otherwise `a` and `na` are set to `NULL` and zero
2512: Fortran Note:
2513: The Fortran version of this routine has a calling sequence
2514: .vb
2515: call KSPGetErrorHistory(KSP ksp, integer na, integer ierr)
2516: .ve
2517: note that you have passed a Fortran array into `KSPSetErrorHistory()` and you need
2518: to access the residual values from this Fortran array you provided. Only the `na` (number of
2519: residual norms currently held) is set.
2521: .seealso: [](ch_ksp), `KSPSetErrorHistory()`, `KSPGetResidualHistory()`, `KSP`
2522: @*/
2523: PetscErrorCode KSPGetErrorHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2524: {
2525: PetscFunctionBegin;
2527: if (a) *a = ksp->err_hist;
2528: if (na) PetscCall(PetscIntCast(ksp->err_hist_len, na));
2529: PetscFunctionReturn(PETSC_SUCCESS);
2530: }
2532: /*@
2533: KSPComputeConvergenceRate - Compute the convergence rate for the iteration <https:/en.wikipedia.org/wiki/Coefficient_of_determination>
2535: Not Collective
2537: Input Parameter:
2538: . ksp - The `KSP`
2540: Output Parameters:
2541: + cr - The residual contraction rate
2542: . rRsq - The coefficient of determination, $R^2$, indicating the linearity of the data
2543: . ce - The error contraction rate
2544: - eRsq - The coefficient of determination, $R^2$, indicating the linearity of the data
2546: Level: advanced
2548: Note:
2549: Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
2550: the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
2552: .seealso: [](ch_ksp), `KSP`, `KSPConvergedRateView()`
2553: @*/
2554: PetscErrorCode KSPComputeConvergenceRate(KSP ksp, PetscReal *cr, PetscReal *rRsq, PetscReal *ce, PetscReal *eRsq)
2555: {
2556: PetscReal const *hist;
2557: PetscReal *x, *y, slope, intercept, mean = 0.0, var = 0.0, res = 0.0;
2558: PetscInt n, k;
2560: PetscFunctionBegin;
2561: if (cr || rRsq) {
2562: PetscCall(KSPGetResidualHistory(ksp, &hist, &n));
2563: if (!n) {
2564: if (cr) *cr = 0.0;
2565: if (rRsq) *rRsq = -1.0;
2566: } else {
2567: PetscCall(PetscMalloc2(n, &x, n, &y));
2568: for (k = 0; k < n; ++k) {
2569: x[k] = k;
2570: y[k] = PetscLogReal(hist[k]);
2571: mean += y[k];
2572: }
2573: mean /= n;
2574: PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2575: for (k = 0; k < n; ++k) {
2576: res += PetscSqr(y[k] - (slope * x[k] + intercept));
2577: var += PetscSqr(y[k] - mean);
2578: }
2579: PetscCall(PetscFree2(x, y));
2580: if (cr) *cr = PetscExpReal(slope);
2581: if (rRsq) *rRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2582: }
2583: }
2584: if (ce || eRsq) {
2585: PetscCall(KSPGetErrorHistory(ksp, &hist, &n));
2586: if (!n) {
2587: if (ce) *ce = 0.0;
2588: if (eRsq) *eRsq = -1.0;
2589: } else {
2590: PetscCall(PetscMalloc2(n, &x, n, &y));
2591: for (k = 0; k < n; ++k) {
2592: x[k] = k;
2593: y[k] = PetscLogReal(hist[k]);
2594: mean += y[k];
2595: }
2596: mean /= n;
2597: PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2598: for (k = 0; k < n; ++k) {
2599: res += PetscSqr(y[k] - (slope * x[k] + intercept));
2600: var += PetscSqr(y[k] - mean);
2601: }
2602: PetscCall(PetscFree2(x, y));
2603: if (ce) *ce = PetscExpReal(slope);
2604: if (eRsq) *eRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2605: }
2606: }
2607: PetscFunctionReturn(PETSC_SUCCESS);
2608: }
2610: /*@C
2611: KSPSetConvergenceTest - Sets the function to be used to determine convergence.
2613: Logically Collective
2615: Input Parameters:
2616: + ksp - iterative context obtained from `KSPCreate()`
2617: . converge - pointer to the function
2618: . ctx - context for private data for the convergence routine (may be `NULL`)
2619: - destroy - a routine for destroying the context (may be `NULL`)
2621: Calling sequence of `converge`:
2622: + ksp - iterative context obtained from `KSPCreate()`
2623: . it - iteration number
2624: . rnorm - (estimated) 2-norm of (preconditioned) residual
2625: . reason - the reason why it has converged or diverged
2626: - ctx - optional convergence context, as set by `KSPSetConvergenceTest()`
2628: Calling sequence of `destroy`:
2629: . ctx - the context
2631: Level: advanced
2633: Notes:
2634: Must be called after the `KSP` type has been set so put this after
2635: a call to `KSPSetType()`, or `KSPSetFromOptions()`.
2637: The default convergence test, `KSPConvergedDefault()`, aborts if the
2638: residual grows to more than 10000 times the initial residual.
2640: The default is a combination of relative and absolute tolerances.
2641: The residual value that is tested may be an approximation; routines
2642: that need exact values should compute them.
2644: In the default PETSc convergence test, the precise values of reason
2645: are macros such as `KSP_CONVERGED_RTOL`, which are defined in petscksp.h.
2647: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPGetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2648: @*/
2649: PetscErrorCode KSPSetConvergenceTest(KSP ksp, PetscErrorCode (*converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void *ctx, PetscErrorCode (*destroy)(void *ctx))
2650: {
2651: PetscFunctionBegin;
2653: if (ksp->convergeddestroy) PetscCall((*ksp->convergeddestroy)(ksp->cnvP));
2654: ksp->converged = converge;
2655: ksp->convergeddestroy = destroy;
2656: ksp->cnvP = (void *)ctx;
2657: PetscFunctionReturn(PETSC_SUCCESS);
2658: }
2660: /*@C
2661: KSPGetConvergenceTest - Gets the function to be used to determine convergence.
2663: Logically Collective
2665: Input Parameter:
2666: . ksp - iterative context obtained from `KSPCreate()`
2668: Output Parameters:
2669: + converge - pointer to convergence test function
2670: . ctx - context for private data for the convergence routine (may be `NULL`)
2671: - destroy - a routine for destroying the context (may be `NULL`)
2673: Calling sequence of `converge`:
2674: + ksp - iterative context obtained from `KSPCreate()`
2675: . it - iteration number
2676: . rnorm - (estimated) 2-norm of (preconditioned) residual
2677: . reason - the reason why it has converged or diverged
2678: - ctx - optional convergence context, as set by `KSPSetConvergenceTest()`
2680: Calling sequence of `destroy`:
2681: . ctx - the convergence test context
2683: Level: advanced
2685: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2686: @*/
2687: PetscErrorCode KSPGetConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void **ctx, PetscErrorCode (**destroy)(void *ctx))
2688: {
2689: PetscFunctionBegin;
2691: if (converge) *converge = ksp->converged;
2692: if (destroy) *destroy = ksp->convergeddestroy;
2693: if (ctx) *ctx = ksp->cnvP;
2694: PetscFunctionReturn(PETSC_SUCCESS);
2695: }
2697: /*@C
2698: KSPGetAndClearConvergenceTest - Gets the function to be used to determine convergence. Removes the current test without calling destroy on the test context
2700: Logically Collective
2702: Input Parameter:
2703: . ksp - iterative context obtained from `KSPCreate()`
2705: Output Parameters:
2706: + converge - pointer to convergence test function
2707: . ctx - context for private data for the convergence routine
2708: - destroy - a routine for destroying the context
2710: Calling sequence of `converge`:
2711: + ksp - iterative context obtained from `KSPCreate()`
2712: . it - iteration number
2713: . rnorm - (estimated) 2-norm of (preconditioned) residual
2714: . reason - the reason why it has converged or diverged
2715: - ctx - optional convergence context, as set by `KSPSetConvergenceTest()`
2717: Calling sequence of `destroy`:
2718: . ctx - the convergence test context
2720: Level: advanced
2722: Note:
2723: This is intended to be used to allow transferring the convergence test (and its context) to another testing object (for example another `KSP`)
2724: and then calling `KSPSetConvergenceTest()` on this original `KSP`. If you just called `KSPGetConvergenceTest()` followed
2725: by `KSPSetConvergenceTest()` the original context information
2726: would be destroyed and hence the transferred context would be invalid and trigger a crash on use
2728: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2729: @*/
2730: PetscErrorCode KSPGetAndClearConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP ksp, PetscInt it, PetscReal rnorm, KSPConvergedReason *reason, void *ctx), void **ctx, PetscErrorCode (**destroy)(void *ctx))
2731: {
2732: PetscFunctionBegin;
2734: *converge = ksp->converged;
2735: *destroy = ksp->convergeddestroy;
2736: *ctx = ksp->cnvP;
2737: ksp->converged = NULL;
2738: ksp->cnvP = NULL;
2739: ksp->convergeddestroy = NULL;
2740: PetscFunctionReturn(PETSC_SUCCESS);
2741: }
2743: /*@C
2744: KSPGetConvergenceContext - Gets the convergence context set with `KSPSetConvergenceTest()`.
2746: Not Collective
2748: Input Parameter:
2749: . ksp - iterative context obtained from `KSPCreate()`
2751: Output Parameter:
2752: . ctx - monitoring context
2754: Level: advanced
2756: .seealso: [](ch_ksp), `KSP`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2757: @*/
2758: PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx)
2759: {
2760: PetscFunctionBegin;
2762: *(void **)ctx = ksp->cnvP;
2763: PetscFunctionReturn(PETSC_SUCCESS);
2764: }
2766: /*@
2767: KSPBuildSolution - Builds the approximate solution in a vector provided.
2769: Collective
2771: Input Parameter:
2772: . ksp - iterative context obtained from `KSPCreate()`
2774: Output Parameter:
2775: Provide exactly one of
2776: + v - location to stash solution, optional, otherwise pass `NULL`
2777: - V - the solution is returned in this location. This vector is created internally. This vector should NOT be destroyed by the user with `VecDestroy()`.
2779: Level: developer
2781: Notes:
2782: This routine can be used in one of two ways
2783: .vb
2784: KSPBuildSolution(ksp,NULL,&V);
2785: or
2786: KSPBuildSolution(ksp,v,NULL); or KSPBuildSolution(ksp,v,&v);
2787: .ve
2788: In the first case an internal vector is allocated to store the solution
2789: (the user cannot destroy this vector). In the second case the solution
2790: is generated in the vector that the user provides. Note that for certain
2791: methods, such as `KSPCG`, the second case requires a copy of the solution,
2792: while in the first case the call is essentially free since it simply
2793: returns the vector where the solution already is stored. For some methods
2794: like `KSPGMRES` during the solve this is a reasonably expensive operation and should only be
2795: used if truly needed.
2797: .seealso: [](ch_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP`
2798: @*/
2799: PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V)
2800: {
2801: PetscFunctionBegin;
2803: PetscCheck(V || v, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "Must provide either v or V");
2804: if (!V) V = &v;
2805: if (ksp->reason != KSP_CONVERGED_ITERATING) {
2806: if (!v) PetscCall(KSPGetSolution(ksp, V));
2807: else PetscCall(VecCopy(ksp->vec_sol, v));
2808: } else {
2809: PetscUseTypeMethod(ksp, buildsolution, v, V);
2810: }
2811: PetscFunctionReturn(PETSC_SUCCESS);
2812: }
2814: /*@
2815: KSPBuildResidual - Builds the residual in a vector provided.
2817: Collective
2819: Input Parameter:
2820: . ksp - iterative context obtained from `KSPCreate()`
2822: Output Parameters:
2823: + v - optional location to stash residual. If `v` is not provided, then a location is generated.
2824: . t - work vector. If not provided then one is generated.
2825: - V - the residual
2827: Level: advanced
2829: Note:
2830: Regardless of whether or not `v` is provided, the residual is
2831: returned in `V`.
2833: .seealso: [](ch_ksp), `KSP`, `KSPBuildSolution()`
2834: @*/
2835: PetscErrorCode KSPBuildResidual(KSP ksp, Vec t, Vec v, Vec *V)
2836: {
2837: PetscBool flag = PETSC_FALSE;
2838: Vec w = v, tt = t;
2840: PetscFunctionBegin;
2842: if (!w) PetscCall(VecDuplicate(ksp->vec_rhs, &w));
2843: if (!tt) {
2844: PetscCall(VecDuplicate(ksp->vec_sol, &tt));
2845: flag = PETSC_TRUE;
2846: }
2847: PetscUseTypeMethod(ksp, buildresidual, tt, w, V);
2848: if (flag) PetscCall(VecDestroy(&tt));
2849: PetscFunctionReturn(PETSC_SUCCESS);
2850: }
2852: /*@
2853: KSPSetDiagonalScale - Tells `KSP` to symmetrically diagonally scale the system
2854: before solving. This actually CHANGES the matrix (and right-hand side).
2856: Logically Collective
2858: Input Parameters:
2859: + ksp - the `KSP` context
2860: - scale - `PETSC_TRUE` or `PETSC_FALSE`
2862: Options Database Keys:
2863: + -ksp_diagonal_scale - perform a diagonal scaling before the solve
2864: - -ksp_diagonal_scale_fix - scale the matrix back AFTER the solve
2866: Level: advanced
2868: Notes:
2869: Scales the matrix by $D^{-1/2} A D^{-1/2} [D^{1/2} x ] = D^{-1/2} b $
2870: where $D_{ii}$ is $1/abs(A_{ii}) $ unless $A_{ii}$ is zero and then it is 1.
2872: BE CAREFUL with this routine: it actually scales the matrix and right
2873: hand side that define the system. After the system is solved the matrix
2874: and right-hand side remain scaled unless you use `KSPSetDiagonalScaleFix()`
2876: This should NOT be used within the `SNES` solves if you are using a line
2877: search.
2879: If you use this with the `PCType` `PCEISENSTAT` preconditioner than you can
2880: use the `PCEisenstatSetNoDiagonalScaling()` option, or `-pc_eisenstat_no_diagonal_scaling`
2881: to save some unneeded, redundant flops.
2883: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2884: @*/
2885: PetscErrorCode KSPSetDiagonalScale(KSP ksp, PetscBool scale)
2886: {
2887: PetscFunctionBegin;
2890: ksp->dscale = scale;
2891: PetscFunctionReturn(PETSC_SUCCESS);
2892: }
2894: /*@
2895: KSPGetDiagonalScale - Checks if `KSP` solver scales the matrix and right-hand side, that is if `KSPSetDiagonalScale()` has been called
2897: Not Collective
2899: Input Parameter:
2900: . ksp - the `KSP` context
2902: Output Parameter:
2903: . scale - `PETSC_TRUE` or `PETSC_FALSE`
2905: Level: intermediate
2907: .seealso: [](ch_ksp), `KSP`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`
2908: @*/
2909: PetscErrorCode KSPGetDiagonalScale(KSP ksp, PetscBool *scale)
2910: {
2911: PetscFunctionBegin;
2913: PetscAssertPointer(scale, 2);
2914: *scale = ksp->dscale;
2915: PetscFunctionReturn(PETSC_SUCCESS);
2916: }
2918: /*@
2919: KSPSetDiagonalScaleFix - Tells `KSP` to diagonally scale the system back after solving.
2921: Logically Collective
2923: Input Parameters:
2924: + ksp - the `KSP` context
2925: - fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2926: rescale (default)
2928: Level: intermediate
2930: Notes:
2931: Must be called after `KSPSetDiagonalScale()`
2933: Using this will slow things down, because it rescales the matrix before and
2934: after each linear solve. This is intended mainly for testing to allow one
2935: to easily get back the original system to make sure the solution computed is
2936: accurate enough.
2938: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPGetDiagonalScaleFix()`, `KSP`
2939: @*/
2940: PetscErrorCode KSPSetDiagonalScaleFix(KSP ksp, PetscBool fix)
2941: {
2942: PetscFunctionBegin;
2945: ksp->dscalefix = fix;
2946: PetscFunctionReturn(PETSC_SUCCESS);
2947: }
2949: /*@
2950: KSPGetDiagonalScaleFix - Determines if `KSP` diagonally scales the system back after solving. That is `KSPSetDiagonalScaleFix()` has been called
2952: Not Collective
2954: Input Parameter:
2955: . ksp - the `KSP` context
2957: Output Parameter:
2958: . fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2959: rescale (default)
2961: Level: intermediate
2963: .seealso: [](ch_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2964: @*/
2965: PetscErrorCode KSPGetDiagonalScaleFix(KSP ksp, PetscBool *fix)
2966: {
2967: PetscFunctionBegin;
2969: PetscAssertPointer(fix, 2);
2970: *fix = ksp->dscalefix;
2971: PetscFunctionReturn(PETSC_SUCCESS);
2972: }
2974: /*@C
2975: KSPSetComputeOperators - set routine to compute the linear operators
2977: Logically Collective
2979: Input Parameters:
2980: + ksp - the `KSP` context
2981: . func - function to compute the operators, see `KSPComputeOperatorsFn` for the calling sequence
2982: - ctx - optional context
2984: Level: beginner
2986: Notes:
2987: `func()` will be called automatically at the very next call to `KSPSolve()`. It will NOT be called at future `KSPSolve()` calls
2988: unless either `KSPSetComputeOperators()` or `KSPSetOperators()` is called before that `KSPSolve()` is called. This allows the same system to be solved several times
2989: with different right-hand side functions but is a confusing API since one might expect it to be called for each `KSPSolve()`
2991: To reuse the same preconditioner for the next `KSPSolve()` and not compute a new one based on the most recently computed matrix call `KSPSetReusePreconditioner()`
2993: Developer Note:
2994: Perhaps this routine and `KSPSetComputeRHS()` could be combined into a new API that makes clear when new matrices are computing without requiring call this
2995: routine to indicate when the new matrix should be computed.
2997: .seealso: [](ch_ksp), `KSP`, `KSPSetOperators()`, `KSPSetComputeRHS()`, `DMKSPSetComputeOperators()`, `KSPSetComputeInitialGuess()`, `KSPComputeOperatorsFn`
2998: @*/
2999: PetscErrorCode KSPSetComputeOperators(KSP ksp, KSPComputeOperatorsFn *func, void *ctx)
3000: {
3001: DM dm;
3003: PetscFunctionBegin;
3005: PetscCall(KSPGetDM(ksp, &dm));
3006: PetscCall(DMKSPSetComputeOperators(dm, func, ctx));
3007: if (ksp->setupstage == KSP_SETUP_NEWRHS) ksp->setupstage = KSP_SETUP_NEWMATRIX;
3008: PetscFunctionReturn(PETSC_SUCCESS);
3009: }
3011: /*@C
3012: KSPSetComputeRHS - set routine to compute the right-hand side of the linear system
3014: Logically Collective
3016: Input Parameters:
3017: + ksp - the `KSP` context
3018: . func - function to compute the right-hand side, see `KSPComputeRHSFn` for the calling sequence
3019: - ctx - optional context
3021: Level: beginner
3023: Note:
3024: The routine you provide will be called EACH you call `KSPSolve()` to prepare the new right-hand side for that solve
3026: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `DMKSPSetComputeRHS()`, `KSPSetComputeOperators()`, `KSPSetOperators()`, `KSPComputeRHSFn`
3027: @*/
3028: PetscErrorCode KSPSetComputeRHS(KSP ksp, KSPComputeRHSFn *func, void *ctx)
3029: {
3030: DM dm;
3032: PetscFunctionBegin;
3034: PetscCall(KSPGetDM(ksp, &dm));
3035: PetscCall(DMKSPSetComputeRHS(dm, func, ctx));
3036: PetscFunctionReturn(PETSC_SUCCESS);
3037: }
3039: /*@C
3040: KSPSetComputeInitialGuess - set routine to compute the initial guess of the linear system
3042: Logically Collective
3044: Input Parameters:
3045: + ksp - the `KSP` context
3046: . func - function to compute the initial guess, see `KSPComputeInitialGuessFn` for calling sequence
3047: - ctx - optional context
3049: Level: beginner
3051: Note:
3052: This should only be used in conjunction with `KSPSetComputeRHS()` and `KSPSetComputeOperators()`, otherwise
3053: call `KSPSetInitialGuessNonzero()` and set the initial guess values in the solution vector passed to `KSPSolve()` before calling the solver
3055: .seealso: [](ch_ksp), `KSP`, `KSPSolve()`, `KSPSetComputeRHS()`, `KSPSetComputeOperators()`, `DMKSPSetComputeInitialGuess()`, `KSPSetInitialGuessNonzero()`,
3056: `KSPComputeInitialGuessFn`
3057: @*/
3058: PetscErrorCode KSPSetComputeInitialGuess(KSP ksp, KSPComputeInitialGuessFn *func, void *ctx)
3059: {
3060: DM dm;
3062: PetscFunctionBegin;
3064: PetscCall(KSPGetDM(ksp, &dm));
3065: PetscCall(DMKSPSetComputeInitialGuess(dm, func, ctx));
3066: PetscFunctionReturn(PETSC_SUCCESS);
3067: }
3069: /*@
3070: KSPSetUseExplicitTranspose - Determines the explicit transpose of the operator is formed in `KSPSolveTranspose()`. In some configurations (like GPUs) it may
3071: be explicitly formed since the solve is much more efficient.
3073: Logically Collective
3075: Input Parameter:
3076: . ksp - the `KSP` context
3078: Output Parameter:
3079: . flg - `PETSC_TRUE` to transpose the system in `KSPSolveTranspose()`, `PETSC_FALSE` to not transpose (default)
3081: Level: advanced
3083: .seealso: [](ch_ksp), `KSPSolveTranspose()`, `KSP`
3084: @*/
3085: PetscErrorCode KSPSetUseExplicitTranspose(KSP ksp, PetscBool flg)
3086: {
3087: PetscFunctionBegin;
3090: ksp->transpose.use_explicittranspose = flg;
3091: PetscFunctionReturn(PETSC_SUCCESS);
3092: }