Actual source code: fieldsplit.c
1: #include <petsc/private/pcimpl.h>
2: #include <petsc/private/kspimpl.h>
3: #include <petscdm.h>
4: #include <petscdevice.h>
5: #if PetscDefined(HAVE_CUDA)
6: #include <petscdevice_cuda.h>
7: #endif
8: #if PetscDefined(HAVE_HIP)
9: #include <petscdevice_hip.h>
10: #endif
12: const char *const PCFieldSplitSchurPreTypes[] = {"SELF", "SELFP", "A11", "USER", "FULL", "PCFieldSplitSchurPreType", "PC_FIELDSPLIT_SCHUR_PRE_", NULL};
13: const char *const PCFieldSplitSchurFactTypes[] = {"DIAG", "LOWER", "UPPER", "FULL", "PCFieldSplitSchurFactType", "PC_FIELDSPLIT_SCHUR_FACT_", NULL};
15: PetscLogEvent KSP_Solve_FS_0, KSP_Solve_FS_1, KSP_Solve_FS_S, KSP_Solve_FS_U, KSP_Solve_FS_L, KSP_Solve_FS_2, KSP_Solve_FS_3, KSP_Solve_FS_4;
17: typedef struct _PC_FieldSplitLink *PC_FieldSplitLink;
18: struct _PC_FieldSplitLink {
19: KSP ksp;
20: Vec x, y, z;
21: char *splitname;
22: PetscInt nfields;
23: PetscInt *fields, *fields_col;
24: VecScatter sctx;
25: IS is, is_col;
26: PC_FieldSplitLink next, previous;
27: PetscLogEvent event;
29: /* Used only when setting coordinates with PCSetCoordinates */
30: PetscInt dim;
31: PetscInt ndofs;
32: PetscReal *coords;
33: };
35: typedef struct {
36: PCCompositeType type;
37: PetscBool defaultsplit; /* Flag for a system with a set of 'k' scalar fields with the same layout (and bs = k) */
38: PetscBool splitdefined; /* Flag is set after the splits have been defined, to prevent more splits from being added */
39: PetscInt bs; /* Block size for IS and Mat structures */
40: PetscInt nsplits; /* Number of field divisions defined */
41: Vec *x, *y, w1, w2;
42: Mat *mat; /* The diagonal block for each split */
43: Mat *pmat; /* The preconditioning diagonal block for each split */
44: Mat *Afield; /* The rows of the matrix associated with each split */
45: PetscBool issetup;
47: /* Only used when Schur complement preconditioning is used */
48: Mat B; /* The (0,1) block */
49: Mat C; /* The (1,0) block */
50: Mat schur; /* The Schur complement S = A11 - A10 A00^{-1} A01, the KSP here, kspinner, is H_1 in [El08] */
51: Mat schurp; /* Assembled approximation to S built by MatSchurComplement to be used as a matrix for constructing the preconditioner when solving with S */
52: Mat schur_user; /* User-provided matrix for constructing the preconditioner for the Schur complement */
53: PCFieldSplitSchurPreType schurpre; /* Determines which matrix is used for the Schur complement */
54: PCFieldSplitSchurFactType schurfactorization;
55: KSP kspschur; /* The solver for S */
56: KSP kspupper; /* The solver for A in the upper diagonal part of the factorization (H_2 in [El08]) */
57: PetscScalar schurscale; /* Scaling factor for the Schur complement solution with DIAG factorization */
59: /* Only used when Golub-Kahan bidiagonalization preconditioning is used */
60: Mat H; /* The modified matrix H = A00 + nu*A01*A01' */
61: PetscReal gkbtol; /* Stopping tolerance for lower bound estimate */
62: PetscInt gkbdelay; /* The delay window for the stopping criterion */
63: PetscReal gkbnu; /* Parameter for augmented Lagrangian H = A + nu*A01*A01' */
64: PetscInt gkbmaxit; /* Maximum number of iterations for outer loop */
65: PetscBool gkbmonitor; /* Monitor for gkb iterations and the lower bound error */
66: PetscViewer gkbviewer; /* Viewer context for gkbmonitor */
67: Vec u, v, d, Hu; /* Work vectors for the GKB algorithm */
68: PetscScalar *vecz; /* Contains intermediate values, eg for lower bound */
70: PC_FieldSplitLink head;
71: PetscBool isrestrict; /* indicates PCFieldSplitRestrictIS() has been last called on this object, hack */
72: PetscBool suboptionsset; /* Indicates that the KSPSetFromOptions() has been called on the sub-KSPs */
73: PetscBool dm_splits; /* Whether to use DMCreateFieldDecomposition() whenever possible */
74: PetscBool diag_use_amat; /* Whether to extract diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
75: PetscBool offdiag_use_amat; /* Whether to extract off-diagonal matrix blocks from Amat, rather than Pmat (weaker than -pc_use_amat) */
76: PetscBool detect; /* Whether to form 2-way split by finding zero diagonal entries */
77: PetscBool coordinates_set; /* Whether PCSetCoordinates has been called */
78: } PC_FieldSplit;
80: /*
81: Note:
82: there is no particular reason that pmat, x, and y are stored as arrays in PC_FieldSplit instead of
83: inside PC_FieldSplitLink, just historical. If you want to be able to add new fields after already using the
84: PC you could change this.
85: */
87: /* This helper is so that setting a user-provided matrix is orthogonal to choosing to use it. This way the
88: * application-provided FormJacobian can provide this matrix without interfering with the user's (command-line) choices. */
89: static Mat FieldSplitSchurPre(PC_FieldSplit *jac)
90: {
91: switch (jac->schurpre) {
92: case PC_FIELDSPLIT_SCHUR_PRE_SELF:
93: return jac->schur;
94: case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
95: return jac->schurp;
96: case PC_FIELDSPLIT_SCHUR_PRE_A11:
97: return jac->pmat[1];
98: case PC_FIELDSPLIT_SCHUR_PRE_FULL: /* We calculate this and store it in schur_user */
99: case PC_FIELDSPLIT_SCHUR_PRE_USER: /* Use a user-provided matrix if it is given, otherwise diagonal block */
100: default:
101: return jac->schur_user ? jac->schur_user : jac->pmat[1];
102: }
103: }
105: #include <petscdraw.h>
106: static PetscErrorCode PCView_FieldSplit(PC pc, PetscViewer viewer)
107: {
108: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
109: PetscBool isascii, isdraw;
110: PetscInt i, j;
111: PC_FieldSplitLink ilink = jac->head;
113: PetscFunctionBegin;
114: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
115: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
116: if (isascii) {
117: if (jac->bs > 0) {
118: PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
119: } else {
120: PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
121: }
122: if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n"));
123: if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n"));
124: if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n"));
125: PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for each split is in the following KSP objects:\n"));
126: for (i = 0; i < jac->nsplits; i++) {
127: if (ilink->fields) {
128: PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
129: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
130: for (j = 0; j < ilink->nfields; j++) {
131: if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
132: PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
133: }
134: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
135: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
136: } else {
137: PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
138: }
139: PetscCall(KSPView(ilink->ksp, viewer));
140: ilink = ilink->next;
141: }
142: }
144: if (isdraw) {
145: PetscDraw draw;
146: PetscReal x, y, w, wd;
148: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
149: PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
150: w = 2 * PetscMin(1.0 - x, x);
151: wd = w / (jac->nsplits + 1);
152: x = x - wd * (jac->nsplits - 1) / 2.0;
153: for (i = 0; i < jac->nsplits; i++) {
154: PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
155: PetscCall(KSPView(ilink->ksp, viewer));
156: PetscCall(PetscDrawPopCurrentPoint(draw));
157: x += wd;
158: ilink = ilink->next;
159: }
160: }
161: PetscFunctionReturn(PETSC_SUCCESS);
162: }
164: static PetscErrorCode PCView_FieldSplit_Schur(PC pc, PetscViewer viewer)
165: {
166: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
167: PetscBool isascii, isdraw;
168: PetscInt i, j;
169: PC_FieldSplitLink ilink = jac->head;
170: MatSchurComplementAinvType atype;
172: PetscFunctionBegin;
173: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
174: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
175: if (isascii) {
176: if (jac->bs > 0) {
177: PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, blocksize = %" PetscInt_FMT ", factorization %s\n", jac->bs, PCFieldSplitSchurFactTypes[jac->schurfactorization]));
178: } else {
179: PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with Schur preconditioner, factorization %s\n", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
180: }
181: if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n"));
182: switch (jac->schurpre) {
183: case PC_FIELDSPLIT_SCHUR_PRE_SELF:
184: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from S itself\n"));
185: break;
186: case PC_FIELDSPLIT_SCHUR_PRE_SELFP:
187: if (jac->schur) {
188: PetscCall(MatSchurComplementGetAinvType(jac->schur, &atype));
189: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from Sp, an assembled approximation to S, which uses A00's %sinverse\n", atype == MAT_SCHUR_COMPLEMENT_AINV_DIAG ? "diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_BLOCK_DIAG ? "block diagonal's " : (atype == MAT_SCHUR_COMPLEMENT_AINV_FULL ? "full " : "lumped diagonal's "))));
190: }
191: break;
192: case PC_FIELDSPLIT_SCHUR_PRE_A11:
193: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n"));
194: break;
195: case PC_FIELDSPLIT_SCHUR_PRE_FULL:
196: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from the exact Schur complement\n"));
197: break;
198: case PC_FIELDSPLIT_SCHUR_PRE_USER:
199: if (jac->schur_user) {
200: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from user provided matrix\n"));
201: } else {
202: PetscCall(PetscViewerASCIIPrintf(viewer, " Preconditioner for the Schur complement formed from A11\n"));
203: }
204: break;
205: default:
206: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Invalid Schur preconditioning type: %d", jac->schurpre);
207: }
208: PetscCall(PetscViewerASCIIPrintf(viewer, " Split info:\n"));
209: PetscCall(PetscViewerASCIIPushTab(viewer));
210: for (i = 0; i < jac->nsplits; i++) {
211: if (ilink->fields) {
212: PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Fields ", i));
213: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
214: for (j = 0; j < ilink->nfields; j++) {
215: if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
216: PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
217: }
218: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
219: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
220: } else {
221: PetscCall(PetscViewerASCIIPrintf(viewer, "Split number %" PetscInt_FMT " Defined by IS\n", i));
222: }
223: ilink = ilink->next;
224: }
225: PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for A00 block\n"));
226: PetscCall(PetscViewerASCIIPushTab(viewer));
227: if (jac->head) PetscCall(KSPView(jac->head->ksp, viewer));
228: else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n"));
229: PetscCall(PetscViewerASCIIPopTab(viewer));
230: if (jac->head && jac->kspupper != jac->head->ksp) {
231: PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for upper A00 in upper triangular factor\n"));
232: PetscCall(PetscViewerASCIIPushTab(viewer));
233: if (jac->kspupper) PetscCall(KSPView(jac->kspupper, viewer));
234: else PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n"));
235: PetscCall(PetscViewerASCIIPopTab(viewer));
236: }
237: PetscCall(PetscViewerASCIIPrintf(viewer, "KSP solver for S = A11 - A10 inv(A00) A01\n"));
238: PetscCall(PetscViewerASCIIPushTab(viewer));
239: if (jac->kspschur) {
240: PetscCall(KSPView(jac->kspschur, viewer));
241: } else {
242: PetscCall(PetscViewerASCIIPrintf(viewer, " not yet available\n"));
243: }
244: PetscCall(PetscViewerASCIIPopTab(viewer));
245: PetscCall(PetscViewerASCIIPopTab(viewer));
246: } else if (isdraw && jac->head) {
247: PetscDraw draw;
248: PetscReal x, y, w, wd, h;
249: PetscInt cnt = 2;
250: char str[32];
252: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
253: PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
254: if (jac->kspupper != jac->head->ksp) cnt++;
255: w = 2 * PetscMin(1.0 - x, x);
256: wd = w / (cnt + 1);
258: PetscCall(PetscSNPrintf(str, 32, "Schur fact. %s", PCFieldSplitSchurFactTypes[jac->schurfactorization]));
259: PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
260: y -= h;
261: if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_USER && !jac->schur_user) {
262: PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[PC_FIELDSPLIT_SCHUR_PRE_A11]));
263: } else {
264: PetscCall(PetscSNPrintf(str, 32, "Prec. for Schur from %s", PCFieldSplitSchurPreTypes[jac->schurpre]));
265: }
266: PetscCall(PetscDrawStringBoxed(draw, x + wd * (cnt - 1) / 2.0, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h));
267: y -= h;
268: x = x - wd * (cnt - 1) / 2.0;
270: PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
271: PetscCall(KSPView(jac->head->ksp, viewer));
272: PetscCall(PetscDrawPopCurrentPoint(draw));
273: if (jac->kspupper != jac->head->ksp) {
274: x += wd;
275: PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
276: PetscCall(KSPView(jac->kspupper, viewer));
277: PetscCall(PetscDrawPopCurrentPoint(draw));
278: }
279: x += wd;
280: PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
281: PetscCall(KSPView(jac->kspschur, viewer));
282: PetscCall(PetscDrawPopCurrentPoint(draw));
283: }
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: static PetscErrorCode PCView_FieldSplit_GKB(PC pc, PetscViewer viewer)
288: {
289: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
290: PetscBool isascii, isdraw;
291: PetscInt i, j;
292: PC_FieldSplitLink ilink = jac->head;
294: PetscFunctionBegin;
295: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
296: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
297: if (isascii) {
298: if (jac->bs > 0) {
299: PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT ", blocksize = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits, jac->bs));
300: } else {
301: PetscCall(PetscViewerASCIIPrintf(viewer, " FieldSplit with %s composition: total splits = %" PetscInt_FMT "\n", PCCompositeTypes[jac->type], jac->nsplits));
302: }
303: if (pc->useAmat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for blocks\n"));
304: if (jac->diag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for diagonal blocks\n"));
305: if (jac->offdiag_use_amat) PetscCall(PetscViewerASCIIPrintf(viewer, " using Amat (not Pmat) as operator for off-diagonal blocks\n"));
307: PetscCall(PetscViewerASCIIPrintf(viewer, " Stopping tolerance=%.1e, delay in error estimate=%" PetscInt_FMT ", maximum iterations=%" PetscInt_FMT "\n", (double)jac->gkbtol, jac->gkbdelay, jac->gkbmaxit));
308: PetscCall(PetscViewerASCIIPrintf(viewer, " Solver info for H = A00 + nu*A01*A01' matrix:\n"));
309: PetscCall(PetscViewerASCIIPushTab(viewer));
311: if (ilink->fields) {
312: PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Fields "));
313: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
314: for (j = 0; j < ilink->nfields; j++) {
315: if (j > 0) PetscCall(PetscViewerASCIIPrintf(viewer, ","));
316: PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, ilink->fields[j]));
317: }
318: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
319: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
320: } else {
321: PetscCall(PetscViewerASCIIPrintf(viewer, "Split number 0 Defined by IS\n"));
322: }
323: PetscCall(KSPView(ilink->ksp, viewer));
325: PetscCall(PetscViewerASCIIPopTab(viewer));
326: }
328: if (isdraw) {
329: PetscDraw draw;
330: PetscReal x, y, w, wd;
332: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
333: PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
334: w = 2 * PetscMin(1.0 - x, x);
335: wd = w / (jac->nsplits + 1);
336: x = x - wd * (jac->nsplits - 1) / 2.0;
337: for (i = 0; i < jac->nsplits; i++) {
338: PetscCall(PetscDrawPushCurrentPoint(draw, x, y));
339: PetscCall(KSPView(ilink->ksp, viewer));
340: PetscCall(PetscDrawPopCurrentPoint(draw));
341: x += wd;
342: ilink = ilink->next;
343: }
344: }
345: PetscFunctionReturn(PETSC_SUCCESS);
346: }
348: /* Precondition: jac->bs is set to a meaningful value or MATNEST */
349: static PetscErrorCode PCFieldSplitSetRuntimeSplits_Private(PC pc)
350: {
351: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
352: PetscInt bs, i, nfields, *ifields, nfields_col, *ifields_col;
353: PetscBool flg, flg_col, mnest;
354: char optionname[128], splitname[8], optionname_col[128];
356: PetscFunctionBegin;
357: PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &mnest));
358: if (mnest) {
359: PetscCall(MatNestGetSize(pc->pmat, &bs, NULL));
360: } else {
361: bs = jac->bs;
362: }
363: PetscCall(PetscMalloc2(bs, &ifields, bs, &ifields_col));
364: for (i = 0, flg = PETSC_TRUE;; i++) {
365: PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
366: PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
367: PetscCall(PetscSNPrintf(optionname_col, sizeof(optionname_col), "-pc_fieldsplit_%" PetscInt_FMT "_fields_col", i));
368: nfields = bs;
369: nfields_col = bs;
370: PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
371: PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname_col, ifields_col, &nfields_col, &flg_col));
372: if (!flg) break;
373: else if (flg && !flg_col) {
374: PetscCheck(nfields, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
375: PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields));
376: } else {
377: PetscCheck(nfields && nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot list zero fields");
378: PetscCheck(nfields == nfields_col, PETSC_COMM_SELF, PETSC_ERR_USER, "Number of row and column fields must match");
379: PetscCall(PCFieldSplitSetFields(pc, splitname, nfields, ifields, ifields_col));
380: }
381: }
382: if (i > 0) {
383: /* Makes command-line setting of splits take precedence over setting them in code.
384: Otherwise subsequent calls to PCFieldSplitSetIS() or PCFieldSplitSetFields() would
385: create new splits, which would probably not be what the user wanted. */
386: jac->splitdefined = PETSC_TRUE;
387: }
388: PetscCall(PetscFree2(ifields, ifields_col));
389: PetscFunctionReturn(PETSC_SUCCESS);
390: }
392: static PetscErrorCode PCFieldSplitSetDefaults(PC pc)
393: {
394: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
395: PC_FieldSplitLink ilink = jac->head;
396: PetscBool fieldsplit_default = PETSC_FALSE, coupling = PETSC_FALSE;
397: PetscInt i;
399: PetscFunctionBegin;
400: /*
401: Kinda messy, but at least this now uses DMCreateFieldDecomposition().
402: Should probably be rewritten.
403: */
404: if (!ilink) {
405: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_detect_coupling", &coupling, NULL));
406: if (pc->dm && jac->dm_splits && !jac->detect && !coupling) {
407: PetscInt numFields, f, i, j;
408: char **fieldNames;
409: IS *fields;
410: DM *dms;
411: DM subdm[128];
412: PetscBool flg;
414: PetscCall(DMCreateFieldDecomposition(pc->dm, &numFields, &fieldNames, &fields, &dms));
415: /* Allow the user to prescribe the splits */
416: for (i = 0, flg = PETSC_TRUE;; i++) {
417: PetscInt ifields[128];
418: IS compField;
419: char optionname[128], splitname[8];
420: PetscInt nfields = numFields;
422: PetscCall(PetscSNPrintf(optionname, sizeof(optionname), "-pc_fieldsplit_%" PetscInt_FMT "_fields", i));
423: PetscCall(PetscOptionsGetIntArray(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, optionname, ifields, &nfields, &flg));
424: if (!flg) break;
425: PetscCheck(numFields <= 128, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot currently support %" PetscInt_FMT " > 128 fields", numFields);
426: PetscCall(DMCreateSubDM(pc->dm, nfields, ifields, &compField, &subdm[i]));
427: if (nfields == 1) {
428: PetscCall(PCFieldSplitSetIS(pc, fieldNames[ifields[0]], compField));
429: } else {
430: PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
431: PetscCall(PCFieldSplitSetIS(pc, splitname, compField));
432: }
433: PetscCall(ISDestroy(&compField));
434: for (j = 0; j < nfields; ++j) {
435: f = ifields[j];
436: PetscCall(PetscFree(fieldNames[f]));
437: PetscCall(ISDestroy(&fields[f]));
438: }
439: }
440: if (i == 0) {
441: for (f = 0; f < numFields; ++f) {
442: PetscCall(PCFieldSplitSetIS(pc, fieldNames[f], fields[f]));
443: PetscCall(PetscFree(fieldNames[f]));
444: PetscCall(ISDestroy(&fields[f]));
445: }
446: } else {
447: for (j = 0; j < numFields; j++) PetscCall(DMDestroy(dms + j));
448: PetscCall(PetscFree(dms));
449: PetscCall(PetscMalloc1(i, &dms));
450: for (j = 0; j < i; ++j) dms[j] = subdm[j];
451: }
452: PetscCall(PetscFree(fieldNames));
453: PetscCall(PetscFree(fields));
454: if (dms) {
455: PetscCall(PetscInfo(pc, "Setting up physics based fieldsplit preconditioner using the embedded DM\n"));
456: for (ilink = jac->head, i = 0; ilink; ilink = ilink->next, ++i) {
457: const char *prefix;
458: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ilink->ksp, &prefix));
459: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dms[i], prefix));
460: PetscCall(KSPSetDM(ilink->ksp, dms[i]));
461: PetscCall(KSPSetDMActive(ilink->ksp, PETSC_FALSE));
462: PetscCall(PetscObjectIncrementTabLevel((PetscObject)dms[i], (PetscObject)ilink->ksp, 0));
463: PetscCall(DMDestroy(&dms[i]));
464: }
465: PetscCall(PetscFree(dms));
466: }
467: } else {
468: if (jac->bs <= 0) {
469: if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &jac->bs));
470: else jac->bs = 1;
471: }
473: if (jac->detect) {
474: IS zerodiags, rest;
475: PetscInt nmin, nmax;
477: PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
478: if (jac->diag_use_amat) {
479: PetscCall(MatFindZeroDiagonals(pc->mat, &zerodiags));
480: } else {
481: PetscCall(MatFindZeroDiagonals(pc->pmat, &zerodiags));
482: }
483: PetscCall(ISComplement(zerodiags, nmin, nmax, &rest));
484: PetscCall(PCFieldSplitSetIS(pc, "0", rest));
485: PetscCall(PCFieldSplitSetIS(pc, "1", zerodiags));
486: PetscCall(ISDestroy(&zerodiags));
487: PetscCall(ISDestroy(&rest));
488: } else if (coupling) {
489: IS coupling, rest;
490: PetscInt nmin, nmax;
492: PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
493: if (jac->offdiag_use_amat) {
494: PetscCall(MatFindOffBlockDiagonalEntries(pc->mat, &coupling));
495: } else {
496: PetscCall(MatFindOffBlockDiagonalEntries(pc->pmat, &coupling));
497: }
498: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc->mat), nmax - nmin, nmin, 1, &rest));
499: PetscCall(ISSetIdentity(rest));
500: PetscCall(PCFieldSplitSetIS(pc, "0", rest));
501: PetscCall(PCFieldSplitSetIS(pc, "1", coupling));
502: PetscCall(ISDestroy(&coupling));
503: PetscCall(ISDestroy(&rest));
504: } else {
505: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_default", &fieldsplit_default, NULL));
506: if (!fieldsplit_default) {
507: /* Allow user to set fields from command line, if bs was known at the time of PCSetFromOptions_FieldSplit()
508: then it is set there. This is not ideal because we should only have options set in XXSetFromOptions(). */
509: PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
510: if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
511: }
512: if ((fieldsplit_default || !jac->splitdefined) && !jac->isrestrict) {
513: Mat M = pc->pmat;
514: PetscBool isnest;
515: PetscInt nf;
517: PetscCall(PetscInfo(pc, "Using default splitting of fields\n"));
518: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &isnest));
519: if (!isnest) {
520: M = pc->mat;
521: PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATNEST, &isnest));
522: }
523: if (!isnest) nf = jac->bs;
524: else PetscCall(MatNestGetSize(M, &nf, NULL));
525: for (i = 0; i < nf; i++) {
526: char splitname[8];
528: PetscCall(PetscSNPrintf(splitname, sizeof(splitname), "%" PetscInt_FMT, i));
529: PetscCall(PCFieldSplitSetFields(pc, splitname, 1, &i, &i));
530: }
531: jac->defaultsplit = PETSC_TRUE;
532: }
533: }
534: }
535: } else if (jac->nsplits == 1) {
536: IS is2;
537: PetscInt nmin, nmax;
539: PetscCheck(ilink->is, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Must provide at least two sets of fields to PCFieldSplit()");
540: PetscCall(MatGetOwnershipRange(pc->mat, &nmin, &nmax));
541: PetscCall(ISComplement(ilink->is, nmin, nmax, &is2));
542: PetscCall(PCFieldSplitSetIS(pc, "1", is2));
543: PetscCall(ISDestroy(&is2));
544: }
546: PetscCheck(jac->nsplits >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unhandled case, must have at least two fields, not %" PetscInt_FMT, jac->nsplits);
547: PetscFunctionReturn(PETSC_SUCCESS);
548: }
550: static PetscErrorCode MatGolubKahanComputeExplicitOperator(Mat A, Mat B, Mat C, Mat *H, PetscReal gkbnu)
551: {
552: Mat BT, T;
553: PetscReal nrmT, nrmB;
555: PetscFunctionBegin;
556: PetscCall(MatHermitianTranspose(C, MAT_INITIAL_MATRIX, &T)); /* Test if augmented matrix is symmetric */
557: PetscCall(MatAXPY(T, -1.0, B, DIFFERENT_NONZERO_PATTERN));
558: PetscCall(MatNorm(T, NORM_1, &nrmT));
559: PetscCall(MatNorm(B, NORM_1, &nrmB));
560: PetscCheck(nrmB <= 0 || nrmT / nrmB < PETSC_SMALL, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix is not symmetric/Hermitian, GKB is not applicable.");
562: /* Compute augmented Lagrangian matrix H = A00 + nu*A01*A01'. This corresponds to */
563: /* setting N := 1/nu*I in [Ar13]. */
564: PetscCall(MatHermitianTranspose(B, MAT_INITIAL_MATRIX, &BT));
565: PetscCall(MatMatMult(B, BT, MAT_INITIAL_MATRIX, PETSC_CURRENT, H)); /* H = A01*A01' */
566: PetscCall(MatAYPX(*H, gkbnu, A, DIFFERENT_NONZERO_PATTERN)); /* H = A00 + nu*A01*A01' */
568: PetscCall(MatDestroy(&BT));
569: PetscCall(MatDestroy(&T));
570: PetscFunctionReturn(PETSC_SUCCESS);
571: }
573: PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char pre[], const char name[], const char *option[], const char *value[], PetscBool *flg);
575: static PetscErrorCode PCSetUp_FieldSplit(PC pc)
576: {
577: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
578: PC_FieldSplitLink ilink;
579: PetscInt i, nsplit;
580: PetscBool matnest = PETSC_FALSE;
582: PetscFunctionBegin;
583: pc->failedreason = PC_NOERROR;
584: PetscCall(PCFieldSplitSetDefaults(pc));
585: nsplit = jac->nsplits;
586: ilink = jac->head;
587: if (pc->pmat) PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
589: /* get the matrices for each split */
590: if (!jac->issetup) {
591: PetscInt rstart, rend, nslots, bs;
593: jac->issetup = PETSC_TRUE;
595: /* This is done here instead of in PCFieldSplitSetFields() because may not have matrix at that point */
596: if (jac->defaultsplit || !ilink->is) {
597: if (jac->bs <= 0) jac->bs = nsplit;
598: }
600: /* MatCreateSubMatrix() for [S]BAIJ matrices can only work if the indices include entire blocks of the matrix */
601: PetscCall(MatGetBlockSize(pc->pmat, &bs));
602: if (bs > 1 && (jac->bs <= bs || jac->bs % bs)) {
603: PetscBool blk;
605: PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &blk, MATBAIJ, MATSBAIJ, MATSEQBAIJ, MATSEQSBAIJ, MATMPIBAIJ, MATMPISBAIJ, NULL));
606: PetscCheck(!blk, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Cannot use MATBAIJ with PCFIELDSPLIT and currently set matrix and PC blocksizes");
607: }
609: if (!matnest) { /* use the matrix blocksize and stride IS to determine the index sets that define the submatrices */
610: bs = jac->bs;
611: PetscCall(MatGetOwnershipRange(pc->pmat, &rstart, &rend));
612: nslots = (rend - rstart) / bs;
613: for (i = 0; i < nsplit; i++) {
614: if (jac->defaultsplit) {
615: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + i, nsplit, &ilink->is));
616: PetscCall(PetscObjectReference((PetscObject)ilink->is));
617: ilink->is_col = ilink->is;
618: } else if (!ilink->is) {
619: PetscBool same_fields = PETSC_TRUE;
621: for (PetscInt k = 0; k < ilink->nfields; k++) {
622: if (ilink->fields[k] != ilink->fields_col[k]) same_fields = PETSC_FALSE;
623: }
625: if (ilink->nfields > 1) {
626: PetscInt *ii, *jj, j, k, nfields = ilink->nfields, *fields = ilink->fields, *fields_col = ilink->fields_col;
628: PetscCall(PetscMalloc1(ilink->nfields * nslots, &ii));
629: if (!same_fields) PetscCall(PetscMalloc1(ilink->nfields * nslots, &jj));
630: for (j = 0; j < nslots; j++) {
631: for (k = 0; k < nfields; k++) {
632: ii[nfields * j + k] = rstart + bs * j + fields[k];
633: if (!same_fields) jj[nfields * j + k] = rstart + bs * j + fields_col[k];
634: }
635: }
636: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, ii, PETSC_OWN_POINTER, &ilink->is));
637: if (!same_fields) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), nslots * nfields, jj, PETSC_OWN_POINTER, &ilink->is_col));
638: else {
639: PetscCall(PetscObjectReference((PetscObject)ilink->is));
640: ilink->is_col = ilink->is;
641: }
642: PetscCall(ISSetBlockSize(ilink->is, nfields));
643: PetscCall(ISSetBlockSize(ilink->is_col, nfields));
644: } else {
645: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields[0], bs, &ilink->is));
646: if (!same_fields) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)pc), nslots, rstart + ilink->fields_col[0], bs, &ilink->is_col));
647: else {
648: PetscCall(PetscObjectReference((PetscObject)ilink->is));
649: ilink->is_col = ilink->is;
650: }
651: }
652: }
653: ilink = ilink->next;
654: }
655: } else { /* use the IS that define the MATNEST to determine the index sets that define the submatrices */
656: IS *rowis, *colis, *ises = NULL;
657: PetscInt mis, nis;
659: PetscCall(MatNestGetSize(pc->pmat, &mis, &nis));
660: PetscCall(PetscMalloc2(mis, &rowis, nis, &colis));
661: PetscCall(MatNestGetISs(pc->pmat, rowis, colis));
662: if (!jac->defaultsplit) PetscCall(PetscMalloc1(mis, &ises));
664: for (i = 0; i < nsplit; i++) {
665: if (jac->defaultsplit) {
666: PetscCall(ISDuplicate(rowis[i], &ilink->is));
667: PetscCall(PetscObjectReference((PetscObject)ilink->is));
668: ilink->is_col = ilink->is;
669: } else if (!ilink->is) {
670: if (ilink->nfields > 1) {
671: for (PetscInt j = 0; j < ilink->nfields; j++) ises[j] = rowis[ilink->fields[j]];
672: PetscCall(ISConcatenate(PetscObjectComm((PetscObject)pc), ilink->nfields, ises, &ilink->is));
673: } else {
674: PetscCall(ISDuplicate(rowis[ilink->fields[0]], &ilink->is));
675: }
676: PetscCall(PetscObjectReference((PetscObject)ilink->is));
677: ilink->is_col = ilink->is;
678: }
679: ilink = ilink->next;
680: }
681: PetscCall(PetscFree2(rowis, colis));
682: PetscCall(PetscFree(ises));
683: }
684: }
686: ilink = jac->head;
687: if (!jac->pmat) {
688: Vec xtmp;
690: PetscCall(MatCreateVecs(pc->pmat, &xtmp, NULL));
691: PetscCall(PetscMalloc1(nsplit, &jac->pmat));
692: PetscCall(PetscMalloc2(nsplit, &jac->x, nsplit, &jac->y));
693: for (i = 0; i < nsplit; i++) {
694: MatNullSpace sp;
696: /* Check for matrix attached to IS */
697: PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&jac->pmat[i]));
698: if (jac->pmat[i]) {
699: PetscCall(PetscObjectReference((PetscObject)jac->pmat[i]));
700: if (jac->type == PC_COMPOSITE_SCHUR) {
701: jac->schur_user = jac->pmat[i];
703: PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
704: }
705: } else {
706: const char *prefix;
707: PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->pmat[i]));
708: PetscCall(MatGetOptionsPrefix(jac->pmat[i], &prefix));
709: if (!prefix) {
710: PetscCall(KSPGetOptionsPrefix(ilink->ksp, &prefix));
711: PetscCall(MatSetOptionsPrefix(jac->pmat[i], prefix));
712: }
713: PetscCall(MatSetFromOptions(jac->pmat[i]));
714: PetscCall(MatViewFromOptions(jac->pmat[i], NULL, "-mat_view"));
715: }
716: /* create work vectors for each split */
717: PetscCall(MatCreateVecs(jac->pmat[i], &jac->x[i], &jac->y[i]));
718: ilink->x = jac->x[i];
719: ilink->y = jac->y[i];
720: ilink->z = NULL;
721: /* compute scatter contexts needed by multiplicative versions and non-default splits */
722: PetscCall(VecScatterCreate(xtmp, ilink->is, jac->x[i], NULL, &ilink->sctx));
723: PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nearnullspace", (PetscObject *)&sp));
724: if (sp) PetscCall(MatSetNearNullSpace(jac->pmat[i], sp));
725: ilink = ilink->next;
726: }
727: PetscCall(VecDestroy(&xtmp));
728: } else {
729: MatReuse scall;
730: MatNullSpace *nullsp = NULL;
732: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
733: PetscCall(MatGetNullSpaces(nsplit, jac->pmat, &nullsp));
734: for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->pmat[i]));
735: scall = MAT_INITIAL_MATRIX;
736: } else scall = MAT_REUSE_MATRIX;
738: for (i = 0; i < nsplit; i++) {
739: Mat pmat;
741: /* Check for matrix attached to IS */
742: PetscCall(PetscObjectQuery((PetscObject)ilink->is, "pmat", (PetscObject *)&pmat));
743: if (!pmat) PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, ilink->is_col, scall, &jac->pmat[i]));
744: ilink = ilink->next;
745: }
746: if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->pmat, &nullsp));
747: }
748: if (jac->diag_use_amat) {
749: ilink = jac->head;
750: if (!jac->mat) {
751: PetscCall(PetscMalloc1(nsplit, &jac->mat));
752: for (i = 0; i < nsplit; i++) {
753: PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, MAT_INITIAL_MATRIX, &jac->mat[i]));
754: ilink = ilink->next;
755: }
756: } else {
757: MatReuse scall;
758: MatNullSpace *nullsp = NULL;
760: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
761: PetscCall(MatGetNullSpaces(nsplit, jac->mat, &nullsp));
762: for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->mat[i]));
763: scall = MAT_INITIAL_MATRIX;
764: } else scall = MAT_REUSE_MATRIX;
766: for (i = 0; i < nsplit; i++) {
767: PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, ilink->is_col, scall, &jac->mat[i]));
768: ilink = ilink->next;
769: }
770: if (nullsp) PetscCall(MatRestoreNullSpaces(nsplit, jac->mat, &nullsp));
771: }
772: } else {
773: jac->mat = jac->pmat;
774: }
776: /* Check for null space attached to IS */
777: ilink = jac->head;
778: for (i = 0; i < nsplit; i++) {
779: MatNullSpace sp;
781: PetscCall(PetscObjectQuery((PetscObject)ilink->is, "nullspace", (PetscObject *)&sp));
782: if (sp) PetscCall(MatSetNullSpace(jac->mat[i], sp));
783: ilink = ilink->next;
784: }
786: if (jac->type != PC_COMPOSITE_ADDITIVE && jac->type != PC_COMPOSITE_SCHUR && jac->type != PC_COMPOSITE_GKB) {
787: /* extract the rows of the matrix associated with each field: used for efficient computation of residual inside algorithm */
788: /* FIXME: Can/should we reuse jac->mat whenever (jac->diag_use_amat) is true? */
789: ilink = jac->head;
790: if (nsplit == 2 && jac->type == PC_COMPOSITE_MULTIPLICATIVE) {
791: /* special case need where Afield[0] is not needed and only certain columns of Afield[1] are needed since update is only on those rows of the solution */
792: if (!jac->Afield) {
793: PetscCall(PetscCalloc1(nsplit, &jac->Afield));
794: if (jac->offdiag_use_amat) {
795: PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
796: } else {
797: PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->Afield[1]));
798: }
799: } else {
800: MatReuse scall;
802: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
803: PetscCall(MatDestroy(&jac->Afield[1]));
804: scall = MAT_INITIAL_MATRIX;
805: } else scall = MAT_REUSE_MATRIX;
807: if (jac->offdiag_use_amat) {
808: PetscCall(MatCreateSubMatrix(pc->mat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
809: } else {
810: PetscCall(MatCreateSubMatrix(pc->pmat, ilink->next->is, ilink->is, scall, &jac->Afield[1]));
811: }
812: }
813: } else {
814: if (!jac->Afield) {
815: PetscCall(PetscMalloc1(nsplit, &jac->Afield));
816: for (i = 0; i < nsplit; i++) {
817: if (jac->offdiag_use_amat) {
818: PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
819: } else {
820: PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, MAT_INITIAL_MATRIX, &jac->Afield[i]));
821: }
822: ilink = ilink->next;
823: }
824: } else {
825: MatReuse scall;
826: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
827: for (i = 0; i < nsplit; i++) PetscCall(MatDestroy(&jac->Afield[i]));
828: scall = MAT_INITIAL_MATRIX;
829: } else scall = MAT_REUSE_MATRIX;
831: for (i = 0; i < nsplit; i++) {
832: if (jac->offdiag_use_amat) {
833: PetscCall(MatCreateSubMatrix(pc->mat, ilink->is, NULL, scall, &jac->Afield[i]));
834: } else {
835: PetscCall(MatCreateSubMatrix(pc->pmat, ilink->is, NULL, scall, &jac->Afield[i]));
836: }
837: ilink = ilink->next;
838: }
839: }
840: }
841: }
843: if (jac->type == PC_COMPOSITE_SCHUR) {
844: PetscBool isset, isspd = PETSC_FALSE, issym = PETSC_FALSE, flg;
845: char lscname[256];
846: PetscObject LSC_L;
848: PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use Schur complement preconditioner you must have exactly 2 fields");
850: /* If pc->mat is SPD, don't scale by -1 the Schur complement */
851: PetscCall(MatIsSPDKnown(pc->pmat, &isset, &isspd));
852: if (jac->schurscale == (PetscScalar)-1.0) jac->schurscale = (isset && isspd) ? 1.0 : -1.0;
853: PetscCall(MatIsSymmetricKnown(pc->pmat, &isset, &issym));
855: PetscCall(PetscObjectTypeCompareAny(jac->offdiag_use_amat ? (PetscObject)pc->mat : (PetscObject)pc->pmat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
857: if (jac->schur) {
858: KSP kspA = jac->head->ksp, kspInner = NULL, kspUpper = jac->kspupper;
859: MatReuse scall;
861: if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
862: scall = MAT_INITIAL_MATRIX;
863: PetscCall(MatDestroy(&jac->B));
864: PetscCall(MatDestroy(&jac->C));
865: } else scall = MAT_REUSE_MATRIX;
867: PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
868: ilink = jac->head;
869: PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, scall, &jac->B));
870: if (!flg) PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, scall, &jac->C));
871: else {
872: PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
873: if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
874: else PetscCall(MatCreateTranspose(jac->B, &jac->C));
875: }
876: ilink = ilink->next;
877: PetscCall(MatSchurComplementUpdateSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
878: if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) {
879: PetscCall(MatDestroy(&jac->schurp));
880: PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
881: } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL && jac->kspupper != jac->head->ksp) {
882: PetscCall(MatDestroy(&jac->schur_user));
883: PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
884: }
885: if (kspA != kspInner) PetscCall(KSPSetOperators(kspA, jac->mat[0], jac->pmat[0]));
886: if (kspUpper != kspA) PetscCall(KSPSetOperators(kspUpper, jac->mat[0], jac->pmat[0]));
887: PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
888: } else {
889: const char *Dprefix;
890: char schurprefix[256], schurmatprefix[256];
891: char schurtestoption[256];
892: MatNullSpace sp;
893: KSP kspt;
895: /* extract the A01 and A10 matrices */
896: ilink = jac->head;
897: PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, MAT_INITIAL_MATRIX, &jac->B));
898: if (!flg) PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->C));
899: else {
900: PetscCall(MatIsHermitianKnown(jac->offdiag_use_amat ? pc->mat : pc->pmat, &isset, &flg));
901: if (isset && flg) PetscCall(MatCreateHermitianTranspose(jac->B, &jac->C));
902: else PetscCall(MatCreateTranspose(jac->B, &jac->C));
903: }
904: ilink = ilink->next;
905: /* Use mat[0] (diagonal block of Amat) preconditioned by pmat[0] to define Schur complement */
906: PetscCall(MatCreate(((PetscObject)jac->mat[0])->comm, &jac->schur));
907: PetscCall(MatSetType(jac->schur, MATSCHURCOMPLEMENT));
908: PetscCall(MatSchurComplementSetSubMatrices(jac->schur, jac->mat[0], jac->pmat[0], jac->B, jac->C, jac->mat[1]));
909: PetscCall(PetscSNPrintf(schurmatprefix, sizeof(schurmatprefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
910: PetscCall(MatSetOptionsPrefix(jac->schur, schurmatprefix));
911: PetscCall(MatSchurComplementGetKSP(jac->schur, &kspt));
912: PetscCall(KSPSetOptionsPrefix(kspt, schurmatprefix));
914: /* Note: this is not true in general */
915: PetscCall(MatGetNullSpace(jac->mat[1], &sp));
916: if (sp) PetscCall(MatSetNullSpace(jac->schur, sp));
918: PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_inner_", ilink->splitname));
919: PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
920: if (flg) {
921: DM dmInner;
922: KSP kspInner;
923: PC pcInner;
925: PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
926: PetscCall(KSPReset(kspInner));
927: PetscCall(KSPSetOperators(kspInner, jac->mat[0], jac->pmat[0]));
928: PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_inner_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
929: /* Indent this deeper to emphasize the "inner" nature of this solver. */
930: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner, (PetscObject)pc, 2));
931: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspInner->pc, (PetscObject)pc, 2));
932: PetscCall(KSPSetOptionsPrefix(kspInner, schurprefix));
934: /* Set DM for new solver */
935: PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
936: PetscCall(KSPSetDM(kspInner, dmInner));
937: PetscCall(KSPSetDMActive(kspInner, PETSC_FALSE));
939: /* Defaults to PCKSP as preconditioner */
940: PetscCall(KSPGetPC(kspInner, &pcInner));
941: PetscCall(PCSetType(pcInner, PCKSP));
942: PetscCall(PCKSPSetKSP(pcInner, jac->head->ksp));
943: } else {
944: /* Use the outer solver for the inner solve, but revert the KSPPREONLY from PCFieldSplitSetFields_FieldSplit or
945: * PCFieldSplitSetIS_FieldSplit. We don't want KSPPREONLY because it makes the Schur complement inexact,
946: * preventing Schur complement reduction to be an accurate solve. Usually when an iterative solver is used for
947: * S = D - C A_inner^{-1} B, we expect S to be defined using an accurate definition of A_inner^{-1}, so we make
948: * GMRES the default. Note that it is also common to use PREONLY for S, in which case S may not be used
949: * directly, and the user is responsible for setting an inexact method for fieldsplit's A^{-1}. */
950: PetscCall(KSPSetType(jac->head->ksp, KSPGMRES));
951: PetscCall(MatSchurComplementSetKSP(jac->schur, jac->head->ksp));
952: }
953: PetscCall(KSPSetOperators(jac->head->ksp, jac->mat[0], jac->pmat[0]));
954: PetscCall(KSPSetFromOptions(jac->head->ksp));
955: PetscCall(MatSetFromOptions(jac->schur));
957: PetscCall(PetscObjectTypeCompare((PetscObject)jac->schur, MATSCHURCOMPLEMENT, &flg));
958: if (flg) { /* Need to do this otherwise PCSetUp_KSP will overwrite the amat of jac->head->ksp */
959: KSP kspInner;
960: PC pcInner;
962: PetscCall(MatSchurComplementGetKSP(jac->schur, &kspInner));
963: PetscCall(KSPGetPC(kspInner, &pcInner));
964: PetscCall(PetscObjectTypeCompare((PetscObject)pcInner, PCKSP, &flg));
965: if (flg) {
966: KSP ksp;
968: PetscCall(PCKSPGetKSP(pcInner, &ksp));
969: if (ksp == jac->head->ksp) PetscCall(PCSetUseAmat(pcInner, PETSC_TRUE));
970: }
971: }
972: PetscCall(PetscSNPrintf(schurtestoption, sizeof(schurtestoption), "-fieldsplit_%s_upper_", ilink->splitname));
973: PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, schurtestoption, NULL, NULL, &flg));
974: if (flg) {
975: DM dmInner;
977: PetscCall(PetscSNPrintf(schurprefix, sizeof(schurprefix), "%sfieldsplit_%s_upper_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
978: PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspupper));
979: PetscCall(KSPSetNestLevel(jac->kspupper, pc->kspnestlevel));
980: PetscCall(KSPSetErrorIfNotConverged(jac->kspupper, pc->erroriffailure));
981: PetscCall(KSPSetOptionsPrefix(jac->kspupper, schurprefix));
982: PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper, (PetscObject)pc, 1));
983: PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspupper->pc, (PetscObject)pc, 1));
984: PetscCall(KSPGetDM(jac->head->ksp, &dmInner));
985: PetscCall(KSPSetDM(jac->kspupper, dmInner));
986: PetscCall(KSPSetDMActive(jac->kspupper, PETSC_FALSE));
987: PetscCall(KSPSetFromOptions(jac->kspupper));
988: PetscCall(KSPSetOperators(jac->kspupper, jac->mat[0], jac->pmat[0]));
989: PetscCall(VecDuplicate(jac->head->x, &jac->head->z));
990: } else {
991: jac->kspupper = jac->head->ksp;
992: PetscCall(PetscObjectReference((PetscObject)jac->head->ksp));
993: }
995: if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELFP) PetscCall(MatSchurComplementGetPmat(jac->schur, MAT_INITIAL_MATRIX, &jac->schurp));
996: PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &jac->kspschur));
997: PetscCall(KSPSetNestLevel(jac->kspschur, pc->kspnestlevel));
998: PetscCall(KSPSetErrorIfNotConverged(jac->kspschur, pc->erroriffailure));
999: PetscCall(PetscObjectIncrementTabLevel((PetscObject)jac->kspschur, (PetscObject)pc, 1));
1000: if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_SELF) {
1001: PC pcschur;
1002: PetscCall(KSPGetPC(jac->kspschur, &pcschur));
1003: PetscCall(PCSetType(pcschur, PCNONE));
1004: /* Note: This is bad if there exist preconditioners for MATSCHURCOMPLEMENT */
1005: } else if (jac->schurpre == PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1006: if (jac->schurfactorization != PC_FIELDSPLIT_SCHUR_FACT_FULL || jac->kspupper != jac->head->ksp) PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1007: }
1008: PetscCall(KSPSetOperators(jac->kspschur, jac->schur, FieldSplitSchurPre(jac)));
1009: PetscCall(KSPGetOptionsPrefix(jac->head->next->ksp, &Dprefix));
1010: PetscCall(KSPSetOptionsPrefix(jac->kspschur, Dprefix));
1011: /* propagate DM */
1012: {
1013: DM sdm;
1014: PetscCall(KSPGetDM(jac->head->next->ksp, &sdm));
1015: if (sdm) {
1016: PetscCall(KSPSetDM(jac->kspschur, sdm));
1017: PetscCall(KSPSetDMActive(jac->kspschur, PETSC_FALSE));
1018: }
1019: }
1020: /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1021: /* need to call this every time, since the jac->kspschur is freshly created, otherwise its options never get set */
1022: PetscCall(KSPSetFromOptions(jac->kspschur));
1023: }
1024: PetscCall(MatAssemblyBegin(jac->schur, MAT_FINAL_ASSEMBLY));
1025: PetscCall(MatAssemblyEnd(jac->schur, MAT_FINAL_ASSEMBLY));
1026: if (issym) PetscCall(MatSetOption(jac->schur, MAT_SYMMETRIC, PETSC_TRUE));
1027: if (isspd) PetscCall(MatSetOption(jac->schur, MAT_SPD, PETSC_TRUE));
1029: /* HACK: special support to forward L and Lp matrices that might be used by PCLSC */
1030: PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_L", ilink->splitname));
1031: PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1032: if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1033: if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_L", LSC_L));
1034: PetscCall(PetscSNPrintf(lscname, sizeof(lscname), "%s_LSC_Lp", ilink->splitname));
1035: PetscCall(PetscObjectQuery((PetscObject)pc->pmat, lscname, &LSC_L));
1036: if (!LSC_L) PetscCall(PetscObjectQuery((PetscObject)pc->mat, lscname, &LSC_L));
1037: if (LSC_L) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "LSC_Lp", LSC_L));
1038: } else if (jac->type == PC_COMPOSITE_GKB) {
1039: PetscCheck(nsplit == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "To use GKB preconditioner you must have exactly 2 fields");
1040: ilink = jac->head;
1041: PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->is, ilink->next->is, MAT_INITIAL_MATRIX, &jac->B));
1042: /* Create work vectors for GKB algorithm */
1043: PetscCall(VecDuplicate(ilink->x, &jac->u));
1044: PetscCall(VecDuplicate(ilink->x, &jac->Hu));
1045: PetscCall(VecDuplicate(ilink->x, &jac->w2));
1046: PetscCall(MatCreateSubMatrix(jac->offdiag_use_amat ? pc->mat : pc->pmat, ilink->next->is, ilink->is, MAT_INITIAL_MATRIX, &jac->C));
1047: ilink = ilink->next;
1048: /* Create work vectors for GKB algorithm */
1049: PetscCall(VecDuplicate(ilink->x, &jac->v));
1050: PetscCall(VecDuplicate(ilink->x, &jac->d));
1051: PetscCall(VecDuplicate(ilink->x, &jac->w1));
1052: PetscCall(MatGolubKahanComputeExplicitOperator(jac->mat[0], jac->B, jac->C, &jac->H, jac->gkbnu));
1053: PetscCall(PetscCalloc1(jac->gkbdelay, &jac->vecz));
1055: ilink = jac->head;
1056: PetscCall(KSPSetOperators(ilink->ksp, jac->H, jac->H));
1057: if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1058: /* Create gkb_monitor context */
1059: if (jac->gkbmonitor) {
1060: PetscInt tablevel;
1061: PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &jac->gkbviewer));
1062: PetscCall(PetscViewerSetType(jac->gkbviewer, PETSCVIEWERASCII));
1063: PetscCall(PetscObjectGetTabLevel((PetscObject)ilink->ksp, &tablevel));
1064: PetscCall(PetscViewerASCIISetTab(jac->gkbviewer, tablevel));
1065: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)ilink->ksp, 1));
1066: }
1067: } else {
1068: /* set up the individual splits' PCs */
1069: i = 0;
1070: ilink = jac->head;
1071: while (ilink) {
1072: PetscCall(KSPSetOperators(ilink->ksp, jac->mat[i], jac->pmat[i]));
1073: /* really want setfromoptions called in PCSetFromOptions_FieldSplit(), but it is not ready yet */
1074: if (!jac->suboptionsset) PetscCall(KSPSetFromOptions(ilink->ksp));
1075: i++;
1076: ilink = ilink->next;
1077: }
1078: }
1080: /* Set coordinates to the sub PC objects whenever these are set */
1081: if (jac->coordinates_set) {
1082: PC pc_coords;
1083: if (jac->type == PC_COMPOSITE_SCHUR) {
1084: // Head is first block.
1085: PetscCall(KSPGetPC(jac->head->ksp, &pc_coords));
1086: PetscCall(PCSetCoordinates(pc_coords, jac->head->dim, jac->head->ndofs, jac->head->coords));
1087: // Second one is Schur block, but its KSP object is in kspschur.
1088: PetscCall(KSPGetPC(jac->kspschur, &pc_coords));
1089: PetscCall(PCSetCoordinates(pc_coords, jac->head->next->dim, jac->head->next->ndofs, jac->head->next->coords));
1090: } else if (jac->type == PC_COMPOSITE_GKB) {
1091: PetscCall(PetscInfo(pc, "Warning: Setting coordinates does nothing for the GKB Fieldpslit preconditioner\n"));
1092: } else {
1093: ilink = jac->head;
1094: while (ilink) {
1095: PetscCall(KSPGetPC(ilink->ksp, &pc_coords));
1096: PetscCall(PCSetCoordinates(pc_coords, ilink->dim, ilink->ndofs, ilink->coords));
1097: ilink = ilink->next;
1098: }
1099: }
1100: }
1102: jac->suboptionsset = PETSC_TRUE;
1103: PetscFunctionReturn(PETSC_SUCCESS);
1104: }
1106: static PetscErrorCode PCSetUpOnBlocks_FieldSplit_Schur(PC pc)
1107: {
1108: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1109: PC_FieldSplitLink ilinkA = jac->head;
1110: KSP kspA = ilinkA->ksp, kspUpper = jac->kspupper;
1112: PetscFunctionBegin;
1113: if (jac->schurfactorization == PC_FIELDSPLIT_SCHUR_FACT_FULL && kspUpper != kspA) {
1114: PetscCall(KSPSetUp(kspUpper));
1115: PetscCall(KSPSetUpOnBlocks(kspUpper));
1116: }
1117: PetscCall(KSPSetUp(kspA));
1118: PetscCall(KSPSetUpOnBlocks(kspA));
1119: if (jac->schurpre != PC_FIELDSPLIT_SCHUR_PRE_FULL) {
1120: PetscCall(KSPSetUp(jac->kspschur));
1121: PetscCall(KSPSetUpOnBlocks(jac->kspschur));
1122: } else if (kspUpper == kspA) {
1123: Mat A;
1124: PetscInt m, M, N;
1125: VecType vtype;
1126: PetscMemType mtype;
1127: PetscScalar *array;
1129: PetscCall(MatGetSize(jac->B, &M, &N));
1130: PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1131: PetscCall(MatGetVecType(jac->B, &vtype));
1132: PetscCall(VecGetArrayAndMemType(ilinkA->x, &array, &mtype));
1133: PetscCall(VecRestoreArrayAndMemType(ilinkA->x, &array));
1134: if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(PetscMalloc1(m * (N + 1), &array));
1135: #if PetscDefined(HAVE_CUDA)
1136: else if (PetscMemTypeCUDA(mtype)) PetscCallCUDA(cudaMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1137: #endif
1138: #if PetscDefined(HAVE_HIP)
1139: else if (PetscMemTypeHIP(mtype)) PetscCallHIP(hipMalloc((void **)&array, sizeof(PetscScalar) * m * (N + 1)));
1140: #endif
1141: PetscCall(MatCreateDenseFromVecType(PetscObjectComm((PetscObject)jac->schur), vtype, m, PETSC_DECIDE, M, N + 1, -1, array, &A)); // number of columns of the Schur complement plus one
1142: PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", (PetscObject)A));
1143: PetscCall(MatDestroy(&A));
1144: }
1145: PetscFunctionReturn(PETSC_SUCCESS);
1146: }
1148: static PetscErrorCode PCSetUpOnBlocks_FieldSplit(PC pc)
1149: {
1150: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1151: PC_FieldSplitLink ilink = jac->head;
1153: PetscFunctionBegin;
1154: while (ilink) {
1155: PetscCall(KSPSetUp(ilink->ksp));
1156: PetscCall(KSPSetUpOnBlocks(ilink->ksp));
1157: ilink = ilink->next;
1158: }
1159: PetscFunctionReturn(PETSC_SUCCESS);
1160: }
1162: static PetscErrorCode PCSetUpOnBlocks_FieldSplit_GKB(PC pc)
1163: {
1164: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1165: PC_FieldSplitLink ilinkA = jac->head;
1166: KSP ksp = ilinkA->ksp;
1168: PetscFunctionBegin;
1169: PetscCall(KSPSetUp(ksp));
1170: PetscCall(KSPSetUpOnBlocks(ksp));
1171: PetscFunctionReturn(PETSC_SUCCESS);
1172: }
1174: static PetscErrorCode PCApply_FieldSplit_Schur(PC pc, Vec x, Vec y)
1175: {
1176: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1177: PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1178: KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1179: Mat AinvB = NULL;
1180: PetscInt N, P;
1182: PetscFunctionBegin;
1183: switch (jac->schurfactorization) {
1184: case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1185: /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1186: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1187: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1188: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1189: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1190: PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1191: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1192: PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1193: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1194: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1195: PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1196: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1197: PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1198: PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1199: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1200: PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1201: PetscCall(VecScale(ilinkD->y, jac->schurscale));
1202: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1203: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1204: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1205: break;
1206: case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1207: /* [A00 0; A10 S], suitable for left preconditioning */
1208: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1209: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1210: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1211: PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1212: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1213: PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1214: PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1215: PetscCall(VecScale(ilinkD->x, -1.));
1216: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1217: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1218: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1219: PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1220: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1221: PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1222: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1223: PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1224: PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1225: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1226: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1227: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1228: break;
1229: case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1230: /* [A00 A01; 0 S], suitable for right preconditioning */
1231: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1232: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1233: PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1234: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1235: PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1236: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1237: PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1238: PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1239: PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1240: PetscCall(VecScale(ilinkA->x, -1.));
1241: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1242: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1243: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1244: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1245: PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1246: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1247: PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1248: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1249: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1250: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1251: break;
1252: case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1253: /* [1 0; A10 A00^{-1} 1] [A00 0; 0 S] [1 A00^{-1}A01; 0 1] */
1254: PetscCall(MatGetSize(jac->B, NULL, &P));
1255: N = P;
1256: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1257: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1258: PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1259: if (kspUpper == kspA) {
1260: PetscCall(PetscObjectQuery((PetscObject)jac->schur, "AinvB", (PetscObject *)&AinvB));
1261: if (AinvB) {
1262: PetscCall(MatGetSize(AinvB, NULL, &N));
1263: if (N > P) { // first time PCApply_FieldSplit_Schur() is called
1264: PetscMemType mtype;
1265: Vec c = NULL;
1266: PetscScalar *array;
1267: PetscInt m, M;
1269: PetscCall(MatGetSize(jac->B, &M, NULL));
1270: PetscCall(MatGetLocalSize(jac->B, &m, NULL));
1271: PetscCall(MatDenseGetArrayAndMemType(AinvB, &array, &mtype));
1272: if (PetscMemTypeHost(mtype) || (!PetscDefined(HAVE_CUDA) && !PetscDefined(HAVE_HIP))) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1273: #if PetscDefined(HAVE_CUDA)
1274: else if (PetscMemTypeCUDA(mtype)) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1275: #endif
1276: #if PetscDefined(HAVE_HIP)
1277: else if (PetscMemTypeHIP(mtype)) PetscCall(VecCreateMPIHIPWithArray(PetscObjectComm((PetscObject)jac->schur), 1, m, M, array + m * P, &c));
1278: #endif
1279: PetscCall(MatDenseRestoreArrayAndMemType(AinvB, &array));
1280: PetscCall(VecCopy(ilinkA->x, c));
1281: PetscCall(MatSchurComplementComputeExplicitOperator(jac->schur, &jac->schur_user));
1282: PetscCall(KSPSetOperators(jac->kspschur, jac->schur, jac->schur_user));
1283: PetscCall(VecCopy(c, ilinkA->y)); // retrieve the solution as the last column of the composed Mat
1284: PetscCall(VecDestroy(&c));
1285: }
1286: }
1287: }
1288: if (N == P) PetscCall(KSPSolve(kspLower, ilinkA->x, ilinkA->y));
1289: PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->y));
1290: PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->y, NULL));
1291: PetscCall(MatMult(jac->C, ilinkA->y, ilinkD->x));
1292: PetscCall(VecScale(ilinkD->x, -1.0));
1293: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1294: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1296: PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1297: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1298: PetscCall(KSPSolve(jac->kspschur, ilinkD->x, ilinkD->y));
1299: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1300: PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1301: PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1302: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1304: if (kspUpper == kspA) {
1305: if (!AinvB) {
1306: PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->y));
1307: PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1308: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1309: PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1310: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1311: PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1312: } else PetscCall(MatMultAdd(AinvB, ilinkD->y, ilinkA->y, ilinkA->y));
1313: } else {
1314: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1315: PetscCall(KSPSolve(kspA, ilinkA->x, ilinkA->y));
1316: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1317: PetscCall(MatMult(jac->B, ilinkD->y, ilinkA->x));
1318: PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1319: PetscCall(KSPSolve(kspUpper, ilinkA->x, ilinkA->z));
1320: PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->z));
1321: PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->z, NULL));
1322: PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1323: }
1324: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1325: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1326: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1327: }
1328: PetscFunctionReturn(PETSC_SUCCESS);
1329: }
1331: static PetscErrorCode PCApplyTranspose_FieldSplit_Schur(PC pc, Vec x, Vec y)
1332: {
1333: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1334: PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1335: KSP kspA = ilinkA->ksp, kspLower = kspA, kspUpper = jac->kspupper;
1337: PetscFunctionBegin;
1338: switch (jac->schurfactorization) {
1339: case PC_FIELDSPLIT_SCHUR_FACT_DIAG:
1340: /* [A00 0; 0 -S], positive definite, suitable for MINRES */
1341: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1342: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1343: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1344: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1345: PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1346: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1347: PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1348: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1349: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1350: PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1351: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1352: PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1353: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1354: PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1355: PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1356: PetscCall(VecScale(ilinkD->y, jac->schurscale));
1357: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1358: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1359: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1360: break;
1361: case PC_FIELDSPLIT_SCHUR_FACT_UPPER:
1362: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1363: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1364: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1365: PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1366: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1367: PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1368: PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1369: PetscCall(VecScale(ilinkD->x, -1.));
1370: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1371: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1372: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1373: PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1374: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1375: PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1376: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1377: PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1378: PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1379: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1380: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1381: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1382: break;
1383: case PC_FIELDSPLIT_SCHUR_FACT_LOWER:
1384: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1385: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1386: PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1387: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1388: PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1389: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1390: PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1391: PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1392: PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1393: PetscCall(VecScale(ilinkA->x, -1.));
1394: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1395: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1396: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, ADD_VALUES, SCATTER_FORWARD));
1397: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1398: PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1399: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1400: PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1401: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1402: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1403: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1404: break;
1405: case PC_FIELDSPLIT_SCHUR_FACT_FULL:
1406: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1407: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1408: PetscCall(PetscLogEventBegin(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1409: PetscCall(KSPSolveTranspose(kspUpper, ilinkA->x, ilinkA->y));
1410: PetscCall(KSPCheckSolve(kspUpper, pc, ilinkA->y));
1411: PetscCall(PetscLogEventEnd(KSP_Solve_FS_U, kspUpper, ilinkA->x, ilinkA->y, NULL));
1412: PetscCall(MatMultTranspose(jac->B, ilinkA->y, ilinkD->x));
1413: PetscCall(VecScale(ilinkD->x, -1.0));
1414: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1415: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, ADD_VALUES, SCATTER_FORWARD));
1417: PetscCall(PetscLogEventBegin(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1418: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, 1));
1419: PetscCall(KSPSolveTranspose(jac->kspschur, ilinkD->x, ilinkD->y));
1420: PetscCall(PetscObjectIncrementTabLevel((PetscObject)kspA, (PetscObject)kspA, -1));
1421: PetscCall(KSPCheckSolve(jac->kspschur, pc, ilinkD->y));
1422: PetscCall(PetscLogEventEnd(KSP_Solve_FS_S, jac->kspschur, ilinkD->x, ilinkD->y, NULL));
1423: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1425: if (kspLower == kspA) {
1426: PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->y));
1427: PetscCall(VecAXPY(ilinkA->x, -1.0, ilinkA->y));
1428: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1429: PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1430: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1431: PetscCall(PetscLogEventEnd(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1432: } else {
1433: PetscCall(PetscLogEventBegin(ilinkA->event, kspA, ilinkA->x, ilinkA->y, NULL));
1434: PetscCall(KSPSolveTranspose(kspA, ilinkA->x, ilinkA->y));
1435: PetscCall(KSPCheckSolve(kspA, pc, ilinkA->y));
1436: PetscCall(MatMultTranspose(jac->C, ilinkD->y, ilinkA->x));
1437: PetscCall(PetscLogEventBegin(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1438: PetscCall(KSPSolveTranspose(kspLower, ilinkA->x, ilinkA->z));
1439: PetscCall(KSPCheckSolve(kspLower, pc, ilinkA->z));
1440: PetscCall(PetscLogEventEnd(KSP_Solve_FS_L, kspLower, ilinkA->x, ilinkA->z, NULL));
1441: PetscCall(VecAXPY(ilinkA->y, -1.0, ilinkA->z));
1442: }
1443: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1444: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1445: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1446: }
1447: PetscFunctionReturn(PETSC_SUCCESS);
1448: }
1450: #define FieldSplitSplitSolveAdd(ilink, xx, yy) \
1451: ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->x, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || \
1452: KSPSolve(ilink->ksp, ilink->x, ilink->y) || KSPCheckSolve(ilink->ksp, pc, ilink->y) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL) || VecScatterBegin(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE) || \
1453: VecScatterEnd(ilink->sctx, ilink->y, yy, ADD_VALUES, SCATTER_REVERSE)))
1455: static PetscErrorCode PCApply_FieldSplit(PC pc, Vec x, Vec y)
1456: {
1457: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1458: PC_FieldSplitLink ilink = jac->head;
1459: PetscInt cnt, bs;
1461: PetscFunctionBegin;
1462: if (jac->type == PC_COMPOSITE_ADDITIVE) {
1463: PetscBool matnest;
1465: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1466: if (jac->defaultsplit && !matnest) {
1467: PetscCall(VecGetBlockSize(x, &bs));
1468: PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1469: PetscCall(VecGetBlockSize(y, &bs));
1470: PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1471: PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1472: while (ilink) {
1473: PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1474: PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1475: PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1476: PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1477: ilink = ilink->next;
1478: }
1479: PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1480: } else {
1481: PetscCall(VecSet(y, 0.0));
1482: while (ilink) {
1483: PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1484: ilink = ilink->next;
1485: }
1486: }
1487: } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE && jac->nsplits == 2) {
1488: PetscCall(VecSet(y, 0.0));
1489: /* solve on first block for first block variables */
1490: PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1491: PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, INSERT_VALUES, SCATTER_FORWARD));
1492: PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1493: PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1494: PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1495: PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1496: PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1497: PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1499: /* compute the residual only onto second block variables using first block variables */
1500: PetscCall(MatMult(jac->Afield[1], ilink->y, ilink->next->x));
1501: ilink = ilink->next;
1502: PetscCall(VecScale(ilink->x, -1.0));
1503: PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1504: PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1506: /* solve on second block variables */
1507: PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1508: PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1509: PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1510: PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1511: PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1512: PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1513: } else if (jac->type == PC_COMPOSITE_MULTIPLICATIVE || jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1514: if (!jac->w1) {
1515: PetscCall(VecDuplicate(x, &jac->w1));
1516: PetscCall(VecDuplicate(x, &jac->w2));
1517: }
1518: PetscCall(VecSet(y, 0.0));
1519: PetscCall(FieldSplitSplitSolveAdd(ilink, x, y));
1520: cnt = 1;
1521: while (ilink->next) {
1522: ilink = ilink->next;
1523: /* compute the residual only over the part of the vector needed */
1524: PetscCall(MatMult(jac->Afield[cnt++], y, ilink->x));
1525: PetscCall(VecScale(ilink->x, -1.0));
1526: PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1527: PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1528: PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1529: PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1530: PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1531: PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1532: PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1533: PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1534: }
1535: if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1536: cnt -= 2;
1537: while (ilink->previous) {
1538: ilink = ilink->previous;
1539: /* compute the residual only over the part of the vector needed */
1540: PetscCall(MatMult(jac->Afield[cnt--], y, ilink->x));
1541: PetscCall(VecScale(ilink->x, -1.0));
1542: PetscCall(VecScatterBegin(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1543: PetscCall(VecScatterEnd(ilink->sctx, x, ilink->x, ADD_VALUES, SCATTER_FORWARD));
1544: PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1545: PetscCall(KSPSolve(ilink->ksp, ilink->x, ilink->y));
1546: PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1547: PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1548: PetscCall(VecScatterBegin(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1549: PetscCall(VecScatterEnd(ilink->sctx, ilink->y, y, ADD_VALUES, SCATTER_REVERSE));
1550: }
1551: }
1552: } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Unsupported or unknown composition %d", (int)jac->type);
1553: PetscFunctionReturn(PETSC_SUCCESS);
1554: }
1556: static PetscErrorCode PCApply_FieldSplit_GKB(PC pc, Vec x, Vec y)
1557: {
1558: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1559: PC_FieldSplitLink ilinkA = jac->head, ilinkD = ilinkA->next;
1560: KSP ksp = ilinkA->ksp;
1561: Vec u, v, Hu, d, work1, work2;
1562: PetscScalar alpha, z, nrmz2, *vecz;
1563: PetscReal lowbnd, nu, beta;
1564: PetscInt j, iterGKB;
1566: PetscFunctionBegin;
1567: PetscCall(VecScatterBegin(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1568: PetscCall(VecScatterBegin(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1569: PetscCall(VecScatterEnd(ilinkA->sctx, x, ilinkA->x, INSERT_VALUES, SCATTER_FORWARD));
1570: PetscCall(VecScatterEnd(ilinkD->sctx, x, ilinkD->x, INSERT_VALUES, SCATTER_FORWARD));
1572: u = jac->u;
1573: v = jac->v;
1574: Hu = jac->Hu;
1575: d = jac->d;
1576: work1 = jac->w1;
1577: work2 = jac->w2;
1578: vecz = jac->vecz;
1580: /* Change RHS to comply with matrix regularization H = A + nu*B*B' */
1581: /* Add q = q + nu*B*b */
1582: if (jac->gkbnu) {
1583: nu = jac->gkbnu;
1584: PetscCall(VecScale(ilinkD->x, jac->gkbnu));
1585: PetscCall(MatMultAdd(jac->B, ilinkD->x, ilinkA->x, ilinkA->x)); /* q = q + nu*B*b */
1586: } else {
1587: /* Situation when no augmented Lagrangian is used. Then we set inner */
1588: /* matrix N = I in [Ar13], and thus nu = 1. */
1589: nu = 1;
1590: }
1592: /* Transform rhs from [q,tilde{b}] to [0,b] */
1593: PetscCall(PetscLogEventBegin(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1594: PetscCall(KSPSolve(ksp, ilinkA->x, ilinkA->y));
1595: PetscCall(KSPCheckSolve(ksp, pc, ilinkA->y));
1596: PetscCall(PetscLogEventEnd(ilinkA->event, ksp, ilinkA->x, ilinkA->y, NULL));
1597: PetscCall(MatMultHermitianTranspose(jac->B, ilinkA->y, work1));
1598: PetscCall(VecAXPBY(work1, 1.0 / nu, -1.0, ilinkD->x)); /* c = b - B'*x */
1600: /* First step of algorithm */
1601: PetscCall(VecNorm(work1, NORM_2, &beta)); /* beta = sqrt(nu*c'*c)*/
1602: KSPCheckDot(ksp, beta);
1603: beta = PetscSqrtReal(nu) * beta;
1604: PetscCall(VecAXPBY(v, nu / beta, 0.0, work1)); /* v = nu/beta *c */
1605: PetscCall(MatMult(jac->B, v, work2)); /* u = H^{-1}*B*v */
1606: PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1607: PetscCall(KSPSolve(ksp, work2, u));
1608: PetscCall(KSPCheckSolve(ksp, pc, u));
1609: PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1610: PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */
1611: PetscCall(VecDot(Hu, u, &alpha));
1612: KSPCheckDot(ksp, alpha);
1613: PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1614: alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1615: PetscCall(VecScale(u, 1.0 / alpha));
1616: PetscCall(VecAXPBY(d, 1.0 / alpha, 0.0, v)); /* v = nu/beta *c */
1618: z = beta / alpha;
1619: vecz[1] = z;
1621: /* Computation of first iterate x(1) and p(1) */
1622: PetscCall(VecAXPY(ilinkA->y, z, u));
1623: PetscCall(VecCopy(d, ilinkD->y));
1624: PetscCall(VecScale(ilinkD->y, -z));
1626: iterGKB = 1;
1627: lowbnd = 2 * jac->gkbtol;
1628: if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1630: while (iterGKB < jac->gkbmaxit && lowbnd > jac->gkbtol) {
1631: iterGKB += 1;
1632: PetscCall(MatMultHermitianTranspose(jac->B, u, work1)); /* v <- nu*(B'*u-alpha/nu*v) */
1633: PetscCall(VecAXPBY(v, nu, -alpha, work1));
1634: PetscCall(VecNorm(v, NORM_2, &beta)); /* beta = sqrt(nu)*v'*v */
1635: beta = beta / PetscSqrtReal(nu);
1636: PetscCall(VecScale(v, 1.0 / beta));
1637: PetscCall(MatMult(jac->B, v, work2)); /* u <- H^{-1}*(B*v-beta*H*u) */
1638: PetscCall(MatMult(jac->H, u, Hu));
1639: PetscCall(VecAXPY(work2, -beta, Hu));
1640: PetscCall(PetscLogEventBegin(ilinkA->event, ksp, work2, u, NULL));
1641: PetscCall(KSPSolve(ksp, work2, u));
1642: PetscCall(KSPCheckSolve(ksp, pc, u));
1643: PetscCall(PetscLogEventEnd(ilinkA->event, ksp, work2, u, NULL));
1644: PetscCall(MatMult(jac->H, u, Hu)); /* alpha = u'*H*u */
1645: PetscCall(VecDot(Hu, u, &alpha));
1646: KSPCheckDot(ksp, alpha);
1647: PetscCheck(PetscRealPart(alpha) > 0.0, PETSC_COMM_SELF, PETSC_ERR_NOT_CONVERGED, "GKB preconditioner diverged, H is not positive definite");
1648: alpha = PetscSqrtReal(PetscAbsScalar(alpha));
1649: PetscCall(VecScale(u, 1.0 / alpha));
1651: z = -beta / alpha * z; /* z <- beta/alpha*z */
1652: vecz[0] = z;
1654: /* Computation of new iterate x(i+1) and p(i+1) */
1655: PetscCall(VecAXPBY(d, 1.0 / alpha, -beta / alpha, v)); /* d = (v-beta*d)/alpha */
1656: PetscCall(VecAXPY(ilinkA->y, z, u)); /* r = r + z*u */
1657: PetscCall(VecAXPY(ilinkD->y, -z, d)); /* p = p - z*d */
1658: PetscCall(MatMult(jac->H, ilinkA->y, Hu)); /* ||u||_H = u'*H*u */
1659: PetscCall(VecDot(Hu, ilinkA->y, &nrmz2));
1661: /* Compute Lower Bound estimate */
1662: if (iterGKB > jac->gkbdelay) {
1663: lowbnd = 0.0;
1664: for (j = 0; j < jac->gkbdelay; j++) lowbnd += PetscAbsScalar(vecz[j] * vecz[j]);
1665: lowbnd = PetscSqrtReal(lowbnd / PetscAbsScalar(nrmz2));
1666: }
1668: for (j = 0; j < jac->gkbdelay - 1; j++) vecz[jac->gkbdelay - j - 1] = vecz[jac->gkbdelay - j - 2];
1669: if (jac->gkbmonitor) PetscCall(PetscViewerASCIIPrintf(jac->gkbviewer, "%3" PetscInt_FMT " GKB Lower bound estimate %14.12e\n", iterGKB, (double)lowbnd));
1670: }
1672: PetscCall(VecScatterBegin(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1673: PetscCall(VecScatterEnd(ilinkA->sctx, ilinkA->y, y, INSERT_VALUES, SCATTER_REVERSE));
1674: PetscCall(VecScatterBegin(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1675: PetscCall(VecScatterEnd(ilinkD->sctx, ilinkD->y, y, INSERT_VALUES, SCATTER_REVERSE));
1676: PetscFunctionReturn(PETSC_SUCCESS);
1677: }
1679: #define FieldSplitSplitSolveAddTranspose(ilink, xx, yy) \
1680: ((PetscErrorCode)(VecScatterBegin(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || VecScatterEnd(ilink->sctx, xx, ilink->y, INSERT_VALUES, SCATTER_FORWARD) || PetscLogEventBegin(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || \
1681: KSPSolveTranspose(ilink->ksp, ilink->y, ilink->x) || KSPCheckSolve(ilink->ksp, pc, ilink->x) || PetscLogEventEnd(ilink->event, ilink->ksp, ilink->y, ilink->x, NULL) || VecScatterBegin(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE) || \
1682: VecScatterEnd(ilink->sctx, ilink->x, yy, ADD_VALUES, SCATTER_REVERSE)))
1684: static PetscErrorCode PCApplyTranspose_FieldSplit(PC pc, Vec x, Vec y)
1685: {
1686: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1687: PC_FieldSplitLink ilink = jac->head;
1688: PetscInt bs;
1690: PetscFunctionBegin;
1691: if (jac->type == PC_COMPOSITE_ADDITIVE) {
1692: PetscBool matnest;
1694: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATNEST, &matnest));
1695: if (jac->defaultsplit && !matnest) {
1696: PetscCall(VecGetBlockSize(x, &bs));
1697: PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of x vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1698: PetscCall(VecGetBlockSize(y, &bs));
1699: PetscCheck(jac->bs <= 0 || bs == jac->bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Blocksize of y vector %" PetscInt_FMT " does not match fieldsplit blocksize %" PetscInt_FMT, bs, jac->bs);
1700: PetscCall(VecStrideGatherAll(x, jac->x, INSERT_VALUES));
1701: while (ilink) {
1702: PetscCall(PetscLogEventBegin(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1703: PetscCall(KSPSolveTranspose(ilink->ksp, ilink->x, ilink->y));
1704: PetscCall(KSPCheckSolve(ilink->ksp, pc, ilink->y));
1705: PetscCall(PetscLogEventEnd(ilink->event, ilink->ksp, ilink->x, ilink->y, NULL));
1706: ilink = ilink->next;
1707: }
1708: PetscCall(VecStrideScatterAll(jac->y, y, INSERT_VALUES));
1709: } else {
1710: PetscCall(VecSet(y, 0.0));
1711: while (ilink) {
1712: PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1713: ilink = ilink->next;
1714: }
1715: }
1716: } else {
1717: if (!jac->w1) {
1718: PetscCall(VecDuplicate(x, &jac->w1));
1719: PetscCall(VecDuplicate(x, &jac->w2));
1720: }
1721: PetscCall(VecSet(y, 0.0));
1722: if (jac->type == PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE) {
1723: PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1724: while (ilink->next) {
1725: ilink = ilink->next;
1726: PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1727: PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1728: PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1729: }
1730: while (ilink->previous) {
1731: ilink = ilink->previous;
1732: PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1733: PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1734: PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1735: }
1736: } else {
1737: while (ilink->next) { /* get to last entry in linked list */
1738: ilink = ilink->next;
1739: }
1740: PetscCall(FieldSplitSplitSolveAddTranspose(ilink, x, y));
1741: while (ilink->previous) {
1742: ilink = ilink->previous;
1743: PetscCall(MatMultTranspose(pc->mat, y, jac->w1));
1744: PetscCall(VecWAXPY(jac->w2, -1.0, jac->w1, x));
1745: PetscCall(FieldSplitSplitSolveAddTranspose(ilink, jac->w2, y));
1746: }
1747: }
1748: }
1749: PetscFunctionReturn(PETSC_SUCCESS);
1750: }
1752: static PetscErrorCode PCReset_FieldSplit(PC pc)
1753: {
1754: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1755: PC_FieldSplitLink ilink = jac->head, next;
1757: PetscFunctionBegin;
1758: while (ilink) {
1759: PetscCall(KSPDestroy(&ilink->ksp));
1760: PetscCall(VecDestroy(&ilink->x));
1761: PetscCall(VecDestroy(&ilink->y));
1762: PetscCall(VecDestroy(&ilink->z));
1763: PetscCall(VecScatterDestroy(&ilink->sctx));
1764: PetscCall(ISDestroy(&ilink->is));
1765: PetscCall(ISDestroy(&ilink->is_col));
1766: PetscCall(PetscFree(ilink->splitname));
1767: PetscCall(PetscFree(ilink->fields));
1768: PetscCall(PetscFree(ilink->fields_col));
1769: next = ilink->next;
1770: PetscCall(PetscFree(ilink));
1771: ilink = next;
1772: }
1773: jac->head = NULL;
1774: PetscCall(PetscFree2(jac->x, jac->y));
1775: if (jac->mat && jac->mat != jac->pmat) {
1776: PetscCall(MatDestroyMatrices(jac->nsplits, &jac->mat));
1777: } else if (jac->mat) {
1778: jac->mat = NULL;
1779: }
1780: if (jac->pmat) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->pmat));
1781: if (jac->Afield) PetscCall(MatDestroyMatrices(jac->nsplits, &jac->Afield));
1782: jac->nsplits = 0;
1783: PetscCall(VecDestroy(&jac->w1));
1784: PetscCall(VecDestroy(&jac->w2));
1785: if (jac->schur) PetscCall(PetscObjectCompose((PetscObject)jac->schur, "AinvB", NULL));
1786: PetscCall(MatDestroy(&jac->schur));
1787: PetscCall(MatDestroy(&jac->schurp));
1788: PetscCall(MatDestroy(&jac->schur_user));
1789: PetscCall(KSPDestroy(&jac->kspschur));
1790: PetscCall(KSPDestroy(&jac->kspupper));
1791: PetscCall(MatDestroy(&jac->B));
1792: PetscCall(MatDestroy(&jac->C));
1793: PetscCall(MatDestroy(&jac->H));
1794: PetscCall(VecDestroy(&jac->u));
1795: PetscCall(VecDestroy(&jac->v));
1796: PetscCall(VecDestroy(&jac->Hu));
1797: PetscCall(VecDestroy(&jac->d));
1798: PetscCall(PetscFree(jac->vecz));
1799: PetscCall(PetscViewerDestroy(&jac->gkbviewer));
1800: jac->isrestrict = PETSC_FALSE;
1801: PetscFunctionReturn(PETSC_SUCCESS);
1802: }
1804: static PetscErrorCode PCDestroy_FieldSplit(PC pc)
1805: {
1806: PetscFunctionBegin;
1807: PetscCall(PCReset_FieldSplit(pc));
1808: PetscCall(PetscFree(pc->data));
1809: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
1810: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", NULL));
1811: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", NULL));
1812: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", NULL));
1813: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", NULL));
1814: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", NULL));
1815: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", NULL));
1816: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
1817: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
1818: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
1819: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
1820: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
1821: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
1822: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
1823: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
1824: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
1825: PetscFunctionReturn(PETSC_SUCCESS);
1826: }
1828: static PetscErrorCode PCSetFromOptions_FieldSplit(PC pc, PetscOptionItems PetscOptionsObject)
1829: {
1830: PetscInt bs;
1831: PetscBool flg;
1832: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1833: PCCompositeType ctype;
1835: PetscFunctionBegin;
1836: PetscOptionsHeadBegin(PetscOptionsObject, "FieldSplit options");
1837: PetscCall(PetscOptionsBool("-pc_fieldsplit_dm_splits", "Whether to use DMCreateFieldDecomposition() for splits", "PCFieldSplitSetDMSplits", jac->dm_splits, &jac->dm_splits, NULL));
1838: PetscCall(PetscOptionsInt("-pc_fieldsplit_block_size", "Blocksize that defines number of fields", "PCFieldSplitSetBlockSize", jac->bs, &bs, &flg));
1839: if (flg) PetscCall(PCFieldSplitSetBlockSize(pc, bs));
1840: jac->diag_use_amat = pc->useAmat;
1841: PetscCall(PetscOptionsBool("-pc_fieldsplit_diag_use_amat", "Use Amat (not Pmat) to extract diagonal fieldsplit blocks", "PCFieldSplitSetDiagUseAmat", jac->diag_use_amat, &jac->diag_use_amat, NULL));
1842: jac->offdiag_use_amat = pc->useAmat;
1843: PetscCall(PetscOptionsBool("-pc_fieldsplit_off_diag_use_amat", "Use Amat (not Pmat) to extract off-diagonal fieldsplit blocks", "PCFieldSplitSetOffDiagUseAmat", jac->offdiag_use_amat, &jac->offdiag_use_amat, NULL));
1844: PetscCall(PetscOptionsBool("-pc_fieldsplit_detect_saddle_point", "Form 2-way split by detecting zero diagonal entries", "PCFieldSplitSetDetectSaddlePoint", jac->detect, &jac->detect, NULL));
1845: PetscCall(PCFieldSplitSetDetectSaddlePoint(pc, jac->detect)); /* Sets split type and Schur PC type */
1846: PetscCall(PetscOptionsEnum("-pc_fieldsplit_type", "Type of composition", "PCFieldSplitSetType", PCCompositeTypes, (PetscEnum)jac->type, (PetscEnum *)&ctype, &flg));
1847: if (flg) PetscCall(PCFieldSplitSetType(pc, ctype));
1848: /* Only setup fields once */
1849: if (jac->bs > 0 && jac->nsplits == 0) {
1850: /* only allow user to set fields from command line.
1851: otherwise user can set them in PCFieldSplitSetDefaults() */
1852: PetscCall(PCFieldSplitSetRuntimeSplits_Private(pc));
1853: if (jac->splitdefined) PetscCall(PetscInfo(pc, "Splits defined using the options database\n"));
1854: }
1855: if (jac->type == PC_COMPOSITE_SCHUR) {
1856: PetscCall(PetscOptionsGetEnum(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_fieldsplit_schur_factorization_type", PCFieldSplitSchurFactTypes, (PetscEnum *)&jac->schurfactorization, &flg));
1857: if (flg) PetscCall(PetscInfo(pc, "Deprecated use of -pc_fieldsplit_schur_factorization_type\n"));
1858: PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_fact_type", "Which off-diagonal parts of the block factorization to use", "PCFieldSplitSetSchurFactType", PCFieldSplitSchurFactTypes, (PetscEnum)jac->schurfactorization, (PetscEnum *)&jac->schurfactorization, NULL));
1859: PetscCall(PetscOptionsEnum("-pc_fieldsplit_schur_precondition", "How to build preconditioner for Schur complement", "PCFieldSplitSetSchurPre", PCFieldSplitSchurPreTypes, (PetscEnum)jac->schurpre, (PetscEnum *)&jac->schurpre, NULL));
1860: PetscCall(PetscOptionsScalar("-pc_fieldsplit_schur_scale", "Scale Schur complement", "PCFieldSplitSetSchurScale", jac->schurscale, &jac->schurscale, NULL));
1861: } else if (jac->type == PC_COMPOSITE_GKB) {
1862: PetscCall(PetscOptionsReal("-pc_fieldsplit_gkb_tol", "The tolerance for the lower bound stopping criterion", "PCFieldSplitSetGKBTol", jac->gkbtol, &jac->gkbtol, NULL));
1863: PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_delay", "The delay value for lower bound criterion", "PCFieldSplitSetGKBDelay", jac->gkbdelay, &jac->gkbdelay, NULL));
1864: PetscCall(PetscOptionsBoundedReal("-pc_fieldsplit_gkb_nu", "Parameter in augmented Lagrangian approach", "PCFieldSplitSetGKBNu", jac->gkbnu, &jac->gkbnu, NULL, 0.0));
1865: PetscCall(PetscOptionsInt("-pc_fieldsplit_gkb_maxit", "Maximum allowed number of iterations", "PCFieldSplitSetGKBMaxit", jac->gkbmaxit, &jac->gkbmaxit, NULL));
1866: PetscCall(PetscOptionsBool("-pc_fieldsplit_gkb_monitor", "Prints number of GKB iterations and error", "PCFieldSplitGKB", jac->gkbmonitor, &jac->gkbmonitor, NULL));
1867: }
1868: /*
1869: In the initial call to this routine the sub-solver data structures do not exist so we cannot call KSPSetFromOptions() on them yet.
1870: But after the initial setup of ALL the layers of sub-solvers is completed we do want to call KSPSetFromOptions() on the sub-solvers every time it
1871: is called on the outer solver in case changes were made in the options database
1873: But even after PCSetUp_FieldSplit() is called all the options inside the inner levels of sub-solvers may still not have been set thus we only call the KSPSetFromOptions()
1874: if we know that the entire stack of sub-solvers below this have been complete instantiated, we check this by seeing if any solver iterations are complete.
1875: Without this extra check test p2p1fetidp_olof_full and others fail with incorrect matrix types.
1877: There could be a negative side effect of calling the KSPSetFromOptions() below.
1879: If one captured the PetscObjectState of the options database one could skip these calls if the database has not changed from the previous call
1880: */
1881: if (jac->issetup) {
1882: PC_FieldSplitLink ilink = jac->head;
1883: if (jac->type == PC_COMPOSITE_SCHUR) {
1884: if (jac->kspupper && jac->kspupper->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspupper));
1885: if (jac->kspschur && jac->kspschur->totalits > 0) PetscCall(KSPSetFromOptions(jac->kspschur));
1886: }
1887: while (ilink) {
1888: if (ilink->ksp->totalits > 0) PetscCall(KSPSetFromOptions(ilink->ksp));
1889: ilink = ilink->next;
1890: }
1891: }
1892: PetscOptionsHeadEnd();
1893: PetscFunctionReturn(PETSC_SUCCESS);
1894: }
1896: static PetscErrorCode PCFieldSplitSetFields_FieldSplit(PC pc, const char splitname[], PetscInt n, const PetscInt *fields, const PetscInt *fields_col)
1897: {
1898: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1899: PC_FieldSplitLink ilink, next = jac->head;
1900: char prefix[128];
1901: PetscInt i;
1902: PetscLogEvent nse;
1904: PetscFunctionBegin;
1905: if (jac->splitdefined) {
1906: PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
1907: PetscFunctionReturn(PETSC_SUCCESS);
1908: }
1909: for (i = 0; i < n; i++) PetscCheck(fields[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", fields[i]);
1910: PetscCall(PetscNew(&ilink));
1911: if (splitname) {
1912: PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
1913: } else {
1914: PetscCall(PetscMalloc1(3, &ilink->splitname));
1915: PetscCall(PetscSNPrintf(ilink->splitname, 2, "%" PetscInt_FMT, jac->nsplits));
1916: }
1917: PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
1918: ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
1919: PetscCall(PetscMalloc1(n, &ilink->fields));
1920: PetscCall(PetscArraycpy(ilink->fields, fields, n));
1921: PetscCall(PetscMalloc1(n, &ilink->fields_col));
1922: PetscCall(PetscArraycpy(ilink->fields_col, fields_col, n));
1924: ilink->nfields = n;
1925: ilink->next = NULL;
1926: PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
1927: PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
1928: PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
1929: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
1930: PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
1932: PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
1933: PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
1935: if (!next) {
1936: jac->head = ilink;
1937: ilink->previous = NULL;
1938: } else {
1939: while (next->next) next = next->next;
1940: next->next = ilink;
1941: ilink->previous = next;
1942: }
1943: jac->nsplits++;
1944: PetscFunctionReturn(PETSC_SUCCESS);
1945: }
1947: static PetscErrorCode PCFieldSplitSchurGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1948: {
1949: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1951: PetscFunctionBegin;
1952: *subksp = NULL;
1953: if (n) *n = 0;
1954: if (jac->type == PC_COMPOSITE_SCHUR) {
1955: PetscInt nn;
1957: PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitSchurGetSubKSP()");
1958: PetscCheck(jac->nsplits == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unexpected number of splits %" PetscInt_FMT " != 2", jac->nsplits);
1959: nn = jac->nsplits + (jac->kspupper != jac->head->ksp ? 1 : 0);
1960: PetscCall(PetscMalloc1(nn, subksp));
1961: (*subksp)[0] = jac->head->ksp;
1962: (*subksp)[1] = jac->kspschur;
1963: if (jac->kspupper != jac->head->ksp) (*subksp)[2] = jac->kspupper;
1964: if (n) *n = nn;
1965: }
1966: PetscFunctionReturn(PETSC_SUCCESS);
1967: }
1969: static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit_Schur(PC pc, PetscInt *n, KSP **subksp)
1970: {
1971: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1973: PetscFunctionBegin;
1974: PetscCheck(jac->schur, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must call KSPSetUp() or PCSetUp() before calling PCFieldSplitGetSubKSP()");
1975: PetscCall(PetscMalloc1(jac->nsplits, subksp));
1976: PetscCall(MatSchurComplementGetKSP(jac->schur, *subksp));
1978: (*subksp)[1] = jac->kspschur;
1979: if (n) *n = jac->nsplits;
1980: PetscFunctionReturn(PETSC_SUCCESS);
1981: }
1983: static PetscErrorCode PCFieldSplitGetSubKSP_FieldSplit(PC pc, PetscInt *n, KSP **subksp)
1984: {
1985: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
1986: PetscInt cnt = 0;
1987: PC_FieldSplitLink ilink = jac->head;
1989: PetscFunctionBegin;
1990: PetscCall(PetscMalloc1(jac->nsplits, subksp));
1991: while (ilink) {
1992: (*subksp)[cnt++] = ilink->ksp;
1993: ilink = ilink->next;
1994: }
1995: PetscCheck(cnt == jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupt PCFIELDSPLIT object: number of splits in linked list %" PetscInt_FMT " does not match number in object %" PetscInt_FMT, cnt, jac->nsplits);
1996: if (n) *n = jac->nsplits;
1997: PetscFunctionReturn(PETSC_SUCCESS);
1998: }
2000: /*@
2001: PCFieldSplitRestrictIS - Restricts the fieldsplit `IS`s to be within a given `IS`.
2003: Input Parameters:
2004: + pc - the preconditioner context
2005: - isy - the index set that defines the indices to which the fieldsplit is to be restricted
2007: Level: advanced
2009: Developer Notes:
2010: It seems the resulting `IS`s will not cover the entire space, so
2011: how can they define a convergent preconditioner? Needs explaining.
2013: .seealso: [](sec_block_matrices), `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2014: @*/
2015: PetscErrorCode PCFieldSplitRestrictIS(PC pc, IS isy)
2016: {
2017: PetscFunctionBegin;
2020: PetscTryMethod(pc, "PCFieldSplitRestrictIS_C", (PC, IS), (pc, isy));
2021: PetscFunctionReturn(PETSC_SUCCESS);
2022: }
2024: static PetscErrorCode PCFieldSplitRestrictIS_FieldSplit(PC pc, IS isy)
2025: {
2026: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2027: PC_FieldSplitLink ilink = jac->head, next;
2028: PetscInt localsize, size, sizez, i;
2029: const PetscInt *ind, *indz;
2030: PetscInt *indc, *indcz;
2031: PetscBool flg;
2033: PetscFunctionBegin;
2034: PetscCall(ISGetLocalSize(isy, &localsize));
2035: PetscCallMPI(MPI_Scan(&localsize, &size, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)isy)));
2036: size -= localsize;
2037: while (ilink) {
2038: IS isrl, isr;
2039: PC subpc;
2040: PetscCall(ISEmbed(ilink->is, isy, PETSC_TRUE, &isrl));
2041: PetscCall(ISGetLocalSize(isrl, &localsize));
2042: PetscCall(PetscMalloc1(localsize, &indc));
2043: PetscCall(ISGetIndices(isrl, &ind));
2044: PetscCall(PetscArraycpy(indc, ind, localsize));
2045: PetscCall(ISRestoreIndices(isrl, &ind));
2046: PetscCall(ISDestroy(&isrl));
2047: for (i = 0; i < localsize; i++) *(indc + i) += size;
2048: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)isy), localsize, indc, PETSC_OWN_POINTER, &isr));
2049: PetscCall(PetscObjectReference((PetscObject)isr));
2050: PetscCall(ISDestroy(&ilink->is));
2051: ilink->is = isr;
2052: PetscCall(PetscObjectReference((PetscObject)isr));
2053: PetscCall(ISDestroy(&ilink->is_col));
2054: ilink->is_col = isr;
2055: PetscCall(ISDestroy(&isr));
2056: PetscCall(KSPGetPC(ilink->ksp, &subpc));
2057: PetscCall(PetscObjectTypeCompare((PetscObject)subpc, PCFIELDSPLIT, &flg));
2058: if (flg) {
2059: IS iszl, isz;
2060: MPI_Comm comm;
2061: PetscCall(ISGetLocalSize(ilink->is, &localsize));
2062: comm = PetscObjectComm((PetscObject)ilink->is);
2063: PetscCall(ISEmbed(isy, ilink->is, PETSC_TRUE, &iszl));
2064: PetscCallMPI(MPI_Scan(&localsize, &sizez, 1, MPIU_INT, MPI_SUM, comm));
2065: sizez -= localsize;
2066: PetscCall(ISGetLocalSize(iszl, &localsize));
2067: PetscCall(PetscMalloc1(localsize, &indcz));
2068: PetscCall(ISGetIndices(iszl, &indz));
2069: PetscCall(PetscArraycpy(indcz, indz, localsize));
2070: PetscCall(ISRestoreIndices(iszl, &indz));
2071: PetscCall(ISDestroy(&iszl));
2072: for (i = 0; i < localsize; i++) *(indcz + i) += sizez;
2073: PetscCall(ISCreateGeneral(comm, localsize, indcz, PETSC_OWN_POINTER, &isz));
2074: PetscCall(PCFieldSplitRestrictIS(subpc, isz));
2075: PetscCall(ISDestroy(&isz));
2076: }
2077: next = ilink->next;
2078: ilink = next;
2079: }
2080: jac->isrestrict = PETSC_TRUE;
2081: PetscFunctionReturn(PETSC_SUCCESS);
2082: }
2084: static PetscErrorCode PCFieldSplitSetIS_FieldSplit(PC pc, const char splitname[], IS is)
2085: {
2086: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2087: PC_FieldSplitLink ilink, next = jac->head;
2088: char prefix[128];
2089: PetscLogEvent nse;
2091: PetscFunctionBegin;
2092: if (jac->splitdefined) {
2093: PetscCall(PetscInfo(pc, "Ignoring new split \"%s\" because the splits have already been defined\n", splitname));
2094: PetscFunctionReturn(PETSC_SUCCESS);
2095: }
2096: PetscCall(PetscNew(&ilink));
2097: if (splitname) {
2098: PetscCall(PetscStrallocpy(splitname, &ilink->splitname));
2099: } else {
2100: PetscCall(PetscMalloc1(8, &ilink->splitname));
2101: PetscCall(PetscSNPrintf(ilink->splitname, 7, "%" PetscInt_FMT, jac->nsplits));
2102: }
2103: PetscCall(PetscMPIIntCast(jac->nsplits, &nse));
2104: ilink->event = jac->nsplits < 5 ? KSP_Solve_FS_0 + nse : KSP_Solve_FS_0 + 4; /* Splits greater than 4 logged in 4th split */
2105: PetscCall(PetscObjectReference((PetscObject)is));
2106: PetscCall(ISDestroy(&ilink->is));
2107: ilink->is = is;
2108: PetscCall(PetscObjectReference((PetscObject)is));
2109: PetscCall(ISDestroy(&ilink->is_col));
2110: ilink->is_col = is;
2111: ilink->next = NULL;
2112: PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &ilink->ksp));
2113: PetscCall(KSPSetNestLevel(ilink->ksp, pc->kspnestlevel));
2114: PetscCall(KSPSetErrorIfNotConverged(ilink->ksp, pc->erroriffailure));
2115: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ilink->ksp, (PetscObject)pc, 1));
2116: PetscCall(KSPSetType(ilink->ksp, KSPPREONLY));
2118: PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sfieldsplit_%s_", ((PetscObject)pc)->prefix ? ((PetscObject)pc)->prefix : "", ilink->splitname));
2119: PetscCall(KSPSetOptionsPrefix(ilink->ksp, prefix));
2121: if (!next) {
2122: jac->head = ilink;
2123: ilink->previous = NULL;
2124: } else {
2125: while (next->next) next = next->next;
2126: next->next = ilink;
2127: ilink->previous = next;
2128: }
2129: jac->nsplits++;
2130: PetscFunctionReturn(PETSC_SUCCESS);
2131: }
2133: /*@
2134: PCFieldSplitSetFields - Sets the fields that define one particular split in `PCFIELDSPLIT`
2136: Logically Collective
2138: Input Parameters:
2139: + pc - the preconditioner context
2140: . splitname - name of this split, if `NULL` the number of the split is used
2141: . n - the number of fields in this split
2142: . fields - the fields in this split
2143: - fields_col - generally the same as `fields`, if it does not match `fields` then the submatrix that is solved for this set of fields comes from an off-diagonal block
2144: of the matrix and `fields_col` provides the column indices for that block
2146: Options Database Key:
2147: . -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
2149: Level: intermediate
2151: Notes:
2152: Use `PCFieldSplitSetIS()` to set a general set of indices as a split.
2154: If the matrix used to construct the preconditioner is `MATNEST` then field i refers to the `is_row[i]` `IS` passed to `MatCreateNest()`.
2156: If the matrix used to construct the preconditioner is not `MATNEST` then
2157: `PCFieldSplitSetFields()` is for defining fields as strided blocks (based on the block size provided to the matrix with `MatSetBlockSize()` or
2158: to the `PC` with `PCFieldSplitSetBlockSize()`). For example, if the block
2159: size is three then one can define a split as 0, or 1 or 2 or 0,1 or 0,2 or 1,2 which mean
2160: 0xx3xx6xx9xx12 ... x1xx4xx7xx ... xx2xx5xx8xx.. 01x34x67x... 0x23x56x8.. x12x45x78x....
2161: where the numbered entries indicate what is in the split.
2163: This function is called once per split (it creates a new split each time). Solve options
2164: for this split will be available under the prefix `-fieldsplit_SPLITNAME_`.
2166: `PCFieldSplitSetIS()` does not support having a `fields_col` different from `fields`
2168: Developer Notes:
2169: This routine does not actually create the `IS` representing the split, that is delayed
2170: until `PCSetUp_FieldSplit()`, because information about the vector/matrix layouts may not be
2171: available when this routine is called.
2173: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetIS()`, `PCFieldSplitRestrictIS()`,
2174: `MatSetBlockSize()`, `MatCreateNest()`
2175: @*/
2176: PetscErrorCode PCFieldSplitSetFields(PC pc, const char splitname[], PetscInt n, const PetscInt fields[], const PetscInt fields_col[])
2177: {
2178: PetscFunctionBegin;
2180: PetscAssertPointer(splitname, 2);
2181: PetscCheck(n >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Provided number of fields %" PetscInt_FMT " in split \"%s\" not positive", n, splitname);
2182: PetscAssertPointer(fields, 4);
2183: PetscTryMethod(pc, "PCFieldSplitSetFields_C", (PC, const char[], PetscInt, const PetscInt *, const PetscInt *), (pc, splitname, n, fields, fields_col));
2184: PetscFunctionReturn(PETSC_SUCCESS);
2185: }
2187: /*@
2188: PCFieldSplitSetDiagUseAmat - set flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2189: the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2191: Logically Collective
2193: Input Parameters:
2194: + pc - the preconditioner object
2195: - flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2197: Options Database Key:
2198: . -pc_fieldsplit_diag_use_amat - use the Amat to provide the diagonal blocks
2200: Level: intermediate
2202: .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetDiagUseAmat()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFIELDSPLIT`
2203: @*/
2204: PetscErrorCode PCFieldSplitSetDiagUseAmat(PC pc, PetscBool flg)
2205: {
2206: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2207: PetscBool isfs;
2209: PetscFunctionBegin;
2211: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2212: PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2213: jac->diag_use_amat = flg;
2214: PetscFunctionReturn(PETSC_SUCCESS);
2215: }
2217: /*@
2218: PCFieldSplitGetDiagUseAmat - get the flag indicating whether to extract diagonal blocks from Amat (rather than Pmat) to build
2219: the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2221: Logically Collective
2223: Input Parameter:
2224: . pc - the preconditioner object
2226: Output Parameter:
2227: . flg - boolean flag indicating whether or not to use Amat to extract the diagonal blocks from
2229: Level: intermediate
2231: .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetDiagUseAmat()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFIELDSPLIT`
2232: @*/
2233: PetscErrorCode PCFieldSplitGetDiagUseAmat(PC pc, PetscBool *flg)
2234: {
2235: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2236: PetscBool isfs;
2238: PetscFunctionBegin;
2240: PetscAssertPointer(flg, 2);
2241: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2242: PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2243: *flg = jac->diag_use_amat;
2244: PetscFunctionReturn(PETSC_SUCCESS);
2245: }
2247: /*@
2248: PCFieldSplitSetOffDiagUseAmat - set flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2249: the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2251: Logically Collective
2253: Input Parameters:
2254: + pc - the preconditioner object
2255: - flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2257: Options Database Key:
2258: . -pc_fieldsplit_off_diag_use_amat <bool> - use the Amat to extract the off-diagonal blocks
2260: Level: intermediate
2262: .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitGetOffDiagUseAmat()`, `PCFieldSplitSetDiagUseAmat()`, `PCFIELDSPLIT`
2263: @*/
2264: PetscErrorCode PCFieldSplitSetOffDiagUseAmat(PC pc, PetscBool flg)
2265: {
2266: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2267: PetscBool isfs;
2269: PetscFunctionBegin;
2271: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2272: PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2273: jac->offdiag_use_amat = flg;
2274: PetscFunctionReturn(PETSC_SUCCESS);
2275: }
2277: /*@
2278: PCFieldSplitGetOffDiagUseAmat - get the flag indicating whether to extract off-diagonal blocks from Amat (rather than Pmat) to build
2279: the sub-matrices associated with each split. Where `KSPSetOperators`(ksp,Amat,Pmat) was used to supply the operators.
2281: Logically Collective
2283: Input Parameter:
2284: . pc - the preconditioner object
2286: Output Parameter:
2287: . flg - boolean flag indicating whether or not to use Amat to extract the off-diagonal blocks from
2289: Level: intermediate
2291: .seealso: [](sec_block_matrices), `PC`, `PCSetOperators()`, `KSPSetOperators()`, `PCFieldSplitSetOffDiagUseAmat()`, `PCFieldSplitGetDiagUseAmat()`, `PCFIELDSPLIT`
2292: @*/
2293: PetscErrorCode PCFieldSplitGetOffDiagUseAmat(PC pc, PetscBool *flg)
2294: {
2295: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2296: PetscBool isfs;
2298: PetscFunctionBegin;
2300: PetscAssertPointer(flg, 2);
2301: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
2302: PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "PC not of type %s", PCFIELDSPLIT);
2303: *flg = jac->offdiag_use_amat;
2304: PetscFunctionReturn(PETSC_SUCCESS);
2305: }
2307: /*@
2308: PCFieldSplitSetIS - Sets the exact elements for a split in a `PCFIELDSPLIT`
2310: Logically Collective
2312: Input Parameters:
2313: + pc - the preconditioner context
2314: . splitname - name of this split, if `NULL` the number of the split is used
2315: - is - the index set that defines the elements in this split
2317: Level: intermediate
2319: Notes:
2320: Use `PCFieldSplitSetFields()`, for splits defined by strided `IS` based on the matrix block size or the `is_rows[]` passed into `MATNEST`
2322: This function is called once per split (it creates a new split each time). Solve options
2323: for this split will be available under the prefix -fieldsplit_SPLITNAME_.
2325: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetBlockSize()`, `PCFieldSplitSetFields()`
2326: @*/
2327: PetscErrorCode PCFieldSplitSetIS(PC pc, const char splitname[], IS is)
2328: {
2329: PetscFunctionBegin;
2331: if (splitname) PetscAssertPointer(splitname, 2);
2333: PetscTryMethod(pc, "PCFieldSplitSetIS_C", (PC, const char[], IS), (pc, splitname, is));
2334: PetscFunctionReturn(PETSC_SUCCESS);
2335: }
2337: /*@
2338: PCFieldSplitGetIS - Retrieves the elements for a split as an `IS`
2340: Logically Collective
2342: Input Parameters:
2343: + pc - the preconditioner context
2344: - splitname - name of this split
2346: Output Parameter:
2347: . is - the index set that defines the elements in this split, or `NULL` if the split is not found
2349: Level: intermediate
2351: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetIS()`, `PCFieldSplitGetISByIndex()`
2352: @*/
2353: PetscErrorCode PCFieldSplitGetIS(PC pc, const char splitname[], IS *is)
2354: {
2355: PetscFunctionBegin;
2357: PetscAssertPointer(splitname, 2);
2358: PetscAssertPointer(is, 3);
2359: {
2360: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2361: PC_FieldSplitLink ilink = jac->head;
2362: PetscBool found;
2364: *is = NULL;
2365: while (ilink) {
2366: PetscCall(PetscStrcmp(ilink->splitname, splitname, &found));
2367: if (found) {
2368: *is = ilink->is;
2369: break;
2370: }
2371: ilink = ilink->next;
2372: }
2373: }
2374: PetscFunctionReturn(PETSC_SUCCESS);
2375: }
2377: /*@
2378: PCFieldSplitGetISByIndex - Retrieves the elements for a given split as an `IS`
2380: Logically Collective
2382: Input Parameters:
2383: + pc - the preconditioner context
2384: - index - index of this split
2386: Output Parameter:
2387: . is - the index set that defines the elements in this split
2389: Level: intermediate
2391: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitGetIS()`, `PCFieldSplitSetIS()`,
2393: @*/
2394: PetscErrorCode PCFieldSplitGetISByIndex(PC pc, PetscInt index, IS *is)
2395: {
2396: PetscFunctionBegin;
2397: PetscCheck(index >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative field %" PetscInt_FMT " requested", index);
2399: PetscAssertPointer(is, 3);
2400: {
2401: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2402: PC_FieldSplitLink ilink = jac->head;
2403: PetscInt i = 0;
2404: PetscCheck(index < jac->nsplits, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Field %" PetscInt_FMT " requested but only %" PetscInt_FMT " exist", index, jac->nsplits);
2406: while (i < index) {
2407: ilink = ilink->next;
2408: ++i;
2409: }
2410: PetscCall(PCFieldSplitGetIS(pc, ilink->splitname, is));
2411: }
2412: PetscFunctionReturn(PETSC_SUCCESS);
2413: }
2415: /*@
2416: PCFieldSplitSetBlockSize - Sets the block size for defining where fields start in the
2417: fieldsplit preconditioner when calling `PCFieldSplitSetFields()`. If not set the matrix block size is used.
2419: Logically Collective
2421: Input Parameters:
2422: + pc - the preconditioner context
2423: - bs - the block size
2425: Level: intermediate
2427: Note:
2428: If the matrix is a `MATNEST` then the `is_rows[]` passed to `MatCreateNest()` determines the fields.
2430: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
2431: @*/
2432: PetscErrorCode PCFieldSplitSetBlockSize(PC pc, PetscInt bs)
2433: {
2434: PetscFunctionBegin;
2437: PetscTryMethod(pc, "PCFieldSplitSetBlockSize_C", (PC, PetscInt), (pc, bs));
2438: PetscFunctionReturn(PETSC_SUCCESS);
2439: }
2441: /*@C
2442: PCFieldSplitGetSubKSP - Gets the `KSP` contexts for all splits
2444: Collective
2446: Input Parameter:
2447: . pc - the preconditioner context
2449: Output Parameters:
2450: + n - the number of splits
2451: - subksp - the array of `KSP` contexts
2453: Level: advanced
2455: Notes:
2456: After `PCFieldSplitGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2457: (not the `KSP`, just the array that contains them).
2459: You must call `PCSetUp()` before calling `PCFieldSplitGetSubKSP()`.
2461: If the fieldsplit is of type `PC_COMPOSITE_SCHUR`, it returns the `KSP` object used inside the
2462: Schur complement and the `KSP` object used to iterate over the Schur complement.
2463: To access all the `KSP` objects used in `PC_COMPOSITE_SCHUR`, use `PCFieldSplitSchurGetSubKSP()`.
2465: If the fieldsplit is of type `PC_COMPOSITE_GKB`, it returns the `KSP` object used to solve the
2466: inner linear system defined by the matrix H in each loop.
2468: Fortran Note:
2469: Call `PCFieldSplitRestoreSubKSP()` when the array of `KSP` is no longer needed
2471: Developer Notes:
2472: There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2474: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitSchurGetSubKSP()`
2475: @*/
2476: PetscErrorCode PCFieldSplitGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2477: {
2478: PetscFunctionBegin;
2480: if (n) PetscAssertPointer(n, 2);
2481: PetscUseMethod(pc, "PCFieldSplitGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2482: PetscFunctionReturn(PETSC_SUCCESS);
2483: }
2485: /*@C
2486: PCFieldSplitSchurGetSubKSP - Gets the `KSP` contexts used inside the Schur complement based `PCFIELDSPLIT`
2488: Collective
2490: Input Parameter:
2491: . pc - the preconditioner context
2493: Output Parameters:
2494: + n - the number of splits
2495: - subksp - the array of `KSP` contexts
2497: Level: advanced
2499: Notes:
2500: After `PCFieldSplitSchurGetSubKSP()` the array of `KSP`s is to be freed by the user with `PetscFree()`
2501: (not the `KSP` just the array that contains them).
2503: You must call `PCSetUp()` before calling `PCFieldSplitSchurGetSubKSP()`.
2505: If the fieldsplit type is of type `PC_COMPOSITE_SCHUR`, it returns (in order)
2506: + 1 - the `KSP` used for the (1,1) block
2507: . 2 - the `KSP` used for the Schur complement (not the one used for the interior Schur solver)
2508: - 3 - the `KSP` used for the (1,1) block in the upper triangular factor (if different from that of the (1,1) block).
2510: It returns a null array if the fieldsplit is not of type `PC_COMPOSITE_SCHUR`; in this case, you should use `PCFieldSplitGetSubKSP()`.
2512: Fortran Note:
2513: Call `PCFieldSplitSchurRestoreSubKSP()` when the array of `KSP` is no longer needed
2515: Developer Notes:
2516: There should be a `PCFieldSplitRestoreSubKSP()` instead of requiring the user to call `PetscFree()`
2518: Should the functionality of `PCFieldSplitSchurGetSubKSP()` and `PCFieldSplitGetSubKSP()` be merged?
2520: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`, `PCFieldSplitGetSubKSP()`
2521: @*/
2522: PetscErrorCode PCFieldSplitSchurGetSubKSP(PC pc, PetscInt *n, KSP *subksp[])
2523: {
2524: PetscFunctionBegin;
2526: if (n) PetscAssertPointer(n, 2);
2527: PetscUseMethod(pc, "PCFieldSplitSchurGetSubKSP_C", (PC, PetscInt *, KSP **), (pc, n, subksp));
2528: PetscFunctionReturn(PETSC_SUCCESS);
2529: }
2531: /*@
2532: PCFieldSplitSetSchurPre - Indicates from what operator the preconditioner is constructed for the Schur complement.
2533: The default is the A11 matrix.
2535: Collective
2537: Input Parameters:
2538: + pc - the preconditioner context
2539: . ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11` (default),
2540: `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`,
2541: `PC_FIELDSPLIT_SCHUR_PRE_SELFP`, and `PC_FIELDSPLIT_SCHUR_PRE_FULL`
2542: - pre - matrix to use for preconditioning, or `NULL`
2544: Options Database Keys:
2545: + -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`. See notes for meaning of various arguments
2546: - -fieldsplit_1_pc_type <pctype> - the preconditioner algorithm that is used to construct the preconditioner from the operator
2548: Level: intermediate
2550: Notes:
2551: If ptype is
2552: + a11 - the preconditioner for the Schur complement is generated from the block diagonal part of the preconditioner
2553: matrix associated with the Schur complement (i.e. A11), not the Schur complement matrix
2554: . self - the preconditioner for the Schur complement is generated from the symbolic representation of the Schur complement matrix:
2555: The only preconditioners that currently work with this symbolic representation matrix object are `PCLSC` and `PCHPDDM`
2556: . user - the preconditioner for the Schur complement is generated from the user provided matrix (pre argument
2557: to this function).
2558: . selfp - the preconditioning for the Schur complement is generated from an explicitly-assembled approximation $ Sp = A11 - A10 inv(diag(A00)) A01 $
2559: This is only a good preconditioner when diag(A00) is a good preconditioner for A00. Optionally, A00 can be
2560: lumped before extracting the diagonal using the additional option `-fieldsplit_1_mat_schur_complement_ainv_type lump`
2561: - full - the preconditioner for the Schur complement is generated from the exact Schur complement matrix representation
2562: computed internally by `PCFIELDSPLIT` (this is expensive)
2563: useful mostly as a test that the Schur complement approach can work for your problem
2565: When solving a saddle point problem, where the A11 block is identically zero, using `a11` as the ptype only makes sense
2566: with the additional option `-fieldsplit_1_pc_type none`. Usually for saddle point problems one would use a `ptype` of `self` and
2567: `-fieldsplit_1_pc_type lsc` which uses the least squares commutator to compute a preconditioner for the Schur complement.
2569: Developer Note:
2570: The name of this function and the option `-pc_fieldsplit_schur_precondition` are inconsistent; precondition should be used everywhere.
2572: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`,
2573: `MatSchurComplementSetAinvType()`, `PCLSC`, `PCFieldSplitSetSchurFactType()`
2574: @*/
2575: PetscErrorCode PCFieldSplitSetSchurPre(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2576: {
2577: PetscFunctionBegin;
2579: PetscTryMethod(pc, "PCFieldSplitSetSchurPre_C", (PC, PCFieldSplitSchurPreType, Mat), (pc, ptype, pre));
2580: PetscFunctionReturn(PETSC_SUCCESS);
2581: }
2583: PetscErrorCode PCFieldSplitSchurPrecondition(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2584: {
2585: return PCFieldSplitSetSchurPre(pc, ptype, pre);
2586: } /* Deprecated name */
2588: /*@
2589: PCFieldSplitGetSchurPre - For Schur complement fieldsplit, determine how the Schur complement will be
2590: preconditioned. See `PCFieldSplitSetSchurPre()` for details.
2592: Logically Collective
2594: Input Parameter:
2595: . pc - the preconditioner context
2597: Output Parameters:
2598: + ptype - which matrix to use for preconditioning the Schur complement: `PC_FIELDSPLIT_SCHUR_PRE_A11`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`, `PC_FIELDSPLIT_SCHUR_PRE_USER`
2599: - pre - matrix to use for preconditioning (with `PC_FIELDSPLIT_SCHUR_PRE_USER`), or `NULL`
2601: Level: intermediate
2603: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCLSC`
2604: @*/
2605: PetscErrorCode PCFieldSplitGetSchurPre(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2606: {
2607: PetscFunctionBegin;
2609: PetscUseMethod(pc, "PCFieldSplitGetSchurPre_C", (PC, PCFieldSplitSchurPreType *, Mat *), (pc, ptype, pre));
2610: PetscFunctionReturn(PETSC_SUCCESS);
2611: }
2613: /*@
2614: PCFieldSplitSchurGetS - extract the `MATSCHURCOMPLEMENT` object used by this `PCFIELDSPLIT` in case it needs to be configured separately
2616: Not Collective
2618: Input Parameter:
2619: . pc - the preconditioner context
2621: Output Parameter:
2622: . S - the Schur complement matrix
2624: Level: advanced
2626: Note:
2627: This matrix should not be destroyed using `MatDestroy()`; rather, use `PCFieldSplitSchurRestoreS()`.
2629: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MATSCHURCOMPLEMENT`, `PCFieldSplitSchurRestoreS()`,
2630: `MatCreateSchurComplement()`, `MatSchurComplementGetKSP()`, `MatSchurComplementComputeExplicitOperator()`, `MatGetSchurComplement()`
2631: @*/
2632: PetscErrorCode PCFieldSplitSchurGetS(PC pc, Mat *S)
2633: {
2634: const char *t;
2635: PetscBool isfs;
2636: PC_FieldSplit *jac;
2638: PetscFunctionBegin;
2640: PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2641: PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2642: PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2643: jac = (PC_FieldSplit *)pc->data;
2644: PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2645: if (S) *S = jac->schur;
2646: PetscFunctionReturn(PETSC_SUCCESS);
2647: }
2649: /*@
2650: PCFieldSplitSchurRestoreS - returns the `MATSCHURCOMPLEMENT` matrix used by this `PC`
2652: Not Collective
2654: Input Parameters:
2655: + pc - the preconditioner context
2656: - S - the Schur complement matrix
2658: Level: advanced
2660: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurPre()`, `MatSchurComplement`, `PCFieldSplitSchurGetS()`
2661: @*/
2662: PetscErrorCode PCFieldSplitSchurRestoreS(PC pc, Mat *S)
2663: {
2664: const char *t;
2665: PetscBool isfs;
2666: PC_FieldSplit *jac;
2668: PetscFunctionBegin;
2670: PetscCall(PetscObjectGetType((PetscObject)pc, &t));
2671: PetscCall(PetscStrcmp(t, PCFIELDSPLIT, &isfs));
2672: PetscCheck(isfs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PC of type PCFIELDSPLIT, got %s instead", t);
2673: jac = (PC_FieldSplit *)pc->data;
2674: PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Expected PCFIELDSPLIT of type SCHUR, got %d instead", jac->type);
2675: PetscCheck(S && (*S == jac->schur), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatSchurComplement restored is not the same as gotten");
2676: PetscFunctionReturn(PETSC_SUCCESS);
2677: }
2679: static PetscErrorCode PCFieldSplitSetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType ptype, Mat pre)
2680: {
2681: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2683: PetscFunctionBegin;
2684: jac->schurpre = ptype;
2685: if (ptype == PC_FIELDSPLIT_SCHUR_PRE_USER && pre) {
2686: PetscCall(MatDestroy(&jac->schur_user));
2687: jac->schur_user = pre;
2688: PetscCall(PetscObjectReference((PetscObject)jac->schur_user));
2689: }
2690: PetscFunctionReturn(PETSC_SUCCESS);
2691: }
2693: static PetscErrorCode PCFieldSplitGetSchurPre_FieldSplit(PC pc, PCFieldSplitSchurPreType *ptype, Mat *pre)
2694: {
2695: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2697: PetscFunctionBegin;
2698: if (ptype) *ptype = jac->schurpre;
2699: if (pre) *pre = jac->schur_user;
2700: PetscFunctionReturn(PETSC_SUCCESS);
2701: }
2703: /*@
2704: PCFieldSplitSetSchurFactType - sets which blocks of the approximate block factorization to retain in the preconditioner {cite}`murphy2000note` and {cite}`ipsen2001note`
2706: Collective
2708: Input Parameters:
2709: + pc - the preconditioner context
2710: - ftype - which blocks of factorization to retain, `PC_FIELDSPLIT_SCHUR_FACT_FULL` is default
2712: Options Database Key:
2713: . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - default is `full`
2715: Level: intermediate
2717: Notes:
2718: The `full` factorization is
2720: ```{math}
2721: \left(\begin{array}{cc} A & B \\
2722: C & E \\
2723: \end{array}\right) =
2724: \left(\begin{array}{cc} I & 0 \\
2725: C A^{-1} & I \\
2726: \end{array}\right)
2727: \left(\begin{array}{cc} A & 0 \\
2728: 0 & S \\
2729: \end{array}\right)
2730: \left(\begin{array}{cc} I & A^{-1}B \\
2731: 0 & I \\
2732: \end{array}\right) = L D U,
2733: ```
2735: where $ S = E - C A^{-1} B $. In practice, the full factorization is applied via block triangular solves with the grouping $L(DU)$. `upper` uses $DU$, `lower` uses $LD$,
2736: and `diag` is the diagonal part with the sign of $S$ flipped (because this makes the preconditioner positive definite for many formulations,
2737: thus allowing the use of `KSPMINRES)`. Sign flipping of $S$ can be turned off with `PCFieldSplitSetSchurScale()`.
2739: If $A$ and $S$ are solved exactly
2740: + 1 - `full` factorization is a direct solver.
2741: . 2 - The preconditioned operator with `lower` or `upper` has all eigenvalues equal to 1 and minimal polynomial of degree 2, so `KSPGMRES` converges in 2 iterations.
2742: - 3 - With `diag`, the preconditioned operator has three distinct nonzero eigenvalues and minimal polynomial of degree at most 4, so `KSPGMRES` converges in at most 4 iterations.
2744: If the iteration count is very low, consider using `KSPFGMRES` or `KSPGCR` which can use one less preconditioner
2745: application in this case. Note that the preconditioned operator may be highly non-normal, so such fast convergence may not be observed in practice.
2747: For symmetric problems in which $A$ is positive definite and $S$ is negative definite, `diag` can be used with `KSPMINRES`.
2749: A flexible method like `KSPFGMRES` or `KSPGCR`, [](sec_flexibleksp), must be used if the fieldsplit preconditioner is nonlinear (e.g., a few iterations of a Krylov method is used to solve with $A$ or $S$).
2751: .seealso: [](sec_block_matrices), `PC`, `PCFieldSplitGetSubKSP()`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurPreType`, `PCFieldSplitSetSchurScale()`,
2752: [](sec_flexibleksp), `PCFieldSplitSetSchurPre()`
2753: @*/
2754: PetscErrorCode PCFieldSplitSetSchurFactType(PC pc, PCFieldSplitSchurFactType ftype)
2755: {
2756: PetscFunctionBegin;
2758: PetscTryMethod(pc, "PCFieldSplitSetSchurFactType_C", (PC, PCFieldSplitSchurFactType), (pc, ftype));
2759: PetscFunctionReturn(PETSC_SUCCESS);
2760: }
2762: static PetscErrorCode PCFieldSplitSetSchurFactType_FieldSplit(PC pc, PCFieldSplitSchurFactType ftype)
2763: {
2764: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2766: PetscFunctionBegin;
2767: jac->schurfactorization = ftype;
2768: PetscFunctionReturn(PETSC_SUCCESS);
2769: }
2771: /*@
2772: PCFieldSplitSetSchurScale - Controls the sign flip of S for `PC_FIELDSPLIT_SCHUR_FACT_DIAG`.
2774: Collective
2776: Input Parameters:
2777: + pc - the preconditioner context
2778: - scale - scaling factor for the Schur complement
2780: Options Database Key:
2781: . -pc_fieldsplit_schur_scale <scale> - default is -1.0
2783: Level: intermediate
2785: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetFields()`, `PCFieldSplitSchurFactType`, `PCFieldSplitSetSchurFactType()`
2786: @*/
2787: PetscErrorCode PCFieldSplitSetSchurScale(PC pc, PetscScalar scale)
2788: {
2789: PetscFunctionBegin;
2792: PetscTryMethod(pc, "PCFieldSplitSetSchurScale_C", (PC, PetscScalar), (pc, scale));
2793: PetscFunctionReturn(PETSC_SUCCESS);
2794: }
2796: static PetscErrorCode PCFieldSplitSetSchurScale_FieldSplit(PC pc, PetscScalar scale)
2797: {
2798: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2800: PetscFunctionBegin;
2801: jac->schurscale = scale;
2802: PetscFunctionReturn(PETSC_SUCCESS);
2803: }
2805: /*@C
2806: PCFieldSplitGetSchurBlocks - Gets all matrix blocks for the Schur complement
2808: Collective
2810: Input Parameter:
2811: . pc - the preconditioner context
2813: Output Parameters:
2814: + A00 - the (0,0) block
2815: . A01 - the (0,1) block
2816: . A10 - the (1,0) block
2817: - A11 - the (1,1) block
2819: Level: advanced
2821: Note:
2822: Use `NULL` for any unneeded output arguments
2824: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `MatSchurComplementGetSubMatrices()`, `MatSchurComplementSetSubMatrices()`
2825: @*/
2826: PetscErrorCode PCFieldSplitGetSchurBlocks(PC pc, Mat *A00, Mat *A01, Mat *A10, Mat *A11)
2827: {
2828: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2830: PetscFunctionBegin;
2832: PetscCheck(jac->type == PC_COMPOSITE_SCHUR, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "FieldSplit is not using a Schur complement approach.");
2833: if (A00) *A00 = jac->pmat[0];
2834: if (A01) *A01 = jac->B;
2835: if (A10) *A10 = jac->C;
2836: if (A11) *A11 = jac->pmat[1];
2837: PetscFunctionReturn(PETSC_SUCCESS);
2838: }
2840: /*@
2841: PCFieldSplitSetGKBTol - Sets the solver tolerance for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2843: Collective
2845: Input Parameters:
2846: + pc - the preconditioner context
2847: - tolerance - the solver tolerance
2849: Options Database Key:
2850: . -pc_fieldsplit_gkb_tol <tolerance> - default is 1e-5
2852: Level: intermediate
2854: Note:
2855: The generalized GKB algorithm {cite}`arioli2013` uses a lower bound estimate of the error in energy norm as stopping criterion.
2856: It stops once the lower bound estimate undershoots the required solver tolerance. Although the actual error might be bigger than
2857: this estimate, the stopping criterion is satisfactory in practical cases.
2859: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBMaxit()`
2860: @*/
2861: PetscErrorCode PCFieldSplitSetGKBTol(PC pc, PetscReal tolerance)
2862: {
2863: PetscFunctionBegin;
2866: PetscTryMethod(pc, "PCFieldSplitSetGKBTol_C", (PC, PetscReal), (pc, tolerance));
2867: PetscFunctionReturn(PETSC_SUCCESS);
2868: }
2870: static PetscErrorCode PCFieldSplitSetGKBTol_FieldSplit(PC pc, PetscReal tolerance)
2871: {
2872: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2874: PetscFunctionBegin;
2875: jac->gkbtol = tolerance;
2876: PetscFunctionReturn(PETSC_SUCCESS);
2877: }
2879: /*@
2880: PCFieldSplitSetGKBMaxit - Sets the maximum number of iterations for the generalized Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2882: Collective
2884: Input Parameters:
2885: + pc - the preconditioner context
2886: - maxit - the maximum number of iterations
2888: Options Database Key:
2889: . -pc_fieldsplit_gkb_maxit <maxit> - default is 100
2891: Level: intermediate
2893: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBNu()`
2894: @*/
2895: PetscErrorCode PCFieldSplitSetGKBMaxit(PC pc, PetscInt maxit)
2896: {
2897: PetscFunctionBegin;
2900: PetscTryMethod(pc, "PCFieldSplitSetGKBMaxit_C", (PC, PetscInt), (pc, maxit));
2901: PetscFunctionReturn(PETSC_SUCCESS);
2902: }
2904: static PetscErrorCode PCFieldSplitSetGKBMaxit_FieldSplit(PC pc, PetscInt maxit)
2905: {
2906: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2908: PetscFunctionBegin;
2909: jac->gkbmaxit = maxit;
2910: PetscFunctionReturn(PETSC_SUCCESS);
2911: }
2913: /*@
2914: PCFieldSplitSetGKBDelay - Sets the delay in the lower bound error estimate in the generalized Golub-Kahan bidiagonalization {cite}`arioli2013` in `PCFIELDSPLIT`
2915: preconditioner.
2917: Collective
2919: Input Parameters:
2920: + pc - the preconditioner context
2921: - delay - the delay window in the lower bound estimate
2923: Options Database Key:
2924: . -pc_fieldsplit_gkb_delay <delay> - default is 5
2926: Level: intermediate
2928: Notes:
2929: The algorithm uses a lower bound estimate of the error in energy norm as stopping criterion. The lower bound of the error $ ||u-u^k||_H $
2930: is expressed as a truncated sum. The error at iteration k can only be measured at iteration (k + `delay`), and thus the algorithm needs
2931: at least (`delay` + 1) iterations to stop.
2933: For more details on the generalized Golub-Kahan bidiagonalization method and its lower bound stopping criterion, please refer to {cite}`arioli2013`
2935: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBNu()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2936: @*/
2937: PetscErrorCode PCFieldSplitSetGKBDelay(PC pc, PetscInt delay)
2938: {
2939: PetscFunctionBegin;
2942: PetscTryMethod(pc, "PCFieldSplitSetGKBDelay_C", (PC, PetscInt), (pc, delay));
2943: PetscFunctionReturn(PETSC_SUCCESS);
2944: }
2946: static PetscErrorCode PCFieldSplitSetGKBDelay_FieldSplit(PC pc, PetscInt delay)
2947: {
2948: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2950: PetscFunctionBegin;
2951: jac->gkbdelay = delay;
2952: PetscFunctionReturn(PETSC_SUCCESS);
2953: }
2955: /*@
2956: PCFieldSplitSetGKBNu - Sets the scalar value nu >= 0 in the transformation H = A00 + nu*A01*A01' of the (1,1) block in the
2957: Golub-Kahan bidiagonalization preconditioner {cite}`arioli2013` in `PCFIELDSPLIT`
2959: Collective
2961: Input Parameters:
2962: + pc - the preconditioner context
2963: - nu - the shift parameter
2965: Options Database Key:
2966: . -pc_fieldsplit_gkb_nu <nu> - default is 1
2968: Level: intermediate
2970: Notes:
2971: This shift is in general done to obtain better convergence properties for the outer loop of the algorithm. This is often achieved by choosing `nu` sufficiently large. However,
2972: if `nu` is chosen too large, the matrix H might be badly conditioned and the solution of the linear system $Hx = b$ in the inner loop becomes difficult. It is therefore
2973: necessary to find a good balance in between the convergence of the inner and outer loop.
2975: For `nu` = 0, no shift is done. In this case A00 has to be positive definite. The matrix N in {cite}`arioli2013` is then chosen as identity.
2977: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetGKBDelay()`, `PCFieldSplitSetGKBTol()`, `PCFieldSplitSetGKBMaxit()`
2978: @*/
2979: PetscErrorCode PCFieldSplitSetGKBNu(PC pc, PetscReal nu)
2980: {
2981: PetscFunctionBegin;
2984: PetscTryMethod(pc, "PCFieldSplitSetGKBNu_C", (PC, PetscReal), (pc, nu));
2985: PetscFunctionReturn(PETSC_SUCCESS);
2986: }
2988: static PetscErrorCode PCFieldSplitSetGKBNu_FieldSplit(PC pc, PetscReal nu)
2989: {
2990: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
2992: PetscFunctionBegin;
2993: jac->gkbnu = nu;
2994: PetscFunctionReturn(PETSC_SUCCESS);
2995: }
2997: static PetscErrorCode PCFieldSplitSetType_FieldSplit(PC pc, PCCompositeType type)
2998: {
2999: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3001: PetscFunctionBegin;
3002: jac->type = type;
3003: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", NULL));
3004: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", NULL));
3005: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", NULL));
3006: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", NULL));
3007: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", NULL));
3008: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", NULL));
3009: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", NULL));
3010: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", NULL));
3011: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", NULL));
3013: if (type == PC_COMPOSITE_SCHUR) {
3014: pc->ops->apply = PCApply_FieldSplit_Schur;
3015: pc->ops->applytranspose = PCApplyTranspose_FieldSplit_Schur;
3016: pc->ops->view = PCView_FieldSplit_Schur;
3017: pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit_Schur;
3019: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit_Schur));
3020: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurPre_C", PCFieldSplitSetSchurPre_FieldSplit));
3021: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSchurPre_C", PCFieldSplitGetSchurPre_FieldSplit));
3022: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurFactType_C", PCFieldSplitSetSchurFactType_FieldSplit));
3023: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetSchurScale_C", PCFieldSplitSetSchurScale_FieldSplit));
3024: } else if (type == PC_COMPOSITE_GKB) {
3025: pc->ops->apply = PCApply_FieldSplit_GKB;
3026: pc->ops->applytranspose = NULL;
3027: pc->ops->view = PCView_FieldSplit_GKB;
3028: pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit_GKB;
3030: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3031: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBTol_C", PCFieldSplitSetGKBTol_FieldSplit));
3032: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBMaxit_C", PCFieldSplitSetGKBMaxit_FieldSplit));
3033: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBNu_C", PCFieldSplitSetGKBNu_FieldSplit));
3034: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetGKBDelay_C", PCFieldSplitSetGKBDelay_FieldSplit));
3035: } else {
3036: pc->ops->apply = PCApply_FieldSplit;
3037: pc->ops->applytranspose = PCApplyTranspose_FieldSplit;
3038: pc->ops->view = PCView_FieldSplit;
3039: pc->ops->setuponblocks = PCSetUpOnBlocks_FieldSplit;
3041: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitGetSubKSP_C", PCFieldSplitGetSubKSP_FieldSplit));
3042: }
3043: PetscFunctionReturn(PETSC_SUCCESS);
3044: }
3046: static PetscErrorCode PCFieldSplitSetBlockSize_FieldSplit(PC pc, PetscInt bs)
3047: {
3048: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3050: PetscFunctionBegin;
3051: PetscCheck(bs >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Blocksize must be positive, you gave %" PetscInt_FMT, bs);
3052: PetscCheck(jac->bs <= 0 || jac->bs == bs, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Cannot change fieldsplit blocksize from %" PetscInt_FMT " to %" PetscInt_FMT " after it has been set", jac->bs, bs);
3053: jac->bs = bs;
3054: PetscFunctionReturn(PETSC_SUCCESS);
3055: }
3057: static PetscErrorCode PCSetCoordinates_FieldSplit(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
3058: {
3059: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3060: PC_FieldSplitLink ilink_current = jac->head;
3061: IS is_owned;
3063: PetscFunctionBegin;
3064: jac->coordinates_set = PETSC_TRUE; // Internal flag
3065: PetscCall(MatGetOwnershipIS(pc->mat, &is_owned, NULL));
3067: while (ilink_current) {
3068: // For each IS, embed it to get local coords indces
3069: IS is_coords;
3070: PetscInt ndofs_block;
3071: const PetscInt *block_dofs_enumeration; // Numbering of the dofs relevant to the current block
3073: // Setting drop to true for safety. It should make no difference.
3074: PetscCall(ISEmbed(ilink_current->is, is_owned, PETSC_TRUE, &is_coords));
3075: PetscCall(ISGetLocalSize(is_coords, &ndofs_block));
3076: PetscCall(ISGetIndices(is_coords, &block_dofs_enumeration));
3078: // Allocate coordinates vector and set it directly
3079: PetscCall(PetscMalloc1(ndofs_block * dim, &ilink_current->coords));
3080: for (PetscInt dof = 0; dof < ndofs_block; ++dof) {
3081: for (PetscInt d = 0; d < dim; ++d) (ilink_current->coords)[dim * dof + d] = coords[dim * block_dofs_enumeration[dof] + d];
3082: }
3083: ilink_current->dim = dim;
3084: ilink_current->ndofs = ndofs_block;
3085: PetscCall(ISRestoreIndices(is_coords, &block_dofs_enumeration));
3086: PetscCall(ISDestroy(&is_coords));
3087: ilink_current = ilink_current->next;
3088: }
3089: PetscCall(ISDestroy(&is_owned));
3090: PetscFunctionReturn(PETSC_SUCCESS);
3091: }
3093: /*@
3094: PCFieldSplitSetType - Sets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3096: Collective
3098: Input Parameters:
3099: + pc - the preconditioner context
3100: - type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`,
3101: `PC_COMPOSITE_GKB`
3103: Options Database Key:
3104: . -pc_fieldsplit_type <one of multiplicative, additive, symmetric_multiplicative, special, schur> - Sets fieldsplit preconditioner type
3106: Level: intermediate
3108: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCCompositeType`, `PCCompositeGetType()`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3109: `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`, `PCFieldSplitSetSchurFactType()`
3110: @*/
3111: PetscErrorCode PCFieldSplitSetType(PC pc, PCCompositeType type)
3112: {
3113: PetscFunctionBegin;
3115: PetscTryMethod(pc, "PCFieldSplitSetType_C", (PC, PCCompositeType), (pc, type));
3116: PetscFunctionReturn(PETSC_SUCCESS);
3117: }
3119: /*@
3120: PCFieldSplitGetType - Gets the type, `PCCompositeType`, of a `PCFIELDSPLIT`
3122: Not collective
3124: Input Parameter:
3125: . pc - the preconditioner context
3127: Output Parameter:
3128: . type - `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE` (default), `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3130: Level: intermediate
3132: .seealso: [](sec_block_matrices), `PC`, `PCCompositeSetType()`, `PCFIELDSPLIT`, `PCCompositeType`, `PC_COMPOSITE_ADDITIVE`, `PC_COMPOSITE_MULTIPLICATIVE`,
3133: `PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE`, `PC_COMPOSITE_SPECIAL`, `PC_COMPOSITE_SCHUR`
3134: @*/
3135: PetscErrorCode PCFieldSplitGetType(PC pc, PCCompositeType *type)
3136: {
3137: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3139: PetscFunctionBegin;
3141: PetscAssertPointer(type, 2);
3142: *type = jac->type;
3143: PetscFunctionReturn(PETSC_SUCCESS);
3144: }
3146: /*@
3147: PCFieldSplitSetDMSplits - Flags whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3149: Logically Collective
3151: Input Parameters:
3152: + pc - the preconditioner context
3153: - flg - boolean indicating whether to use field splits defined by the `DM`
3155: Options Database Key:
3156: . -pc_fieldsplit_dm_splits <bool> - use the field splits defined by the `DM`
3158: Level: intermediate
3160: Developer Note:
3161: The name should be `PCFieldSplitSetUseDMSplits()`, similar change to options database
3163: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3164: @*/
3165: PetscErrorCode PCFieldSplitSetDMSplits(PC pc, PetscBool flg)
3166: {
3167: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3168: PetscBool isfs;
3170: PetscFunctionBegin;
3173: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3174: if (isfs) jac->dm_splits = flg;
3175: PetscFunctionReturn(PETSC_SUCCESS);
3176: }
3178: /*@
3179: PCFieldSplitGetDMSplits - Returns flag indicating whether `DMCreateFieldDecomposition()` should be used to define the splits in a `PCFIELDSPLIT`, whenever possible.
3181: Logically Collective
3183: Input Parameter:
3184: . pc - the preconditioner context
3186: Output Parameter:
3187: . flg - boolean indicating whether to use field splits defined by the `DM`
3189: Level: intermediate
3191: Developer Note:
3192: The name should be `PCFieldSplitGetUseDMSplits()`
3194: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDMSplits()`, `DMCreateFieldDecomposition()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetIS()`
3195: @*/
3196: PetscErrorCode PCFieldSplitGetDMSplits(PC pc, PetscBool *flg)
3197: {
3198: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3199: PetscBool isfs;
3201: PetscFunctionBegin;
3203: PetscAssertPointer(flg, 2);
3204: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &isfs));
3205: if (isfs) {
3206: if (flg) *flg = jac->dm_splits;
3207: }
3208: PetscFunctionReturn(PETSC_SUCCESS);
3209: }
3211: /*@
3212: PCFieldSplitGetDetectSaddlePoint - Returns flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3214: Logically Collective
3216: Input Parameter:
3217: . pc - the preconditioner context
3219: Output Parameter:
3220: . flg - boolean indicating whether to detect fields or not
3222: Level: intermediate
3224: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitSetDetectSaddlePoint()`
3225: @*/
3226: PetscErrorCode PCFieldSplitGetDetectSaddlePoint(PC pc, PetscBool *flg)
3227: {
3228: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3230: PetscFunctionBegin;
3231: *flg = jac->detect;
3232: PetscFunctionReturn(PETSC_SUCCESS);
3233: }
3235: /*@
3236: PCFieldSplitSetDetectSaddlePoint - Sets flag indicating whether `PCFIELDSPLIT` will attempt to automatically determine fields based on zero diagonal entries.
3238: Logically Collective
3240: Input Parameter:
3241: . pc - the preconditioner context
3243: Output Parameter:
3244: . flg - boolean indicating whether to detect fields or not
3246: Options Database Key:
3247: . -pc_fieldsplit_detect_saddle_point <bool> - detect and use the saddle point
3249: Level: intermediate
3251: Note:
3252: Also sets the split type to `PC_COMPOSITE_SCHUR` (see `PCFieldSplitSetType()`) and the Schur preconditioner type to `PC_FIELDSPLIT_SCHUR_PRE_SELF` (see `PCFieldSplitSetSchurPre()`).
3254: .seealso: [](sec_block_matrices), `PC`, `PCFIELDSPLIT`, `PCFieldSplitGetDetectSaddlePoint()`, `PCFieldSplitSetType()`, `PCFieldSplitSetSchurPre()`, `PC_FIELDSPLIT_SCHUR_PRE_SELF`
3255: @*/
3256: PetscErrorCode PCFieldSplitSetDetectSaddlePoint(PC pc, PetscBool flg)
3257: {
3258: PC_FieldSplit *jac = (PC_FieldSplit *)pc->data;
3260: PetscFunctionBegin;
3261: jac->detect = flg;
3262: if (jac->detect) {
3263: PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
3264: PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
3265: }
3266: PetscFunctionReturn(PETSC_SUCCESS);
3267: }
3269: /*MC
3270: PCFIELDSPLIT - Preconditioner created by combining separate preconditioners for individual
3271: collections of variables (that may overlap) called fields or splits. Each field often represents a different continuum variable
3272: represented on a grid, such as velocity, pressure, or temperature.
3273: In the literature these are sometimes called block preconditioners; but should not be confused with `PCBJACOBI`.
3274: See [the users manual section on "Solving Block Matrices"](sec_block_matrices) for more details.
3276: Options Database Keys:
3277: + -pc_fieldsplit_%d_fields <a,b,..> - indicates the fields to be used in the `%d`'th split
3278: . -pc_fieldsplit_default - automatically add any fields to additional splits that have not
3279: been supplied explicitly by `-pc_fieldsplit_%d_fields`
3280: . -pc_fieldsplit_block_size <bs> - size of block that defines fields (i.e. there are bs fields)
3281: when the matrix is not of `MatType` `MATNEST`
3282: . -pc_fieldsplit_type <additive,multiplicative,symmetric_multiplicative,schur,gkb> - type of relaxation or factorization splitting
3283: . -pc_fieldsplit_schur_precondition <self,selfp,user,a11,full> - default is `a11`; see `PCFieldSplitSetSchurPre()`
3284: . -pc_fieldsplit_schur_fact_type <diag,lower,upper,full> - set factorization type when using `-pc_fieldsplit_type schur`;
3285: see `PCFieldSplitSetSchurFactType()`
3286: . -pc_fieldsplit_dm_splits <true,false> (default is true) - Whether to use `DMCreateFieldDecomposition()` for splits
3287: - -pc_fieldsplit_detect_saddle_point - automatically finds rows with zero diagonal and uses Schur complement with no preconditioner as the solver
3289: Options prefixes for inner solvers when using the Schur complement preconditioner are `-fieldsplit_0_` and `-fieldsplit_1_` .
3290: The options prefix for the inner solver when using the Golub-Kahan biadiagonalization preconditioner is `-fieldsplit_0_`
3291: For all other solvers they are `-fieldsplit_%d_` for the `%d`'th field; use `-fieldsplit_` for all fields.
3293: To set options on the solvers for all blocks, prepend `-fieldsplit_` to all the `PC`
3294: options database keys. For example, `-fieldsplit_pc_type ilu` `-fieldsplit_pc_factor_levels 1`.
3296: To set the options on the solvers separate for each block call `PCFieldSplitGetSubKSP()`
3297: and set the options directly on the resulting `KSP` object
3299: Level: intermediate
3301: Notes:
3302: Use `PCFieldSplitSetFields()` to set splits defined by "strided" entries or with a `MATNEST` and `PCFieldSplitSetIS()`
3303: to define a split by an arbitrary collection of entries.
3305: If no splits are set, the default is used. If a `DM` is associated with the `PC` and it supports
3306: `DMCreateFieldDecomposition()`, then that is used for the default. Otherwise if the matrix is not `MATNEST`, the splits are defined by entries strided by bs,
3307: beginning at 0 then 1, etc to bs-1. The block size can be set with `PCFieldSplitSetBlockSize()`,
3308: if this is not called the block size defaults to the blocksize of the second matrix passed
3309: to `KSPSetOperators()`/`PCSetOperators()`.
3311: For the Schur complement preconditioner if
3312: ```{math}
3313: J = \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{10} & A_{11} \end{array}\right]
3314: ```
3316: the preconditioner using `full` factorization is logically
3317: ```{math}
3318: \left[\begin{array}{cc} I & -\text{ksp}(A_{00}) A_{01} \\ 0 & I \end{array}\right] \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & \text{ksp}(S) \end{array}\right] \left[\begin{array}{cc} I & 0 \\ -A_{10} \text{ksp}(A_{00}) & I \end{array}\right]
3319: ```
3320: where the action of $\text{ksp}(A_{00})$ is applied using the `KSP` solver with prefix `-fieldsplit_0_`. $S$ is the Schur complement
3321: ```{math}
3322: S = A_{11} - A_{10} \text{ksp}(A_{00}) A_{01}
3323: ```
3324: which is usually dense and not stored explicitly. The action of $\text{ksp}(S)$ is computed using the `KSP` solver with prefix `-fieldsplit_splitname_` (where `splitname`
3325: was given in providing the SECOND split or 1 if not given). Accordingly, if using `PCFieldSplitGetSubKSP()`, the array of sub-`KSP` contexts will hold two `KSP`s: at its
3326: 0th index, the `KSP` associated with `-fieldsplit_0_`, and at its 1st index, the `KSP` corresponding to `-fieldsplit_1_`.
3327: By default, $A_{11}$ is used to construct a preconditioner for $S$, use `PCFieldSplitSetSchurPre()` for all the possible ways to construct the preconditioner for $S$.
3329: The factorization type is set using `-pc_fieldsplit_schur_fact_type <diag, lower, upper, full>`. `full` is shown above,
3330: `diag` gives
3331: ```{math}
3332: \left[\begin{array}{cc} \text{ksp}(A_{00}) & 0 \\ 0 & -\text{ksp}(S) \end{array}\right]
3333: ```
3334: Note that, slightly counter intuitively, there is a negative in front of the $\text{ksp}(S)$ so that the preconditioner is positive definite. For SPD matrices $J$, the sign flip
3335: can be turned off with `PCFieldSplitSetSchurScale()` or by command line `-pc_fieldsplit_schur_scale 1.0`. The `lower` factorization is the inverse of
3336: ```{math}
3337: \left[\begin{array}{cc} A_{00} & 0 \\ A_{10} & S \end{array}\right]
3338: ```
3339: where the inverses of $A_{00}$ and $S$ are applied using `KSP`s. The upper factorization is the inverse of
3340: ```{math}
3341: \left[\begin{array}{cc} A_{00} & A_{01} \\ 0 & S \end{array}\right]
3342: ```
3343: where again the inverses of $A_{00}$ and $S$ are applied using `KSP`s.
3345: If only one set of indices (one `IS`) is provided with `PCFieldSplitSetIS()` then the complement of that `IS`
3346: is used automatically for a second submatrix.
3348: The fieldsplit preconditioner cannot currently be used with the `MATBAIJ` or `MATSBAIJ` data formats if the blocksize is larger than 1.
3349: Generally it should be used with the `MATAIJ` or `MATNEST` `MatType`
3351: The forms of these preconditioners are closely related, if not identical, to forms derived as "Distributive Iterations", see,
3352: for example, page 294 in "Principles of Computational Fluid Dynamics" by Pieter Wesseling {cite}`wesseling2009`.
3353: One can also use `PCFIELDSPLIT` inside a smoother resulting in "Distributive Smoothers".
3355: See "A taxonomy and comparison of parallel block multi-level preconditioners for the incompressible Navier-Stokes equations" {cite}`elman2008tcp`.
3357: The Constrained Pressure Preconditioner (CPR) can be implemented using `PCCOMPOSITE` with `PCGALERKIN`. CPR first solves an $R A P$ subsystem, updates the
3358: residual on all variables (`PCCompositeSetType(pc,PC_COMPOSITE_MULTIPLICATIVE)`), and then applies a simple ILU like preconditioner on all the variables.
3360: The generalized Golub-Kahan bidiagonalization preconditioner (GKB) can be applied to symmetric $2 \times 2$ block matrices of the shape
3361: ```{math}
3362: \left[\begin{array}{cc} A_{00} & A_{01} \\ A_{01}' & 0 \end{array}\right]
3363: ```
3364: with $A_{00}$ positive semi-definite. The implementation follows {cite}`arioli2013`. Therein, we choose $N := 1/\nu * I$ and the $(1,1)$-block of the matrix is modified to $H = _{A00} + \nu*A_{01}*A_{01}'$.
3365: A linear system $Hx = b$ has to be solved in each iteration of the GKB algorithm. This solver is chosen with the option prefix `-fieldsplit_0_`.
3367: Some `PCFIELDSPLIT` variants are called physics-based preconditioners, since the preconditioner takes into account the underlying physics of the
3368: problem. But this nomenclature is not well-defined.
3370: Developer Note:
3371: The Schur complement functionality of `PCFIELDSPLIT` should likely be factored into its own `PC` thus simplifying the implementation of the preconditioners and their
3372: user API.
3374: .seealso: [](sec_block_matrices), `PC`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCLSC`,
3375: `PCFieldSplitGetSubKSP()`, `PCFieldSplitSchurGetSubKSP()`, `PCFieldSplitSetFields()`,
3376: `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, `PCFieldSplitSetSchurFactType()`,
3377: `MatSchurComplementSetAinvType()`, `PCFieldSplitSetSchurScale()`, `PCFieldSplitSetDetectSaddlePoint()`
3378: M*/
3380: PETSC_EXTERN PetscErrorCode PCCreate_FieldSplit(PC pc)
3381: {
3382: PC_FieldSplit *jac;
3384: PetscFunctionBegin;
3385: PetscCall(PetscNew(&jac));
3387: jac->bs = -1;
3388: jac->type = PC_COMPOSITE_MULTIPLICATIVE;
3389: jac->schurpre = PC_FIELDSPLIT_SCHUR_PRE_USER; /* Try user preconditioner first, fall back on diagonal */
3390: jac->schurfactorization = PC_FIELDSPLIT_SCHUR_FACT_FULL;
3391: jac->schurscale = -1.0;
3392: jac->dm_splits = PETSC_TRUE;
3393: jac->gkbtol = 1e-5;
3394: jac->gkbdelay = 5;
3395: jac->gkbnu = 1;
3396: jac->gkbmaxit = 100;
3398: pc->data = (void *)jac;
3400: pc->ops->setup = PCSetUp_FieldSplit;
3401: pc->ops->reset = PCReset_FieldSplit;
3402: pc->ops->destroy = PCDestroy_FieldSplit;
3403: pc->ops->setfromoptions = PCSetFromOptions_FieldSplit;
3404: pc->ops->applyrichardson = NULL;
3406: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSchurGetSubKSP_C", PCFieldSplitSchurGetSubKSP_FieldSplit));
3407: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetFields_C", PCFieldSplitSetFields_FieldSplit));
3408: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetIS_C", PCFieldSplitSetIS_FieldSplit));
3409: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetType_C", PCFieldSplitSetType_FieldSplit));
3410: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitSetBlockSize_C", PCFieldSplitSetBlockSize_FieldSplit));
3411: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCFieldSplitRestrictIS_C", PCFieldSplitRestrictIS_FieldSplit));
3412: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_FieldSplit));
3414: /* Initialize function pointers */
3415: PetscCall(PCFieldSplitSetType(pc, jac->type));
3416: PetscFunctionReturn(PETSC_SUCCESS);
3417: }