Actual source code: fetidp.c
1: #include <petsc/private/kspimpl.h>
2: #include <petsc/private/pcbddcimpl.h>
3: #include <petsc/private/pcbddcprivateimpl.h>
4: #include <petscdm.h>
6: static PetscBool cited = PETSC_FALSE;
7: static PetscBool cited2 = PETSC_FALSE;
8: static const char citation[] = "@article{ZampiniPCBDDC,\n"
9: "author = {Stefano Zampini},\n"
10: "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n"
11: "journal = {SIAM Journal on Scientific Computing},\n"
12: "volume = {38},\n"
13: "number = {5},\n"
14: "pages = {S282-S306},\n"
15: "year = {2016},\n"
16: "doi = {10.1137/15M1025785},\n"
17: "URL = {http://dx.doi.org/10.1137/15M1025785},\n"
18: "eprint = {http://dx.doi.org/10.1137/15M1025785}\n"
19: "}\n"
20: "@article{ZampiniDualPrimal,\n"
21: "author = {Stefano Zampini},\n"
22: "title = {{D}ual-{P}rimal methods for the cardiac {B}idomain model},\n"
23: "volume = {24},\n"
24: "number = {04},\n"
25: "pages = {667-696},\n"
26: "year = {2014},\n"
27: "doi = {10.1142/S0218202513500632},\n"
28: "URL = {https://www.worldscientific.com/doi/abs/10.1142/S0218202513500632},\n"
29: "eprint = {https://www.worldscientific.com/doi/pdf/10.1142/S0218202513500632}\n"
30: "}\n";
31: static const char citation2[] = "@article{li2013nonoverlapping,\n"
32: "title={A nonoverlapping domain decomposition method for incompressible Stokes equations with continuous pressures},\n"
33: "author={Li, Jing and Tu, Xuemin},\n"
34: "journal={SIAM Journal on Numerical Analysis},\n"
35: "volume={51},\n"
36: "number={2},\n"
37: "pages={1235--1253},\n"
38: "year={2013},\n"
39: "publisher={Society for Industrial and Applied Mathematics}\n"
40: "}\n";
42: /*
43: This file implements the FETI-DP method in PETSc as part of KSP.
44: */
45: typedef struct {
46: KSP parentksp;
47: } KSP_FETIDPMon;
49: typedef struct {
50: KSP innerksp; /* the KSP for the Lagrange multipliers */
51: PC innerbddc; /* the inner BDDC object */
52: PetscBool fully_redundant; /* true for using a fully redundant set of multipliers */
53: PetscBool userbddc; /* true if the user provided the PCBDDC object */
54: PetscBool saddlepoint; /* support for saddle point problems */
55: IS pP; /* index set for pressure variables */
56: Vec rhs_flip; /* see KSPFETIDPSetUpOperators */
57: KSP_FETIDPMon *monctx; /* monitor context, used to pass user defined monitors
58: in the physical space */
59: PetscObjectState matstate; /* these are needed just in the saddle point case */
60: PetscObjectState matnnzstate; /* where we are going to use MatZeroRows on pmat */
61: PetscBool statechanged;
62: PetscBool check;
63: } KSP_FETIDP;
65: static PetscErrorCode KSPFETIDPSetPressureOperator_FETIDP(KSP ksp, Mat P)
66: {
67: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
69: PetscFunctionBegin;
70: if (P) fetidp->saddlepoint = PETSC_TRUE;
71: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_PPmat", (PetscObject)P));
72: PetscFunctionReturn(PETSC_SUCCESS);
73: }
75: /*@
76: KSPFETIDPSetPressureOperator - Sets the operator used to set up the pressure preconditioner for the saddle point `KSPFETIDP` solver,
78: Collective
80: Input Parameters:
81: + ksp - the `KSPFETIDP` solver
82: - P - the linear operator to be preconditioned, usually the mass matrix.
84: Level: advanced
86: Notes:
87: The operator can be either passed in
88: .vb
89: a) monolithic global ordering,
90: b) pressure-only global ordering, or
91: c) interface pressure ordering (if `-ksp_fetidp_pressure_all false`).
92: .ve
93: In cases b) and c), the pressure ordering of dofs needs to satisfy
94: pid_1 < pid_2 iff gid_1 < gid_2
95: where pid_1 and pid_2 are two different pressure dof numbers and gid_1 and gid_2 the corresponding
96: id in the monolithic global ordering.
98: .seealso: [](ch_ksp), `KSPFETIDP`, `MATIS`, `PCBDDC`, `KSPFETIDPGetInnerBDDC()`, `KSPFETIDPGetInnerKSP()`, `KSPSetOperators()`
99: @*/
100: PetscErrorCode KSPFETIDPSetPressureOperator(KSP ksp, Mat P)
101: {
102: PetscFunctionBegin;
105: PetscTryMethod(ksp, "KSPFETIDPSetPressureOperator_C", (KSP, Mat), (ksp, P));
106: PetscFunctionReturn(PETSC_SUCCESS);
107: }
109: static PetscErrorCode KSPFETIDPGetInnerKSP_FETIDP(KSP ksp, KSP *innerksp)
110: {
111: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
113: PetscFunctionBegin;
114: *innerksp = fetidp->innerksp;
115: PetscFunctionReturn(PETSC_SUCCESS);
116: }
118: /*@
119: KSPFETIDPGetInnerKSP - Gets the `KSP` object for the Lagrange multipliers from inside a `KSPFETIDP`
121: Input Parameter:
122: . ksp - the `KSPFETIDP`
124: Output Parameter:
125: . innerksp - the `KSP` for the multipliers
127: Level: advanced
129: .seealso: [](ch_ksp), `KSPFETIDP`, `MATIS`, `PCBDDC`, `KSPFETIDPSetInnerBDDC()`, `KSPFETIDPGetInnerBDDC()`
130: @*/
131: PetscErrorCode KSPFETIDPGetInnerKSP(KSP ksp, KSP *innerksp)
132: {
133: PetscFunctionBegin;
135: PetscAssertPointer(innerksp, 2);
136: PetscUseMethod(ksp, "KSPFETIDPGetInnerKSP_C", (KSP, KSP *), (ksp, innerksp));
137: PetscFunctionReturn(PETSC_SUCCESS);
138: }
140: static PetscErrorCode KSPFETIDPGetInnerBDDC_FETIDP(KSP ksp, PC *pc)
141: {
142: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
144: PetscFunctionBegin;
145: *pc = fetidp->innerbddc;
146: PetscFunctionReturn(PETSC_SUCCESS);
147: }
149: /*@
150: KSPFETIDPGetInnerBDDC - Gets the `PCBDDC` preconditioner used to set up the `KSPFETIDP` matrix for the Lagrange multipliers
152: Input Parameter:
153: . ksp - the `KSPFETIDP` Krylov solver
155: Output Parameter:
156: . pc - the `PCBDDC` preconditioner
158: Level: advanced
160: .seealso: [](ch_ksp), `MATIS`, `PCBDDC`, `KSPFETIDP`, `KSPFETIDPSetInnerBDDC()`, `KSPFETIDPGetInnerKSP()`
161: @*/
162: PetscErrorCode KSPFETIDPGetInnerBDDC(KSP ksp, PC *pc)
163: {
164: PetscFunctionBegin;
166: PetscAssertPointer(pc, 2);
167: PetscUseMethod(ksp, "KSPFETIDPGetInnerBDDC_C", (KSP, PC *), (ksp, pc));
168: PetscFunctionReturn(PETSC_SUCCESS);
169: }
171: static PetscErrorCode KSPFETIDPSetInnerBDDC_FETIDP(KSP ksp, PC pc)
172: {
173: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
175: PetscFunctionBegin;
176: PetscCall(PetscObjectReference((PetscObject)pc));
177: PetscCall(PCDestroy(&fetidp->innerbddc));
178: fetidp->innerbddc = pc;
179: fetidp->userbddc = PETSC_TRUE;
180: PetscFunctionReturn(PETSC_SUCCESS);
181: }
183: /*@
184: KSPFETIDPSetInnerBDDC - Provides the `PCBDDC` preconditioner used to set up the `KSPFETIDP` matrix for the Lagrange multipliers
186: Collective
188: Input Parameters:
189: + ksp - the `KSPFETIDP` Krylov solver
190: - pc - the `PCBDDC` preconditioner
192: Level: advanced
194: Note:
195: A `PC` is automatically created for the `KSPFETIDP` and can be accessed to change options with `KSPFETIDPGetInnerBDDC()` hence this routine is rarely needed
197: .seealso: [](ch_ksp), `MATIS`, `PCBDDC`, `KSPFETIDPGetInnerBDDC()`, `KSPFETIDPGetInnerKSP()`
198: @*/
199: PetscErrorCode KSPFETIDPSetInnerBDDC(KSP ksp, PC pc)
200: {
201: PetscBool isbddc;
203: PetscFunctionBegin;
206: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBDDC, &isbddc));
207: PetscCheck(isbddc, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "KSPFETIDPSetInnerBDDC need a PCBDDC preconditioner");
208: PetscTryMethod(ksp, "KSPFETIDPSetInnerBDDC_C", (KSP, PC), (ksp, pc));
209: PetscFunctionReturn(PETSC_SUCCESS);
210: }
212: static PetscErrorCode KSPBuildSolution_FETIDP(KSP ksp, Vec v, Vec *V)
213: {
214: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
215: Mat F;
216: Vec Xl;
218: PetscFunctionBegin;
219: PetscCall(KSPGetOperators(fetidp->innerksp, &F, NULL));
220: PetscCall(KSPBuildSolution(fetidp->innerksp, NULL, &Xl));
221: if (v) {
222: PetscCall(PCBDDCMatFETIDPGetSolution(F, Xl, v));
223: *V = v;
224: } else {
225: PetscCall(PCBDDCMatFETIDPGetSolution(F, Xl, *V));
226: }
227: PetscFunctionReturn(PETSC_SUCCESS);
228: }
230: static PetscErrorCode KSPMonitor_FETIDP(KSP ksp, PetscInt it, PetscReal rnorm, void *ctx)
231: {
232: KSP_FETIDPMon *monctx = (KSP_FETIDPMon *)ctx;
234: PetscFunctionBegin;
235: PetscCall(KSPMonitor(monctx->parentksp, it, rnorm));
236: PetscFunctionReturn(PETSC_SUCCESS);
237: }
239: static PetscErrorCode KSPComputeEigenvalues_FETIDP(KSP ksp, PetscInt nmax, PetscReal *r, PetscReal *c, PetscInt *neig)
240: {
241: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
243: PetscFunctionBegin;
244: PetscCall(KSPComputeEigenvalues(fetidp->innerksp, nmax, r, c, neig));
245: PetscFunctionReturn(PETSC_SUCCESS);
246: }
248: static PetscErrorCode KSPComputeExtremeSingularValues_FETIDP(KSP ksp, PetscReal *emax, PetscReal *emin)
249: {
250: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
252: PetscFunctionBegin;
253: PetscCall(KSPComputeExtremeSingularValues(fetidp->innerksp, emax, emin));
254: PetscFunctionReturn(PETSC_SUCCESS);
255: }
257: static PetscErrorCode KSPFETIDPCheckOperators(KSP ksp, PetscViewer viewer)
258: {
259: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
260: PC_BDDC *pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
261: PC_IS *pcis = (PC_IS *)fetidp->innerbddc->data;
262: Mat_IS *matis = (Mat_IS *)fetidp->innerbddc->pmat->data;
263: Mat F;
264: FETIDPMat_ctx fetidpmat_ctx;
265: Vec test_vec, test_vec_p = NULL, fetidp_global;
266: IS dirdofs, isvert;
267: MPI_Comm comm = PetscObjectComm((PetscObject)ksp);
268: PetscScalar sval, *array;
269: PetscReal val, rval;
270: const PetscInt *vertex_indices;
271: PetscInt i, n_vertices;
272: PetscBool isascii;
274: PetscFunctionBegin;
275: PetscCheckSameComm(ksp, 1, viewer, 2);
276: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
277: PetscCheck(isascii, comm, PETSC_ERR_SUP, "Unsupported viewer");
278: PetscCall(PetscViewerASCIIPrintf(viewer, "----------FETI-DP MAT --------------\n"));
279: PetscCall(PetscViewerASCIIAddTab(viewer, 2));
280: PetscCall(KSPGetOperators(fetidp->innerksp, &F, NULL));
281: PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO));
282: PetscCall(MatView(F, viewer));
283: PetscCall(PetscViewerPopFormat(viewer));
284: PetscCall(PetscViewerASCIISubtractTab(viewer, 2));
285: PetscCall(MatShellGetContext(F, &fetidpmat_ctx));
286: PetscCall(PetscViewerASCIIPrintf(viewer, "----------FETI-DP TESTS--------------\n"));
287: PetscCall(PetscViewerASCIIPrintf(viewer, "All tests should return zero!\n"));
288: PetscCall(PetscViewerASCIIPrintf(viewer, "FETIDP MAT context in the "));
289: if (fetidp->fully_redundant) {
290: PetscCall(PetscViewerASCIIPrintf(viewer, "fully redundant case for lagrange multipliers.\n"));
291: } else {
292: PetscCall(PetscViewerASCIIPrintf(viewer, "Non-fully redundant case for lagrange multiplier.\n"));
293: }
294: PetscCall(PetscViewerFlush(viewer));
296: /* Get Vertices used to define the BDDC */
297: PetscCall(PCBDDCGraphGetCandidatesIS(pcbddc->mat_graph, NULL, NULL, NULL, NULL, &isvert));
298: PetscCall(ISGetLocalSize(isvert, &n_vertices));
299: PetscCall(ISGetIndices(isvert, &vertex_indices));
301: /******************************************************************/
302: /* TEST A/B: Test numbering of global fetidp dofs */
303: /******************************************************************/
304: PetscCall(MatCreateVecs(F, &fetidp_global, NULL));
305: PetscCall(VecDuplicate(fetidpmat_ctx->lambda_local, &test_vec));
306: PetscCall(VecSet(fetidp_global, 1.0));
307: PetscCall(VecSet(test_vec, 1.));
308: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
309: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
310: if (fetidpmat_ctx->l2g_p) {
311: PetscCall(VecDuplicate(fetidpmat_ctx->vP, &test_vec_p));
312: PetscCall(VecSet(test_vec_p, 1.));
313: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_p, fetidp_global, fetidpmat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE));
314: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_p, fetidp_global, fetidpmat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE));
315: }
316: PetscCall(VecAXPY(test_vec, -1.0, fetidpmat_ctx->lambda_local));
317: PetscCall(VecNorm(test_vec, NORM_INFINITY, &val));
318: PetscCall(VecDestroy(&test_vec));
319: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
320: PetscCall(PetscViewerASCIIPrintf(viewer, "A: CHECK glob to loc: % 1.14e\n", (double)rval));
322: if (fetidpmat_ctx->l2g_p) {
323: PetscCall(VecAXPY(test_vec_p, -1.0, fetidpmat_ctx->vP));
324: PetscCall(VecNorm(test_vec_p, NORM_INFINITY, &val));
325: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
326: PetscCall(PetscViewerASCIIPrintf(viewer, "A: CHECK glob to loc (p): % 1.14e\n", (double)rval));
327: }
329: if (fetidp->fully_redundant) {
330: PetscCall(VecSet(fetidp_global, 0.0));
331: PetscCall(VecSet(fetidpmat_ctx->lambda_local, 0.5));
332: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
333: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
334: PetscCall(VecSum(fetidp_global, &sval));
335: val = PetscRealPart(sval) - fetidpmat_ctx->n_lambda;
336: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
337: PetscCall(PetscViewerASCIIPrintf(viewer, "B: CHECK loc to glob: % 1.14e\n", (double)rval));
338: }
340: if (fetidpmat_ctx->l2g_p) {
341: PetscCall(VecSet(pcis->vec1_N, 1.0));
342: PetscCall(VecSet(pcis->vec1_global, 0.0));
343: PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
344: PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
346: PetscCall(VecSet(fetidp_global, 0.0));
347: PetscCall(VecSet(fetidpmat_ctx->vP, -1.0));
348: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_p, fetidpmat_ctx->vP, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
349: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_p, fetidpmat_ctx->vP, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
350: PetscCall(VecScatterBegin(fetidpmat_ctx->g2g_p, fetidp_global, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
351: PetscCall(VecScatterEnd(fetidpmat_ctx->g2g_p, fetidp_global, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
352: PetscCall(VecScatterBegin(fetidpmat_ctx->g2g_p, pcis->vec1_global, fetidp_global, INSERT_VALUES, SCATTER_FORWARD));
353: PetscCall(VecScatterEnd(fetidpmat_ctx->g2g_p, pcis->vec1_global, fetidp_global, INSERT_VALUES, SCATTER_FORWARD));
354: PetscCall(VecSum(fetidp_global, &sval));
355: val = PetscRealPart(sval);
356: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
357: PetscCall(PetscViewerASCIIPrintf(viewer, "B: CHECK loc to glob (p): % 1.14e\n", (double)rval));
358: }
360: /******************************************************************/
361: /* TEST C: It should hold B_delta*w=0, w\in\widehat{W} */
362: /* This is the meaning of the B matrix */
363: /******************************************************************/
365: PetscCall(VecSetRandom(pcis->vec1_N, NULL));
366: PetscCall(VecSet(pcis->vec1_global, 0.0));
367: PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
368: PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
369: PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD));
370: PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD));
371: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
372: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
373: /* Action of B_delta */
374: PetscCall(MatMult(fetidpmat_ctx->B_delta, pcis->vec1_B, fetidpmat_ctx->lambda_local));
375: PetscCall(VecSet(fetidp_global, 0.0));
376: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
377: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
378: PetscCall(VecNorm(fetidp_global, NORM_INFINITY, &val));
379: PetscCall(PetscViewerASCIIPrintf(viewer, "C: CHECK infty norm of B_delta*w (w continuous): % 1.14e\n", (double)val));
381: /******************************************************************/
382: /* TEST D: It should hold E_Dw = w - P_Dw w\in\widetilde{W} */
383: /* E_D = R_D^TR */
384: /* P_D = B_{D,delta}^T B_{delta} */
385: /* eq.44 Mandel Tezaur and Dohrmann 2005 */
386: /******************************************************************/
388: /* compute a random vector in \widetilde{W} */
389: PetscCall(VecSetRandom(pcis->vec1_N, NULL));
390: /* set zero at vertices and essential dofs */
391: PetscCall(VecGetArray(pcis->vec1_N, &array));
392: for (i = 0; i < n_vertices; i++) array[vertex_indices[i]] = 0.0;
393: PetscCall(PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph, &dirdofs));
394: if (dirdofs) {
395: const PetscInt *idxs;
396: PetscInt ndir;
398: PetscCall(ISGetLocalSize(dirdofs, &ndir));
399: PetscCall(ISGetIndices(dirdofs, &idxs));
400: for (i = 0; i < ndir; i++) array[idxs[i]] = 0.0;
401: PetscCall(ISRestoreIndices(dirdofs, &idxs));
402: }
403: PetscCall(VecRestoreArray(pcis->vec1_N, &array));
404: /* store w for final comparison */
405: PetscCall(VecDuplicate(pcis->vec1_B, &test_vec));
406: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, test_vec, INSERT_VALUES, SCATTER_FORWARD));
407: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, test_vec, INSERT_VALUES, SCATTER_FORWARD));
409: /* Jump operator P_D : results stored in pcis->vec1_B */
410: /* Action of B_delta */
411: PetscCall(MatMult(fetidpmat_ctx->B_delta, test_vec, fetidpmat_ctx->lambda_local));
412: PetscCall(VecSet(fetidp_global, 0.0));
413: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
414: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
415: /* Action of B_Ddelta^T */
416: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
417: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
418: PetscCall(MatMultTranspose(fetidpmat_ctx->B_Ddelta, fetidpmat_ctx->lambda_local, pcis->vec1_B));
420: /* Average operator E_D : results stored in pcis->vec2_B */
421: PetscCall(PCBDDCScalingExtension(fetidpmat_ctx->pc, test_vec, pcis->vec1_global));
422: PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec2_B, INSERT_VALUES, SCATTER_FORWARD));
423: PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec2_B, INSERT_VALUES, SCATTER_FORWARD));
425: /* test E_D=I-P_D */
426: PetscCall(VecAXPY(pcis->vec1_B, 1.0, pcis->vec2_B));
427: PetscCall(VecAXPY(pcis->vec1_B, -1.0, test_vec));
428: PetscCall(VecNorm(pcis->vec1_B, NORM_INFINITY, &val));
429: PetscCall(VecDestroy(&test_vec));
430: PetscCallMPI(MPI_Reduce(&val, &rval, 1, MPIU_REAL, MPIU_MAX, 0, comm));
431: PetscCall(PetscViewerASCIIPrintf(viewer, "%d: CHECK infty norm of E_D + P_D - I: %1.14e\n", PetscGlobalRank, (double)val));
433: /******************************************************************/
434: /* TEST E: It should hold R_D^TP_Dw=0 w\in\widetilde{W} */
435: /* eq.48 Mandel Tezaur and Dohrmann 2005 */
436: /******************************************************************/
438: PetscCall(VecSetRandom(pcis->vec1_N, NULL));
439: /* set zero at vertices and essential dofs */
440: PetscCall(VecGetArray(pcis->vec1_N, &array));
441: for (i = 0; i < n_vertices; i++) array[vertex_indices[i]] = 0.0;
442: if (dirdofs) {
443: const PetscInt *idxs;
444: PetscInt ndir;
446: PetscCall(ISGetLocalSize(dirdofs, &ndir));
447: PetscCall(ISGetIndices(dirdofs, &idxs));
448: for (i = 0; i < ndir; i++) array[idxs[i]] = 0.0;
449: PetscCall(ISRestoreIndices(dirdofs, &idxs));
450: }
451: PetscCall(VecRestoreArray(pcis->vec1_N, &array));
453: /* Jump operator P_D : results stored in pcis->vec1_B */
455: PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
456: PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
457: /* Action of B_delta */
458: PetscCall(MatMult(fetidpmat_ctx->B_delta, pcis->vec1_B, fetidpmat_ctx->lambda_local));
459: PetscCall(VecSet(fetidp_global, 0.0));
460: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
461: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, fetidp_global, ADD_VALUES, SCATTER_FORWARD));
462: /* Action of B_Ddelta^T */
463: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
464: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
465: PetscCall(MatMultTranspose(fetidpmat_ctx->B_Ddelta, fetidpmat_ctx->lambda_local, pcis->vec1_B));
466: /* scaling */
467: PetscCall(PCBDDCScalingExtension(fetidpmat_ctx->pc, pcis->vec1_B, pcis->vec1_global));
468: PetscCall(VecNorm(pcis->vec1_global, NORM_INFINITY, &val));
469: PetscCall(PetscViewerASCIIPrintf(viewer, "E: CHECK infty norm of R^T_D P_D: % 1.14e\n", (double)val));
471: if (!fetidp->fully_redundant) {
472: /******************************************************************/
473: /* TEST F: It should holds B_{delta}B^T_{D,delta}=I */
474: /* Corollary thm 14 Mandel Tezaur and Dohrmann 2005 */
475: /******************************************************************/
476: PetscCall(VecDuplicate(fetidp_global, &test_vec));
477: PetscCall(VecSetRandom(fetidp_global, NULL));
478: if (fetidpmat_ctx->l2g_p) {
479: PetscCall(VecSet(fetidpmat_ctx->vP, 0.));
480: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_p, fetidpmat_ctx->vP, fetidp_global, INSERT_VALUES, SCATTER_FORWARD));
481: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_p, fetidpmat_ctx->vP, fetidp_global, INSERT_VALUES, SCATTER_FORWARD));
482: }
483: /* Action of B_Ddelta^T */
484: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
485: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidp_global, fetidpmat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
486: PetscCall(MatMultTranspose(fetidpmat_ctx->B_Ddelta, fetidpmat_ctx->lambda_local, pcis->vec1_B));
487: /* Action of B_delta */
488: PetscCall(MatMult(fetidpmat_ctx->B_delta, pcis->vec1_B, fetidpmat_ctx->lambda_local));
489: PetscCall(VecSet(test_vec, 0.0));
490: PetscCall(VecScatterBegin(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, test_vec, ADD_VALUES, SCATTER_FORWARD));
491: PetscCall(VecScatterEnd(fetidpmat_ctx->l2g_lambda, fetidpmat_ctx->lambda_local, test_vec, ADD_VALUES, SCATTER_FORWARD));
492: PetscCall(VecAXPY(fetidp_global, -1., test_vec));
493: PetscCall(VecNorm(fetidp_global, NORM_INFINITY, &val));
494: PetscCall(PetscViewerASCIIPrintf(viewer, "E: CHECK infty norm of P^T_D - I: % 1.14e\n", (double)val));
495: PetscCall(VecDestroy(&test_vec));
496: }
497: PetscCall(PetscViewerASCIIPrintf(viewer, "-------------------------------------\n"));
498: PetscCall(PetscViewerFlush(viewer));
499: PetscCall(VecDestroy(&test_vec_p));
500: PetscCall(ISDestroy(&dirdofs));
501: PetscCall(VecDestroy(&fetidp_global));
502: PetscCall(ISRestoreIndices(isvert, &vertex_indices));
503: PetscCall(PCBDDCGraphRestoreCandidatesIS(pcbddc->mat_graph, NULL, NULL, NULL, NULL, &isvert));
504: PetscFunctionReturn(PETSC_SUCCESS);
505: }
507: static PetscErrorCode KSPFETIDPSetUpOperators(KSP ksp)
508: {
509: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
510: PC_BDDC *pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
511: Mat A, Ap;
512: PetscInt fid = -1;
513: PetscMPIInt size;
514: PetscBool ismatis, pisz = PETSC_FALSE, allp = PETSC_FALSE, schp = PETSC_FALSE;
515: PetscBool flip = PETSC_FALSE; /* Usually, Stokes is written (B = -\int_\Omega \nabla \cdot u q)
516: | A B'| | v | = | f |
517: | B 0 | | p | = | g |
518: If -ksp_fetidp_saddlepoint_flip is true, the code assumes it is written as
519: | A B'| | v | = | f |
520: |-B 0 | | p | = |-g |
521: */
522: PetscObjectState matstate, matnnzstate;
524: PetscFunctionBegin;
525: PetscOptionsBegin(PetscObjectComm((PetscObject)ksp), ((PetscObject)ksp)->prefix, "FETI-DP options", "PC");
526: PetscCall(PetscOptionsInt("-ksp_fetidp_pressure_field", "Field id for pressures for saddle-point problems", NULL, fid, &fid, NULL));
527: PetscCall(PetscOptionsBool("-ksp_fetidp_pressure_all", "Use the whole pressure set instead of just that at the interface", NULL, allp, &allp, NULL));
528: PetscCall(PetscOptionsBool("-ksp_fetidp_saddlepoint_flip", "Flip the sign of the pressure-velocity (lower-left) block", NULL, flip, &flip, NULL));
529: PetscCall(PetscOptionsBool("-ksp_fetidp_pressure_schur", "Use a BDDC solver for pressure", NULL, schp, &schp, NULL));
530: PetscOptionsEnd();
532: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)ksp), &size));
533: fetidp->saddlepoint = (fid >= 0 ? PETSC_TRUE : fetidp->saddlepoint);
534: if (size == 1) fetidp->saddlepoint = PETSC_FALSE;
536: PetscCall(KSPGetOperators(ksp, &A, &Ap));
537: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATIS, &ismatis));
538: PetscCheck(ismatis, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Amat should be of type MATIS");
540: /* Quiet return if the matrix states are unchanged.
541: Needed only for the saddle point case since it uses MatZeroRows
542: on a matrix that may not have changed */
543: PetscCall(PetscObjectStateGet((PetscObject)A, &matstate));
544: PetscCall(MatGetNonzeroState(A, &matnnzstate));
545: if (matstate == fetidp->matstate && matnnzstate == fetidp->matnnzstate) PetscFunctionReturn(PETSC_SUCCESS);
546: fetidp->matstate = matstate;
547: fetidp->matnnzstate = matnnzstate;
548: fetidp->statechanged = fetidp->saddlepoint;
550: /* see if we have some fields attached */
551: if (!pcbddc->n_ISForDofsLocal && !pcbddc->n_ISForDofs) {
552: DM dm;
553: PetscContainer c;
555: PetscCall(KSPGetDM(ksp, &dm));
556: PetscCall(PetscObjectQuery((PetscObject)A, "_convert_nest_lfields", (PetscObject *)&c));
557: if (dm) {
558: IS *fields;
559: PetscInt nf, i;
561: PetscCall(DMCreateFieldDecomposition(dm, &nf, NULL, &fields, NULL));
562: PetscCall(PCBDDCSetDofsSplitting(fetidp->innerbddc, nf, fields));
563: for (i = 0; i < nf; i++) PetscCall(ISDestroy(&fields[i]));
564: PetscCall(PetscFree(fields));
565: } else if (c) {
566: MatISLocalFields lf;
568: PetscCall(PetscContainerGetPointer(c, (void **)&lf));
569: PetscCall(PCBDDCSetDofsSplittingLocal(fetidp->innerbddc, lf->nr, lf->rf));
570: }
571: }
573: if (!fetidp->saddlepoint) {
574: PetscCall(PCSetOperators(fetidp->innerbddc, A, A));
575: } else {
576: Mat nA, lA, PPmat;
577: MatNullSpace nnsp;
578: IS pP;
579: PetscInt totP;
581: PetscCall(MatISGetLocalMat(A, &lA));
582: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lA", (PetscObject)lA));
584: pP = fetidp->pP;
585: if (!pP) { /* first time, need to compute pressure dofs */
586: PC_IS *pcis = (PC_IS *)fetidp->innerbddc->data;
587: Mat_IS *matis = (Mat_IS *)A->data;
588: ISLocalToGlobalMapping l2g;
589: IS lP = NULL, II, pII, lPall, Pall, is1, is2;
590: const PetscInt *idxs;
591: PetscInt nl, ni, *widxs;
592: PetscInt i, j, n_neigh, *neigh, *n_shared, **shared, *count;
593: PetscInt rst, ren, n;
594: PetscBool ploc;
596: PetscCall(MatGetLocalSize(A, &nl, NULL));
597: PetscCall(MatGetOwnershipRange(A, &rst, &ren));
598: PetscCall(MatGetLocalSize(lA, &n, NULL));
599: PetscCall(MatISGetLocalToGlobalMapping(A, &l2g, NULL));
601: if (!pcis->is_I_local) { /* need to compute interior dofs */
602: PetscCall(PetscCalloc1(n, &count));
603: PetscCall(ISLocalToGlobalMappingGetInfo(l2g, &n_neigh, &neigh, &n_shared, &shared));
604: for (i = 1; i < n_neigh; i++)
605: for (j = 0; j < n_shared[i]; j++) count[shared[i][j]] += 1;
606: for (i = 0, j = 0; i < n; i++)
607: if (!count[i]) count[j++] = i;
608: PetscCall(ISLocalToGlobalMappingRestoreInfo(l2g, &n_neigh, &neigh, &n_shared, &shared));
609: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, j, count, PETSC_OWN_POINTER, &II));
610: } else {
611: PetscCall(PetscObjectReference((PetscObject)pcis->is_I_local));
612: II = pcis->is_I_local;
613: }
615: /* interior dofs in layout */
616: PetscCall(PetscArrayzero(matis->sf_leafdata, n));
617: PetscCall(PetscArrayzero(matis->sf_rootdata, nl));
618: PetscCall(ISGetLocalSize(II, &ni));
619: PetscCall(ISGetIndices(II, &idxs));
620: for (i = 0; i < ni; i++) matis->sf_leafdata[idxs[i]] = 1;
621: PetscCall(ISRestoreIndices(II, &idxs));
622: PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
623: PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
624: PetscCall(PetscMalloc1(PetscMax(nl, n), &widxs));
625: for (i = 0, ni = 0; i < nl; i++)
626: if (matis->sf_rootdata[i]) widxs[ni++] = i + rst;
627: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs, PETSC_COPY_VALUES, &pII));
629: /* pressure dofs */
630: Pall = NULL;
631: lPall = NULL;
632: ploc = PETSC_FALSE;
633: if (fid < 0) { /* zero pressure block */
634: PetscInt np;
636: PetscCall(MatFindZeroDiagonals(A, &Pall));
637: PetscCall(ISGetSize(Pall, &np));
638: if (!np) { /* zero-block not found, defaults to last field (if set) */
639: fid = pcbddc->n_ISForDofsLocal ? pcbddc->n_ISForDofsLocal - 1 : pcbddc->n_ISForDofs - 1;
640: PetscCall(ISDestroy(&Pall));
641: } else if (!pcbddc->n_ISForDofsLocal && !pcbddc->n_ISForDofs) {
642: PetscCall(PCBDDCSetDofsSplitting(fetidp->innerbddc, 1, &Pall));
643: }
644: }
645: if (!Pall) { /* look for registered fields */
646: if (pcbddc->n_ISForDofsLocal) {
647: PetscInt np;
649: PetscCheck(fid >= 0 && fid < pcbddc->n_ISForDofsLocal, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Invalid field id for pressure %" PetscInt_FMT ", max %" PetscInt_FMT, fid, pcbddc->n_ISForDofsLocal);
650: /* need a sequential IS */
651: PetscCall(ISGetLocalSize(pcbddc->ISForDofsLocal[fid], &np));
652: PetscCall(ISGetIndices(pcbddc->ISForDofsLocal[fid], &idxs));
653: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, np, idxs, PETSC_COPY_VALUES, &lPall));
654: PetscCall(ISRestoreIndices(pcbddc->ISForDofsLocal[fid], &idxs));
655: ploc = PETSC_TRUE;
656: } else if (pcbddc->n_ISForDofs) {
657: PetscCheck(fid >= 0 && fid < pcbddc->n_ISForDofs, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Invalid field id for pressure %" PetscInt_FMT ", max %" PetscInt_FMT, fid, pcbddc->n_ISForDofs);
658: PetscCall(PetscObjectReference((PetscObject)pcbddc->ISForDofs[fid]));
659: Pall = pcbddc->ISForDofs[fid];
660: } else SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Cannot detect pressure field! Use KSPFETIDPGetInnerBDDC() + PCBDDCSetDofsSplitting or PCBDDCSetDofsSplittingLocal");
661: }
663: /* if the user requested the entire pressure,
664: remove the interior pressure dofs from II (or pII) */
665: if (allp) {
666: if (ploc) {
667: IS nII;
668: PetscCall(ISDifference(II, lPall, &nII));
669: PetscCall(ISDestroy(&II));
670: II = nII;
671: } else {
672: IS nII;
673: PetscCall(ISDifference(pII, Pall, &nII));
674: PetscCall(ISDestroy(&pII));
675: pII = nII;
676: }
677: }
678: if (ploc) {
679: PetscCall(ISDifference(lPall, II, &lP));
680: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lP", (PetscObject)lP));
681: } else {
682: PetscCall(ISDifference(Pall, pII, &pP));
683: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_pP", (PetscObject)pP));
684: /* need all local pressure dofs */
685: PetscCall(PetscArrayzero(matis->sf_leafdata, n));
686: PetscCall(PetscArrayzero(matis->sf_rootdata, nl));
687: PetscCall(ISGetLocalSize(Pall, &ni));
688: PetscCall(ISGetIndices(Pall, &idxs));
689: for (i = 0; i < ni; i++) matis->sf_rootdata[idxs[i] - rst] = 1;
690: PetscCall(ISRestoreIndices(Pall, &idxs));
691: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
692: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
693: for (i = 0, ni = 0; i < n; i++)
694: if (matis->sf_leafdata[i]) widxs[ni++] = i;
695: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ni, widxs, PETSC_COPY_VALUES, &lPall));
696: }
698: if (!Pall) {
699: PetscCall(PetscArrayzero(matis->sf_leafdata, n));
700: PetscCall(PetscArrayzero(matis->sf_rootdata, nl));
701: PetscCall(ISGetLocalSize(lPall, &ni));
702: PetscCall(ISGetIndices(lPall, &idxs));
703: for (i = 0; i < ni; i++) matis->sf_leafdata[idxs[i]] = 1;
704: PetscCall(ISRestoreIndices(lPall, &idxs));
705: PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
706: PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
707: for (i = 0, ni = 0; i < nl; i++)
708: if (matis->sf_rootdata[i]) widxs[ni++] = i + rst;
709: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs, PETSC_COPY_VALUES, &Pall));
710: }
711: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_aP", (PetscObject)Pall));
713: if (flip) {
714: PetscInt npl;
715: PetscCall(ISGetLocalSize(Pall, &npl));
716: PetscCall(ISGetIndices(Pall, &idxs));
717: PetscCall(MatCreateVecs(A, NULL, &fetidp->rhs_flip));
718: PetscCall(VecSet(fetidp->rhs_flip, 1.));
719: PetscCall(VecSetOption(fetidp->rhs_flip, VEC_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE));
720: for (i = 0; i < npl; i++) PetscCall(VecSetValue(fetidp->rhs_flip, idxs[i], -1., INSERT_VALUES));
721: PetscCall(VecAssemblyBegin(fetidp->rhs_flip));
722: PetscCall(VecAssemblyEnd(fetidp->rhs_flip));
723: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_flip", (PetscObject)fetidp->rhs_flip));
724: PetscCall(ISRestoreIndices(Pall, &idxs));
725: }
726: PetscCall(ISDestroy(&Pall));
727: PetscCall(ISDestroy(&pII));
729: /* local selected pressures in subdomain-wise and global ordering */
730: PetscCall(PetscArrayzero(matis->sf_leafdata, n));
731: PetscCall(PetscArrayzero(matis->sf_rootdata, nl));
732: if (!ploc) {
733: PetscInt *widxs2;
735: PetscCheck(pP, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Missing parallel pressure IS");
736: PetscCall(ISGetLocalSize(pP, &ni));
737: PetscCall(ISGetIndices(pP, &idxs));
738: for (i = 0; i < ni; i++) matis->sf_rootdata[idxs[i] - rst] = 1;
739: PetscCall(ISRestoreIndices(pP, &idxs));
740: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
741: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
742: for (i = 0, ni = 0; i < n; i++)
743: if (matis->sf_leafdata[i]) widxs[ni++] = i;
744: PetscCall(PetscMalloc1(ni, &widxs2));
745: PetscCall(ISLocalToGlobalMappingApply(l2g, ni, widxs, widxs2));
746: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ni, widxs, PETSC_COPY_VALUES, &lP));
747: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lP", (PetscObject)lP));
748: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs2, PETSC_OWN_POINTER, &is1));
749: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_gP", (PetscObject)is1));
750: PetscCall(ISDestroy(&is1));
751: } else {
752: PetscCheck(lP, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing sequential pressure IS");
753: PetscCall(ISGetLocalSize(lP, &ni));
754: PetscCall(ISGetIndices(lP, &idxs));
755: for (i = 0; i < ni; i++)
756: if (idxs[i] >= 0 && idxs[i] < n) matis->sf_leafdata[idxs[i]] = 1;
757: PetscCall(ISRestoreIndices(lP, &idxs));
758: PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
759: PetscCall(ISLocalToGlobalMappingApply(l2g, ni, idxs, widxs));
760: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs, PETSC_COPY_VALUES, &is1));
761: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_gP", (PetscObject)is1));
762: PetscCall(ISDestroy(&is1));
763: PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, matis->sf_leafdata, matis->sf_rootdata, MPI_REPLACE));
764: for (i = 0, ni = 0; i < nl; i++)
765: if (matis->sf_rootdata[i]) widxs[ni++] = i + rst;
766: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), ni, widxs, PETSC_COPY_VALUES, &pP));
767: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_pP", (PetscObject)pP));
768: }
769: PetscCall(PetscFree(widxs));
771: /* If there's any "interior pressure",
772: we may want to use a discrete harmonic solver instead
773: of a Stokes harmonic for the Dirichlet preconditioner
774: Need to extract the interior velocity dofs in interior dofs ordering (iV)
775: and interior pressure dofs in local ordering (iP) */
776: if (!allp) {
777: ISLocalToGlobalMapping l2g_t;
779: PetscCall(ISDifference(lPall, lP, &is1));
780: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_iP", (PetscObject)is1));
781: PetscCall(ISDifference(II, is1, &is2));
782: PetscCall(ISDestroy(&is1));
783: PetscCall(ISLocalToGlobalMappingCreateIS(II, &l2g_t));
784: PetscCall(ISGlobalToLocalMappingApplyIS(l2g_t, IS_GTOLM_DROP, is2, &is1));
785: PetscCall(ISGetLocalSize(is1, &i));
786: PetscCall(ISGetLocalSize(is2, &j));
787: PetscCheck(i == j, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local sizes %" PetscInt_FMT " and %" PetscInt_FMT " for iV", i, j);
788: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_iV", (PetscObject)is1));
789: PetscCall(ISLocalToGlobalMappingDestroy(&l2g_t));
790: PetscCall(ISDestroy(&is1));
791: PetscCall(ISDestroy(&is2));
792: }
793: PetscCall(ISDestroy(&II));
795: /* exclude selected pressures from the inner BDDC */
796: if (pcbddc->DirichletBoundariesLocal) {
797: IS list[2], plP, isout;
798: PetscInt np;
800: /* need a parallel IS */
801: PetscCall(ISGetLocalSize(lP, &np));
802: PetscCall(ISGetIndices(lP, &idxs));
803: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), np, idxs, PETSC_USE_POINTER, &plP));
804: list[0] = plP;
805: list[1] = pcbddc->DirichletBoundariesLocal;
806: PetscCall(ISConcatenate(PetscObjectComm((PetscObject)ksp), 2, list, &isout));
807: PetscCall(ISSortRemoveDups(isout));
808: PetscCall(ISDestroy(&plP));
809: PetscCall(ISRestoreIndices(lP, &idxs));
810: PetscCall(PCBDDCSetDirichletBoundariesLocal(fetidp->innerbddc, isout));
811: PetscCall(ISDestroy(&isout));
812: } else if (pcbddc->DirichletBoundaries) {
813: IS list[2], isout;
815: list[0] = pP;
816: list[1] = pcbddc->DirichletBoundaries;
817: PetscCall(ISConcatenate(PetscObjectComm((PetscObject)ksp), 2, list, &isout));
818: PetscCall(ISSortRemoveDups(isout));
819: PetscCall(PCBDDCSetDirichletBoundaries(fetidp->innerbddc, isout));
820: PetscCall(ISDestroy(&isout));
821: } else {
822: IS plP;
823: PetscInt np;
825: /* need a parallel IS */
826: PetscCall(ISGetLocalSize(lP, &np));
827: PetscCall(ISGetIndices(lP, &idxs));
828: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)ksp), np, idxs, PETSC_COPY_VALUES, &plP));
829: PetscCall(PCBDDCSetDirichletBoundariesLocal(fetidp->innerbddc, plP));
830: PetscCall(ISDestroy(&plP));
831: PetscCall(ISRestoreIndices(lP, &idxs));
832: }
834: /* save CSR information for the pressure BDDC solver (if any) */
835: if (schp) {
836: PetscInt np, nt;
838: PetscCall(MatGetSize(matis->A, &nt, NULL));
839: PetscCall(ISGetLocalSize(lP, &np));
840: if (np) {
841: PetscInt *xadj = pcbddc->mat_graph->xadj;
842: PetscInt *adjn = pcbddc->mat_graph->adjncy;
843: PetscInt nv = pcbddc->mat_graph->nvtxs_csr;
845: if (nv && nv == nt) {
846: ISLocalToGlobalMapping pmap;
847: PetscInt *schp_csr, *schp_xadj, *schp_adjn, p;
848: PetscContainer c;
850: PetscCall(ISLocalToGlobalMappingCreateIS(lPall, &pmap));
851: PetscCall(ISGetIndices(lPall, &idxs));
852: for (p = 0, nv = 0; p < np; p++) {
853: PetscInt x, n = idxs[p];
855: PetscCall(ISGlobalToLocalMappingApply(pmap, IS_GTOLM_DROP, xadj[n + 1] - xadj[n], adjn + xadj[n], &x, NULL));
856: nv += x;
857: }
858: PetscCall(PetscMalloc1(np + 1 + nv, &schp_csr));
859: schp_xadj = schp_csr;
860: schp_adjn = schp_csr + np + 1;
861: for (p = 0, schp_xadj[0] = 0; p < np; p++) {
862: PetscInt x, n = idxs[p];
864: PetscCall(ISGlobalToLocalMappingApply(pmap, IS_GTOLM_DROP, xadj[n + 1] - xadj[n], adjn + xadj[n], &x, schp_adjn + schp_xadj[p]));
865: schp_xadj[p + 1] = schp_xadj[p] + x;
866: }
867: PetscCall(ISRestoreIndices(lPall, &idxs));
868: PetscCall(ISLocalToGlobalMappingDestroy(&pmap));
869: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &c));
870: PetscCall(PetscContainerSetPointer(c, schp_csr));
871: PetscCall(PetscContainerSetCtxDestroy(c, PetscCtxDestroyDefault));
872: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_pCSR", (PetscObject)c));
873: PetscCall(PetscContainerDestroy(&c));
874: }
875: }
876: }
877: PetscCall(ISDestroy(&lPall));
878: PetscCall(ISDestroy(&lP));
879: fetidp->pP = pP;
880: }
882: /* total number of selected pressure dofs */
883: PetscCall(ISGetSize(fetidp->pP, &totP));
885: /* Set operator for inner BDDC */
886: if (totP || fetidp->rhs_flip) {
887: PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &nA));
888: } else {
889: PetscCall(PetscObjectReference((PetscObject)A));
890: nA = A;
891: }
892: if (fetidp->rhs_flip) {
893: PetscCall(MatDiagonalScale(nA, fetidp->rhs_flip, NULL));
894: if (totP) {
895: Mat lA2;
897: PetscCall(MatISGetLocalMat(nA, &lA));
898: PetscCall(MatDuplicate(lA, MAT_COPY_VALUES, &lA2));
899: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lA", (PetscObject)lA2));
900: PetscCall(MatDestroy(&lA2));
901: }
902: }
904: if (totP) {
905: PetscCall(MatSetOption(nA, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
906: PetscCall(MatZeroRowsColumnsIS(nA, fetidp->pP, 1., NULL, NULL));
907: } else {
908: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_lA", NULL));
909: }
910: PetscCall(MatGetNearNullSpace(Ap, &nnsp));
911: if (!nnsp) PetscCall(MatGetNullSpace(Ap, &nnsp));
912: if (!nnsp) PetscCall(MatGetNearNullSpace(A, &nnsp));
913: if (!nnsp) PetscCall(MatGetNullSpace(A, &nnsp));
914: PetscCall(MatSetNearNullSpace(nA, nnsp));
915: PetscCall(PCSetOperators(fetidp->innerbddc, nA, nA));
916: PetscCall(MatDestroy(&nA));
918: /* non-zero rhs on interior dofs when applying the preconditioner */
919: if (totP) pcbddc->switch_static = PETSC_TRUE;
921: /* if there are no interface pressures, set inner bddc flag for benign saddle point */
922: if (!totP) {
923: pcbddc->benign_saddle_point = PETSC_TRUE;
924: pcbddc->compute_nonetflux = PETSC_TRUE;
925: }
927: /* Operators for pressure preconditioner */
928: if (totP) {
929: /* Extract pressure block if needed */
930: if (!pisz) {
931: Mat C;
932: IS nzrows = NULL;
934: PetscCall(MatCreateSubMatrix(A, fetidp->pP, fetidp->pP, MAT_INITIAL_MATRIX, &C));
935: PetscCall(MatFindNonzeroRows(C, &nzrows));
936: if (nzrows) {
937: PetscInt i;
939: PetscCall(ISGetSize(nzrows, &i));
940: PetscCall(ISDestroy(&nzrows));
941: if (!i) pisz = PETSC_TRUE;
942: }
943: if (!pisz) {
944: PetscCall(MatScale(C, -1.)); /* i.e. Almost Incompressible Elasticity, Stokes discretized with Q1xQ1_stabilized */
945: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_C", (PetscObject)C));
946: }
947: PetscCall(MatDestroy(&C));
948: }
949: /* Divergence mat */
950: if (!pcbddc->divudotp) {
951: Mat B;
952: IS P;
953: IS l2l = NULL;
954: PetscBool save;
956: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_aP", (PetscObject *)&P));
957: if (!pisz) {
958: IS F, V;
959: PetscInt m, M;
961: PetscCall(MatGetOwnershipRange(A, &m, &M));
962: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), M - m, m, 1, &F));
963: PetscCall(ISComplement(P, m, M, &V));
964: PetscCall(MatCreateSubMatrix(A, P, V, MAT_INITIAL_MATRIX, &B));
965: {
966: Mat_IS *Bmatis = (Mat_IS *)B->data;
967: PetscCall(PetscObjectReference((PetscObject)Bmatis->getsub_cis));
968: l2l = Bmatis->getsub_cis;
969: }
970: PetscCall(ISDestroy(&V));
971: PetscCall(ISDestroy(&F));
972: } else {
973: PetscCall(MatCreateSubMatrix(A, P, NULL, MAT_INITIAL_MATRIX, &B));
974: }
975: save = pcbddc->compute_nonetflux; /* SetDivergenceMat activates nonetflux computation */
976: PetscCall(PCBDDCSetDivergenceMat(fetidp->innerbddc, B, PETSC_FALSE, l2l));
977: pcbddc->compute_nonetflux = save;
978: PetscCall(MatDestroy(&B));
979: PetscCall(ISDestroy(&l2l));
980: }
981: if (A != Ap) { /* user has provided a different Pmat, this always supersedes the setter (TODO: is it OK?) */
982: /* use monolithic operator, we restrict later */
983: PetscCall(KSPFETIDPSetPressureOperator(ksp, Ap));
984: }
985: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_PPmat", (PetscObject *)&PPmat));
987: /* PPmat not present, use some default choice */
988: if (!PPmat) {
989: Mat C;
991: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_C", (PetscObject *)&C));
992: if (!schp && C) { /* non-zero pressure block, most likely Almost Incompressible Elasticity */
993: PetscCall(KSPFETIDPSetPressureOperator(ksp, C));
994: } else if (!pisz && schp) { /* we need the whole pressure mass matrix to define the interface BDDC */
995: IS P;
997: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_aP", (PetscObject *)&P));
998: PetscCall(MatCreateSubMatrix(A, P, P, MAT_INITIAL_MATRIX, &C));
999: PetscCall(MatScale(C, -1.));
1000: PetscCall(KSPFETIDPSetPressureOperator(ksp, C));
1001: PetscCall(MatDestroy(&C));
1002: } else { /* identity (need to be scaled properly by the user using e.g. a Richardson method */
1003: PetscInt nl;
1005: PetscCall(ISGetLocalSize(fetidp->pP, &nl));
1006: PetscCall(MatCreate(PetscObjectComm((PetscObject)ksp), &C));
1007: PetscCall(MatSetSizes(C, nl, nl, totP, totP));
1008: PetscCall(MatSetType(C, MATAIJ));
1009: PetscCall(MatMPIAIJSetPreallocation(C, 1, NULL, 0, NULL));
1010: PetscCall(MatSeqAIJSetPreallocation(C, 1, NULL));
1011: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
1012: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
1013: PetscCall(MatShift(C, 1.));
1014: PetscCall(KSPFETIDPSetPressureOperator(ksp, C));
1015: PetscCall(MatDestroy(&C));
1016: }
1017: }
1019: /* Preconditioned operator for the pressure block */
1020: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_PPmat", (PetscObject *)&PPmat));
1021: if (PPmat) {
1022: Mat C;
1023: IS Pall;
1024: PetscInt AM, PAM, PAN, pam, pan, am, an, pl, pIl, pAg, pIg;
1026: PetscCall(PetscObjectQuery((PetscObject)fetidp->innerbddc, "__KSPFETIDP_aP", (PetscObject *)&Pall));
1027: PetscCall(MatGetSize(A, &AM, NULL));
1028: PetscCall(MatGetSize(PPmat, &PAM, &PAN));
1029: PetscCall(ISGetSize(Pall, &pAg));
1030: PetscCall(ISGetSize(fetidp->pP, &pIg));
1031: PetscCall(MatGetLocalSize(PPmat, &pam, &pan));
1032: PetscCall(MatGetLocalSize(A, &am, &an));
1033: PetscCall(ISGetLocalSize(Pall, &pIl));
1034: PetscCall(ISGetLocalSize(fetidp->pP, &pl));
1035: PetscCheck(PAM == PAN, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Pressure matrix must be square, unsupported %" PetscInt_FMT " x %" PetscInt_FMT, PAM, PAN);
1036: PetscCheck(pam == pan, PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Local sizes of pressure matrix must be equal, unsupported %" PetscInt_FMT " x %" PetscInt_FMT, pam, pan);
1037: PetscCheck(pam == am || pam == pl || pam == pIl, PETSC_COMM_SELF, PETSC_ERR_USER, "Invalid number of local rows %" PetscInt_FMT " for pressure matrix! Supported are %" PetscInt_FMT ", %" PetscInt_FMT " or %" PetscInt_FMT, pam, am, pl, pIl);
1038: PetscCheck(pan == an || pan == pl || pan == pIl, PETSC_COMM_SELF, PETSC_ERR_USER, "Invalid number of local columns %" PetscInt_FMT " for pressure matrix! Supported are %" PetscInt_FMT ", %" PetscInt_FMT " or %" PetscInt_FMT, pan, an, pl, pIl);
1039: if (PAM == AM) { /* monolithic ordering, restrict to pressure */
1040: if (schp) {
1041: PetscCall(MatCreateSubMatrix(PPmat, Pall, Pall, MAT_INITIAL_MATRIX, &C));
1042: } else {
1043: PetscCall(MatCreateSubMatrix(PPmat, fetidp->pP, fetidp->pP, MAT_INITIAL_MATRIX, &C));
1044: }
1045: } else if (pAg == PAM) { /* global ordering for pressure only */
1046: if (!allp && !schp) { /* solving for interface pressure only */
1047: IS restr;
1049: PetscCall(ISRenumber(fetidp->pP, NULL, NULL, &restr));
1050: PetscCall(MatCreateSubMatrix(PPmat, restr, restr, MAT_INITIAL_MATRIX, &C));
1051: PetscCall(ISDestroy(&restr));
1052: } else {
1053: PetscCall(PetscObjectReference((PetscObject)PPmat));
1054: C = PPmat;
1055: }
1056: } else if (pIg == PAM) { /* global ordering for selected pressure only */
1057: PetscCheck(!schp, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Need the entire matrix");
1058: PetscCall(PetscObjectReference((PetscObject)PPmat));
1059: C = PPmat;
1060: } else SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_USER, "Unable to use the pressure matrix");
1062: PetscCall(KSPFETIDPSetPressureOperator(ksp, C));
1063: PetscCall(MatDestroy(&C));
1064: } else SETERRQ(PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Missing Pmat for pressure block");
1065: } else { /* totP == 0 */
1066: PetscCall(PetscObjectCompose((PetscObject)fetidp->innerbddc, "__KSPFETIDP_pP", NULL));
1067: }
1068: }
1069: PetscFunctionReturn(PETSC_SUCCESS);
1070: }
1072: static PetscErrorCode KSPSetUp_FETIDP(KSP ksp)
1073: {
1074: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1075: PC_BDDC *pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
1076: PetscBool flg;
1078: PetscFunctionBegin;
1079: PetscCall(KSPFETIDPSetUpOperators(ksp));
1080: /* set up BDDC */
1081: PetscCall(PCSetErrorIfFailure(fetidp->innerbddc, ksp->errorifnotconverged));
1082: PetscCall(PCSetUp(fetidp->innerbddc));
1083: /* FETI-DP as it is implemented needs an exact coarse solver */
1084: if (pcbddc->coarse_ksp) {
1085: PetscCall(KSPSetTolerances(pcbddc->coarse_ksp, PETSC_SMALL, PETSC_SMALL, PETSC_CURRENT, 1000));
1086: PetscCall(KSPSetNormType(pcbddc->coarse_ksp, KSP_NORM_DEFAULT));
1087: }
1088: /* FETI-DP as it is implemented needs exact local Neumann solvers */
1089: PetscCall(KSPSetTolerances(pcbddc->ksp_R, PETSC_SMALL, PETSC_SMALL, PETSC_CURRENT, 1000));
1090: PetscCall(KSPSetNormType(pcbddc->ksp_R, KSP_NORM_DEFAULT));
1092: /* setup FETI-DP operators
1093: If fetidp->statechanged is true, we need to update the operators
1094: needed in the saddle-point case. This should be replaced
1095: by a better logic when the FETI-DP matrix and preconditioner will
1096: have their own classes */
1097: if (pcbddc->new_primal_space || fetidp->statechanged) {
1098: Mat F; /* the FETI-DP matrix */
1099: PC D; /* the FETI-DP preconditioner */
1100: PetscCall(KSPReset(fetidp->innerksp));
1101: PetscCall(PCBDDCCreateFETIDPOperators(fetidp->innerbddc, fetidp->fully_redundant, ((PetscObject)ksp)->prefix, &F, &D));
1102: PetscCall(KSPSetOperators(fetidp->innerksp, F, F));
1103: PetscCall(KSPSetTolerances(fetidp->innerksp, ksp->rtol, ksp->abstol, ksp->divtol, ksp->max_it));
1104: PetscCall(KSPSetPC(fetidp->innerksp, D));
1105: PetscCall(PetscObjectIncrementTabLevel((PetscObject)D, (PetscObject)fetidp->innerksp, 0));
1106: PetscCall(KSPSetFromOptions(fetidp->innerksp));
1107: PetscCall(MatCreateVecs(F, &fetidp->innerksp->vec_rhs, &fetidp->innerksp->vec_sol));
1108: PetscCall(MatDestroy(&F));
1109: PetscCall(PCDestroy(&D));
1110: if (fetidp->check) {
1111: PetscViewer viewer;
1113: if (!pcbddc->dbg_viewer) {
1114: viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
1115: } else {
1116: viewer = pcbddc->dbg_viewer;
1117: }
1118: PetscCall(KSPFETIDPCheckOperators(ksp, viewer));
1119: }
1120: }
1121: fetidp->statechanged = PETSC_FALSE;
1122: pcbddc->new_primal_space = PETSC_FALSE;
1124: /* propagate settings to the inner solve */
1125: PetscCall(KSPGetComputeSingularValues(ksp, &flg));
1126: PetscCall(KSPSetComputeSingularValues(fetidp->innerksp, flg));
1127: if (ksp->res_hist) PetscCall(KSPSetResidualHistory(fetidp->innerksp, ksp->res_hist, ksp->res_hist_max, ksp->res_hist_reset));
1128: PetscCall(KSPSetErrorIfNotConverged(fetidp->innerksp, ksp->errorifnotconverged));
1129: PetscCall(KSPSetUp(fetidp->innerksp));
1130: PetscFunctionReturn(PETSC_SUCCESS);
1131: }
1133: static PetscErrorCode KSPSolve_FETIDP(KSP ksp)
1134: {
1135: Mat F, A;
1136: MatNullSpace nsp;
1137: Vec X, B, Xl, Bl;
1138: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1139: PC_BDDC *pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
1140: KSPConvergedReason reason;
1141: PC pc;
1142: PCFailedReason pcreason;
1143: PetscInt hist_len;
1144: int flg;
1146: PetscFunctionBegin;
1147: PetscCall(PetscCitationsRegister(citation, &cited));
1148: if (fetidp->saddlepoint) PetscCall(PetscCitationsRegister(citation2, &cited2));
1149: PetscCall(KSPGetOperators(ksp, &A, NULL));
1150: PetscCall(KSPGetRhs(ksp, &B));
1151: PetscCall(KSPGetSolution(ksp, &X));
1152: PetscCall(KSPGetOperators(fetidp->innerksp, &F, NULL));
1153: PetscCall(KSPGetRhs(fetidp->innerksp, &Bl));
1154: PetscCall(KSPGetSolution(fetidp->innerksp, &Xl));
1155: PetscCall(PCBDDCMatFETIDPGetRHS(F, B, Bl));
1156: if (ksp->transpose_solve) {
1157: PetscCall(KSPSolveTranspose(fetidp->innerksp, Bl, Xl));
1158: } else {
1159: PetscCall(KSPSolve(fetidp->innerksp, Bl, Xl));
1160: }
1161: PetscCall(KSPGetConvergedReason(fetidp->innerksp, &reason));
1162: PetscCall(KSPGetPC(fetidp->innerksp, &pc));
1163: PetscCall(PCGetFailedReason(pc, &pcreason));
1164: flg = (reason < 0 && reason != KSP_DIVERGED_ITS) || pcreason;
1165: PetscCall(VecFlag(Xl, flg));
1166: if (flg) {
1167: PetscInt its;
1169: PetscCall(KSPGetIterationNumber(fetidp->innerksp, &its));
1170: ksp->reason = KSP_DIVERGED_PC_FAILED;
1171: PetscCall(PetscInfo(ksp, "Inner KSP solve failed: %s %s at iteration %" PetscInt_FMT "\n", KSPConvergedReasons[reason], PCFailedReasons[pcreason], its));
1172: }
1173: PetscCall(PCBDDCMatFETIDPGetSolution(F, Xl, X));
1174: PetscCall(MatGetNullSpace(A, &nsp));
1175: if (nsp) PetscCall(MatNullSpaceRemove(nsp, X));
1176: /* update ksp with stats from inner ksp */
1177: PetscCall(KSPGetConvergedReason(fetidp->innerksp, &ksp->reason));
1178: PetscCall(KSPGetIterationNumber(fetidp->innerksp, &ksp->its));
1179: ksp->totalits += ksp->its;
1180: PetscCall(KSPGetResidualHistory(fetidp->innerksp, NULL, &hist_len));
1181: ksp->res_hist_len = (size_t)hist_len;
1182: /* restore defaults for inner BDDC (Pre/PostSolve flags) */
1183: pcbddc->temp_solution_used = PETSC_FALSE;
1184: pcbddc->rhs_change = PETSC_FALSE;
1185: pcbddc->exact_dirichlet_trick_app = PETSC_FALSE;
1186: PetscFunctionReturn(PETSC_SUCCESS);
1187: }
1189: static PetscErrorCode KSPReset_FETIDP(KSP ksp)
1190: {
1191: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1192: PC_BDDC *pcbddc;
1194: PetscFunctionBegin;
1195: PetscCall(ISDestroy(&fetidp->pP));
1196: PetscCall(VecDestroy(&fetidp->rhs_flip));
1197: /* avoid PCReset that does not take into account ref counting */
1198: PetscCall(PCDestroy(&fetidp->innerbddc));
1199: PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &fetidp->innerbddc));
1200: PetscCall(PCSetType(fetidp->innerbddc, PCBDDC));
1201: pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
1202: pcbddc->symmetric_primal = PETSC_FALSE;
1203: PetscCall(KSPDestroy(&fetidp->innerksp));
1204: fetidp->saddlepoint = PETSC_FALSE;
1205: fetidp->matstate = -1;
1206: fetidp->matnnzstate = -1;
1207: fetidp->statechanged = PETSC_TRUE;
1208: PetscFunctionReturn(PETSC_SUCCESS);
1209: }
1211: static PetscErrorCode KSPDestroy_FETIDP(KSP ksp)
1212: {
1213: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1215: PetscFunctionBegin;
1216: PetscCall(KSPReset_FETIDP(ksp));
1217: PetscCall(PCDestroy(&fetidp->innerbddc));
1218: PetscCall(KSPDestroy(&fetidp->innerksp));
1219: PetscCall(PetscFree(fetidp->monctx));
1220: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPSetInnerBDDC_C", NULL));
1221: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPGetInnerBDDC_C", NULL));
1222: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPGetInnerKSP_C", NULL));
1223: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPSetPressureOperator_C", NULL));
1224: PetscCall(PetscFree(ksp->data));
1225: PetscFunctionReturn(PETSC_SUCCESS);
1226: }
1228: static PetscErrorCode KSPView_FETIDP(KSP ksp, PetscViewer viewer)
1229: {
1230: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1231: PetscBool iascii;
1233: PetscFunctionBegin;
1234: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1235: if (iascii) {
1236: PetscCall(PetscViewerASCIIPrintf(viewer, " fully redundant: %d\n", fetidp->fully_redundant));
1237: PetscCall(PetscViewerASCIIPrintf(viewer, " saddle point: %d\n", fetidp->saddlepoint));
1238: PetscCall(PetscViewerASCIIPrintf(viewer, "Inner KSP solver details\n"));
1239: }
1240: PetscCall(PetscViewerASCIIPushTab(viewer));
1241: PetscCall(KSPView(fetidp->innerksp, viewer));
1242: PetscCall(PetscViewerASCIIPopTab(viewer));
1243: if (iascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Inner BDDC solver details\n"));
1244: PetscCall(PetscViewerASCIIPushTab(viewer));
1245: PetscCall(PCView(fetidp->innerbddc, viewer));
1246: PetscCall(PetscViewerASCIIPopTab(viewer));
1247: PetscFunctionReturn(PETSC_SUCCESS);
1248: }
1250: static PetscErrorCode KSPSetFromOptions_FETIDP(KSP ksp, PetscOptionItems PetscOptionsObject)
1251: {
1252: KSP_FETIDP *fetidp = (KSP_FETIDP *)ksp->data;
1254: PetscFunctionBegin;
1255: /* set options prefixes for the inner objects, since the parent prefix will be valid at this point */
1256: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)fetidp->innerksp, ((PetscObject)ksp)->prefix));
1257: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)fetidp->innerksp, "fetidp_"));
1258: if (!fetidp->userbddc) {
1259: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)fetidp->innerbddc, ((PetscObject)ksp)->prefix));
1260: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)fetidp->innerbddc, "fetidp_bddc_"));
1261: }
1262: PetscOptionsHeadBegin(PetscOptionsObject, "KSP FETIDP options");
1263: PetscCall(PetscOptionsBool("-ksp_fetidp_fullyredundant", "Use fully redundant multipliers", "none", fetidp->fully_redundant, &fetidp->fully_redundant, NULL));
1264: PetscCall(PetscOptionsBool("-ksp_fetidp_saddlepoint", "Activates support for saddle-point problems", NULL, fetidp->saddlepoint, &fetidp->saddlepoint, NULL));
1265: PetscCall(PetscOptionsBool("-ksp_fetidp_check", "Activates verbose debugging output FETI-DP operators", NULL, fetidp->check, &fetidp->check, NULL));
1266: PetscOptionsHeadEnd();
1267: PetscCall(PCSetFromOptions(fetidp->innerbddc));
1268: PetscFunctionReturn(PETSC_SUCCESS);
1269: }
1271: /*MC
1272: KSPFETIDP - The FETI-DP method {cite}`farhat2001feti`
1274: Options Database Keys:
1275: + -ksp_fetidp_fullyredundant <false> - use a fully redundant set of Lagrange multipliers
1276: . -ksp_fetidp_saddlepoint <false> - activates support for saddle point problems, see {cite}`tu2015feti`
1277: . -ksp_fetidp_saddlepoint_flip <false> - see note below
1278: . -ksp_fetidp_pressure_field <-1> - activates support for saddle point problems, and identifies the pressure field id.
1279: If this information is not provided, the pressure field is detected by using `MatFindZeroDiagonals()`.
1280: - -ksp_fetidp_pressure_all <false> - if false, uses the interface pressures, as described in [2]. If true, uses the entire pressure field.
1282: Level: Advanced
1284: Notes:
1285: The matrix for the `KSP` must be of type `MATIS`.
1287: Usually, an incompressible Stokes problem is written as
1288: .vb
1289: | A B^T | | v | = | f |
1290: | B 0 | | p | = | g |
1291: .ve
1292: with B representing $ -\int_\Omega \nabla \cdot u q $. If -ksp_fetidp_saddlepoint_flip is true, the code assumes that the user provides it as
1293: .vb
1294: | A B^T | | v | = | f |
1295: |-B 0 | | p | = |-g |
1296: .ve
1298: The FETI-DP linear system (automatically generated constructing an internal `PCBDDC` object) is solved using an internal `KSP` object.
1300: Options for the inner `KSP` and for the customization of the `PCBDDC` object can be specified at command line by using the prefixes `-fetidp_` and `-fetidp_bddc_`. E.g.,
1301: .vb
1302: -fetidp_ksp_type gmres -fetidp_bddc_pc_bddc_symmetric false
1303: .ve
1304: will use `KSPGMRES` for the solution of the linear system on the Lagrange multipliers, generated using a non-symmetric `PCBDDC`.
1306: For saddle point problems with continuous pressures, the preconditioned operator for the pressure solver can be specified with `KSPFETIDPSetPressureOperator()`.
1307: Alternatively, the pressure operator is extracted from the precondioned matrix (if it is different from the linear solver matrix).
1308: If none of the above, an identity matrix will be created; the user then needs to scale it through a Richardson solver.
1309: Options for the pressure solver can be prefixed with `-fetidp_fielsplit_p_`, E.g.
1310: .vb
1311: -fetidp_fielsplit_p_ksp_type preonly -fetidp_fielsplit_p_pc_type lu -fetidp_fielsplit_p_pc_factor_mat_solver_type mumps
1312: .ve
1313: In order to use the deluxe version of FETI-DP, you must customize the inner `PCBDDC` operator with -fetidp_bddc_pc_bddc_use_deluxe_scaling -fetidp_bddc_pc_bddc_deluxe_singlemat and use
1314: non-redundant multipliers, i.e. `-ksp_fetidp_fullyredundant false`. Options for the scaling solver are prefixed by `-fetidp_bddelta_`, E.g.
1315: .vb
1316: -fetidp_bddelta_pc_factor_mat_solver_type mumps -fetidp_bddelta_pc_type lu
1317: .ve
1319: Some of the basic options such as the maximum number of iterations and tolerances are automatically passed from this `KSP` to the inner `KSP` that actually performs the iterations.
1321: The converged reason and number of iterations computed are passed from the inner `KSP` to this `KSP` at the end of the solution.
1323: Developer Note:
1324: Even though this method does not directly use any norms, the user is allowed to set the `KSPNormType` to any value.
1325: This is so users do not have to change `KSPNormType` options when they switch from other `KSP` methods to this one.
1327: .seealso: [](ch_ksp), `MATIS`, `PCBDDC`, `KSPFETIDPSetInnerBDDC()`, `KSPFETIDPGetInnerBDDC()`, `KSPFETIDPGetInnerKSP()`
1328: M*/
1329: PETSC_EXTERN PetscErrorCode KSPCreate_FETIDP(KSP ksp)
1330: {
1331: KSP_FETIDP *fetidp;
1332: KSP_FETIDPMon *monctx;
1333: PC_BDDC *pcbddc;
1334: PC pc;
1336: PetscFunctionBegin;
1337: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NONE, PC_LEFT, 3));
1338: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NONE, PC_RIGHT, 2));
1339: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_PRECONDITIONED, PC_LEFT, 2));
1340: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_PRECONDITIONED, PC_RIGHT, 2));
1341: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_UNPRECONDITIONED, PC_LEFT, 2));
1342: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_UNPRECONDITIONED, PC_RIGHT, 2));
1343: PetscCall(KSPSetSupportedNorm(ksp, KSP_NORM_NATURAL, PC_LEFT, 2));
1345: PetscCall(PetscNew(&fetidp));
1346: fetidp->matstate = -1;
1347: fetidp->matnnzstate = -1;
1348: fetidp->statechanged = PETSC_TRUE;
1350: ksp->data = (void *)fetidp;
1351: ksp->ops->setup = KSPSetUp_FETIDP;
1352: ksp->ops->solve = KSPSolve_FETIDP;
1353: ksp->ops->destroy = KSPDestroy_FETIDP;
1354: ksp->ops->computeeigenvalues = KSPComputeEigenvalues_FETIDP;
1355: ksp->ops->computeextremesingularvalues = KSPComputeExtremeSingularValues_FETIDP;
1356: ksp->ops->view = KSPView_FETIDP;
1357: ksp->ops->setfromoptions = KSPSetFromOptions_FETIDP;
1358: ksp->ops->buildsolution = KSPBuildSolution_FETIDP;
1359: ksp->ops->buildresidual = KSPBuildResidualDefault;
1360: PetscCall(KSPGetPC(ksp, &pc));
1361: PetscCall(PCSetType(pc, PCNONE));
1362: /* create the inner KSP for the Lagrange multipliers */
1363: PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksp), &fetidp->innerksp));
1364: PetscCall(KSPGetPC(fetidp->innerksp, &pc));
1365: PetscCall(PCSetType(pc, PCNONE));
1366: /* monitor */
1367: PetscCall(PetscNew(&monctx));
1368: monctx->parentksp = ksp;
1369: fetidp->monctx = monctx;
1370: PetscCall(KSPMonitorSet(fetidp->innerksp, KSPMonitor_FETIDP, fetidp->monctx, NULL));
1371: /* create the inner BDDC */
1372: PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &fetidp->innerbddc));
1373: PetscCall(PCSetType(fetidp->innerbddc, PCBDDC));
1374: /* make sure we always obtain a consistent FETI-DP matrix
1375: for symmetric problems, the user can always customize it through the command line */
1376: pcbddc = (PC_BDDC *)fetidp->innerbddc->data;
1377: pcbddc->symmetric_primal = PETSC_FALSE;
1378: /* composed functions */
1379: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPSetInnerBDDC_C", KSPFETIDPSetInnerBDDC_FETIDP));
1380: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPGetInnerBDDC_C", KSPFETIDPGetInnerBDDC_FETIDP));
1381: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPGetInnerKSP_C", KSPFETIDPGetInnerKSP_FETIDP));
1382: PetscCall(PetscObjectComposeFunction((PetscObject)ksp, "KSPFETIDPSetPressureOperator_C", KSPFETIDPSetPressureOperator_FETIDP));
1383: /* need to call KSPSetUp_FETIDP even with KSP_SETUP_NEWMATRIX */
1384: ksp->setupnewmatrix = PETSC_TRUE;
1385: PetscFunctionReturn(PETSC_SUCCESS);
1386: }