Actual source code: mg.c
1: /*
2: Defines the multigrid preconditioner interface.
3: */
4: #include <petsc/private/pcmgimpl.h>
5: #include <petsc/private/kspimpl.h>
6: #include <petscdm.h>
7: PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *);
9: /*
10: Contains the list of registered coarse space construction routines
11: */
12: PetscFunctionList PCMGCoarseList = NULL;
14: PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason)
15: {
16: PC_MG *mg = (PC_MG *)pc->data;
17: PC_MG_Levels *mgc, *mglevels = *mglevelsin;
18: PetscInt cycles = (mglevels->level == 1) ? 1 : mglevels->cycles;
20: PetscFunctionBegin;
21: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
22: if (!transpose) {
23: if (matapp) {
24: PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */
25: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL));
26: } else {
27: PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */
28: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
29: }
30: } else {
31: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
32: PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */
33: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
34: }
35: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
36: if (mglevels->level) { /* not the coarsest grid */
37: if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0));
38: if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R));
39: if (!transpose) {
40: if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
41: else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
42: } else {
43: if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
44: else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
45: }
46: if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0));
48: /* if on finest level and have convergence criteria set */
49: if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) {
50: PetscReal rnorm;
51: PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm));
52: if (rnorm <= mg->ttol) {
53: if (rnorm < mg->abstol) {
54: *reason = PCRICHARDSON_CONVERGED_ATOL;
55: PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol));
56: } else {
57: *reason = PCRICHARDSON_CONVERGED_RTOL;
58: PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol));
59: }
60: PetscFunctionReturn(PETSC_SUCCESS);
61: }
62: }
64: mgc = *(mglevelsin - 1);
65: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
66: if (!transpose) {
67: if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B));
68: else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b));
69: } else {
70: if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B));
71: else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b));
72: }
73: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
74: if (matapp) {
75: if (!mgc->X) {
76: PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X));
77: } else {
78: PetscCall(MatZeroEntries(mgc->X));
79: }
80: } else {
81: PetscCall(VecZeroEntries(mgc->x));
82: }
83: while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason));
84: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
85: if (!transpose) {
86: if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X));
87: else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x));
88: } else {
89: PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x));
90: }
91: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
92: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
93: if (!transpose) {
94: if (matapp) {
95: PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */
96: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL));
97: } else {
98: PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */
99: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
100: }
101: } else {
102: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
103: PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */
104: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
105: }
106: if (mglevels->cr) {
107: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
108: /* TODO Turn on copy and turn off noisy if we have an exact solution
109: PetscCall(VecCopy(mglevels->x, mglevels->crx));
110: PetscCall(VecCopy(mglevels->b, mglevels->crb)); */
111: PetscCall(KSPSetNoisy_Private(mglevels->crx));
112: PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */
113: PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx));
114: }
115: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
116: }
117: PetscFunctionReturn(PETSC_SUCCESS);
118: }
120: static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
121: {
122: PC_MG *mg = (PC_MG *)pc->data;
123: PC_MG_Levels **mglevels = mg->levels;
124: PC tpc;
125: PetscBool changeu, changed;
126: PetscInt levels = mglevels[0]->levels, i;
128: PetscFunctionBegin;
129: /* When the DM is supplying the matrix then it will not exist until here */
130: for (i = 0; i < levels; i++) {
131: if (!mglevels[i]->A) {
132: PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
133: PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
134: }
135: }
137: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
138: PetscCall(PCPreSolveChangeRHS(tpc, &changed));
139: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
140: PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
141: if (!changed && !changeu) {
142: PetscCall(VecDestroy(&mglevels[levels - 1]->b));
143: mglevels[levels - 1]->b = b;
144: } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
145: if (!mglevels[levels - 1]->b) {
146: Vec *vec;
148: PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
149: mglevels[levels - 1]->b = *vec;
150: PetscCall(PetscFree(vec));
151: }
152: PetscCall(VecCopy(b, mglevels[levels - 1]->b));
153: }
154: mglevels[levels - 1]->x = x;
156: mg->rtol = rtol;
157: mg->abstol = abstol;
158: mg->dtol = dtol;
159: if (rtol) {
160: /* compute initial residual norm for relative convergence test */
161: PetscReal rnorm;
162: if (zeroguess) {
163: PetscCall(VecNorm(b, NORM_2, &rnorm));
164: } else {
165: PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w));
166: PetscCall(VecNorm(w, NORM_2, &rnorm));
167: }
168: mg->ttol = PetscMax(rtol * rnorm, abstol);
169: } else if (abstol) mg->ttol = abstol;
170: else mg->ttol = 0.0;
172: /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't
173: stop prematurely due to small residual */
174: for (i = 1; i < levels; i++) {
175: PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
176: if (mglevels[i]->smoothu != mglevels[i]->smoothd) {
177: /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */
178: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
179: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
180: }
181: }
183: *reason = PCRICHARDSON_NOT_SET;
184: for (i = 0; i < its; i++) {
185: PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason));
186: if (*reason) break;
187: }
188: if (*reason == PCRICHARDSON_NOT_SET) *reason = PCRICHARDSON_CONVERGED_ITS;
189: *outits = i;
190: if (!changed && !changeu) mglevels[levels - 1]->b = NULL;
191: PetscFunctionReturn(PETSC_SUCCESS);
192: }
194: PetscErrorCode PCReset_MG(PC pc)
195: {
196: PC_MG *mg = (PC_MG *)pc->data;
197: PC_MG_Levels **mglevels = mg->levels;
198: PetscInt i, n;
200: PetscFunctionBegin;
201: if (mglevels) {
202: n = mglevels[0]->levels;
203: for (i = 0; i < n - 1; i++) {
204: PetscCall(VecDestroy(&mglevels[i + 1]->r));
205: PetscCall(VecDestroy(&mglevels[i]->b));
206: PetscCall(VecDestroy(&mglevels[i]->x));
207: PetscCall(MatDestroy(&mglevels[i + 1]->R));
208: PetscCall(MatDestroy(&mglevels[i]->B));
209: PetscCall(MatDestroy(&mglevels[i]->X));
210: PetscCall(VecDestroy(&mglevels[i]->crx));
211: PetscCall(VecDestroy(&mglevels[i]->crb));
212: PetscCall(MatDestroy(&mglevels[i + 1]->restrct));
213: PetscCall(MatDestroy(&mglevels[i + 1]->interpolate));
214: PetscCall(MatDestroy(&mglevels[i + 1]->inject));
215: PetscCall(VecDestroy(&mglevels[i + 1]->rscale));
216: }
217: PetscCall(VecDestroy(&mglevels[n - 1]->crx));
218: PetscCall(VecDestroy(&mglevels[n - 1]->crb));
219: /* this is not null only if the smoother on the finest level
220: changes the rhs during PreSolve */
221: PetscCall(VecDestroy(&mglevels[n - 1]->b));
222: PetscCall(MatDestroy(&mglevels[n - 1]->B));
224: for (i = 0; i < n; i++) {
225: PetscCall(MatDestroy(&mglevels[i]->coarseSpace));
226: PetscCall(MatDestroy(&mglevels[i]->A));
227: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd));
228: PetscCall(KSPReset(mglevels[i]->smoothu));
229: if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr));
230: }
231: mg->Nc = 0;
232: }
233: PetscFunctionReturn(PETSC_SUCCESS);
234: }
236: /* Implementing CR
238: We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is
240: Inj^T (Inj Inj^T)^{-1} Inj
242: and if Inj is a VecScatter, as it is now in PETSc, we have
244: Inj^T Inj
246: and
248: S = I - Inj^T Inj
250: since
252: Inj S = Inj - (Inj Inj^T) Inj = 0.
254: Brannick suggests
256: A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S
258: but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use
260: M^{-1} A \to S M^{-1} A S
262: In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left.
264: Check: || Inj P - I ||_F < tol
265: Check: In general, Inj Inj^T = I
266: */
268: typedef struct {
269: PC mg; /* The PCMG object */
270: PetscInt l; /* The multigrid level for this solver */
271: Mat Inj; /* The injection matrix */
272: Mat S; /* I - Inj^T Inj */
273: } CRContext;
275: static PetscErrorCode CRSetup_Private(PC pc)
276: {
277: CRContext *ctx;
278: Mat It;
280: PetscFunctionBeginUser;
281: PetscCall(PCShellGetContext(pc, &ctx));
282: PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It));
283: PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG");
284: PetscCall(MatCreateTranspose(It, &ctx->Inj));
285: PetscCall(MatCreateNormal(ctx->Inj, &ctx->S));
286: PetscCall(MatScale(ctx->S, -1.0));
287: PetscCall(MatShift(ctx->S, 1.0));
288: PetscFunctionReturn(PETSC_SUCCESS);
289: }
291: static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y)
292: {
293: CRContext *ctx;
295: PetscFunctionBeginUser;
296: PetscCall(PCShellGetContext(pc, &ctx));
297: PetscCall(MatMult(ctx->S, x, y));
298: PetscFunctionReturn(PETSC_SUCCESS);
299: }
301: static PetscErrorCode CRDestroy_Private(PC pc)
302: {
303: CRContext *ctx;
305: PetscFunctionBeginUser;
306: PetscCall(PCShellGetContext(pc, &ctx));
307: PetscCall(MatDestroy(&ctx->Inj));
308: PetscCall(MatDestroy(&ctx->S));
309: PetscCall(PetscFree(ctx));
310: PetscCall(PCShellSetContext(pc, NULL));
311: PetscFunctionReturn(PETSC_SUCCESS);
312: }
314: static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr)
315: {
316: CRContext *ctx;
318: PetscFunctionBeginUser;
319: PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr));
320: PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)"));
321: PetscCall(PetscCalloc1(1, &ctx));
322: ctx->mg = pc;
323: ctx->l = l;
324: PetscCall(PCSetType(*cr, PCSHELL));
325: PetscCall(PCShellSetContext(*cr, ctx));
326: PetscCall(PCShellSetApply(*cr, CRApply_Private));
327: PetscCall(PCShellSetSetUp(*cr, CRSetup_Private));
328: PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private));
329: PetscFunctionReturn(PETSC_SUCCESS);
330: }
332: PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char[], const char[], const char *[], const char *[], PetscBool *);
334: PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms)
335: {
336: PC_MG *mg = (PC_MG *)pc->data;
337: MPI_Comm comm;
338: PC_MG_Levels **mglevels = mg->levels;
339: PCMGType mgtype = mg->am;
340: PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V;
341: PetscInt i;
342: PetscMPIInt size;
343: const char *prefix;
344: PC ipc;
345: PetscInt n;
347: PetscFunctionBegin;
350: if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS);
351: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
352: if (mglevels) {
353: mgctype = mglevels[0]->cycles;
354: /* changing the number of levels so free up the previous stuff */
355: PetscCall(PCReset_MG(pc));
356: n = mglevels[0]->levels;
357: for (i = 0; i < n; i++) {
358: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
359: PetscCall(KSPDestroy(&mglevels[i]->smoothu));
360: PetscCall(KSPDestroy(&mglevels[i]->cr));
361: PetscCall(PetscFree(mglevels[i]));
362: }
363: PetscCall(PetscFree(mg->levels));
364: }
366: mg->nlevels = levels;
368: PetscCall(PetscMalloc1(levels, &mglevels));
370: PetscCall(PCGetOptionsPrefix(pc, &prefix));
372: mg->stageApply = 0;
373: for (i = 0; i < levels; i++) {
374: PetscCall(PetscNew(&mglevels[i]));
376: mglevels[i]->level = i;
377: mglevels[i]->levels = levels;
378: mglevels[i]->cycles = mgctype;
379: mg->default_smoothu = 2;
380: mg->default_smoothd = 2;
381: mglevels[i]->eventsmoothsetup = 0;
382: mglevels[i]->eventsmoothsolve = 0;
383: mglevels[i]->eventresidual = 0;
384: mglevels[i]->eventinterprestrict = 0;
386: if (comms) comm = comms[i];
387: if (comm != MPI_COMM_NULL) {
388: PetscCall(KSPCreate(comm, &mglevels[i]->smoothd));
389: PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel));
390: PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure));
391: PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i));
392: PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix));
393: PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level));
394: if (i == 0 && levels > 1) { // coarse grid
395: PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_"));
397: /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */
398: PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY));
399: PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc));
400: PetscCallMPI(MPI_Comm_size(comm, &size));
401: if (size > 1) {
402: PetscCall(PCSetType(ipc, PCREDUNDANT));
403: } else {
404: PetscCall(PCSetType(ipc, PCLU));
405: }
406: PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS));
407: } else {
408: char tprefix[128];
410: PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV));
411: PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL));
412: PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE));
413: PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc));
414: PetscCall(PCSetType(ipc, PCSOR));
415: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
417: if (i == levels - 1 && levels > 1) { // replace 'mg_finegrid_' with 'mg_levels_X_'
418: PetscBool set;
419: PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)mglevels[i]->smoothd)->options, ((PetscObject)mglevels[i]->smoothd)->prefix, "-mg_fine_", NULL, NULL, &set));
420: if (set) {
421: if (prefix) PetscCall(PetscSNPrintf(tprefix, 128, "%smg_fine_", prefix));
422: else PetscCall(PetscSNPrintf(tprefix, 128, "mg_fine_"));
423: PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, tprefix));
424: } else {
425: PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
426: PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
427: }
428: } else {
429: PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
430: PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
431: }
432: }
433: }
434: mglevels[i]->smoothu = mglevels[i]->smoothd;
435: mg->rtol = 0.0;
436: mg->abstol = 0.0;
437: mg->dtol = 0.0;
438: mg->ttol = 0.0;
439: mg->cyclesperpcapply = 1;
440: }
441: mg->levels = mglevels;
442: PetscCall(PCMGSetType(pc, mgtype));
443: PetscFunctionReturn(PETSC_SUCCESS);
444: }
446: /*@C
447: PCMGSetLevels - Sets the number of levels to use with `PCMG`.
448: Must be called before any other `PCMG` routine.
450: Logically Collective
452: Input Parameters:
453: + pc - the preconditioner context
454: . levels - the number of levels
455: - comms - optional communicators for each level; this is to allow solving the coarser problems
456: on smaller sets of processes. For processes that are not included in the computation
457: you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes
458: should participate in each level of problem.
460: Level: intermediate
462: Notes:
463: If the number of levels is one then the multigrid uses the `-mg_levels` prefix
464: for setting the level options rather than the `-mg_coarse` or `-mg_fine` prefix.
466: You can free the information in comms after this routine is called.
468: The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level
469: are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on
470: the two levels, rank 0 in the original communicator will pass in an array of 2 communicators
471: of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators
472: the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate
473: in the coarse grid solve.
475: Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one
476: must take special care in providing the restriction and interpolation operation. We recommend
477: providing these as two step operations; first perform a standard restriction or interpolation on
478: the full number of ranks for that level and then use an MPI call to copy the resulting vector
479: array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both
480: cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and
481: receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries.
483: Fortran Notes:
484: Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM`
485: is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc.
487: .seealso: [](ch_ksp), `PCMGSetType()`, `PCMGGetLevels()`
488: @*/
489: PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms)
490: {
491: PetscFunctionBegin;
493: if (comms) PetscAssertPointer(comms, 3);
494: PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms));
495: PetscFunctionReturn(PETSC_SUCCESS);
496: }
498: PetscErrorCode PCDestroy_MG(PC pc)
499: {
500: PC_MG *mg = (PC_MG *)pc->data;
501: PC_MG_Levels **mglevels = mg->levels;
502: PetscInt i, n;
504: PetscFunctionBegin;
505: PetscCall(PCReset_MG(pc));
506: if (mglevels) {
507: n = mglevels[0]->levels;
508: for (i = 0; i < n; i++) {
509: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
510: PetscCall(KSPDestroy(&mglevels[i]->smoothu));
511: PetscCall(KSPDestroy(&mglevels[i]->cr));
512: PetscCall(PetscFree(mglevels[i]));
513: }
514: PetscCall(PetscFree(mg->levels));
515: }
516: PetscCall(PetscFree(pc->data));
517: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
518: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
519: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL));
520: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL));
521: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL));
522: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
523: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
524: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL));
525: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL));
526: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL));
527: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL));
528: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL));
529: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL));
530: PetscFunctionReturn(PETSC_SUCCESS);
531: }
533: /*
534: PCApply_MG - Runs either an additive, multiplicative, Kaskadic
535: or full cycle of multigrid.
537: Note:
538: A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle().
539: */
540: static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose)
541: {
542: PC_MG *mg = (PC_MG *)pc->data;
543: PC_MG_Levels **mglevels = mg->levels;
544: PC tpc;
545: PetscInt levels = mglevels[0]->levels, i;
546: PetscBool changeu, changed, matapp;
548: PetscFunctionBegin;
549: matapp = (PetscBool)(B && X);
550: if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply));
551: /* When the DM is supplying the matrix then it will not exist until here */
552: for (i = 0; i < levels; i++) {
553: if (!mglevels[i]->A) {
554: PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
555: PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
556: }
557: }
559: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
560: PetscCall(PCPreSolveChangeRHS(tpc, &changed));
561: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
562: PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
563: if (!changeu && !changed) {
564: if (matapp) {
565: PetscCall(MatDestroy(&mglevels[levels - 1]->B));
566: mglevels[levels - 1]->B = B;
567: } else {
568: PetscCall(VecDestroy(&mglevels[levels - 1]->b));
569: mglevels[levels - 1]->b = b;
570: }
571: } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
572: if (matapp) {
573: if (mglevels[levels - 1]->B) {
574: PetscInt N1, N2;
575: PetscBool flg;
577: PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1));
578: PetscCall(MatGetSize(B, NULL, &N2));
579: PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg));
580: if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B));
581: }
582: if (!mglevels[levels - 1]->B) {
583: PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B));
584: } else {
585: PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN));
586: }
587: } else {
588: if (!mglevels[levels - 1]->b) {
589: Vec *vec;
591: PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
592: mglevels[levels - 1]->b = *vec;
593: PetscCall(PetscFree(vec));
594: }
595: PetscCall(VecCopy(b, mglevels[levels - 1]->b));
596: }
597: }
598: if (matapp) {
599: mglevels[levels - 1]->X = X;
600: } else {
601: mglevels[levels - 1]->x = x;
602: }
604: /* If coarser Xs are present, it means we have already block applied the PC at least once
605: Reset operators if sizes/type do no match */
606: if (matapp && levels > 1 && mglevels[levels - 2]->X) {
607: PetscInt Xc, Bc;
608: PetscBool flg;
610: PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc));
611: PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc));
612: PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg));
613: if (Xc != Bc || !flg) {
614: PetscCall(MatDestroy(&mglevels[levels - 1]->R));
615: for (i = 0; i < levels - 1; i++) {
616: PetscCall(MatDestroy(&mglevels[i]->R));
617: PetscCall(MatDestroy(&mglevels[i]->B));
618: PetscCall(MatDestroy(&mglevels[i]->X));
619: }
620: }
621: }
623: if (mg->am == PC_MG_MULTIPLICATIVE) {
624: if (matapp) PetscCall(MatZeroEntries(X));
625: else PetscCall(VecZeroEntries(x));
626: for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL));
627: } else if (mg->am == PC_MG_ADDITIVE) {
628: PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp));
629: } else if (mg->am == PC_MG_KASKADE) {
630: PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp));
631: } else {
632: PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp));
633: }
634: if (mg->stageApply) PetscCall(PetscLogStagePop());
635: if (!changeu && !changed) {
636: if (matapp) {
637: mglevels[levels - 1]->B = NULL;
638: } else {
639: mglevels[levels - 1]->b = NULL;
640: }
641: }
642: PetscFunctionReturn(PETSC_SUCCESS);
643: }
645: static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x)
646: {
647: PetscFunctionBegin;
648: PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE));
649: PetscFunctionReturn(PETSC_SUCCESS);
650: }
652: static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x)
653: {
654: PetscFunctionBegin;
655: PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE));
656: PetscFunctionReturn(PETSC_SUCCESS);
657: }
659: static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x)
660: {
661: PetscFunctionBegin;
662: PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE));
663: PetscFunctionReturn(PETSC_SUCCESS);
664: }
666: PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems *PetscOptionsObject)
667: {
668: PetscInt levels, cycles;
669: PetscBool flg, flg2;
670: PC_MG *mg = (PC_MG *)pc->data;
671: PC_MG_Levels **mglevels;
672: PCMGType mgtype;
673: PCMGCycleType mgctype;
674: PCMGGalerkinType gtype;
675: PCMGCoarseSpaceType coarseSpaceType;
677: PetscFunctionBegin;
678: levels = PetscMax(mg->nlevels, 1);
679: PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options");
680: PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg));
681: if (!flg && !mg->levels && pc->dm) {
682: PetscCall(DMGetRefineLevel(pc->dm, &levels));
683: levels++;
684: mg->usedmfornumberoflevels = PETSC_TRUE;
685: }
686: PetscCall(PCMGSetLevels(pc, levels, NULL));
687: mglevels = mg->levels;
689: mgctype = (PCMGCycleType)mglevels[0]->cycles;
690: PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg));
691: if (flg) PetscCall(PCMGSetCycleType(pc, mgctype));
692: gtype = mg->galerkin;
693: PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)gtype, (PetscEnum *)>ype, &flg));
694: if (flg) PetscCall(PCMGSetGalerkin(pc, gtype));
695: coarseSpaceType = mg->coarseSpaceType;
696: PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg));
697: if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType));
698: PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg));
699: PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg));
700: flg2 = PETSC_FALSE;
701: PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg));
702: if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2));
703: flg = PETSC_FALSE;
704: PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL));
705: if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc));
706: mgtype = mg->am;
707: PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg));
708: if (flg) PetscCall(PCMGSetType(pc, mgtype));
709: if (mg->am == PC_MG_MULTIPLICATIVE) {
710: PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg));
711: if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles));
712: }
713: flg = PETSC_FALSE;
714: PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL));
715: if (flg) {
716: PetscInt i;
717: char eventname[128];
719: levels = mglevels[0]->levels;
720: for (i = 0; i < levels; i++) {
721: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %" PetscInt_FMT, i));
722: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup));
723: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %" PetscInt_FMT, i));
724: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve));
725: if (i) {
726: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %" PetscInt_FMT, i));
727: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual));
728: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %" PetscInt_FMT, i));
729: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict));
730: }
731: }
733: if (PetscDefined(USE_LOG)) {
734: const char sname[] = "MG Apply";
736: PetscCall(PetscLogStageGetId(sname, &mg->stageApply));
737: if (mg->stageApply < 0) PetscCall(PetscLogStageRegister(sname, &mg->stageApply));
738: }
739: }
740: PetscOptionsHeadEnd();
741: /* Check option consistency */
742: PetscCall(PCMGGetGalerkin(pc, >ype));
743: PetscCall(PCMGGetAdaptInterpolation(pc, &flg));
744: PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator");
745: PetscFunctionReturn(PETSC_SUCCESS);
746: }
748: const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL};
749: const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL};
750: const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL};
751: const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL};
753: #include <petscdraw.h>
754: PetscErrorCode PCView_MG(PC pc, PetscViewer viewer)
755: {
756: PC_MG *mg = (PC_MG *)pc->data;
757: PC_MG_Levels **mglevels = mg->levels;
758: PetscInt levels = mglevels ? mglevels[0]->levels : 0, i;
759: PetscBool iascii, isbinary, isdraw;
761: PetscFunctionBegin;
762: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
763: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
764: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
765: if (iascii) {
766: const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown";
767: PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename));
768: if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply));
769: if (mg->galerkin == PC_MG_GALERKIN_BOTH) {
770: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n"));
771: } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) {
772: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n"));
773: } else if (mg->galerkin == PC_MG_GALERKIN_MAT) {
774: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n"));
775: } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) {
776: PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n"));
777: } else {
778: PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n"));
779: }
780: if (mg->view) PetscCall((*mg->view)(pc, viewer));
781: for (i = 0; i < levels; i++) {
782: if (i) {
783: PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
784: } else {
785: PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i));
786: }
787: PetscCall(PetscViewerASCIIPushTab(viewer));
788: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
789: PetscCall(PetscViewerASCIIPopTab(viewer));
790: if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) {
791: PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n"));
792: } else if (i) {
793: PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
794: PetscCall(PetscViewerASCIIPushTab(viewer));
795: PetscCall(KSPView(mglevels[i]->smoothu, viewer));
796: PetscCall(PetscViewerASCIIPopTab(viewer));
797: }
798: if (i && mglevels[i]->cr) {
799: PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i));
800: PetscCall(PetscViewerASCIIPushTab(viewer));
801: PetscCall(KSPView(mglevels[i]->cr, viewer));
802: PetscCall(PetscViewerASCIIPopTab(viewer));
803: }
804: }
805: } else if (isbinary) {
806: for (i = levels - 1; i >= 0; i--) {
807: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
808: if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer));
809: }
810: } else if (isdraw) {
811: PetscDraw draw;
812: PetscReal x, w, y, bottom, th;
813: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
814: PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
815: PetscCall(PetscDrawStringGetSize(draw, NULL, &th));
816: bottom = y - th;
817: for (i = levels - 1; i >= 0; i--) {
818: if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) {
819: PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
820: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
821: PetscCall(PetscDrawPopCurrentPoint(draw));
822: } else {
823: w = 0.5 * PetscMin(1.0 - x, x);
824: PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom));
825: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
826: PetscCall(PetscDrawPopCurrentPoint(draw));
827: PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom));
828: PetscCall(KSPView(mglevels[i]->smoothu, viewer));
829: PetscCall(PetscDrawPopCurrentPoint(draw));
830: }
831: PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL));
832: bottom -= th;
833: }
834: }
835: PetscFunctionReturn(PETSC_SUCCESS);
836: }
838: #include <petsc/private/kspimpl.h>
840: /*
841: Calls setup for the KSP on each level
842: */
843: PetscErrorCode PCSetUp_MG(PC pc)
844: {
845: PC_MG *mg = (PC_MG *)pc->data;
846: PC_MG_Levels **mglevels = mg->levels;
847: PetscInt i, n;
848: PC cpc;
849: PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE;
850: Mat dA, dB;
851: Vec tvec;
852: DM *dms;
853: PetscViewer viewer = NULL;
854: PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE;
855: PetscBool adaptInterpolation = mg->adaptInterpolation;
857: PetscFunctionBegin;
858: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up");
859: n = mglevels[0]->levels;
860: /* FIX: Move this to PCSetFromOptions_MG? */
861: if (mg->usedmfornumberoflevels) {
862: PetscInt levels;
863: PetscCall(DMGetRefineLevel(pc->dm, &levels));
864: levels++;
865: if (levels > n) { /* the problem is now being solved on a finer grid */
866: PetscCall(PCMGSetLevels(pc, levels, NULL));
867: n = levels;
868: PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */
869: mglevels = mg->levels;
870: }
871: }
872: PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc));
874: /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */
875: /* so use those from global PC */
876: /* Is this what we always want? What if user wants to keep old one? */
877: PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset));
878: if (opsset) {
879: Mat mmat;
880: PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat));
881: if (mmat == pc->pmat) opsset = PETSC_FALSE;
882: }
884: /* Create CR solvers */
885: PetscCall(PCMGGetAdaptCR(pc, &doCR));
886: if (doCR) {
887: const char *prefix;
889: PetscCall(PCGetOptionsPrefix(pc, &prefix));
890: for (i = 1; i < n; ++i) {
891: PC ipc, cr;
892: char crprefix[128];
894: PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr));
895: PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel));
896: PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE));
897: PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i));
898: PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix));
899: PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level));
900: PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV));
901: PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL));
902: PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED));
903: PetscCall(KSPGetPC(mglevels[i]->cr, &ipc));
905: PetscCall(PCSetType(ipc, PCCOMPOSITE));
906: PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE));
907: PetscCall(PCCompositeAddPCType(ipc, PCSOR));
908: PetscCall(CreateCR_Private(pc, i, &cr));
909: PetscCall(PCCompositeAddPC(ipc, cr));
910: PetscCall(PCDestroy(&cr));
912: PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
913: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
914: PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%" PetscInt_FMT "_cr_", i));
915: PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix));
916: }
917: }
919: if (!opsset) {
920: PetscCall(PCGetUseAmat(pc, &use_amat));
921: if (use_amat) {
922: PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
923: PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat));
924: } else {
925: PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
926: PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat));
927: }
928: }
930: for (i = n - 1; i > 0; i--) {
931: if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) {
932: missinginterpolate = PETSC_TRUE;
933: break;
934: }
935: }
937: PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB));
938: if (dA == dB) dAeqdB = PETSC_TRUE;
939: if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) {
940: needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */
941: }
943: if (pc->dm && !pc->setupcalled) {
944: /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */
945: PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm));
946: PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE));
947: if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) {
948: PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm));
949: PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE));
950: }
951: if (mglevels[n - 1]->cr) {
952: PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm));
953: PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE));
954: }
955: }
957: /*
958: Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS)
959: Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs?
960: */
961: if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) {
962: /* first see if we can compute a coarse space */
963: if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) {
964: for (i = n - 2; i > -1; i--) {
965: if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) {
966: PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace));
967: PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace));
968: }
969: }
970: } else { /* construct the interpolation from the DMs */
971: Mat p;
972: Vec rscale;
973: PetscCall(PetscMalloc1(n, &dms));
974: dms[n - 1] = pc->dm;
975: /* Separately create them so we do not get DMKSP interference between levels */
976: for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i]));
977: for (i = n - 2; i > -1; i--) {
978: DMKSP kdm;
979: PetscBool dmhasrestrict, dmhasinject;
980: PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i]));
981: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE));
982: if (mglevels[i]->smoothd != mglevels[i]->smoothu) {
983: PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i]));
984: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE));
985: }
986: if (mglevels[i]->cr) {
987: PetscCall(KSPSetDM(mglevels[i]->cr, dms[i]));
988: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE));
989: }
990: PetscCall(DMGetDMKSPWrite(dms[i], &kdm));
991: /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take
992: * a bitwise OR of computing the matrix, RHS, and initial iterate. */
993: kdm->ops->computerhs = NULL;
994: kdm->rhsctx = NULL;
995: if (!mglevels[i + 1]->interpolate) {
996: PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale));
997: PetscCall(PCMGSetInterpolation(pc, i + 1, p));
998: if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale));
999: PetscCall(VecDestroy(&rscale));
1000: PetscCall(MatDestroy(&p));
1001: }
1002: PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict));
1003: if (dmhasrestrict && !mglevels[i + 1]->restrct) {
1004: PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p));
1005: PetscCall(PCMGSetRestriction(pc, i + 1, p));
1006: PetscCall(MatDestroy(&p));
1007: }
1008: PetscCall(DMHasCreateInjection(dms[i], &dmhasinject));
1009: if (dmhasinject && !mglevels[i + 1]->inject) {
1010: PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p));
1011: PetscCall(PCMGSetInjection(pc, i + 1, p));
1012: PetscCall(MatDestroy(&p));
1013: }
1014: }
1016: for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i]));
1017: PetscCall(PetscFree(dms));
1018: }
1019: }
1021: if (mg->galerkin < PC_MG_GALERKIN_NONE) {
1022: Mat A, B;
1023: PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE;
1024: MatReuse reuse = MAT_INITIAL_MATRIX;
1026: if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE;
1027: if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE;
1028: if (pc->setupcalled) reuse = MAT_REUSE_MATRIX;
1029: for (i = n - 2; i > -1; i--) {
1030: PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0");
1031: if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct));
1032: if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate));
1033: if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B));
1034: if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A));
1035: if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B));
1036: /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */
1037: if (!doA && dAeqdB) {
1038: if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B));
1039: A = B;
1040: } else if (!doA && reuse == MAT_INITIAL_MATRIX) {
1041: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL));
1042: PetscCall(PetscObjectReference((PetscObject)A));
1043: }
1044: if (!doB && dAeqdB) {
1045: if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A));
1046: B = A;
1047: } else if (!doB && reuse == MAT_INITIAL_MATRIX) {
1048: PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B));
1049: PetscCall(PetscObjectReference((PetscObject)B));
1050: }
1051: if (reuse == MAT_INITIAL_MATRIX) {
1052: PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B));
1053: PetscCall(PetscObjectDereference((PetscObject)A));
1054: PetscCall(PetscObjectDereference((PetscObject)B));
1055: }
1056: dA = A;
1057: dB = B;
1058: }
1059: }
1061: /* Adapt interpolation matrices */
1062: if (adaptInterpolation) {
1063: for (i = 0; i < n; ++i) {
1064: if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace));
1065: if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace));
1066: }
1067: for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i));
1068: }
1070: if (needRestricts && pc->dm) {
1071: for (i = n - 2; i >= 0; i--) {
1072: DM dmfine, dmcoarse;
1073: Mat Restrict, Inject;
1074: Vec rscale;
1075: PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine));
1076: PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse));
1077: PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict));
1078: PetscCall(PCMGGetRScale(pc, i + 1, &rscale));
1079: PetscCall(PCMGGetInjection(pc, i + 1, &Inject));
1080: PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse));
1081: }
1082: }
1084: if (!pc->setupcalled) {
1085: for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd));
1086: for (i = 1; i < n; i++) {
1087: if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu));
1088: if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr));
1089: }
1090: /* insure that if either interpolation or restriction is set the other one is set */
1091: for (i = 1; i < n; i++) {
1092: PetscCall(PCMGGetInterpolation(pc, i, NULL));
1093: PetscCall(PCMGGetRestriction(pc, i, NULL));
1094: }
1095: for (i = 0; i < n - 1; i++) {
1096: if (!mglevels[i]->b) {
1097: Vec *vec;
1098: PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL));
1099: PetscCall(PCMGSetRhs(pc, i, *vec));
1100: PetscCall(VecDestroy(vec));
1101: PetscCall(PetscFree(vec));
1102: }
1103: if (!mglevels[i]->r && i) {
1104: PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1105: PetscCall(PCMGSetR(pc, i, tvec));
1106: PetscCall(VecDestroy(&tvec));
1107: }
1108: if (!mglevels[i]->x) {
1109: PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1110: PetscCall(PCMGSetX(pc, i, tvec));
1111: PetscCall(VecDestroy(&tvec));
1112: }
1113: if (doCR) {
1114: PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx));
1115: PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb));
1116: PetscCall(VecZeroEntries(mglevels[i]->crb));
1117: }
1118: }
1119: if (n != 1 && !mglevels[n - 1]->r) {
1120: /* PCMGSetR() on the finest level if user did not supply it */
1121: Vec *vec;
1122: PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL));
1123: PetscCall(PCMGSetR(pc, n - 1, *vec));
1124: PetscCall(VecDestroy(vec));
1125: PetscCall(PetscFree(vec));
1126: }
1127: if (doCR) {
1128: PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx));
1129: PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb));
1130: PetscCall(VecZeroEntries(mglevels[n - 1]->crb));
1131: }
1132: }
1134: if (pc->dm) {
1135: /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */
1136: for (i = 0; i < n - 1; i++) {
1137: if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1138: }
1139: }
1140: // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g.,
1141: // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply.
1142: if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1144: for (i = 1; i < n; i++) {
1145: if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) {
1146: /* if doing only down then initial guess is zero */
1147: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
1148: }
1149: if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1150: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1151: PetscCall(KSPSetUp(mglevels[i]->smoothd));
1152: if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1153: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1154: if (!mglevels[i]->residual) {
1155: Mat mat;
1156: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1157: PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat));
1158: }
1159: if (!mglevels[i]->residualtranspose) {
1160: Mat mat;
1161: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1162: PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat));
1163: }
1164: }
1165: for (i = 1; i < n; i++) {
1166: if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) {
1167: Mat downmat, downpmat;
1169: /* check if operators have been set for up, if not use down operators to set them */
1170: PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL));
1171: if (!opsset) {
1172: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1173: PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat));
1174: }
1176: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE));
1177: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1178: PetscCall(KSPSetUp(mglevels[i]->smoothu));
1179: if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR;
1180: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1181: }
1182: if (mglevels[i]->cr) {
1183: Mat downmat, downpmat;
1185: /* check if operators have been set for up, if not use down operators to set them */
1186: PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL));
1187: if (!opsset) {
1188: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1189: PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat));
1190: }
1192: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1193: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1194: PetscCall(KSPSetUp(mglevels[i]->cr));
1195: if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR;
1196: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1197: }
1198: }
1200: if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1201: PetscCall(KSPSetUp(mglevels[0]->smoothd));
1202: if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1203: if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1205: /*
1206: Dump the interpolation/restriction matrices plus the
1207: Jacobian/stiffness on each level. This allows MATLAB users to
1208: easily check if the Galerkin condition A_c = R A_f R^T is satisfied.
1210: Only support one or the other at the same time.
1211: */
1212: #if defined(PETSC_USE_SOCKET_VIEWER)
1213: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL));
1214: if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc));
1215: dump = PETSC_FALSE;
1216: #endif
1217: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL));
1218: if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc));
1220: if (viewer) {
1221: for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer));
1222: for (i = 0; i < n; i++) {
1223: PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc));
1224: PetscCall(MatView(pc->mat, viewer));
1225: }
1226: }
1227: PetscFunctionReturn(PETSC_SUCCESS);
1228: }
1230: PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels)
1231: {
1232: PC_MG *mg = (PC_MG *)pc->data;
1234: PetscFunctionBegin;
1235: *levels = mg->nlevels;
1236: PetscFunctionReturn(PETSC_SUCCESS);
1237: }
1239: /*@
1240: PCMGGetLevels - Gets the number of levels to use with `PCMG`.
1242: Not Collective
1244: Input Parameter:
1245: . pc - the preconditioner context
1247: Output Parameter:
1248: . levels - the number of levels
1250: Level: advanced
1252: .seealso: [](ch_ksp), `PCMG`, `PCMGSetLevels()`
1253: @*/
1254: PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels)
1255: {
1256: PetscFunctionBegin;
1258: PetscAssertPointer(levels, 2);
1259: *levels = 0;
1260: PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels));
1261: PetscFunctionReturn(PETSC_SUCCESS);
1262: }
1264: /*@
1265: PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy
1267: Input Parameter:
1268: . pc - the preconditioner context
1270: Output Parameters:
1271: + gc - grid complexity = sum_i(n_i) / n_0
1272: - oc - operator complexity = sum_i(nnz_i) / nnz_0
1274: Level: advanced
1276: Note:
1277: This is often call the operator complexity in multigrid literature
1279: .seealso: [](ch_ksp), `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`
1280: @*/
1281: PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc)
1282: {
1283: PC_MG *mg = (PC_MG *)pc->data;
1284: PC_MG_Levels **mglevels = mg->levels;
1285: PetscInt lev, N;
1286: PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0;
1287: MatInfo info;
1289: PetscFunctionBegin;
1291: if (gc) PetscAssertPointer(gc, 2);
1292: if (oc) PetscAssertPointer(oc, 3);
1293: if (!pc->setupcalled) {
1294: if (gc) *gc = 0;
1295: if (oc) *oc = 0;
1296: PetscFunctionReturn(PETSC_SUCCESS);
1297: }
1298: PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels");
1299: for (lev = 0; lev < mg->nlevels; lev++) {
1300: Mat dB;
1301: PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB));
1302: PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */
1303: PetscCall(MatGetSize(dB, &N, NULL));
1304: sgc += N;
1305: soc += info.nz_used;
1306: if (lev == mg->nlevels - 1) {
1307: nnz0 = info.nz_used;
1308: n0 = N;
1309: }
1310: }
1311: PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available");
1312: *gc = (PetscReal)(sgc / n0);
1313: if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0);
1314: PetscFunctionReturn(PETSC_SUCCESS);
1315: }
1317: /*@
1318: PCMGSetType - Determines the form of multigrid to use, either
1319: multiplicative, additive, full, or the Kaskade algorithm.
1321: Logically Collective
1323: Input Parameters:
1324: + pc - the preconditioner context
1325: - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`
1327: Options Database Key:
1328: . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade
1330: Level: advanced
1332: .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType`
1333: @*/
1334: PetscErrorCode PCMGSetType(PC pc, PCMGType form)
1335: {
1336: PC_MG *mg = (PC_MG *)pc->data;
1338: PetscFunctionBegin;
1341: mg->am = form;
1342: if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG;
1343: else pc->ops->applyrichardson = NULL;
1344: PetscFunctionReturn(PETSC_SUCCESS);
1345: }
1347: /*@
1348: PCMGGetType - Finds the form of multigrid the `PCMG` is using multiplicative, additive, full, or the Kaskade algorithm.
1350: Logically Collective
1352: Input Parameter:
1353: . pc - the preconditioner context
1355: Output Parameter:
1356: . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType`
1358: Level: advanced
1360: .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()`
1361: @*/
1362: PetscErrorCode PCMGGetType(PC pc, PCMGType *type)
1363: {
1364: PC_MG *mg = (PC_MG *)pc->data;
1366: PetscFunctionBegin;
1368: *type = mg->am;
1369: PetscFunctionReturn(PETSC_SUCCESS);
1370: }
1372: /*@
1373: PCMGSetCycleType - Sets the type cycles to use. Use `PCMGSetCycleTypeOnLevel()` for more
1374: complicated cycling.
1376: Logically Collective
1378: Input Parameters:
1379: + pc - the multigrid context
1380: - n - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W`
1382: Options Database Key:
1383: . -pc_mg_cycle_type <v,w> - provide the cycle desired
1385: Level: advanced
1387: .seealso: [](ch_ksp), `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType`
1388: @*/
1389: PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n)
1390: {
1391: PC_MG *mg = (PC_MG *)pc->data;
1392: PC_MG_Levels **mglevels = mg->levels;
1393: PetscInt i, levels;
1395: PetscFunctionBegin;
1398: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1399: levels = mglevels[0]->levels;
1400: for (i = 0; i < levels; i++) mglevels[i]->cycles = n;
1401: PetscFunctionReturn(PETSC_SUCCESS);
1402: }
1404: /*@
1405: PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step
1406: of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE`
1408: Logically Collective
1410: Input Parameters:
1411: + pc - the multigrid context
1412: - n - number of cycles (default is 1)
1414: Options Database Key:
1415: . -pc_mg_multiplicative_cycles n - set the number of cycles
1417: Level: advanced
1419: Note:
1420: This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()`
1422: .seealso: [](ch_ksp), `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType`
1423: @*/
1424: PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n)
1425: {
1426: PC_MG *mg = (PC_MG *)pc->data;
1428: PetscFunctionBegin;
1431: mg->cyclesperpcapply = n;
1432: PetscFunctionReturn(PETSC_SUCCESS);
1433: }
1435: static PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use)
1436: {
1437: PC_MG *mg = (PC_MG *)pc->data;
1439: PetscFunctionBegin;
1440: mg->galerkin = use;
1441: PetscFunctionReturn(PETSC_SUCCESS);
1442: }
1444: /*@
1445: PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the
1446: finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i
1448: Logically Collective
1450: Input Parameters:
1451: + pc - the multigrid context
1452: - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE`
1454: Options Database Key:
1455: . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process
1457: Level: intermediate
1459: Note:
1460: Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not
1461: use the `PCMG` construction of the coarser grids.
1463: .seealso: [](ch_ksp), `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType`
1464: @*/
1465: PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use)
1466: {
1467: PetscFunctionBegin;
1469: PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use));
1470: PetscFunctionReturn(PETSC_SUCCESS);
1471: }
1473: /*@
1474: PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i
1476: Not Collective
1478: Input Parameter:
1479: . pc - the multigrid context
1481: Output Parameter:
1482: . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL`
1484: Level: intermediate
1486: .seealso: [](ch_ksp), `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType`
1487: @*/
1488: PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin)
1489: {
1490: PC_MG *mg = (PC_MG *)pc->data;
1492: PetscFunctionBegin;
1494: *galerkin = mg->galerkin;
1495: PetscFunctionReturn(PETSC_SUCCESS);
1496: }
1498: static PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt)
1499: {
1500: PC_MG *mg = (PC_MG *)pc->data;
1502: PetscFunctionBegin;
1503: mg->adaptInterpolation = adapt;
1504: PetscFunctionReturn(PETSC_SUCCESS);
1505: }
1507: static PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt)
1508: {
1509: PC_MG *mg = (PC_MG *)pc->data;
1511: PetscFunctionBegin;
1512: *adapt = mg->adaptInterpolation;
1513: PetscFunctionReturn(PETSC_SUCCESS);
1514: }
1516: static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype)
1517: {
1518: PC_MG *mg = (PC_MG *)pc->data;
1520: PetscFunctionBegin;
1521: mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE;
1522: mg->coarseSpaceType = ctype;
1523: PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
1524: PetscFunctionReturn(PETSC_SUCCESS);
1525: }
1527: static PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype)
1528: {
1529: PC_MG *mg = (PC_MG *)pc->data;
1531: PetscFunctionBegin;
1532: *ctype = mg->coarseSpaceType;
1533: PetscFunctionReturn(PETSC_SUCCESS);
1534: }
1536: static PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr)
1537: {
1538: PC_MG *mg = (PC_MG *)pc->data;
1540: PetscFunctionBegin;
1541: mg->compatibleRelaxation = cr;
1542: PetscFunctionReturn(PETSC_SUCCESS);
1543: }
1545: static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr)
1546: {
1547: PC_MG *mg = (PC_MG *)pc->data;
1549: PetscFunctionBegin;
1550: *cr = mg->compatibleRelaxation;
1551: PetscFunctionReturn(PETSC_SUCCESS);
1552: }
1554: /*@
1555: PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space.
1557: Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1559: Logically Collective
1561: Input Parameters:
1562: + pc - the multigrid context
1563: - ctype - the type of coarse space
1565: Options Database Keys:
1566: + -pc_mg_adapt_interp_n <int> - The number of modes to use
1567: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw`
1569: Level: intermediate
1571: Note:
1572: Requires a `DM` with specific functionality be attached to the `PC`.
1574: .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM`
1575: @*/
1576: PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype)
1577: {
1578: PetscFunctionBegin;
1581: PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype));
1582: PetscFunctionReturn(PETSC_SUCCESS);
1583: }
1585: /*@
1586: PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space.
1588: Not Collective
1590: Input Parameter:
1591: . pc - the multigrid context
1593: Output Parameter:
1594: . ctype - the type of coarse space
1596: Level: intermediate
1598: .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`
1599: @*/
1600: PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype)
1601: {
1602: PetscFunctionBegin;
1604: PetscAssertPointer(ctype, 2);
1605: PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype));
1606: PetscFunctionReturn(PETSC_SUCCESS);
1607: }
1609: /* MATT: REMOVE? */
1610: /*@
1611: PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1613: Logically Collective
1615: Input Parameters:
1616: + pc - the multigrid context
1617: - adapt - flag for adaptation of the interpolator
1619: Options Database Keys:
1620: + -pc_mg_adapt_interp - Turn on adaptation
1621: . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension
1622: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector
1624: Level: intermediate
1626: .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1627: @*/
1628: PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt)
1629: {
1630: PetscFunctionBegin;
1632: PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt));
1633: PetscFunctionReturn(PETSC_SUCCESS);
1634: }
1636: /*@
1637: PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh,
1638: and thus accurately interpolated.
1640: Not Collective
1642: Input Parameter:
1643: . pc - the multigrid context
1645: Output Parameter:
1646: . adapt - flag for adaptation of the interpolator
1648: Level: intermediate
1650: .seealso: [](ch_ksp), `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1651: @*/
1652: PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt)
1653: {
1654: PetscFunctionBegin;
1656: PetscAssertPointer(adapt, 2);
1657: PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt));
1658: PetscFunctionReturn(PETSC_SUCCESS);
1659: }
1661: /*@
1662: PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation.
1664: Logically Collective
1666: Input Parameters:
1667: + pc - the multigrid context
1668: - cr - flag for compatible relaxation
1670: Options Database Key:
1671: . -pc_mg_adapt_cr - Turn on compatible relaxation
1673: Level: intermediate
1675: .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1676: @*/
1677: PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr)
1678: {
1679: PetscFunctionBegin;
1681: PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr));
1682: PetscFunctionReturn(PETSC_SUCCESS);
1683: }
1685: /*@
1686: PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation.
1688: Not Collective
1690: Input Parameter:
1691: . pc - the multigrid context
1693: Output Parameter:
1694: . cr - flag for compatible relaxaion
1696: Level: intermediate
1698: .seealso: [](ch_ksp), `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1699: @*/
1700: PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr)
1701: {
1702: PetscFunctionBegin;
1704: PetscAssertPointer(cr, 2);
1705: PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr));
1706: PetscFunctionReturn(PETSC_SUCCESS);
1707: }
1709: /*@
1710: PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use
1711: on all levels. Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of
1712: pre- and post-smoothing steps.
1714: Logically Collective
1716: Input Parameters:
1717: + pc - the multigrid context
1718: - n - the number of smoothing steps
1720: Options Database Key:
1721: . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps
1723: Level: advanced
1725: Note:
1726: This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1728: .seealso: [](ch_ksp), `PCMG`, `PCMGSetDistinctSmoothUp()`
1729: @*/
1730: PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n)
1731: {
1732: PC_MG *mg = (PC_MG *)pc->data;
1733: PC_MG_Levels **mglevels = mg->levels;
1734: PetscInt i, levels;
1736: PetscFunctionBegin;
1739: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1740: levels = mglevels[0]->levels;
1742: for (i = 1; i < levels; i++) {
1743: PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1744: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1745: mg->default_smoothu = n;
1746: mg->default_smoothd = n;
1747: }
1748: PetscFunctionReturn(PETSC_SUCCESS);
1749: }
1751: /*@
1752: PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels
1753: and adds the suffix _up to the options name
1755: Logically Collective
1757: Input Parameter:
1758: . pc - the preconditioner context
1760: Options Database Key:
1761: . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects
1763: Level: advanced
1765: Note:
1766: This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1768: .seealso: [](ch_ksp), `PCMG`, `PCMGSetNumberSmooth()`
1769: @*/
1770: PetscErrorCode PCMGSetDistinctSmoothUp(PC pc)
1771: {
1772: PC_MG *mg = (PC_MG *)pc->data;
1773: PC_MG_Levels **mglevels = mg->levels;
1774: PetscInt i, levels;
1775: KSP subksp;
1777: PetscFunctionBegin;
1779: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1780: levels = mglevels[0]->levels;
1782: for (i = 1; i < levels; i++) {
1783: const char *prefix = NULL;
1784: /* make sure smoother up and down are different */
1785: PetscCall(PCMGGetSmootherUp(pc, i, &subksp));
1786: PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix));
1787: PetscCall(KSPSetOptionsPrefix(subksp, prefix));
1788: PetscCall(KSPAppendOptionsPrefix(subksp, "up_"));
1789: }
1790: PetscFunctionReturn(PETSC_SUCCESS);
1791: }
1793: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1794: static PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[])
1795: {
1796: PC_MG *mg = (PC_MG *)pc->data;
1797: PC_MG_Levels **mglevels = mg->levels;
1798: Mat *mat;
1799: PetscInt l;
1801: PetscFunctionBegin;
1802: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1803: PetscCall(PetscMalloc1(mg->nlevels, &mat));
1804: for (l = 1; l < mg->nlevels; l++) {
1805: mat[l - 1] = mglevels[l]->interpolate;
1806: PetscCall(PetscObjectReference((PetscObject)mat[l - 1]));
1807: }
1808: *num_levels = mg->nlevels;
1809: *interpolations = mat;
1810: PetscFunctionReturn(PETSC_SUCCESS);
1811: }
1813: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1814: static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[])
1815: {
1816: PC_MG *mg = (PC_MG *)pc->data;
1817: PC_MG_Levels **mglevels = mg->levels;
1818: PetscInt l;
1819: Mat *mat;
1821: PetscFunctionBegin;
1822: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1823: PetscCall(PetscMalloc1(mg->nlevels, &mat));
1824: for (l = 0; l < mg->nlevels - 1; l++) {
1825: PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &mat[l]));
1826: PetscCall(PetscObjectReference((PetscObject)mat[l]));
1827: }
1828: *num_levels = mg->nlevels;
1829: *coarseOperators = mat;
1830: PetscFunctionReturn(PETSC_SUCCESS);
1831: }
1833: /*@C
1834: PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction.
1836: Not Collective, No Fortran Support
1838: Input Parameters:
1839: + name - name of the constructor
1840: - function - constructor routine
1842: Calling sequence of `function`:
1843: + pc - The `PC` object
1844: . l - The multigrid level, 0 is the coarse level
1845: . dm - The `DM` for this level
1846: . smooth - The level smoother
1847: . Nc - The size of the coarse space
1848: . initGuess - Basis for an initial guess for the space
1849: - coarseSp - A basis for the computed coarse space
1851: Level: advanced
1853: Developer Notes:
1854: How come this is not used by `PCGAMG`?
1856: .seealso: [](ch_ksp), `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()`
1857: @*/
1858: PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PetscErrorCode (*function)(PC pc, PetscInt l, DM dm, KSP smooth, PetscInt Nc, Mat initGuess, Mat *coarseSp))
1859: {
1860: PetscFunctionBegin;
1861: PetscCall(PCInitializePackage());
1862: PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function));
1863: PetscFunctionReturn(PETSC_SUCCESS);
1864: }
1866: /*@C
1867: PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method.
1869: Not Collective, No Fortran Support
1871: Input Parameter:
1872: . name - name of the constructor
1874: Output Parameter:
1875: . function - constructor routine
1877: Level: advanced
1879: .seealso: [](ch_ksp), `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()`
1880: @*/
1881: PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PetscErrorCode (**function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *))
1882: {
1883: PetscFunctionBegin;
1884: PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function));
1885: PetscFunctionReturn(PETSC_SUCCESS);
1886: }
1888: /*MC
1889: PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional
1890: information about the coarser grid matrices and restriction/interpolation operators.
1892: Options Database Keys:
1893: + -pc_mg_levels <nlevels> - number of levels including finest
1894: . -pc_mg_cycle_type <v,w> - provide the cycle desired
1895: . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default
1896: . -pc_mg_log - log information about time spent on each level of the solver
1897: . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes)
1898: . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R'
1899: . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1)
1900: . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices
1901: to a `PETSCVIEWERSOCKET` for reading from MATLAB.
1902: - -pc_mg_dump_binary -dumps the matrices for each level and the restriction/interpolation matrices
1903: to the binary output file called binaryoutput
1905: Level: intermediate
1907: Notes:
1908: The Krylov solver (if any) and preconditioner (smoother) and their parameters are controlled from the options database with the standard
1909: options database keywords prefixed with `-mg_levels_` to affect all the levels but the coarsest, which is controlled with `-mg_coarse_`,
1910: and the finest where `-mg_fine_` can override `-mg_levels_`. One can set different preconditioners etc on specific levels with the prefix
1911: `-mg_levels_n_` where `n` is the level number (zero being the coarse level. For example
1912: .vb
1913: -mg_levels_ksp_type gmres -mg_levels_pc_type bjacobi -mg_coarse_pc_type svd -mg_levels_2_pc_type sor
1914: .ve
1915: These options also work for controlling the smoothers etc inside `PCGAMG`
1917: If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother then one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method
1919: When run with a single level the smoother options are used on that level NOT the coarse grid solver options
1921: When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This
1922: is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing
1923: (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the
1924: residual is computed at the end of each cycle.
1926: .seealso: [](sec_mg), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE`
1927: `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`,
1928: `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`,
1929: `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`,
1930: `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`,
1931: `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1932: M*/
1934: PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc)
1935: {
1936: PC_MG *mg;
1938: PetscFunctionBegin;
1939: PetscCall(PetscNew(&mg));
1940: pc->data = mg;
1941: mg->nlevels = -1;
1942: mg->am = PC_MG_MULTIPLICATIVE;
1943: mg->galerkin = PC_MG_GALERKIN_NONE;
1944: mg->adaptInterpolation = PETSC_FALSE;
1945: mg->Nc = -1;
1946: mg->eigenvalue = -1;
1948: pc->useAmat = PETSC_TRUE;
1950: pc->ops->apply = PCApply_MG;
1951: pc->ops->applytranspose = PCApplyTranspose_MG;
1952: pc->ops->matapply = PCMatApply_MG;
1953: pc->ops->setup = PCSetUp_MG;
1954: pc->ops->reset = PCReset_MG;
1955: pc->ops->destroy = PCDestroy_MG;
1956: pc->ops->setfromoptions = PCSetFromOptions_MG;
1957: pc->ops->view = PCView_MG;
1959: PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue));
1960: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG));
1961: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG));
1962: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG));
1963: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG));
1964: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG));
1965: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG));
1966: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG));
1967: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG));
1968: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG));
1969: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG));
1970: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG));
1971: PetscFunctionReturn(PETSC_SUCCESS);
1972: }