Actual source code: mg.c
1: /*
2: Defines the multigrid preconditioner interface.
3: */
4: #include <petsc/private/pcmgimpl.h>
5: #include <petsc/private/kspimpl.h>
6: #include <petscdm.h>
7: PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *);
9: /*
10: Contains the list of registered coarse space construction routines
11: */
12: PetscFunctionList PCMGCoarseList = NULL;
14: PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason)
15: {
16: PC_MG *mg = (PC_MG *)pc->data;
17: PC_MG_Levels *mgc, *mglevels = *mglevelsin;
18: PetscInt cycles = (mglevels->level == 1) ? 1 : mglevels->cycles;
20: PetscFunctionBegin;
21: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
22: if (!transpose) {
23: if (matapp) {
24: PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */
25: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL));
26: } else {
27: PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */
28: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
29: }
30: } else {
31: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
32: PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */
33: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
34: }
35: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
36: if (mglevels->level) { /* not the coarsest grid */
37: if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0));
38: if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R));
39: if (!transpose) {
40: if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
41: else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
42: } else {
43: if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
44: else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
45: }
46: if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0));
48: /* if on finest level and have convergence criteria set */
49: if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) {
50: PetscReal rnorm;
51: PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm));
52: if (rnorm <= mg->ttol) {
53: if (rnorm < mg->abstol) {
54: *reason = PCRICHARDSON_CONVERGED_ATOL;
55: PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol));
56: } else {
57: *reason = PCRICHARDSON_CONVERGED_RTOL;
58: PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol));
59: }
60: PetscFunctionReturn(PETSC_SUCCESS);
61: }
62: }
64: mgc = *(mglevelsin - 1);
65: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
66: if (!transpose) {
67: if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B));
68: else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b));
69: } else {
70: if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B));
71: else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b));
72: }
73: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
74: if (matapp) {
75: if (!mgc->X) {
76: PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X));
77: } else {
78: PetscCall(MatZeroEntries(mgc->X));
79: }
80: } else {
81: PetscCall(VecZeroEntries(mgc->x));
82: }
83: while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason));
84: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
85: if (!transpose) {
86: if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X));
87: else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x));
88: } else {
89: PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x));
90: }
91: if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
92: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
93: if (!transpose) {
94: if (matapp) {
95: PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */
96: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL));
97: } else {
98: PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */
99: PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
100: }
101: } else {
102: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
103: PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */
104: PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
105: }
106: if (mglevels->cr) {
107: Mat crA;
109: PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
110: /* TODO Turn on copy and turn off noisy if we have an exact solution
111: PetscCall(VecCopy(mglevels->x, mglevels->crx));
112: PetscCall(VecCopy(mglevels->b, mglevels->crb)); */
113: PetscCall(KSPGetOperators(mglevels->cr, &crA, NULL));
114: PetscCall(KSPSetNoisy_Private(crA, mglevels->crx));
115: PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */
116: PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx));
117: }
118: if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
119: }
120: PetscFunctionReturn(PETSC_SUCCESS);
121: }
123: static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
124: {
125: PC_MG *mg = (PC_MG *)pc->data;
126: PC_MG_Levels **mglevels = mg->levels;
127: PC tpc;
128: PetscBool changeu, changed;
129: PetscInt levels = mglevels[0]->levels, i;
131: PetscFunctionBegin;
132: /* When the DM is supplying the matrix then it will not exist until here */
133: for (i = 0; i < levels; i++) {
134: if (!mglevels[i]->A) {
135: PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
136: PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
137: }
138: }
140: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
141: PetscCall(PCPreSolveChangeRHS(tpc, &changed));
142: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
143: PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
144: if (!changed && !changeu) {
145: PetscCall(VecDestroy(&mglevels[levels - 1]->b));
146: mglevels[levels - 1]->b = b;
147: } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
148: if (!mglevels[levels - 1]->b) {
149: Vec *vec;
151: PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
152: mglevels[levels - 1]->b = *vec;
153: PetscCall(PetscFree(vec));
154: }
155: PetscCall(VecCopy(b, mglevels[levels - 1]->b));
156: }
157: mglevels[levels - 1]->x = x;
159: mg->rtol = rtol;
160: mg->abstol = abstol;
161: mg->dtol = dtol;
162: if (rtol) {
163: /* compute initial residual norm for relative convergence test */
164: PetscReal rnorm;
165: if (zeroguess) {
166: PetscCall(VecNorm(b, NORM_2, &rnorm));
167: } else {
168: PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w));
169: PetscCall(VecNorm(w, NORM_2, &rnorm));
170: }
171: mg->ttol = PetscMax(rtol * rnorm, abstol);
172: } else if (abstol) mg->ttol = abstol;
173: else mg->ttol = 0.0;
175: /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't
176: stop prematurely due to small residual */
177: for (i = 1; i < levels; i++) {
178: PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
179: if (mglevels[i]->smoothu != mglevels[i]->smoothd) {
180: /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */
181: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
182: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
183: }
184: }
186: *reason = PCRICHARDSON_NOT_SET;
187: for (i = 0; i < its; i++) {
188: PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason));
189: if (*reason) break;
190: }
191: if (*reason == PCRICHARDSON_NOT_SET) *reason = PCRICHARDSON_CONVERGED_ITS;
192: *outits = i;
193: if (!changed && !changeu) mglevels[levels - 1]->b = NULL;
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
197: PetscErrorCode PCReset_MG(PC pc)
198: {
199: PC_MG *mg = (PC_MG *)pc->data;
200: PC_MG_Levels **mglevels = mg->levels;
201: PetscInt i, n;
203: PetscFunctionBegin;
204: if (mglevels) {
205: n = mglevels[0]->levels;
206: for (i = 0; i < n - 1; i++) {
207: PetscCall(VecDestroy(&mglevels[i + 1]->r));
208: PetscCall(VecDestroy(&mglevels[i]->b));
209: PetscCall(VecDestroy(&mglevels[i]->x));
210: PetscCall(MatDestroy(&mglevels[i + 1]->R));
211: PetscCall(MatDestroy(&mglevels[i]->B));
212: PetscCall(MatDestroy(&mglevels[i]->X));
213: PetscCall(VecDestroy(&mglevels[i]->crx));
214: PetscCall(VecDestroy(&mglevels[i]->crb));
215: PetscCall(MatDestroy(&mglevels[i + 1]->restrct));
216: PetscCall(MatDestroy(&mglevels[i + 1]->interpolate));
217: PetscCall(MatDestroy(&mglevels[i + 1]->inject));
218: PetscCall(VecDestroy(&mglevels[i + 1]->rscale));
219: }
220: PetscCall(VecDestroy(&mglevels[n - 1]->crx));
221: PetscCall(VecDestroy(&mglevels[n - 1]->crb));
222: /* this is not null only if the smoother on the finest level
223: changes the rhs during PreSolve */
224: PetscCall(VecDestroy(&mglevels[n - 1]->b));
225: PetscCall(MatDestroy(&mglevels[n - 1]->B));
227: for (i = 0; i < n; i++) {
228: PetscCall(MatDestroy(&mglevels[i]->coarseSpace));
229: PetscCall(MatDestroy(&mglevels[i]->A));
230: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd));
231: PetscCall(KSPReset(mglevels[i]->smoothu));
232: if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr));
233: }
234: mg->Nc = 0;
235: }
236: PetscFunctionReturn(PETSC_SUCCESS);
237: }
239: /* Implementing CR
241: We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is
243: Inj^T (Inj Inj^T)^{-1} Inj
245: and if Inj is a VecScatter, as it is now in PETSc, we have
247: Inj^T Inj
249: and
251: S = I - Inj^T Inj
253: since
255: Inj S = Inj - (Inj Inj^T) Inj = 0.
257: Brannick suggests
259: A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S
261: but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use
263: M^{-1} A \to S M^{-1} A S
265: In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left.
267: Check: || Inj P - I ||_F < tol
268: Check: In general, Inj Inj^T = I
269: */
271: typedef struct {
272: PC mg; /* The PCMG object */
273: PetscInt l; /* The multigrid level for this solver */
274: Mat Inj; /* The injection matrix */
275: Mat S; /* I - Inj^T Inj */
276: } CRContext;
278: static PetscErrorCode CRSetup_Private(PC pc)
279: {
280: CRContext *ctx;
281: Mat It;
283: PetscFunctionBeginUser;
284: PetscCall(PCShellGetContext(pc, &ctx));
285: PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It));
286: PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG");
287: PetscCall(MatCreateTranspose(It, &ctx->Inj));
288: PetscCall(MatCreateNormal(ctx->Inj, &ctx->S));
289: PetscCall(MatScale(ctx->S, -1.0));
290: PetscCall(MatShift(ctx->S, 1.0));
291: PetscFunctionReturn(PETSC_SUCCESS);
292: }
294: static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y)
295: {
296: CRContext *ctx;
298: PetscFunctionBeginUser;
299: PetscCall(PCShellGetContext(pc, &ctx));
300: PetscCall(MatMult(ctx->S, x, y));
301: PetscFunctionReturn(PETSC_SUCCESS);
302: }
304: static PetscErrorCode CRDestroy_Private(PC pc)
305: {
306: CRContext *ctx;
308: PetscFunctionBeginUser;
309: PetscCall(PCShellGetContext(pc, &ctx));
310: PetscCall(MatDestroy(&ctx->Inj));
311: PetscCall(MatDestroy(&ctx->S));
312: PetscCall(PetscFree(ctx));
313: PetscCall(PCShellSetContext(pc, NULL));
314: PetscFunctionReturn(PETSC_SUCCESS);
315: }
317: static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr)
318: {
319: CRContext *ctx;
321: PetscFunctionBeginUser;
322: PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr));
323: PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)"));
324: PetscCall(PetscCalloc1(1, &ctx));
325: ctx->mg = pc;
326: ctx->l = l;
327: PetscCall(PCSetType(*cr, PCSHELL));
328: PetscCall(PCShellSetContext(*cr, ctx));
329: PetscCall(PCShellSetApply(*cr, CRApply_Private));
330: PetscCall(PCShellSetSetUp(*cr, CRSetup_Private));
331: PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private));
332: PetscFunctionReturn(PETSC_SUCCESS);
333: }
335: PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char[], const char[], const char *[], const char *[], PetscBool *);
337: PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms)
338: {
339: PC_MG *mg = (PC_MG *)pc->data;
340: MPI_Comm comm;
341: PC_MG_Levels **mglevels = mg->levels;
342: PCMGType mgtype = mg->am;
343: PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V;
344: PetscInt i;
345: PetscMPIInt size;
346: const char *prefix;
347: PC ipc;
348: PetscInt n;
350: PetscFunctionBegin;
353: if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS);
354: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
355: if (mglevels) {
356: mgctype = mglevels[0]->cycles;
357: /* changing the number of levels so free up the previous stuff */
358: PetscCall(PCReset_MG(pc));
359: n = mglevels[0]->levels;
360: for (i = 0; i < n; i++) {
361: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
362: PetscCall(KSPDestroy(&mglevels[i]->smoothu));
363: PetscCall(KSPDestroy(&mglevels[i]->cr));
364: PetscCall(PetscFree(mglevels[i]));
365: }
366: PetscCall(PetscFree(mg->levels));
367: }
369: mg->nlevels = levels;
371: PetscCall(PetscMalloc1(levels, &mglevels));
373: PetscCall(PCGetOptionsPrefix(pc, &prefix));
375: mg->stageApply = 0;
376: for (i = 0; i < levels; i++) {
377: PetscCall(PetscNew(&mglevels[i]));
379: mglevels[i]->level = i;
380: mglevels[i]->levels = levels;
381: mglevels[i]->cycles = mgctype;
382: mg->default_smoothu = 2;
383: mg->default_smoothd = 2;
384: mglevels[i]->eventsmoothsetup = 0;
385: mglevels[i]->eventsmoothsolve = 0;
386: mglevels[i]->eventresidual = 0;
387: mglevels[i]->eventinterprestrict = 0;
389: if (comms) comm = comms[i];
390: if (comm != MPI_COMM_NULL) {
391: PetscCall(KSPCreate(comm, &mglevels[i]->smoothd));
392: PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel));
393: PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure));
394: PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i));
395: PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix));
396: PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level));
397: if (i == 0 && levels > 1) { // coarse grid
398: PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_"));
400: /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */
401: PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY));
402: PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc));
403: PetscCallMPI(MPI_Comm_size(comm, &size));
404: if (size > 1) {
405: PetscCall(PCSetType(ipc, PCREDUNDANT));
406: } else {
407: PetscCall(PCSetType(ipc, PCLU));
408: }
409: PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS));
410: } else {
411: char tprefix[128];
413: PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV));
414: PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL));
415: PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE));
416: PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc));
417: PetscCall(PCSetType(ipc, PCSOR));
418: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
420: if (i == levels - 1 && levels > 1) { // replace 'mg_finegrid_' with 'mg_levels_X_'
421: PetscBool set;
422: PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)mglevels[i]->smoothd)->options, ((PetscObject)mglevels[i]->smoothd)->prefix, "-mg_fine_", NULL, NULL, &set));
423: if (set) {
424: if (prefix) PetscCall(PetscSNPrintf(tprefix, 128, "%smg_fine_", prefix));
425: else PetscCall(PetscSNPrintf(tprefix, 128, "mg_fine_"));
426: PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, tprefix));
427: } else {
428: PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
429: PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
430: }
431: } else {
432: PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
433: PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
434: }
435: }
436: }
437: mglevels[i]->smoothu = mglevels[i]->smoothd;
438: mg->rtol = 0.0;
439: mg->abstol = 0.0;
440: mg->dtol = 0.0;
441: mg->ttol = 0.0;
442: mg->cyclesperpcapply = 1;
443: }
444: mg->levels = mglevels;
445: PetscCall(PCMGSetType(pc, mgtype));
446: PetscFunctionReturn(PETSC_SUCCESS);
447: }
449: /*@C
450: PCMGSetLevels - Sets the number of levels to use with `PCMG`.
451: Must be called before any other `PCMG` routine.
453: Logically Collective
455: Input Parameters:
456: + pc - the preconditioner context
457: . levels - the number of levels
458: - comms - optional communicators for each level; this is to allow solving the coarser problems
459: on smaller sets of processes. For processes that are not included in the computation
460: you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes
461: should participate in each level of problem.
463: Level: intermediate
465: Notes:
466: If the number of levels is one then the multigrid uses the `-mg_levels` prefix
467: for setting the level options rather than the `-mg_coarse` or `-mg_fine` prefix.
469: You can free the information in comms after this routine is called.
471: The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level
472: are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on
473: the two levels, rank 0 in the original communicator will pass in an array of 2 communicators
474: of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators
475: the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate
476: in the coarse grid solve.
478: Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one
479: must take special care in providing the restriction and interpolation operation. We recommend
480: providing these as two step operations; first perform a standard restriction or interpolation on
481: the full number of ranks for that level and then use an MPI call to copy the resulting vector
482: array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both
483: cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and
484: receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries.
486: Fortran Notes:
487: Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM`
488: is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc.
490: .seealso: [](ch_ksp), `PCMGSetType()`, `PCMGGetLevels()`
491: @*/
492: PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms)
493: {
494: PetscFunctionBegin;
496: if (comms) PetscAssertPointer(comms, 3);
497: PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms));
498: PetscFunctionReturn(PETSC_SUCCESS);
499: }
501: PetscErrorCode PCDestroy_MG(PC pc)
502: {
503: PC_MG *mg = (PC_MG *)pc->data;
504: PC_MG_Levels **mglevels = mg->levels;
505: PetscInt i, n;
507: PetscFunctionBegin;
508: PetscCall(PCReset_MG(pc));
509: if (mglevels) {
510: n = mglevels[0]->levels;
511: for (i = 0; i < n; i++) {
512: if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
513: PetscCall(KSPDestroy(&mglevels[i]->smoothu));
514: PetscCall(KSPDestroy(&mglevels[i]->cr));
515: PetscCall(PetscFree(mglevels[i]));
516: }
517: PetscCall(PetscFree(mg->levels));
518: }
519: PetscCall(PetscFree(pc->data));
520: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
521: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
522: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL));
523: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL));
524: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL));
525: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
526: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
527: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL));
528: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL));
529: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL));
530: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL));
531: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL));
532: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL));
533: PetscFunctionReturn(PETSC_SUCCESS);
534: }
536: /*
537: PCApply_MG - Runs either an additive, multiplicative, Kaskadic
538: or full cycle of multigrid.
540: Note:
541: A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle().
542: */
543: static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose)
544: {
545: PC_MG *mg = (PC_MG *)pc->data;
546: PC_MG_Levels **mglevels = mg->levels;
547: PC tpc;
548: PetscInt levels = mglevels[0]->levels, i;
549: PetscBool changeu, changed, matapp;
551: PetscFunctionBegin;
552: matapp = (PetscBool)(B && X);
553: if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply));
554: /* When the DM is supplying the matrix then it will not exist until here */
555: for (i = 0; i < levels; i++) {
556: if (!mglevels[i]->A) {
557: PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
558: PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
559: }
560: }
562: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
563: PetscCall(PCPreSolveChangeRHS(tpc, &changed));
564: PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
565: PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
566: if (!changeu && !changed) {
567: if (matapp) {
568: PetscCall(MatDestroy(&mglevels[levels - 1]->B));
569: mglevels[levels - 1]->B = B;
570: } else {
571: PetscCall(VecDestroy(&mglevels[levels - 1]->b));
572: mglevels[levels - 1]->b = b;
573: }
574: } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
575: if (matapp) {
576: if (mglevels[levels - 1]->B) {
577: PetscInt N1, N2;
578: PetscBool flg;
580: PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1));
581: PetscCall(MatGetSize(B, NULL, &N2));
582: PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg));
583: if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B));
584: }
585: if (!mglevels[levels - 1]->B) {
586: PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B));
587: } else {
588: PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN));
589: }
590: } else {
591: if (!mglevels[levels - 1]->b) {
592: Vec *vec;
594: PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
595: mglevels[levels - 1]->b = *vec;
596: PetscCall(PetscFree(vec));
597: }
598: PetscCall(VecCopy(b, mglevels[levels - 1]->b));
599: }
600: }
601: if (matapp) {
602: mglevels[levels - 1]->X = X;
603: } else {
604: mglevels[levels - 1]->x = x;
605: }
607: /* If coarser Xs are present, it means we have already block applied the PC at least once
608: Reset operators if sizes/type do no match */
609: if (matapp && levels > 1 && mglevels[levels - 2]->X) {
610: PetscInt Xc, Bc;
611: PetscBool flg;
613: PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc));
614: PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc));
615: PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg));
616: if (Xc != Bc || !flg) {
617: PetscCall(MatDestroy(&mglevels[levels - 1]->R));
618: for (i = 0; i < levels - 1; i++) {
619: PetscCall(MatDestroy(&mglevels[i]->R));
620: PetscCall(MatDestroy(&mglevels[i]->B));
621: PetscCall(MatDestroy(&mglevels[i]->X));
622: }
623: }
624: }
626: if (mg->am == PC_MG_MULTIPLICATIVE) {
627: if (matapp) PetscCall(MatZeroEntries(X));
628: else PetscCall(VecZeroEntries(x));
629: for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL));
630: } else if (mg->am == PC_MG_ADDITIVE) {
631: PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp));
632: } else if (mg->am == PC_MG_KASKADE) {
633: PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp));
634: } else {
635: PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp));
636: }
637: if (mg->stageApply) PetscCall(PetscLogStagePop());
638: if (!changeu && !changed) {
639: if (matapp) {
640: mglevels[levels - 1]->B = NULL;
641: } else {
642: mglevels[levels - 1]->b = NULL;
643: }
644: }
645: PetscFunctionReturn(PETSC_SUCCESS);
646: }
648: static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x)
649: {
650: PetscFunctionBegin;
651: PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE));
652: PetscFunctionReturn(PETSC_SUCCESS);
653: }
655: static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x)
656: {
657: PetscFunctionBegin;
658: PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE));
659: PetscFunctionReturn(PETSC_SUCCESS);
660: }
662: static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x)
663: {
664: PetscFunctionBegin;
665: PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE));
666: PetscFunctionReturn(PETSC_SUCCESS);
667: }
669: static PetscErrorCode PCMatApplyTranspose_MG(PC pc, Mat b, Mat x)
670: {
671: PetscFunctionBegin;
672: PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_TRUE));
673: PetscFunctionReturn(PETSC_SUCCESS);
674: }
676: PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems PetscOptionsObject)
677: {
678: PetscInt levels, cycles;
679: PetscBool flg, flg2;
680: PC_MG *mg = (PC_MG *)pc->data;
681: PC_MG_Levels **mglevels;
682: PCMGType mgtype;
683: PCMGCycleType mgctype;
684: PCMGGalerkinType gtype;
685: PCMGCoarseSpaceType coarseSpaceType;
687: PetscFunctionBegin;
688: levels = PetscMax(mg->nlevels, 1);
689: PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options");
690: PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg));
691: if (!flg && !mg->levels && pc->dm) {
692: PetscCall(DMGetRefineLevel(pc->dm, &levels));
693: levels++;
694: mg->usedmfornumberoflevels = PETSC_TRUE;
695: }
696: PetscCall(PCMGSetLevels(pc, levels, NULL));
697: mglevels = mg->levels;
699: mgctype = (PCMGCycleType)mglevels[0]->cycles;
700: PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg));
701: if (flg) PetscCall(PCMGSetCycleType(pc, mgctype));
702: coarseSpaceType = mg->coarseSpaceType;
703: PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg));
704: if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType));
705: PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg));
706: PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg));
707: flg2 = PETSC_FALSE;
708: PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg));
709: if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2));
710: flg = PETSC_FALSE;
711: PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL));
712: if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc));
713: PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)mg->galerkin, (PetscEnum *)>ype, &flg));
714: if (flg) PetscCall(PCMGSetGalerkin(pc, gtype));
715: mgtype = mg->am;
716: PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg));
717: if (flg) PetscCall(PCMGSetType(pc, mgtype));
718: if (mg->am == PC_MG_MULTIPLICATIVE) {
719: PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg));
720: if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles));
721: }
722: flg = PETSC_FALSE;
723: PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL));
724: if (flg) {
725: PetscInt i;
726: char eventname[128];
728: levels = mglevels[0]->levels;
729: for (i = 0; i < levels; i++) {
730: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %" PetscInt_FMT, i));
731: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup));
732: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %" PetscInt_FMT, i));
733: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve));
734: if (i) {
735: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %" PetscInt_FMT, i));
736: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual));
737: PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %" PetscInt_FMT, i));
738: PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict));
739: }
740: }
742: if (PetscDefined(USE_LOG)) {
743: const char sname[] = "MG Apply";
745: PetscCall(PetscLogStageGetId(sname, &mg->stageApply));
746: if (mg->stageApply < 0) PetscCall(PetscLogStageRegister(sname, &mg->stageApply));
747: }
748: }
749: PetscOptionsHeadEnd();
750: /* Check option consistency */
751: PetscCall(PCMGGetGalerkin(pc, >ype));
752: PetscCall(PCMGGetAdaptInterpolation(pc, &flg));
753: PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator");
754: PetscFunctionReturn(PETSC_SUCCESS);
755: }
757: const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL};
758: const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL};
759: const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL};
760: const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL};
762: #include <petscdraw.h>
763: PetscErrorCode PCView_MG(PC pc, PetscViewer viewer)
764: {
765: PC_MG *mg = (PC_MG *)pc->data;
766: PC_MG_Levels **mglevels = mg->levels;
767: PetscInt levels = mglevels ? mglevels[0]->levels : 0, i;
768: PetscBool isascii, isbinary, isdraw;
770: PetscFunctionBegin;
771: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
772: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
773: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
774: if (isascii) {
775: const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown";
776: PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename));
777: if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply));
778: if (mg->galerkin == PC_MG_GALERKIN_BOTH) {
779: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n"));
780: } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) {
781: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n"));
782: } else if (mg->galerkin == PC_MG_GALERKIN_MAT) {
783: PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n"));
784: } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) {
785: PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n"));
786: } else {
787: PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n"));
788: }
789: if (mg->view) PetscCall((*mg->view)(pc, viewer));
790: for (i = 0; i < levels; i++) {
791: if (i) {
792: PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
793: } else {
794: PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i));
795: }
796: PetscCall(PetscViewerASCIIPushTab(viewer));
797: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
798: PetscCall(PetscViewerASCIIPopTab(viewer));
799: if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) {
800: PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n"));
801: } else if (i) {
802: PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
803: PetscCall(PetscViewerASCIIPushTab(viewer));
804: PetscCall(KSPView(mglevels[i]->smoothu, viewer));
805: PetscCall(PetscViewerASCIIPopTab(viewer));
806: }
807: if (i && mglevels[i]->cr) {
808: PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i));
809: PetscCall(PetscViewerASCIIPushTab(viewer));
810: PetscCall(KSPView(mglevels[i]->cr, viewer));
811: PetscCall(PetscViewerASCIIPopTab(viewer));
812: }
813: }
814: } else if (isbinary) {
815: for (i = levels - 1; i >= 0; i--) {
816: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
817: if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer));
818: }
819: } else if (isdraw) {
820: PetscDraw draw;
821: PetscReal x, w, y, bottom, th;
822: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
823: PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
824: PetscCall(PetscDrawStringGetSize(draw, NULL, &th));
825: bottom = y - th;
826: for (i = levels - 1; i >= 0; i--) {
827: if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) {
828: PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
829: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
830: PetscCall(PetscDrawPopCurrentPoint(draw));
831: } else {
832: w = 0.5 * PetscMin(1.0 - x, x);
833: PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom));
834: PetscCall(KSPView(mglevels[i]->smoothd, viewer));
835: PetscCall(PetscDrawPopCurrentPoint(draw));
836: PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom));
837: PetscCall(KSPView(mglevels[i]->smoothu, viewer));
838: PetscCall(PetscDrawPopCurrentPoint(draw));
839: }
840: PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL));
841: bottom -= th;
842: }
843: }
844: PetscFunctionReturn(PETSC_SUCCESS);
845: }
847: #include <petsc/private/kspimpl.h>
849: /*
850: Calls setup for the KSP on each level
851: */
852: PetscErrorCode PCSetUp_MG(PC pc)
853: {
854: PC_MG *mg = (PC_MG *)pc->data;
855: PC_MG_Levels **mglevels = mg->levels;
856: PetscInt i, n;
857: PC cpc;
858: PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE;
859: Mat dA, dB;
860: Vec tvec;
861: DM *dms;
862: PetscViewer viewer = NULL;
863: PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE;
864: PetscBool adaptInterpolation = mg->adaptInterpolation;
866: PetscFunctionBegin;
867: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up");
868: n = mglevels[0]->levels;
869: /* FIX: Move this to PCSetFromOptions_MG? */
870: if (mg->usedmfornumberoflevels) {
871: PetscInt levels;
872: PetscCall(DMGetRefineLevel(pc->dm, &levels));
873: levels++;
874: if (levels > n) { /* the problem is now being solved on a finer grid */
875: PetscCall(PCMGSetLevels(pc, levels, NULL));
876: n = levels;
877: PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */
878: mglevels = mg->levels;
879: }
880: }
881: PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc));
883: /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */
884: /* so use those from global PC */
885: /* Is this what we always want? What if user wants to keep old one? */
886: PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset));
887: if (opsset) {
888: Mat mmat;
889: PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat));
890: if (mmat == pc->pmat) opsset = PETSC_FALSE;
891: }
893: /* Create CR solvers */
894: PetscCall(PCMGGetAdaptCR(pc, &doCR));
895: if (doCR) {
896: const char *prefix;
898: PetscCall(PCGetOptionsPrefix(pc, &prefix));
899: for (i = 1; i < n; ++i) {
900: PC ipc, cr;
901: char crprefix[128];
903: PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr));
904: PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel));
905: PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE));
906: PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i));
907: PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix));
908: PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level));
909: PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV));
910: PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL));
911: PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED));
912: PetscCall(KSPGetPC(mglevels[i]->cr, &ipc));
914: PetscCall(PCSetType(ipc, PCCOMPOSITE));
915: PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE));
916: PetscCall(PCCompositeAddPCType(ipc, PCSOR));
917: PetscCall(CreateCR_Private(pc, i, &cr));
918: PetscCall(PCCompositeAddPC(ipc, cr));
919: PetscCall(PCDestroy(&cr));
921: PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
922: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
923: PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%" PetscInt_FMT "_cr_", i));
924: PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix));
925: }
926: }
928: if (!opsset) {
929: PetscCall(PCGetUseAmat(pc, &use_amat));
930: if (use_amat) {
931: PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
932: PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat));
933: } else {
934: PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
935: PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat));
936: }
937: }
939: for (i = n - 1; i > 0; i--) {
940: if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) {
941: missinginterpolate = PETSC_TRUE;
942: break;
943: }
944: }
946: PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB));
947: if (dA == dB) dAeqdB = PETSC_TRUE;
948: if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */
950: if (pc->dm && !pc->setupcalled) {
951: /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */
952: PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm));
953: PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE));
954: if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) {
955: PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm));
956: PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE));
957: }
958: if (mglevels[n - 1]->cr) {
959: PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm));
960: PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE));
961: }
962: }
964: /*
965: Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS)
966: Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs?
967: */
968: if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) {
969: /* first see if we can compute a coarse space */
970: if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) {
971: for (i = n - 2; i > -1; i--) {
972: if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) {
973: PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace));
974: PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace));
975: }
976: }
977: } else { /* construct the interpolation from the DMs */
978: Mat p;
979: Vec rscale;
980: PetscCall(PetscMalloc1(n, &dms));
981: dms[n - 1] = pc->dm;
982: /* Separately create them so we do not get DMKSP interference between levels */
983: for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i]));
984: for (i = n - 2; i > -1; i--) {
985: DMKSP kdm;
986: PetscBool dmhasrestrict, dmhasinject;
987: PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i]));
988: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE));
989: if (mglevels[i]->smoothd != mglevels[i]->smoothu) {
990: PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i]));
991: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE));
992: }
993: if (mglevels[i]->cr) {
994: PetscCall(KSPSetDM(mglevels[i]->cr, dms[i]));
995: if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE));
996: }
997: PetscCall(DMGetDMKSPWrite(dms[i], &kdm));
998: /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take
999: * a bitwise OR of computing the matrix, RHS, and initial iterate. */
1000: kdm->ops->computerhs = NULL;
1001: kdm->rhsctx = NULL;
1002: if (!mglevels[i + 1]->interpolate) {
1003: PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale));
1004: PetscCall(PCMGSetInterpolation(pc, i + 1, p));
1005: if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale));
1006: PetscCall(VecDestroy(&rscale));
1007: PetscCall(MatDestroy(&p));
1008: }
1009: PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict));
1010: if (dmhasrestrict && !mglevels[i + 1]->restrct) {
1011: PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p));
1012: PetscCall(PCMGSetRestriction(pc, i + 1, p));
1013: PetscCall(MatDestroy(&p));
1014: }
1015: PetscCall(DMHasCreateInjection(dms[i], &dmhasinject));
1016: if (dmhasinject && !mglevels[i + 1]->inject) {
1017: PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p));
1018: PetscCall(PCMGSetInjection(pc, i + 1, p));
1019: PetscCall(MatDestroy(&p));
1020: }
1021: }
1023: for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i]));
1024: PetscCall(PetscFree(dms));
1025: }
1026: }
1028: if (mg->galerkin < PC_MG_GALERKIN_NONE) {
1029: Mat A, B;
1030: PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE;
1031: MatReuse reuse = MAT_INITIAL_MATRIX;
1033: if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE;
1034: if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE;
1035: if (pc->setupcalled) reuse = MAT_REUSE_MATRIX;
1036: for (i = n - 2; i > -1; i--) {
1037: PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0");
1038: if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct));
1039: if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate));
1040: if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B));
1041: if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A));
1042: if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B));
1043: /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */
1044: if (!doA && dAeqdB) {
1045: if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B));
1046: A = B;
1047: } else if (!doA && reuse == MAT_INITIAL_MATRIX) {
1048: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL));
1049: PetscCall(PetscObjectReference((PetscObject)A));
1050: }
1051: if (!doB && dAeqdB) {
1052: if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A));
1053: B = A;
1054: } else if (!doB && reuse == MAT_INITIAL_MATRIX) {
1055: PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B));
1056: PetscCall(PetscObjectReference((PetscObject)B));
1057: }
1058: if (reuse == MAT_INITIAL_MATRIX) {
1059: PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B));
1060: PetscCall(PetscObjectDereference((PetscObject)A));
1061: PetscCall(PetscObjectDereference((PetscObject)B));
1062: }
1063: dA = A;
1064: dB = B;
1065: }
1066: }
1068: /* Adapt interpolation matrices */
1069: if (adaptInterpolation) {
1070: for (i = 0; i < n; ++i) {
1071: if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace));
1072: if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace));
1073: }
1074: for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i));
1075: }
1077: if (needRestricts && pc->dm) {
1078: for (i = n - 2; i >= 0; i--) {
1079: DM dmfine, dmcoarse;
1080: Mat Restrict, Inject;
1081: Vec rscale;
1082: PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine));
1083: PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse));
1084: PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict));
1085: PetscCall(PCMGGetRScale(pc, i + 1, &rscale));
1086: PetscCall(PCMGGetInjection(pc, i + 1, &Inject));
1087: PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse));
1088: }
1089: }
1091: if (!pc->setupcalled) {
1092: for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd));
1093: for (i = 1; i < n; i++) {
1094: if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu));
1095: if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr));
1096: }
1097: /* insure that if either interpolation or restriction is set the other one is set */
1098: for (i = 1; i < n; i++) {
1099: PetscCall(PCMGGetInterpolation(pc, i, NULL));
1100: PetscCall(PCMGGetRestriction(pc, i, NULL));
1101: }
1102: for (i = 0; i < n - 1; i++) {
1103: if (!mglevels[i]->b) {
1104: Vec *vec;
1105: PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL));
1106: PetscCall(PCMGSetRhs(pc, i, *vec));
1107: PetscCall(VecDestroy(vec));
1108: PetscCall(PetscFree(vec));
1109: }
1110: if (!mglevels[i]->r && i) {
1111: PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1112: PetscCall(PCMGSetR(pc, i, tvec));
1113: PetscCall(VecDestroy(&tvec));
1114: }
1115: if (!mglevels[i]->x) {
1116: PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1117: PetscCall(PCMGSetX(pc, i, tvec));
1118: PetscCall(VecDestroy(&tvec));
1119: }
1120: if (doCR) {
1121: PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx));
1122: PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb));
1123: PetscCall(VecZeroEntries(mglevels[i]->crb));
1124: }
1125: }
1126: if (n != 1 && !mglevels[n - 1]->r) {
1127: /* PCMGSetR() on the finest level if user did not supply it */
1128: Vec *vec;
1129: PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL));
1130: PetscCall(PCMGSetR(pc, n - 1, *vec));
1131: PetscCall(VecDestroy(vec));
1132: PetscCall(PetscFree(vec));
1133: }
1134: if (doCR) {
1135: PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx));
1136: PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb));
1137: PetscCall(VecZeroEntries(mglevels[n - 1]->crb));
1138: }
1139: }
1141: if (pc->dm) {
1142: /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */
1143: for (i = 0; i < n - 1; i++) {
1144: if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1145: }
1146: }
1147: // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g.,
1148: // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply.
1149: if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1151: for (i = 1; i < n; i++) {
1152: if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) {
1153: /* if doing only down then initial guess is zero */
1154: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
1155: }
1156: if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1157: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1158: PetscCall(KSPSetUp(mglevels[i]->smoothd));
1159: if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1160: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1161: if (!mglevels[i]->residual) {
1162: Mat mat;
1163: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1164: PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat));
1165: }
1166: if (!mglevels[i]->residualtranspose) {
1167: Mat mat;
1168: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1169: PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat));
1170: }
1171: }
1172: for (i = 1; i < n; i++) {
1173: if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) {
1174: Mat downmat, downpmat;
1176: /* check if operators have been set for up, if not use down operators to set them */
1177: PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL));
1178: if (!opsset) {
1179: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1180: PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat));
1181: }
1183: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE));
1184: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1185: PetscCall(KSPSetUp(mglevels[i]->smoothu));
1186: if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR;
1187: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1188: }
1189: if (mglevels[i]->cr) {
1190: Mat downmat, downpmat;
1192: /* check if operators have been set for up, if not use down operators to set them */
1193: PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL));
1194: if (!opsset) {
1195: PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1196: PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat));
1197: }
1199: PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1200: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1201: PetscCall(KSPSetUp(mglevels[i]->cr));
1202: if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR;
1203: if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1204: }
1205: }
1207: if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1208: PetscCall(KSPSetUp(mglevels[0]->smoothd));
1209: if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1210: if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1212: /*
1213: Dump the interpolation/restriction matrices plus the
1214: Jacobian/stiffness on each level. This allows MATLAB users to
1215: easily check if the Galerkin condition A_c = R A_f R^T is satisfied.
1217: Only support one or the other at the same time.
1218: */
1219: #if defined(PETSC_USE_SOCKET_VIEWER)
1220: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL));
1221: if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc));
1222: dump = PETSC_FALSE;
1223: #endif
1224: PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL));
1225: if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc));
1227: if (viewer) {
1228: for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer));
1229: for (i = 0; i < n; i++) {
1230: PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc));
1231: PetscCall(MatView(pc->mat, viewer));
1232: }
1233: }
1234: PetscFunctionReturn(PETSC_SUCCESS);
1235: }
1237: PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels)
1238: {
1239: PC_MG *mg = (PC_MG *)pc->data;
1241: PetscFunctionBegin;
1242: *levels = mg->nlevels;
1243: PetscFunctionReturn(PETSC_SUCCESS);
1244: }
1246: /*@
1247: PCMGGetLevels - Gets the number of levels to use with `PCMG`.
1249: Not Collective
1251: Input Parameter:
1252: . pc - the preconditioner context
1254: Output Parameter:
1255: . levels - the number of levels
1257: Level: advanced
1259: .seealso: [](ch_ksp), `PCMG`, `PCMGSetLevels()`
1260: @*/
1261: PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels)
1262: {
1263: PetscFunctionBegin;
1265: PetscAssertPointer(levels, 2);
1266: *levels = 0;
1267: PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels));
1268: PetscFunctionReturn(PETSC_SUCCESS);
1269: }
1271: /*@
1272: PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy
1274: Input Parameter:
1275: . pc - the preconditioner context
1277: Output Parameters:
1278: + gc - grid complexity = sum_i(n_i) / n_0
1279: - oc - operator complexity = sum_i(nnz_i) / nnz_0
1281: Level: advanced
1283: Note:
1284: This is often call the operator complexity in multigrid literature
1286: .seealso: [](ch_ksp), `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`
1287: @*/
1288: PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc)
1289: {
1290: PC_MG *mg = (PC_MG *)pc->data;
1291: PC_MG_Levels **mglevels = mg->levels;
1292: PetscInt lev, N;
1293: PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0;
1294: MatInfo info;
1296: PetscFunctionBegin;
1298: if (gc) PetscAssertPointer(gc, 2);
1299: if (oc) PetscAssertPointer(oc, 3);
1300: if (!pc->setupcalled) {
1301: if (gc) *gc = 0;
1302: if (oc) *oc = 0;
1303: PetscFunctionReturn(PETSC_SUCCESS);
1304: }
1305: PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels");
1306: for (lev = 0; lev < mg->nlevels; lev++) {
1307: Mat dB;
1308: PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB));
1309: PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */
1310: PetscCall(MatGetSize(dB, &N, NULL));
1311: sgc += N;
1312: soc += info.nz_used;
1313: if (lev == mg->nlevels - 1) {
1314: nnz0 = info.nz_used;
1315: n0 = N;
1316: }
1317: }
1318: PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available");
1319: *gc = (PetscReal)(sgc / n0);
1320: if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0);
1321: PetscFunctionReturn(PETSC_SUCCESS);
1322: }
1324: /*@
1325: PCMGSetType - Determines the form of multigrid to use, either
1326: multiplicative, additive, full, or the Kaskade algorithm.
1328: Logically Collective
1330: Input Parameters:
1331: + pc - the preconditioner context
1332: - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`
1334: Options Database Key:
1335: . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade
1337: Level: advanced
1339: .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType`
1340: @*/
1341: PetscErrorCode PCMGSetType(PC pc, PCMGType form)
1342: {
1343: PC_MG *mg = (PC_MG *)pc->data;
1345: PetscFunctionBegin;
1348: mg->am = form;
1349: if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG;
1350: else pc->ops->applyrichardson = NULL;
1351: PetscFunctionReturn(PETSC_SUCCESS);
1352: }
1354: /*@
1355: PCMGGetType - Finds the form of multigrid the `PCMG` is using multiplicative, additive, full, or the Kaskade algorithm.
1357: Logically Collective
1359: Input Parameter:
1360: . pc - the preconditioner context
1362: Output Parameter:
1363: . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType`
1365: Level: advanced
1367: .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()`
1368: @*/
1369: PetscErrorCode PCMGGetType(PC pc, PCMGType *type)
1370: {
1371: PC_MG *mg = (PC_MG *)pc->data;
1373: PetscFunctionBegin;
1375: *type = mg->am;
1376: PetscFunctionReturn(PETSC_SUCCESS);
1377: }
1379: /*@
1380: PCMGSetCycleType - Sets the type cycles to use. Use `PCMGSetCycleTypeOnLevel()` for more
1381: complicated cycling.
1383: Logically Collective
1385: Input Parameters:
1386: + pc - the multigrid context
1387: - n - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W`
1389: Options Database Key:
1390: . -pc_mg_cycle_type <v,w> - provide the cycle desired
1392: Level: advanced
1394: .seealso: [](ch_ksp), `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType`
1395: @*/
1396: PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n)
1397: {
1398: PC_MG *mg = (PC_MG *)pc->data;
1399: PC_MG_Levels **mglevels = mg->levels;
1400: PetscInt i, levels;
1402: PetscFunctionBegin;
1405: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1406: levels = mglevels[0]->levels;
1407: for (i = 0; i < levels; i++) mglevels[i]->cycles = n;
1408: PetscFunctionReturn(PETSC_SUCCESS);
1409: }
1411: /*@
1412: PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step
1413: of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE`
1415: Logically Collective
1417: Input Parameters:
1418: + pc - the multigrid context
1419: - n - number of cycles (default is 1)
1421: Options Database Key:
1422: . -pc_mg_multiplicative_cycles n - set the number of cycles
1424: Level: advanced
1426: Note:
1427: This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()`
1429: .seealso: [](ch_ksp), `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType`
1430: @*/
1431: PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n)
1432: {
1433: PC_MG *mg = (PC_MG *)pc->data;
1435: PetscFunctionBegin;
1438: mg->cyclesperpcapply = n;
1439: PetscFunctionReturn(PETSC_SUCCESS);
1440: }
1442: static PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use)
1443: {
1444: PC_MG *mg = (PC_MG *)pc->data;
1446: PetscFunctionBegin;
1447: mg->galerkin = use;
1448: PetscFunctionReturn(PETSC_SUCCESS);
1449: }
1451: /*@
1452: PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the
1453: finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i
1455: Logically Collective
1457: Input Parameters:
1458: + pc - the multigrid context
1459: - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE`
1461: Options Database Key:
1462: . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process
1464: Level: intermediate
1466: Note:
1467: Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not
1468: use the `PCMG` construction of the coarser grids.
1470: .seealso: [](ch_ksp), `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType`
1471: @*/
1472: PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use)
1473: {
1474: PetscFunctionBegin;
1476: PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use));
1477: PetscFunctionReturn(PETSC_SUCCESS);
1478: }
1480: /*@
1481: PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i
1483: Not Collective
1485: Input Parameter:
1486: . pc - the multigrid context
1488: Output Parameter:
1489: . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL`
1491: Level: intermediate
1493: .seealso: [](ch_ksp), `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType`
1494: @*/
1495: PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin)
1496: {
1497: PC_MG *mg = (PC_MG *)pc->data;
1499: PetscFunctionBegin;
1501: *galerkin = mg->galerkin;
1502: PetscFunctionReturn(PETSC_SUCCESS);
1503: }
1505: static PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt)
1506: {
1507: PC_MG *mg = (PC_MG *)pc->data;
1509: PetscFunctionBegin;
1510: mg->adaptInterpolation = adapt;
1511: PetscFunctionReturn(PETSC_SUCCESS);
1512: }
1514: static PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt)
1515: {
1516: PC_MG *mg = (PC_MG *)pc->data;
1518: PetscFunctionBegin;
1519: *adapt = mg->adaptInterpolation;
1520: PetscFunctionReturn(PETSC_SUCCESS);
1521: }
1523: static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype)
1524: {
1525: PC_MG *mg = (PC_MG *)pc->data;
1527: PetscFunctionBegin;
1528: mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE;
1529: mg->coarseSpaceType = ctype;
1530: PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
1531: PetscFunctionReturn(PETSC_SUCCESS);
1532: }
1534: static PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype)
1535: {
1536: PC_MG *mg = (PC_MG *)pc->data;
1538: PetscFunctionBegin;
1539: *ctype = mg->coarseSpaceType;
1540: PetscFunctionReturn(PETSC_SUCCESS);
1541: }
1543: static PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr)
1544: {
1545: PC_MG *mg = (PC_MG *)pc->data;
1547: PetscFunctionBegin;
1548: mg->compatibleRelaxation = cr;
1549: PetscFunctionReturn(PETSC_SUCCESS);
1550: }
1552: static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr)
1553: {
1554: PC_MG *mg = (PC_MG *)pc->data;
1556: PetscFunctionBegin;
1557: *cr = mg->compatibleRelaxation;
1558: PetscFunctionReturn(PETSC_SUCCESS);
1559: }
1561: /*@
1562: PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space.
1564: Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1566: Logically Collective
1568: Input Parameters:
1569: + pc - the multigrid context
1570: - ctype - the type of coarse space
1572: Options Database Keys:
1573: + -pc_mg_adapt_interp_n <int> - The number of modes to use
1574: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw`
1576: Level: intermediate
1578: Note:
1579: Requires a `DM` with specific functionality be attached to the `PC`.
1581: .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM`
1582: @*/
1583: PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype)
1584: {
1585: PetscFunctionBegin;
1588: PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype));
1589: PetscFunctionReturn(PETSC_SUCCESS);
1590: }
1592: /*@
1593: PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space.
1595: Not Collective
1597: Input Parameter:
1598: . pc - the multigrid context
1600: Output Parameter:
1601: . ctype - the type of coarse space
1603: Level: intermediate
1605: .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`
1606: @*/
1607: PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype)
1608: {
1609: PetscFunctionBegin;
1611: PetscAssertPointer(ctype, 2);
1612: PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype));
1613: PetscFunctionReturn(PETSC_SUCCESS);
1614: }
1616: /* MATT: REMOVE? */
1617: /*@
1618: PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1620: Logically Collective
1622: Input Parameters:
1623: + pc - the multigrid context
1624: - adapt - flag for adaptation of the interpolator
1626: Options Database Keys:
1627: + -pc_mg_adapt_interp - Turn on adaptation
1628: . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension
1629: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector
1631: Level: intermediate
1633: .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1634: @*/
1635: PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt)
1636: {
1637: PetscFunctionBegin;
1639: PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt));
1640: PetscFunctionReturn(PETSC_SUCCESS);
1641: }
1643: /*@
1644: PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh,
1645: and thus accurately interpolated.
1647: Not Collective
1649: Input Parameter:
1650: . pc - the multigrid context
1652: Output Parameter:
1653: . adapt - flag for adaptation of the interpolator
1655: Level: intermediate
1657: .seealso: [](ch_ksp), `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1658: @*/
1659: PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt)
1660: {
1661: PetscFunctionBegin;
1663: PetscAssertPointer(adapt, 2);
1664: PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt));
1665: PetscFunctionReturn(PETSC_SUCCESS);
1666: }
1668: /*@
1669: PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation.
1671: Logically Collective
1673: Input Parameters:
1674: + pc - the multigrid context
1675: - cr - flag for compatible relaxation
1677: Options Database Key:
1678: . -pc_mg_adapt_cr - Turn on compatible relaxation
1680: Level: intermediate
1682: .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1683: @*/
1684: PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr)
1685: {
1686: PetscFunctionBegin;
1688: PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr));
1689: PetscFunctionReturn(PETSC_SUCCESS);
1690: }
1692: /*@
1693: PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation.
1695: Not Collective
1697: Input Parameter:
1698: . pc - the multigrid context
1700: Output Parameter:
1701: . cr - flag for compatible relaxaion
1703: Level: intermediate
1705: .seealso: [](ch_ksp), `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1706: @*/
1707: PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr)
1708: {
1709: PetscFunctionBegin;
1711: PetscAssertPointer(cr, 2);
1712: PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr));
1713: PetscFunctionReturn(PETSC_SUCCESS);
1714: }
1716: /*@
1717: PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use
1718: on all levels. Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of
1719: pre- and post-smoothing steps.
1721: Logically Collective
1723: Input Parameters:
1724: + pc - the multigrid context
1725: - n - the number of smoothing steps
1727: Options Database Key:
1728: . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps
1730: Level: advanced
1732: Note:
1733: This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1735: .seealso: [](ch_ksp), `PCMG`, `PCMGSetDistinctSmoothUp()`
1736: @*/
1737: PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n)
1738: {
1739: PC_MG *mg = (PC_MG *)pc->data;
1740: PC_MG_Levels **mglevels = mg->levels;
1741: PetscInt i, levels;
1743: PetscFunctionBegin;
1746: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1747: levels = mglevels[0]->levels;
1749: for (i = 1; i < levels; i++) {
1750: PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1751: PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1752: mg->default_smoothu = n;
1753: mg->default_smoothd = n;
1754: }
1755: PetscFunctionReturn(PETSC_SUCCESS);
1756: }
1758: /*@
1759: PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels
1760: and adds the suffix _up to the options name
1762: Logically Collective
1764: Input Parameter:
1765: . pc - the preconditioner context
1767: Options Database Key:
1768: . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects
1770: Level: advanced
1772: Note:
1773: This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1775: .seealso: [](ch_ksp), `PCMG`, `PCMGSetNumberSmooth()`
1776: @*/
1777: PetscErrorCode PCMGSetDistinctSmoothUp(PC pc)
1778: {
1779: PC_MG *mg = (PC_MG *)pc->data;
1780: PC_MG_Levels **mglevels = mg->levels;
1781: PetscInt i, levels;
1782: KSP subksp;
1784: PetscFunctionBegin;
1786: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1787: levels = mglevels[0]->levels;
1789: for (i = 1; i < levels; i++) {
1790: const char *prefix = NULL;
1791: /* make sure smoother up and down are different */
1792: PetscCall(PCMGGetSmootherUp(pc, i, &subksp));
1793: PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix));
1794: PetscCall(KSPSetOptionsPrefix(subksp, prefix));
1795: PetscCall(KSPAppendOptionsPrefix(subksp, "up_"));
1796: }
1797: PetscFunctionReturn(PETSC_SUCCESS);
1798: }
1800: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1801: static PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[])
1802: {
1803: PC_MG *mg = (PC_MG *)pc->data;
1804: PC_MG_Levels **mglevels = mg->levels;
1805: Mat *mat;
1806: PetscInt l;
1808: PetscFunctionBegin;
1809: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1810: PetscCall(PetscMalloc1(mg->nlevels, &mat));
1811: for (l = 1; l < mg->nlevels; l++) {
1812: mat[l - 1] = mglevels[l]->interpolate;
1813: PetscCall(PetscObjectReference((PetscObject)mat[l - 1]));
1814: }
1815: *num_levels = mg->nlevels;
1816: *interpolations = mat;
1817: PetscFunctionReturn(PETSC_SUCCESS);
1818: }
1820: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1821: static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[])
1822: {
1823: PC_MG *mg = (PC_MG *)pc->data;
1824: PC_MG_Levels **mglevels = mg->levels;
1825: PetscInt l;
1826: Mat *mat;
1828: PetscFunctionBegin;
1829: PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1830: PetscCall(PetscMalloc1(mg->nlevels, &mat));
1831: for (l = 0; l < mg->nlevels - 1; l++) {
1832: PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &mat[l]));
1833: PetscCall(PetscObjectReference((PetscObject)mat[l]));
1834: }
1835: *num_levels = mg->nlevels;
1836: *coarseOperators = mat;
1837: PetscFunctionReturn(PETSC_SUCCESS);
1838: }
1840: /*@C
1841: PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction.
1843: Not Collective, No Fortran Support
1845: Input Parameters:
1846: + name - name of the constructor
1847: - function - constructor routine, see `PCMGCoarseSpaceConstructorFn`
1849: Level: advanced
1851: Developer Notes:
1852: This does not appear to be used anywhere
1854: .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()`
1855: @*/
1856: PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn *function)
1857: {
1858: PetscFunctionBegin;
1859: PetscCall(PCInitializePackage());
1860: PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function));
1861: PetscFunctionReturn(PETSC_SUCCESS);
1862: }
1864: /*@C
1865: PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method.
1867: Not Collective, No Fortran Support
1869: Input Parameter:
1870: . name - name of the constructor
1872: Output Parameter:
1873: . function - constructor routine
1875: Level: advanced
1877: .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()`
1878: @*/
1879: PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn **function)
1880: {
1881: PetscFunctionBegin;
1882: PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function));
1883: PetscFunctionReturn(PETSC_SUCCESS);
1884: }
1886: /*MC
1887: PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional
1888: information about the coarser grid matrices and restriction/interpolation operators.
1890: Options Database Keys:
1891: + -pc_mg_levels <nlevels> - number of levels including finest
1892: . -pc_mg_cycle_type <v,w> - provide the cycle desired
1893: . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default
1894: . -pc_mg_log - log information about time spent on each level of the solver
1895: . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes)
1896: . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R'
1897: . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1)
1898: . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices
1899: to a `PETSCVIEWERSOCKET` for reading from MATLAB.
1900: - -pc_mg_dump_binary -dumps the matrices for each level and the restriction/interpolation matrices
1901: to the binary output file called binaryoutput
1903: Level: intermediate
1905: Notes:
1906: The Krylov solver (if any) and preconditioner (smoother) and their parameters are controlled from the options database with the standard
1907: options database keywords prefixed with `-mg_levels_` to affect all the levels but the coarsest, which is controlled with `-mg_coarse_`,
1908: and the finest where `-mg_fine_` can override `-mg_levels_`. One can set different preconditioners etc on specific levels with the prefix
1909: `-mg_levels_n_` where `n` is the level number (zero being the coarse level. For example
1910: .vb
1911: -mg_levels_ksp_type gmres -mg_levels_pc_type bjacobi -mg_coarse_pc_type svd -mg_levels_2_pc_type sor
1912: .ve
1913: These options also work for controlling the smoothers etc inside `PCGAMG`
1915: If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother than one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method
1917: When run with a single level the smoother options are used on that level NOT the coarse grid solver options
1919: When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This
1920: is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing
1921: (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the
1922: residual is computed at the end of each cycle.
1924: .seealso: [](sec_mg), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE`
1925: `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`,
1926: `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`,
1927: `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`,
1928: `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`,
1929: `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1930: M*/
1932: PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc)
1933: {
1934: PC_MG *mg;
1936: PetscFunctionBegin;
1937: PetscCall(PetscNew(&mg));
1938: pc->data = mg;
1939: mg->nlevels = -1;
1940: mg->am = PC_MG_MULTIPLICATIVE;
1941: mg->galerkin = PC_MG_GALERKIN_NONE;
1942: mg->adaptInterpolation = PETSC_FALSE;
1943: mg->Nc = -1;
1944: mg->eigenvalue = -1;
1946: pc->useAmat = PETSC_TRUE;
1948: pc->ops->apply = PCApply_MG;
1949: pc->ops->applytranspose = PCApplyTranspose_MG;
1950: pc->ops->matapply = PCMatApply_MG;
1951: pc->ops->matapplytranspose = PCMatApplyTranspose_MG;
1952: pc->ops->setup = PCSetUp_MG;
1953: pc->ops->reset = PCReset_MG;
1954: pc->ops->destroy = PCDestroy_MG;
1955: pc->ops->setfromoptions = PCSetFromOptions_MG;
1956: pc->ops->view = PCView_MG;
1958: PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue));
1959: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG));
1960: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG));
1961: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG));
1962: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG));
1963: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG));
1964: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG));
1965: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG));
1966: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG));
1967: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG));
1968: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG));
1969: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG));
1970: PetscFunctionReturn(PETSC_SUCCESS);
1971: }