Actual source code: mg.c

  1: /*
  2:     Defines the multigrid preconditioner interface.
  3: */
  4: #include <petsc/private/pcmgimpl.h>
  5: #include <petsc/private/kspimpl.h>
  6: #include <petscdm.h>
  7: PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *);

  9: /*
 10:    Contains the list of registered coarse space construction routines
 11: */
 12: PetscFunctionList PCMGCoarseList = NULL;

 14: PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason)
 15: {
 16:   PC_MG        *mg = (PC_MG *)pc->data;
 17:   PC_MG_Levels *mgc, *mglevels = *mglevelsin;
 18:   PetscInt      cycles = (mglevels->level == 1) ? 1 : mglevels->cycles;

 20:   PetscFunctionBegin;
 21:   if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
 22:   if (!transpose) {
 23:     if (matapp) {
 24:       PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */
 25:       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL));
 26:     } else {
 27:       PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */
 28:       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
 29:     }
 30:   } else {
 31:     PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
 32:     PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */
 33:     PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
 34:   }
 35:   if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
 36:   if (mglevels->level) { /* not the coarsest grid */
 37:     if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0));
 38:     if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R));
 39:     if (!transpose) {
 40:       if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
 41:       else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
 42:     } else {
 43:       if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
 44:       else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
 45:     }
 46:     if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0));

 48:     /* if on finest level and have convergence criteria set */
 49:     if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) {
 50:       PetscReal rnorm;

 52:       PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm));
 53:       if (rnorm <= mg->ttol) {
 54:         if (rnorm < mg->abstol) {
 55:           *reason = PCRICHARDSON_CONVERGED_ATOL;
 56:           PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol));
 57:         } else {
 58:           *reason = PCRICHARDSON_CONVERGED_RTOL;
 59:           PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol));
 60:         }
 61:         PetscFunctionReturn(PETSC_SUCCESS);
 62:       }
 63:     }

 65:     mgc = *(mglevelsin - 1);
 66:     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
 67:     if (!transpose) {
 68:       if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B));
 69:       else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b));
 70:     } else {
 71:       if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B));
 72:       else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b));
 73:     }
 74:     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
 75:     if (matapp) {
 76:       if (!mgc->X) {
 77:         PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X));
 78:       } else {
 79:         PetscCall(MatZeroEntries(mgc->X));
 80:       }
 81:     } else {
 82:       PetscCall(VecZeroEntries(mgc->x));
 83:     }
 84:     while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason));
 85:     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
 86:     if (!transpose) {
 87:       if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X));
 88:       else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x));
 89:     } else {
 90:       PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x));
 91:     }
 92:     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
 93:     if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
 94:     if (!transpose) {
 95:       if (matapp) {
 96:         PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */
 97:         PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL));
 98:       } else {
 99:         PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */
100:         PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
101:       }
102:     } else {
103:       PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
104:       PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */
105:       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
106:     }
107:     if (mglevels->cr) {
108:       Mat crA;

110:       PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
111:       /* TODO Turn on copy and turn off noisy if we have an exact solution
112:       PetscCall(VecCopy(mglevels->x, mglevels->crx));
113:       PetscCall(VecCopy(mglevels->b, mglevels->crb)); */
114:       PetscCall(KSPGetOperators(mglevels->cr, &crA, NULL));
115:       PetscCall(KSPSetNoisy_Private(crA, mglevels->crx));
116:       PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */
117:       PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx));
118:     }
119:     if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
120:   }
121:   PetscFunctionReturn(PETSC_SUCCESS);
122: }

124: static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
125: {
126:   PC_MG         *mg       = (PC_MG *)pc->data;
127:   PC_MG_Levels **mglevels = mg->levels;
128:   PC             tpc;
129:   PetscBool      changeu, changed;
130:   PetscInt       levels = mglevels[0]->levels, i;

132:   PetscFunctionBegin;
133:   /* When the DM is supplying the matrix then it will not exist until here */
134:   for (i = 0; i < levels; i++) {
135:     if (!mglevels[i]->A) {
136:       PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
137:       PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
138:     }
139:   }

141:   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
142:   PetscCall(PCPreSolveChangeRHS(tpc, &changed));
143:   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
144:   PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
145:   if (!changed && !changeu) {
146:     PetscCall(VecDestroy(&mglevels[levels - 1]->b));
147:     mglevels[levels - 1]->b = b;
148:   } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
149:     if (!mglevels[levels - 1]->b) {
150:       Vec *vec;

152:       PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
153:       mglevels[levels - 1]->b = *vec;
154:       PetscCall(PetscFree(vec));
155:     }
156:     PetscCall(VecCopy(b, mglevels[levels - 1]->b));
157:   }
158:   mglevels[levels - 1]->x = x;

160:   mg->rtol   = rtol;
161:   mg->abstol = abstol;
162:   mg->dtol   = dtol;
163:   if (rtol) {
164:     /* compute initial residual norm for relative convergence test */
165:     PetscReal rnorm;

167:     if (zeroguess) {
168:       PetscCall(VecNorm(b, NORM_2, &rnorm));
169:     } else {
170:       PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w));
171:       PetscCall(VecNorm(w, NORM_2, &rnorm));
172:     }
173:     mg->ttol = PetscMax(rtol * rnorm, abstol);
174:   } else if (abstol) mg->ttol = abstol;
175:   else mg->ttol = 0.0;

177:   /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't
178:      stop prematurely due to small residual */
179:   for (i = 1; i < levels; i++) {
180:     PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
181:     if (mglevels[i]->smoothu != mglevels[i]->smoothd) {
182:       /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */
183:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
184:       PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
185:     }
186:   }

188:   *reason = PCRICHARDSON_NOT_SET;
189:   for (i = 0; i < its; i++) {
190:     PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason));
191:     if (*reason) break;
192:   }
193:   if (*reason == PCRICHARDSON_NOT_SET) *reason = PCRICHARDSON_CONVERGED_ITS;
194:   *outits = i;
195:   if (!changed && !changeu) mglevels[levels - 1]->b = NULL;
196:   PetscFunctionReturn(PETSC_SUCCESS);
197: }

199: PetscErrorCode PCReset_MG(PC pc)
200: {
201:   PC_MG         *mg       = (PC_MG *)pc->data;
202:   PC_MG_Levels **mglevels = mg->levels;
203:   PetscInt       i, n;

205:   PetscFunctionBegin;
206:   if (mglevels) {
207:     n = mglevels[0]->levels;
208:     for (i = 0; i < n - 1; i++) {
209:       PetscCall(VecDestroy(&mglevels[i + 1]->r));
210:       PetscCall(VecDestroy(&mglevels[i]->b));
211:       PetscCall(VecDestroy(&mglevels[i]->x));
212:       PetscCall(MatDestroy(&mglevels[i + 1]->R));
213:       PetscCall(MatDestroy(&mglevels[i]->B));
214:       PetscCall(MatDestroy(&mglevels[i]->X));
215:       PetscCall(VecDestroy(&mglevels[i]->crx));
216:       PetscCall(VecDestroy(&mglevels[i]->crb));
217:       PetscCall(MatDestroy(&mglevels[i + 1]->restrct));
218:       PetscCall(MatDestroy(&mglevels[i + 1]->interpolate));
219:       PetscCall(MatDestroy(&mglevels[i + 1]->inject));
220:       PetscCall(VecDestroy(&mglevels[i + 1]->rscale));
221:     }
222:     PetscCall(VecDestroy(&mglevels[n - 1]->crx));
223:     PetscCall(VecDestroy(&mglevels[n - 1]->crb));
224:     /* this is not null only if the smoother on the finest level
225:        changes the rhs during PreSolve */
226:     PetscCall(VecDestroy(&mglevels[n - 1]->b));
227:     PetscCall(MatDestroy(&mglevels[n - 1]->B));

229:     for (i = 0; i < n; i++) {
230:       PetscCall(MatDestroy(&mglevels[i]->coarseSpace));
231:       PetscCall(MatDestroy(&mglevels[i]->A));
232:       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd));
233:       PetscCall(KSPReset(mglevels[i]->smoothu));
234:       if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr));
235:     }
236:     mg->Nc = 0;
237:   }
238:   PetscFunctionReturn(PETSC_SUCCESS);
239: }

241: /* Implementing CR

243: We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is

245:   Inj^T (Inj Inj^T)^{-1} Inj

247: and if Inj is a VecScatter, as it is now in PETSc, we have

249:   Inj^T Inj

251: and

253:   S = I - Inj^T Inj

255: since

257:   Inj S = Inj - (Inj Inj^T) Inj = 0.

259: Brannick suggests

261:   A \to S^T A S  \qquad\mathrm{and}\qquad M \to S^T M S

263: but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use

265:   M^{-1} A \to S M^{-1} A S

267: In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left.

269:   Check: || Inj P - I ||_F < tol
270:   Check: In general, Inj Inj^T = I
271: */

273: typedef struct {
274:   PC       mg;  /* The PCMG object */
275:   PetscInt l;   /* The multigrid level for this solver */
276:   Mat      Inj; /* The injection matrix */
277:   Mat      S;   /* I - Inj^T Inj */
278: } CRContext;

280: static PetscErrorCode CRSetup_Private(PC pc)
281: {
282:   CRContext *ctx;
283:   Mat        It;

285:   PetscFunctionBeginUser;
286:   PetscCall(PCShellGetContext(pc, &ctx));
287:   PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It));
288:   PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG");
289:   PetscCall(MatCreateTranspose(It, &ctx->Inj));
290:   PetscCall(MatCreateNormal(ctx->Inj, &ctx->S));
291:   PetscCall(MatScale(ctx->S, -1.0));
292:   PetscCall(MatShift(ctx->S, 1.0));
293:   PetscFunctionReturn(PETSC_SUCCESS);
294: }

296: static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y)
297: {
298:   CRContext *ctx;

300:   PetscFunctionBeginUser;
301:   PetscCall(PCShellGetContext(pc, &ctx));
302:   PetscCall(MatMult(ctx->S, x, y));
303:   PetscFunctionReturn(PETSC_SUCCESS);
304: }

306: static PetscErrorCode CRDestroy_Private(PC pc)
307: {
308:   CRContext *ctx;

310:   PetscFunctionBeginUser;
311:   PetscCall(PCShellGetContext(pc, &ctx));
312:   PetscCall(MatDestroy(&ctx->Inj));
313:   PetscCall(MatDestroy(&ctx->S));
314:   PetscCall(PetscFree(ctx));
315:   PetscCall(PCShellSetContext(pc, NULL));
316:   PetscFunctionReturn(PETSC_SUCCESS);
317: }

319: static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr)
320: {
321:   CRContext *ctx;

323:   PetscFunctionBeginUser;
324:   PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr));
325:   PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)"));
326:   PetscCall(PetscCalloc1(1, &ctx));
327:   ctx->mg = pc;
328:   ctx->l  = l;
329:   PetscCall(PCSetType(*cr, PCSHELL));
330:   PetscCall(PCShellSetContext(*cr, ctx));
331:   PetscCall(PCShellSetApply(*cr, CRApply_Private));
332:   PetscCall(PCShellSetSetUp(*cr, CRSetup_Private));
333:   PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private));
334:   PetscFunctionReturn(PETSC_SUCCESS);
335: }

337: PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char[], const char[], const char *[], const char *[], PetscBool *);

339: PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms)
340: {
341:   PC_MG         *mg = (PC_MG *)pc->data;
342:   MPI_Comm       comm;
343:   PC_MG_Levels **mglevels = mg->levels;
344:   PCMGType       mgtype   = mg->am;
345:   PetscInt       mgctype  = (PetscInt)PC_MG_CYCLE_V;
346:   PetscInt       i;
347:   PetscMPIInt    size;
348:   const char    *prefix;
349:   PC             ipc;
350:   PetscInt       n;

352:   PetscFunctionBegin;
355:   if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS);
356:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
357:   if (mglevels) {
358:     mgctype = mglevels[0]->cycles;
359:     /* changing the number of levels so free up the previous stuff */
360:     PetscCall(PCReset_MG(pc));
361:     n = mglevels[0]->levels;
362:     for (i = 0; i < n; i++) {
363:       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
364:       PetscCall(KSPDestroy(&mglevels[i]->smoothu));
365:       PetscCall(KSPDestroy(&mglevels[i]->cr));
366:       PetscCall(PetscFree(mglevels[i]));
367:     }
368:     PetscCall(PetscFree(mg->levels));
369:   }

371:   mg->nlevels = levels;

373:   PetscCall(PetscMalloc1(levels, &mglevels));

375:   PetscCall(PCGetOptionsPrefix(pc, &prefix));

377:   mg->stageApply = 0;
378:   for (i = 0; i < levels; i++) {
379:     PetscCall(PetscNew(&mglevels[i]));

381:     mglevels[i]->level               = i;
382:     mglevels[i]->levels              = levels;
383:     mglevels[i]->cycles              = mgctype;
384:     mg->default_smoothu              = 2;
385:     mg->default_smoothd              = 2;
386:     mglevels[i]->eventsmoothsetup    = 0;
387:     mglevels[i]->eventsmoothsolve    = 0;
388:     mglevels[i]->eventresidual       = 0;
389:     mglevels[i]->eventinterprestrict = 0;

391:     if (comms) comm = comms[i];
392:     if (comm != MPI_COMM_NULL) {
393:       PetscCall(KSPCreate(comm, &mglevels[i]->smoothd));
394:       PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel));
395:       PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure));
396:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i));
397:       PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix));
398:       PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level));
399:       if (i == 0 && levels > 1) { // coarse grid
400:         PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_"));

402:         /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */
403:         PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY));
404:         PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc));
405:         PetscCallMPI(MPI_Comm_size(comm, &size));
406:         if (size > 1) {
407:           PetscCall(PCSetType(ipc, PCREDUNDANT));
408:         } else {
409:           PetscCall(PCSetType(ipc, PCLU));
410:         }
411:         PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS));
412:       } else {
413:         char tprefix[128];

415:         PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV));
416:         PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL));
417:         PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE));
418:         PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc));
419:         PetscCall(PCSetType(ipc, PCSOR));
420:         PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));

422:         if (i == levels - 1 && levels > 1) { // replace 'mg_finegrid_' with 'mg_levels_X_'
423:           PetscBool set;

425:           PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)mglevels[i]->smoothd)->options, ((PetscObject)mglevels[i]->smoothd)->prefix, "-mg_fine_", NULL, NULL, &set));
426:           if (set) {
427:             if (prefix) PetscCall(PetscSNPrintf(tprefix, 128, "%smg_fine_", prefix));
428:             else PetscCall(PetscSNPrintf(tprefix, 128, "mg_fine_"));
429:             PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, tprefix));
430:           } else {
431:             PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
432:             PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
433:           }
434:         } else {
435:           PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
436:           PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
437:         }
438:       }
439:     }
440:     mglevels[i]->smoothu = mglevels[i]->smoothd;
441:     mg->rtol             = 0.0;
442:     mg->abstol           = 0.0;
443:     mg->dtol             = 0.0;
444:     mg->ttol             = 0.0;
445:     mg->cyclesperpcapply = 1;
446:   }
447:   mg->levels = mglevels;
448:   PetscCall(PCMGSetType(pc, mgtype));
449:   PetscFunctionReturn(PETSC_SUCCESS);
450: }

452: /*@C
453:   PCMGSetLevels - Sets the number of levels to use with `PCMG`.
454:   Must be called before any other `PCMG` routine.

456:   Logically Collective

458:   Input Parameters:
459: + pc     - the preconditioner context
460: . levels - the number of levels
461: - comms  - optional communicators for each level; this is to allow solving the coarser problems
462:            on smaller sets of processes. For processes that are not included in the computation
463:            you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes
464:            should participate in each level of problem.

466:   Options Database Key:
467: . -pc_mg_levels <levels> - set the number of levels to use

469:   Level: intermediate

471:   Notes:
472:   If the number of levels is one then the multigrid uses the `-mg_levels` prefix
473:   for setting the level options rather than the `-mg_coarse` or `-mg_fine` prefix.

475:   You can free the information in comms after this routine is called.

477:   The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level
478:   are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on
479:   the two levels, rank 0 in the original communicator will pass in an array of 2 communicators
480:   of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators
481:   the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate
482:   in the coarse grid solve.

484:   Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one
485:   must take special care in providing the restriction and interpolation operation. We recommend
486:   providing these as two step operations; first perform a standard restriction or interpolation on
487:   the full number of ranks for that level and then use an MPI call to copy the resulting vector
488:   array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both
489:   cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and
490:   receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries.

492:   Fortran Notes:
493:   Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM`
494:   is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc.

496: .seealso: [](ch_ksp), `PCMGSetType()`, `PCMGGetLevels()`
497: @*/
498: PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms)
499: {
500:   PetscFunctionBegin;
502:   if (comms) PetscAssertPointer(comms, 3);
503:   PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms));
504:   PetscFunctionReturn(PETSC_SUCCESS);
505: }

507: PetscErrorCode PCDestroy_MG(PC pc)
508: {
509:   PC_MG         *mg       = (PC_MG *)pc->data;
510:   PC_MG_Levels **mglevels = mg->levels;
511:   PetscInt       i, n;

513:   PetscFunctionBegin;
514:   PetscCall(PCReset_MG(pc));
515:   if (mglevels) {
516:     n = mglevels[0]->levels;
517:     for (i = 0; i < n; i++) {
518:       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
519:       PetscCall(KSPDestroy(&mglevels[i]->smoothu));
520:       PetscCall(KSPDestroy(&mglevels[i]->cr));
521:       PetscCall(PetscFree(mglevels[i]));
522:     }
523:     PetscCall(PetscFree(mg->levels));
524:   }
525:   PetscCall(PetscFree(pc->data));
526:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
527:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
528:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL));
529:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetReusePreconditioner_C", NULL));
530:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL));
531:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL));
532:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
533:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
534:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL));
535:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL));
536:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL));
537:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL));
538:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL));
539:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL));
540:   PetscFunctionReturn(PETSC_SUCCESS);
541: }

543: /*
544:    PCApply_MG - Runs either an additive, multiplicative, Kaskadic
545:              or full cycle of multigrid.

547:   Note:
548:   A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle().
549: */
550: static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose)
551: {
552:   PC_MG         *mg       = (PC_MG *)pc->data;
553:   PC_MG_Levels **mglevels = mg->levels;
554:   PC             tpc;
555:   PetscInt       levels = mglevels[0]->levels, i;
556:   PetscBool      changeu, changed, matapp;

558:   PetscFunctionBegin;
559:   matapp = (PetscBool)(B && X);
560:   if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply));
561:   /* When the DM is supplying the matrix then it will not exist until here */
562:   for (i = 0; i < levels; i++) {
563:     if (!mglevels[i]->A) {
564:       PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
565:       PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
566:     }
567:   }

569:   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
570:   PetscCall(PCPreSolveChangeRHS(tpc, &changed));
571:   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
572:   PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
573:   if (!changeu && !changed) {
574:     if (matapp) {
575:       PetscCall(MatDestroy(&mglevels[levels - 1]->B));
576:       mglevels[levels - 1]->B = B;
577:     } else {
578:       PetscCall(VecDestroy(&mglevels[levels - 1]->b));
579:       mglevels[levels - 1]->b = b;
580:     }
581:   } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
582:     if (matapp) {
583:       if (mglevels[levels - 1]->B) {
584:         PetscInt  N1, N2;
585:         PetscBool flg;

587:         PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1));
588:         PetscCall(MatGetSize(B, NULL, &N2));
589:         PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg));
590:         if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B));
591:       }
592:       if (!mglevels[levels - 1]->B) {
593:         PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B));
594:       } else {
595:         PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN));
596:       }
597:     } else {
598:       if (!mglevels[levels - 1]->b) {
599:         Vec *vec;

601:         PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
602:         mglevels[levels - 1]->b = *vec;
603:         PetscCall(PetscFree(vec));
604:       }
605:       PetscCall(VecCopy(b, mglevels[levels - 1]->b));
606:     }
607:   }
608:   if (matapp) {
609:     mglevels[levels - 1]->X = X;
610:   } else {
611:     mglevels[levels - 1]->x = x;
612:   }

614:   /* If coarser Xs are present, it means we have already block applied the PC at least once
615:      Reset operators if sizes/type do no match */
616:   if (matapp && levels > 1 && mglevels[levels - 2]->X) {
617:     PetscInt  Xc, Bc;
618:     PetscBool flg;

620:     PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc));
621:     PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc));
622:     PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg));
623:     if (Xc != Bc || !flg) {
624:       PetscCall(MatDestroy(&mglevels[levels - 1]->R));
625:       for (i = 0; i < levels - 1; i++) {
626:         PetscCall(MatDestroy(&mglevels[i]->R));
627:         PetscCall(MatDestroy(&mglevels[i]->B));
628:         PetscCall(MatDestroy(&mglevels[i]->X));
629:       }
630:     }
631:   }

633:   if (mg->am == PC_MG_MULTIPLICATIVE) {
634:     if (matapp) PetscCall(MatZeroEntries(X));
635:     else PetscCall(VecZeroEntries(x));
636:     for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL));
637:   } else if (mg->am == PC_MG_ADDITIVE) {
638:     PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp));
639:   } else if (mg->am == PC_MG_KASKADE) {
640:     PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp));
641:   } else {
642:     PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp));
643:   }
644:   if (mg->stageApply) PetscCall(PetscLogStagePop());
645:   if (!changeu && !changed) {
646:     if (matapp) {
647:       mglevels[levels - 1]->B = NULL;
648:     } else {
649:       mglevels[levels - 1]->b = NULL;
650:     }
651:   }
652:   PetscFunctionReturn(PETSC_SUCCESS);
653: }

655: static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x)
656: {
657:   PetscFunctionBegin;
658:   PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE));
659:   PetscFunctionReturn(PETSC_SUCCESS);
660: }

662: static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x)
663: {
664:   PetscFunctionBegin;
665:   PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE));
666:   PetscFunctionReturn(PETSC_SUCCESS);
667: }

669: static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x)
670: {
671:   PetscFunctionBegin;
672:   PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE));
673:   PetscFunctionReturn(PETSC_SUCCESS);
674: }

676: static PetscErrorCode PCMatApplyTranspose_MG(PC pc, Mat b, Mat x)
677: {
678:   PetscFunctionBegin;
679:   PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_TRUE));
680:   PetscFunctionReturn(PETSC_SUCCESS);
681: }

683: PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems PetscOptionsObject)
684: {
685:   PetscInt            levels, cycles;
686:   PetscBool           flg, flg2;
687:   PC_MG              *mg = (PC_MG *)pc->data;
688:   PC_MG_Levels      **mglevels;
689:   PCMGType            mgtype;
690:   PCMGCycleType       mgctype;
691:   PCMGGalerkinType    gtype;
692:   PCMGCoarseSpaceType coarseSpaceType;

694:   PetscFunctionBegin;
695:   levels = PetscMax(mg->nlevels, 1);
696:   PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options");
697:   PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg));
698:   if (!flg && !mg->levels && pc->dm) {
699:     PetscCall(DMGetRefineLevel(pc->dm, &levels));
700:     levels++;
701:     mg->usedmfornumberoflevels = PETSC_TRUE;
702:   }
703:   PetscCall(PCMGSetLevels(pc, levels, NULL));
704:   mglevels = mg->levels;

706:   mgctype = (PCMGCycleType)mglevels[0]->cycles;
707:   PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg));
708:   if (flg) PetscCall(PCMGSetCycleType(pc, mgctype));
709:   coarseSpaceType = mg->coarseSpaceType;
710:   PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg));
711:   if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType));
712:   PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg));
713:   PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg));
714:   flg2 = PETSC_FALSE;
715:   PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg));
716:   if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2));
717:   flg = PETSC_FALSE;
718:   PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL));
719:   if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc));
720:   PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)mg->galerkin, (PetscEnum *)&gtype, &flg));
721:   if (flg) PetscCall(PCMGSetGalerkin(pc, gtype));
722:   mgtype = mg->am;
723:   PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg));
724:   if (flg) PetscCall(PCMGSetType(pc, mgtype));
725:   if (mg->am == PC_MG_MULTIPLICATIVE) {
726:     PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg));
727:     if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles));
728:   }
729:   flg = PETSC_FALSE;
730:   PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL));
731:   if (flg) {
732:     PetscInt i;
733:     char     eventname[128];

735:     levels = mglevels[0]->levels;
736:     for (i = 0; i < levels; i++) {
737:       PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %" PetscInt_FMT, i));
738:       PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup));
739:       PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %" PetscInt_FMT, i));
740:       PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve));
741:       if (i) {
742:         PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %" PetscInt_FMT, i));
743:         PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual));
744:         PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %" PetscInt_FMT, i));
745:         PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict));
746:       }
747:     }

749:     if (PetscDefined(USE_LOG)) {
750:       const char sname[] = "MG Apply";

752:       PetscCall(PetscLogStageGetId(sname, &mg->stageApply));
753:       if (mg->stageApply < 0) PetscCall(PetscLogStageRegister(sname, &mg->stageApply));
754:     }
755:   }
756:   PetscOptionsHeadEnd();
757:   /* Check option consistency */
758:   PetscCall(PCMGGetGalerkin(pc, &gtype));
759:   PetscCall(PCMGGetAdaptInterpolation(pc, &flg));
760:   PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator");
761:   PetscFunctionReturn(PETSC_SUCCESS);
762: }

764: const char *const PCMGTypes[]            = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL};
765: const char *const PCMGCycleTypes[]       = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL};
766: const char *const PCMGGalerkinTypes[]    = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL};
767: const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL};

769: #include <petscdraw.h>
770: PetscErrorCode PCView_MG(PC pc, PetscViewer viewer)
771: {
772:   PC_MG         *mg       = (PC_MG *)pc->data;
773:   PC_MG_Levels **mglevels = mg->levels;
774:   PetscInt       levels   = mglevels ? mglevels[0]->levels : 0, i;
775:   PetscBool      isascii, isbinary, isdraw;

777:   PetscFunctionBegin;
778:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
779:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
780:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
781:   if (isascii) {
782:     const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown";

784:     if (levels == 1) PetscCall(PetscViewerASCIIPrintf(viewer, "  WARNING: Multigrid is being run with only a single level!\n"));
785:     PetscCall(PetscViewerASCIIPrintf(viewer, "  type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename));
786:     if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "    Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply));
787:     if (mg->galerkin == PC_MG_GALERKIN_BOTH) {
788:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices\n"));
789:     } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) {
790:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices for pmat\n"));
791:     } else if (mg->galerkin == PC_MG_GALERKIN_MAT) {
792:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices for mat\n"));
793:     } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) {
794:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using externally compute Galerkin coarse grid matrices\n"));
795:     } else {
796:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using Galerkin computed coarse grid matrices\n"));
797:     }
798:     if (mg->view) PetscCall((*mg->view)(pc, viewer));
799:     for (i = 0; i < levels; i++) {
800:       if (i) {
801:         PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
802:       } else {
803:         PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i));
804:       }
805:       PetscCall(PetscViewerASCIIPushTab(viewer));
806:       PetscCall(KSPView(mglevels[i]->smoothd, viewer));
807:       PetscCall(PetscViewerASCIIPopTab(viewer));
808:       if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) {
809:         PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n"));
810:       } else if (i) {
811:         PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
812:         PetscCall(PetscViewerASCIIPushTab(viewer));
813:         PetscCall(KSPView(mglevels[i]->smoothu, viewer));
814:         PetscCall(PetscViewerASCIIPopTab(viewer));
815:       }
816:       if (i && mglevels[i]->cr) {
817:         PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i));
818:         PetscCall(PetscViewerASCIIPushTab(viewer));
819:         PetscCall(KSPView(mglevels[i]->cr, viewer));
820:         PetscCall(PetscViewerASCIIPopTab(viewer));
821:       }
822:     }
823:   } else if (isbinary) {
824:     for (i = levels - 1; i >= 0; i--) {
825:       PetscCall(KSPView(mglevels[i]->smoothd, viewer));
826:       if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer));
827:     }
828:   } else if (isdraw) {
829:     PetscDraw draw;
830:     PetscReal x, w, y, bottom, th;
831:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
832:     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
833:     PetscCall(PetscDrawStringGetSize(draw, NULL, &th));
834:     bottom = y - th;
835:     for (i = levels - 1; i >= 0; i--) {
836:       if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) {
837:         PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
838:         PetscCall(KSPView(mglevels[i]->smoothd, viewer));
839:         PetscCall(PetscDrawPopCurrentPoint(draw));
840:       } else {
841:         w = 0.5 * PetscMin(1.0 - x, x);
842:         PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom));
843:         PetscCall(KSPView(mglevels[i]->smoothd, viewer));
844:         PetscCall(PetscDrawPopCurrentPoint(draw));
845:         PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom));
846:         PetscCall(KSPView(mglevels[i]->smoothu, viewer));
847:         PetscCall(PetscDrawPopCurrentPoint(draw));
848:       }
849:       PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL));
850:       bottom -= th;
851:     }
852:   }
853:   PetscFunctionReturn(PETSC_SUCCESS);
854: }

856: #include <petsc/private/kspimpl.h>

858: /*
859:     Calls setup for the KSP on each level
860: */
861: PetscErrorCode PCSetUp_MG(PC pc)
862: {
863:   PC_MG         *mg       = (PC_MG *)pc->data;
864:   PC_MG_Levels **mglevels = mg->levels;
865:   PetscInt       i, n;
866:   PC             cpc;
867:   PetscBool      dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE;
868:   Mat            dA, dB;
869:   Vec            tvec;
870:   DM            *dms;
871:   PetscViewer    viewer = NULL;
872:   PetscBool      dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE;
873:   PetscBool      adaptInterpolation = mg->adaptInterpolation;

875:   PetscFunctionBegin;
876:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up");
877:   n = mglevels[0]->levels;
878:   /* FIX: Move this to PCSetFromOptions_MG? */
879:   if (mg->usedmfornumberoflevels) {
880:     PetscInt levels;

882:     PetscCall(DMGetRefineLevel(pc->dm, &levels));
883:     levels++;
884:     if (levels > n) { /* the problem is now being solved on a finer grid */
885:       PetscCall(PCMGSetLevels(pc, levels, NULL));
886:       n = levels;
887:       PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */
888:       mglevels = mg->levels;
889:     }
890:   }

892:   /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */
893:   /* so use those from global PC */
894:   /* Is this what we always want? What if user wants to keep old one? */
895:   PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset));
896:   if (opsset) {
897:     Mat mmat;
898:     PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat));
899:     if (mmat == pc->pmat) opsset = PETSC_FALSE;
900:   }
901:   /* fine grid smoother inherits the reuse-pc flag */
902:   PetscCall(KSPGetPC(mglevels[n - 1]->smoothd, &cpc));
903:   cpc->reusepreconditioner = pc->reusepreconditioner;
904:   PetscCall(KSPGetPC(mglevels[n - 1]->smoothu, &cpc));
905:   cpc->reusepreconditioner = pc->reusepreconditioner;

907:   /* Create CR solvers */
908:   PetscCall(PCMGGetAdaptCR(pc, &doCR));
909:   if (doCR) {
910:     const char *prefix;

912:     PetscCall(PCGetOptionsPrefix(pc, &prefix));
913:     for (i = 1; i < n; ++i) {
914:       PC   ipc, cr;
915:       char crprefix[128];

917:       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr));
918:       PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel));
919:       PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE));
920:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i));
921:       PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix));
922:       PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level));
923:       PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV));
924:       PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL));
925:       PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED));
926:       PetscCall(KSPGetPC(mglevels[i]->cr, &ipc));

928:       PetscCall(PCSetType(ipc, PCCOMPOSITE));
929:       PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE));
930:       PetscCall(PCCompositeAddPCType(ipc, PCSOR));
931:       PetscCall(CreateCR_Private(pc, i, &cr));
932:       PetscCall(PCCompositeAddPC(ipc, cr));
933:       PetscCall(PCDestroy(&cr));

935:       PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
936:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
937:       PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%" PetscInt_FMT "_cr_", i));
938:       PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix));
939:     }
940:   }

942:   if (!opsset) {
943:     PetscCall(PCGetUseAmat(pc, &use_amat));
944:     if (use_amat) {
945:       PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n  because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
946:       PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat));
947:     } else {
948:       PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n  because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
949:       PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat));
950:     }
951:   }

953:   for (i = n - 1; i > 0; i--) {
954:     if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) {
955:       missinginterpolate = PETSC_TRUE;
956:       break;
957:     }
958:   }

960:   PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB));
961:   if (dA == dB) dAeqdB = PETSC_TRUE;
962:   if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */

964:   if (pc->dm && !pc->setupcalled) {
965:     /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */
966:     PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm));
967:     PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE));
968:     if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) {
969:       PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm));
970:       PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE));
971:     }
972:     if (mglevels[n - 1]->cr) {
973:       PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm));
974:       PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE));
975:     }
976:   }

978:   /*
979:    Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS)
980:    Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs?
981:   */
982:   if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) {
983:     /* first see if we can compute a coarse space */
984:     if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) {
985:       for (i = n - 2; i > -1; i--) {
986:         if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) {
987:           PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace));
988:           PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace));
989:         }
990:       }
991:     } else { /* construct the interpolation from the DMs */
992:       Mat p;
993:       Vec rscale;

995:       PetscCheck(n == 1 || pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PC lacks a DM so cannot automatically construct a multigrid hierarchy. Number of levels requested %" PetscInt_FMT, n);
996:       PetscCall(PetscMalloc1(n, &dms));
997:       dms[n - 1] = pc->dm;
998:       /* Separately create them so we do not get DMKSP interference between levels */
999:       for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i]));
1000:       for (i = n - 2; i > -1; i--) {
1001:         DMKSP     kdm;
1002:         PetscBool dmhasrestrict, dmhasinject;

1004:         PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i]));
1005:         if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE));
1006:         if (mglevels[i]->smoothd != mglevels[i]->smoothu) {
1007:           PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i]));
1008:           if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE));
1009:         }
1010:         if (mglevels[i]->cr) {
1011:           PetscCall(KSPSetDM(mglevels[i]->cr, dms[i]));
1012:           if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE));
1013:         }
1014:         PetscCall(DMGetDMKSPWrite(dms[i], &kdm));
1015:         /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take
1016:          * a bitwise OR of computing the matrix, RHS, and initial iterate. */
1017:         kdm->ops->computerhs = NULL;
1018:         kdm->rhsctx          = NULL;
1019:         if (!mglevels[i + 1]->interpolate) {
1020:           PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale));
1021:           PetscCall(PCMGSetInterpolation(pc, i + 1, p));
1022:           if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale));
1023:           PetscCall(VecDestroy(&rscale));
1024:           PetscCall(MatDestroy(&p));
1025:         }
1026:         PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict));
1027:         if (dmhasrestrict && !mglevels[i + 1]->restrct) {
1028:           PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p));
1029:           PetscCall(PCMGSetRestriction(pc, i + 1, p));
1030:           PetscCall(MatDestroy(&p));
1031:         }
1032:         PetscCall(DMHasCreateInjection(dms[i], &dmhasinject));
1033:         if (dmhasinject && !mglevels[i + 1]->inject) {
1034:           PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p));
1035:           PetscCall(PCMGSetInjection(pc, i + 1, p));
1036:           PetscCall(MatDestroy(&p));
1037:         }
1038:       }

1040:       for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i]));
1041:       PetscCall(PetscFree(dms));
1042:     }
1043:   }

1045:   if (mg->galerkin < PC_MG_GALERKIN_NONE) {
1046:     Mat       A, B;
1047:     PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE;
1048:     MatReuse  reuse = MAT_INITIAL_MATRIX;

1050:     if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE;
1051:     if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE;
1052:     if (pc->setupcalled) reuse = MAT_REUSE_MATRIX;
1053:     for (i = n - 2; i > -1; i--) {
1054:       PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0");
1055:       if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct));
1056:       if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate));
1057:       if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B));
1058:       if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A));
1059:       if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B));
1060:       /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */
1061:       if (!doA && dAeqdB) {
1062:         if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B));
1063:         A = B;
1064:       } else if (!doA && reuse == MAT_INITIAL_MATRIX) {
1065:         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL));
1066:         PetscCall(PetscObjectReference((PetscObject)A));
1067:       }
1068:       if (!doB && dAeqdB) {
1069:         if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A));
1070:         B = A;
1071:       } else if (!doB && reuse == MAT_INITIAL_MATRIX) {
1072:         PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B));
1073:         PetscCall(PetscObjectReference((PetscObject)B));
1074:       }
1075:       if (reuse == MAT_INITIAL_MATRIX) {
1076:         PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B));
1077:         PetscCall(PetscObjectDereference((PetscObject)A));
1078:         PetscCall(PetscObjectDereference((PetscObject)B));
1079:       }
1080:       dA = A;
1081:       dB = B;
1082:     }
1083:   }

1085:   /* Adapt interpolation matrices */
1086:   if (adaptInterpolation) {
1087:     for (i = 0; i < n; ++i) {
1088:       if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace));
1089:       if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace));
1090:     }
1091:     for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i));
1092:   }

1094:   if (needRestricts && pc->dm) {
1095:     for (i = n - 2; i >= 0; i--) {
1096:       DM  dmfine, dmcoarse;
1097:       Mat Restrict, Inject;
1098:       Vec rscale;

1100:       PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine));
1101:       PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse));
1102:       PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict));
1103:       PetscCall(PCMGGetRScale(pc, i + 1, &rscale));
1104:       PetscCall(PCMGGetInjection(pc, i + 1, &Inject));
1105:       PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse));
1106:     }
1107:   }

1109:   if (!pc->setupcalled) {
1110:     for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd));
1111:     for (i = 1; i < n; i++) {
1112:       if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu));
1113:       if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr));
1114:     }
1115:     /* insure that if either interpolation or restriction is set the other one is set */
1116:     for (i = 1; i < n; i++) {
1117:       PetscCall(PCMGGetInterpolation(pc, i, NULL));
1118:       PetscCall(PCMGGetRestriction(pc, i, NULL));
1119:     }
1120:     for (i = 0; i < n - 1; i++) {
1121:       if (!mglevels[i]->b) {
1122:         Vec *vec;
1123:         PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL));
1124:         PetscCall(PCMGSetRhs(pc, i, *vec));
1125:         PetscCall(VecDestroy(vec));
1126:         PetscCall(PetscFree(vec));
1127:       }
1128:       if (!mglevels[i]->r && i) {
1129:         PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1130:         PetscCall(PCMGSetR(pc, i, tvec));
1131:         PetscCall(VecDestroy(&tvec));
1132:       }
1133:       if (!mglevels[i]->x) {
1134:         PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1135:         PetscCall(PCMGSetX(pc, i, tvec));
1136:         PetscCall(VecDestroy(&tvec));
1137:       }
1138:       if (doCR) {
1139:         PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx));
1140:         PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb));
1141:         PetscCall(VecZeroEntries(mglevels[i]->crb));
1142:       }
1143:     }
1144:     if (n != 1 && !mglevels[n - 1]->r) {
1145:       /* PCMGSetR() on the finest level if user did not supply it */
1146:       Vec *vec;

1148:       PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL));
1149:       PetscCall(PCMGSetR(pc, n - 1, *vec));
1150:       PetscCall(VecDestroy(vec));
1151:       PetscCall(PetscFree(vec));
1152:     }
1153:     if (doCR) {
1154:       PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx));
1155:       PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb));
1156:       PetscCall(VecZeroEntries(mglevels[n - 1]->crb));
1157:     }
1158:   }

1160:   if (pc->dm) {
1161:     /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */
1162:     for (i = 0; i < n - 1; i++) {
1163:       if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1164:     }
1165:   }
1166:   // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g.,
1167:   // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply.
1168:   if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;

1170:   for (i = 1; i < n; i++) {
1171:     if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) {
1172:       /* if doing only down then initial guess is zero */
1173:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
1174:     }
1175:     if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1176:     if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1177:     PetscCall(KSPSetUp(mglevels[i]->smoothd));
1178:     if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1179:     if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1180:     if (!mglevels[i]->residual) {
1181:       Mat mat;

1183:       PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1184:       PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat));
1185:     }
1186:     if (!mglevels[i]->residualtranspose) {
1187:       Mat mat;

1189:       PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1190:       PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat));
1191:     }
1192:   }
1193:   for (i = 1; i < n; i++) {
1194:     if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) {
1195:       Mat downmat, downpmat;

1197:       /* check if operators have been set for up, if not use down operators to set them */
1198:       PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL));
1199:       if (!opsset) {
1200:         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1201:         PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat));
1202:       }

1204:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE));
1205:       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1206:       PetscCall(KSPSetUp(mglevels[i]->smoothu));
1207:       if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR;
1208:       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1209:     }
1210:     if (mglevels[i]->cr) {
1211:       Mat downmat, downpmat;

1213:       /* check if operators have been set for up, if not use down operators to set them */
1214:       PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL));
1215:       if (!opsset) {
1216:         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1217:         PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat));
1218:       }

1220:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1221:       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1222:       PetscCall(KSPSetUp(mglevels[i]->cr));
1223:       if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR;
1224:       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1225:     }
1226:   }

1228:   if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1229:   PetscCall(KSPSetUp(mglevels[0]->smoothd));
1230:   if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1231:   if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));

1233:   /*
1234:      Dump the interpolation/restriction matrices plus the
1235:    Jacobian/stiffness on each level. This allows MATLAB users to
1236:    easily check if the Galerkin condition A_c = R A_f R^T is satisfied.

1238:    Only support one or the other at the same time.
1239:   */
1240: #if defined(PETSC_USE_SOCKET_VIEWER)
1241:   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL));
1242:   if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc));
1243:   dump = PETSC_FALSE;
1244: #endif
1245:   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL));
1246:   if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc));

1248:   if (viewer) {
1249:     for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer));
1250:     for (i = 0; i < n; i++) {
1251:       PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc));
1252:       PetscCall(MatView(pc->mat, viewer));
1253:     }
1254:   }
1255:   PetscFunctionReturn(PETSC_SUCCESS);
1256: }

1258: PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels)
1259: {
1260:   PC_MG *mg = (PC_MG *)pc->data;

1262:   PetscFunctionBegin;
1263:   *levels = mg->nlevels;
1264:   PetscFunctionReturn(PETSC_SUCCESS);
1265: }

1267: /*@
1268:   PCMGGetLevels - Gets the number of levels to use with `PCMG`.

1270:   Not Collective

1272:   Input Parameter:
1273: . pc - the preconditioner context

1275:   Output Parameter:
1276: . levels - the number of levels

1278:   Level: advanced

1280: .seealso: [](ch_ksp), `PCMG`, `PCMGSetLevels()`
1281: @*/
1282: PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels)
1283: {
1284:   PetscFunctionBegin;
1286:   PetscAssertPointer(levels, 2);
1287:   *levels = 0;
1288:   PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels));
1289:   PetscFunctionReturn(PETSC_SUCCESS);
1290: }

1292: /*@
1293:   PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy

1295:   Input Parameter:
1296: . pc - the preconditioner context

1298:   Output Parameters:
1299: + gc - grid complexity = sum_i(n_i) / n_0
1300: - oc - operator complexity = sum_i(nnz_i) / nnz_0

1302:   Level: advanced

1304:   Note:
1305:   This is often call the operator complexity in multigrid literature

1307: .seealso: [](ch_ksp), `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`
1308: @*/
1309: PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc)
1310: {
1311:   PC_MG         *mg       = (PC_MG *)pc->data;
1312:   PC_MG_Levels **mglevels = mg->levels;
1313:   PetscInt       lev, N;
1314:   PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0;
1315:   MatInfo        info;

1317:   PetscFunctionBegin;
1319:   if (gc) PetscAssertPointer(gc, 2);
1320:   if (oc) PetscAssertPointer(oc, 3);
1321:   if (!pc->setupcalled) {
1322:     if (gc) *gc = 0;
1323:     if (oc) *oc = 0;
1324:     PetscFunctionReturn(PETSC_SUCCESS);
1325:   }
1326:   PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels");
1327:   for (lev = 0; lev < mg->nlevels; lev++) {
1328:     Mat dB;
1329:     PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB));
1330:     PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */
1331:     PetscCall(MatGetSize(dB, &N, NULL));
1332:     sgc += N;
1333:     soc += info.nz_used;
1334:     if (lev == mg->nlevels - 1) {
1335:       nnz0 = info.nz_used;
1336:       n0   = N;
1337:     }
1338:   }
1339:   PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available");
1340:   *gc = (PetscReal)(sgc / n0);
1341:   if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0);
1342:   PetscFunctionReturn(PETSC_SUCCESS);
1343: }

1345: /*@
1346:   PCMGSetType - Determines the form of multigrid to use, either
1347:   multiplicative, additive, full, or the Kaskade algorithm.

1349:   Logically Collective

1351:   Input Parameters:
1352: + pc   - the preconditioner context
1353: - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`

1355:   Options Database Key:
1356: . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade

1358:   Level: advanced

1360: .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType`
1361: @*/
1362: PetscErrorCode PCMGSetType(PC pc, PCMGType form)
1363: {
1364:   PC_MG *mg = (PC_MG *)pc->data;

1366:   PetscFunctionBegin;
1369:   mg->am = form;
1370:   if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG;
1371:   else pc->ops->applyrichardson = NULL;
1372:   PetscFunctionReturn(PETSC_SUCCESS);
1373: }

1375: /*@
1376:   PCMGGetType - Finds the form of multigrid the `PCMG` is using  multiplicative, additive, full, or the Kaskade algorithm.

1378:   Logically Collective

1380:   Input Parameter:
1381: . pc - the preconditioner context

1383:   Output Parameter:
1384: . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType`

1386:   Level: advanced

1388: .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()`
1389: @*/
1390: PetscErrorCode PCMGGetType(PC pc, PCMGType *type)
1391: {
1392:   PC_MG *mg = (PC_MG *)pc->data;

1394:   PetscFunctionBegin;
1396:   *type = mg->am;
1397:   PetscFunctionReturn(PETSC_SUCCESS);
1398: }

1400: /*@
1401:   PCMGSetCycleType - Sets the type cycles to use.  Use `PCMGSetCycleTypeOnLevel()` for more
1402:   complicated cycling.

1404:   Logically Collective

1406:   Input Parameters:
1407: + pc - the multigrid context
1408: - n  - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W`

1410:   Options Database Key:
1411: . -pc_mg_cycle_type <v,w> - provide the cycle desired

1413:   Level: advanced

1415: .seealso: [](ch_ksp), `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType`
1416: @*/
1417: PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n)
1418: {
1419:   PC_MG         *mg       = (PC_MG *)pc->data;
1420:   PC_MG_Levels **mglevels = mg->levels;
1421:   PetscInt       i, levels;

1423:   PetscFunctionBegin;
1426:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1427:   levels = mglevels[0]->levels;
1428:   for (i = 0; i < levels; i++) mglevels[i]->cycles = n;
1429:   PetscFunctionReturn(PETSC_SUCCESS);
1430: }

1432: /*@
1433:   PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step
1434:   of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE`

1436:   Logically Collective

1438:   Input Parameters:
1439: + pc - the multigrid context
1440: - n  - number of cycles (default is 1)

1442:   Options Database Key:
1443: . -pc_mg_multiplicative_cycles n - set the number of cycles

1445:   Level: advanced

1447:   Note:
1448:   This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()`

1450: .seealso: [](ch_ksp), `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType`
1451: @*/
1452: PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n)
1453: {
1454:   PC_MG *mg = (PC_MG *)pc->data;

1456:   PetscFunctionBegin;
1459:   mg->cyclesperpcapply = n;
1460:   PetscFunctionReturn(PETSC_SUCCESS);
1461: }

1463: /*
1464:    Since the finest level KSP shares the original matrix (of the entire system), it's preconditioner
1465:    should not be updated if the whole PC is supposed to reuse the preconditioner
1466: */
1467: static PetscErrorCode PCSetReusePreconditioner_MG(PC pc, PetscBool flag)
1468: {
1469:   PC_MG         *mg       = (PC_MG *)pc->data;
1470:   PC_MG_Levels **mglevels = mg->levels;
1471:   PetscInt       levels;
1472:   PC             tpc;

1474:   PetscFunctionBegin;
1477:   if (mglevels) {
1478:     levels = mglevels[0]->levels;
1479:     PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
1480:     tpc->reusepreconditioner = flag;
1481:     PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
1482:     tpc->reusepreconditioner = flag;
1483:   }
1484:   PetscFunctionReturn(PETSC_SUCCESS);
1485: }

1487: static PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use)
1488: {
1489:   PC_MG *mg = (PC_MG *)pc->data;

1491:   PetscFunctionBegin;
1492:   mg->galerkin = use;
1493:   PetscFunctionReturn(PETSC_SUCCESS);
1494: }

1496: /*@
1497:   PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the
1498:   finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i

1500:   Logically Collective

1502:   Input Parameters:
1503: + pc  - the multigrid context
1504: - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE`

1506:   Options Database Key:
1507: . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process

1509:   Level: intermediate

1511:   Note:
1512:   Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not
1513:   use the `PCMG` construction of the coarser grids.

1515: .seealso: [](ch_ksp), `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType`
1516: @*/
1517: PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use)
1518: {
1519:   PetscFunctionBegin;
1521:   PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use));
1522:   PetscFunctionReturn(PETSC_SUCCESS);
1523: }

1525: /*@
1526:   PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i

1528:   Not Collective

1530:   Input Parameter:
1531: . pc - the multigrid context

1533:   Output Parameter:
1534: . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL`

1536:   Level: intermediate

1538: .seealso: [](ch_ksp), `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType`
1539: @*/
1540: PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin)
1541: {
1542:   PC_MG *mg = (PC_MG *)pc->data;

1544:   PetscFunctionBegin;
1546:   *galerkin = mg->galerkin;
1547:   PetscFunctionReturn(PETSC_SUCCESS);
1548: }

1550: static PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt)
1551: {
1552:   PC_MG *mg = (PC_MG *)pc->data;

1554:   PetscFunctionBegin;
1555:   mg->adaptInterpolation = adapt;
1556:   PetscFunctionReturn(PETSC_SUCCESS);
1557: }

1559: static PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt)
1560: {
1561:   PC_MG *mg = (PC_MG *)pc->data;

1563:   PetscFunctionBegin;
1564:   *adapt = mg->adaptInterpolation;
1565:   PetscFunctionReturn(PETSC_SUCCESS);
1566: }

1568: static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype)
1569: {
1570:   PC_MG *mg = (PC_MG *)pc->data;

1572:   PetscFunctionBegin;
1573:   mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE;
1574:   mg->coarseSpaceType    = ctype;
1575:   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
1576:   PetscFunctionReturn(PETSC_SUCCESS);
1577: }

1579: static PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype)
1580: {
1581:   PC_MG *mg = (PC_MG *)pc->data;

1583:   PetscFunctionBegin;
1584:   *ctype = mg->coarseSpaceType;
1585:   PetscFunctionReturn(PETSC_SUCCESS);
1586: }

1588: static PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr)
1589: {
1590:   PC_MG *mg = (PC_MG *)pc->data;

1592:   PetscFunctionBegin;
1593:   mg->compatibleRelaxation = cr;
1594:   PetscFunctionReturn(PETSC_SUCCESS);
1595: }

1597: static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr)
1598: {
1599:   PC_MG *mg = (PC_MG *)pc->data;

1601:   PetscFunctionBegin;
1602:   *cr = mg->compatibleRelaxation;
1603:   PetscFunctionReturn(PETSC_SUCCESS);
1604: }

1606: /*@
1607:   PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space.

1609:   Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.

1611:   Logically Collective

1613:   Input Parameters:
1614: + pc    - the multigrid context
1615: - ctype - the type of coarse space

1617:   Options Database Keys:
1618: + -pc_mg_adapt_interp_n <int>             - The number of modes to use
1619: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw`

1621:   Level: intermediate

1623:   Note:
1624:   Requires a `DM` with specific functionality be attached to the `PC`.

1626: .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM`
1627: @*/
1628: PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype)
1629: {
1630:   PetscFunctionBegin;
1633:   PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype));
1634:   PetscFunctionReturn(PETSC_SUCCESS);
1635: }

1637: /*@
1638:   PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space.

1640:   Not Collective

1642:   Input Parameter:
1643: . pc - the multigrid context

1645:   Output Parameter:
1646: . ctype - the type of coarse space

1648:   Level: intermediate

1650: .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`
1651: @*/
1652: PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype)
1653: {
1654:   PetscFunctionBegin;
1656:   PetscAssertPointer(ctype, 2);
1657:   PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype));
1658:   PetscFunctionReturn(PETSC_SUCCESS);
1659: }

1661: /* MATT: REMOVE? */
1662: /*@
1663:   PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.

1665:   Logically Collective

1667:   Input Parameters:
1668: + pc    - the multigrid context
1669: - adapt - flag for adaptation of the interpolator

1671:   Options Database Keys:
1672: + -pc_mg_adapt_interp                     - Turn on adaptation
1673: . -pc_mg_adapt_interp_n <int>             - The number of modes to use, should be divisible by dimension
1674: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector

1676:   Level: intermediate

1678: .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1679: @*/
1680: PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt)
1681: {
1682:   PetscFunctionBegin;
1684:   PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt));
1685:   PetscFunctionReturn(PETSC_SUCCESS);
1686: }

1688: /*@
1689:   PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh,
1690:   and thus accurately interpolated.

1692:   Not Collective

1694:   Input Parameter:
1695: . pc - the multigrid context

1697:   Output Parameter:
1698: . adapt - flag for adaptation of the interpolator

1700:   Level: intermediate

1702: .seealso: [](ch_ksp), `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1703: @*/
1704: PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt)
1705: {
1706:   PetscFunctionBegin;
1708:   PetscAssertPointer(adapt, 2);
1709:   PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt));
1710:   PetscFunctionReturn(PETSC_SUCCESS);
1711: }

1713: /*@
1714:   PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation.

1716:   Logically Collective

1718:   Input Parameters:
1719: + pc - the multigrid context
1720: - cr - flag for compatible relaxation

1722:   Options Database Key:
1723: . -pc_mg_adapt_cr - Turn on compatible relaxation

1725:   Level: intermediate

1727: .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1728: @*/
1729: PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr)
1730: {
1731:   PetscFunctionBegin;
1733:   PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr));
1734:   PetscFunctionReturn(PETSC_SUCCESS);
1735: }

1737: /*@
1738:   PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation.

1740:   Not Collective

1742:   Input Parameter:
1743: . pc - the multigrid context

1745:   Output Parameter:
1746: . cr - flag for compatible relaxaion

1748:   Level: intermediate

1750: .seealso: [](ch_ksp), `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1751: @*/
1752: PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr)
1753: {
1754:   PetscFunctionBegin;
1756:   PetscAssertPointer(cr, 2);
1757:   PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr));
1758:   PetscFunctionReturn(PETSC_SUCCESS);
1759: }

1761: /*@
1762:   PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use
1763:   on all levels.  Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of
1764:   pre- and post-smoothing steps.

1766:   Logically Collective

1768:   Input Parameters:
1769: + pc - the multigrid context
1770: - n  - the number of smoothing steps

1772:   Options Database Key:
1773: . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps

1775:   Level: advanced

1777:   Note:
1778:   This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.

1780: .seealso: [](ch_ksp), `PCMG`, `PCMGSetDistinctSmoothUp()`
1781: @*/
1782: PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n)
1783: {
1784:   PC_MG         *mg       = (PC_MG *)pc->data;
1785:   PC_MG_Levels **mglevels = mg->levels;
1786:   PetscInt       i, levels;

1788:   PetscFunctionBegin;
1791:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1792:   levels = mglevels[0]->levels;

1794:   for (i = 1; i < levels; i++) {
1795:     PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1796:     PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1797:     mg->default_smoothu = n;
1798:     mg->default_smoothd = n;
1799:   }
1800:   PetscFunctionReturn(PETSC_SUCCESS);
1801: }

1803: /*@
1804:   PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels
1805:   and adds the suffix _up to the options name

1807:   Logically Collective

1809:   Input Parameter:
1810: . pc - the preconditioner context

1812:   Options Database Key:
1813: . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects

1815:   Level: advanced

1817:   Note:
1818:   This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.

1820: .seealso: [](ch_ksp), `PCMG`, `PCMGSetNumberSmooth()`
1821: @*/
1822: PetscErrorCode PCMGSetDistinctSmoothUp(PC pc)
1823: {
1824:   PC_MG         *mg       = (PC_MG *)pc->data;
1825:   PC_MG_Levels **mglevels = mg->levels;
1826:   PetscInt       i, levels;
1827:   KSP            subksp;

1829:   PetscFunctionBegin;
1831:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1832:   levels = mglevels[0]->levels;

1834:   for (i = 1; i < levels; i++) {
1835:     const char *prefix = NULL;
1836:     /* make sure smoother up and down are different */
1837:     PetscCall(PCMGGetSmootherUp(pc, i, &subksp));
1838:     PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix));
1839:     PetscCall(KSPSetOptionsPrefix(subksp, prefix));
1840:     PetscCall(KSPAppendOptionsPrefix(subksp, "up_"));
1841:   }
1842:   PetscFunctionReturn(PETSC_SUCCESS);
1843: }

1845: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1846: static PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[])
1847: {
1848:   PC_MG         *mg       = (PC_MG *)pc->data;
1849:   PC_MG_Levels **mglevels = mg->levels;
1850:   Mat           *mat;
1851:   PetscInt       l;

1853:   PetscFunctionBegin;
1854:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1855:   PetscCall(PetscMalloc1(mg->nlevels, &mat));
1856:   for (l = 1; l < mg->nlevels; l++) {
1857:     mat[l - 1] = mglevels[l]->interpolate;
1858:     PetscCall(PetscObjectReference((PetscObject)mat[l - 1]));
1859:   }
1860:   *num_levels     = mg->nlevels;
1861:   *interpolations = mat;
1862:   PetscFunctionReturn(PETSC_SUCCESS);
1863: }

1865: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1866: static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[])
1867: {
1868:   PC_MG         *mg       = (PC_MG *)pc->data;
1869:   PC_MG_Levels **mglevels = mg->levels;
1870:   PetscInt       l;
1871:   Mat           *mat;

1873:   PetscFunctionBegin;
1874:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1875:   PetscCall(PetscMalloc1(mg->nlevels, &mat));
1876:   for (l = 0; l < mg->nlevels - 1; l++) {
1877:     PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &mat[l]));
1878:     PetscCall(PetscObjectReference((PetscObject)mat[l]));
1879:   }
1880:   *num_levels      = mg->nlevels;
1881:   *coarseOperators = mat;
1882:   PetscFunctionReturn(PETSC_SUCCESS);
1883: }

1885: /*@C
1886:   PCMGRegisterCoarseSpaceConstructor -  Adds a method to the `PCMG` package for coarse space construction.

1888:   Not Collective, No Fortran Support

1890:   Input Parameters:
1891: + name     - name of the constructor
1892: - function - constructor routine, see `PCMGCoarseSpaceConstructorFn`

1894:   Level: advanced

1896:   Developer Notes:
1897:   This does not appear to be used anywhere

1899: .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()`
1900: @*/
1901: PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn *function)
1902: {
1903:   PetscFunctionBegin;
1904:   PetscCall(PCInitializePackage());
1905:   PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function));
1906:   PetscFunctionReturn(PETSC_SUCCESS);
1907: }

1909: /*@C
1910:   PCMGGetCoarseSpaceConstructor -  Returns the given coarse space construction method.

1912:   Not Collective, No Fortran Support

1914:   Input Parameter:
1915: . name - name of the constructor

1917:   Output Parameter:
1918: . function - constructor routine

1920:   Level: advanced

1922: .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()`
1923: @*/
1924: PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn **function)
1925: {
1926:   PetscFunctionBegin;
1927:   PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function));
1928:   PetscFunctionReturn(PETSC_SUCCESS);
1929: }

1931: /*MC
1932:    PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional information about the restriction/interpolation
1933:    operators using `PCMGSetInterpolation()` and/or `PCMGSetRestriction()`(and possibly the coarser grid matrices) or a `DM` that can provide such information.

1935:    Options Database Keys:
1936: +  -pc_mg_levels <nlevels>                            - number of levels including finest
1937: .  -pc_mg_cycle_type <v,w>                            - provide the cycle desired
1938: .  -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default
1939: .  -pc_mg_log                                         - log information about time spent on each level of the solver
1940: .  -pc_mg_distinct_smoothup                           - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes)
1941: .  -pc_mg_galerkin <both,pmat,mat,none>               - use the Galerkin process to compute coarser operators, i.e., $A_{coarse} = R A_{fine} R^T$
1942: .  -pc_mg_multiplicative_cycles                        - number of cycles to use as the preconditioner (defaults to 1)
1943: .  -pc_mg_dump_matlab                                  - dumps the matrices for each level and the restriction/interpolation matrices
1944:                                                          to a `PETSCVIEWERSOCKET` for reading from MATLAB.
1945: -  -pc_mg_dump_binary                                  -dumps the matrices for each level and the restriction/interpolation matrices
1946:                                                         to the binary output file called binaryoutput

1948:    Level: intermediate

1950:    Notes:
1951:    `PCMG` provides a general framework for implementing multigrid methods. Use `PCGAMG` for PETSc's algebraic multigrid preconditioner, `PCHYPRE` for hypre's
1952:    BoomerAMG algebraic multigrid, and `PCML` for Trilinos's ML preconditioner. `PCAMGX` provides access to NVIDIA's AmgX algebraic multigrid.

1954:    If you use `KSPSetDM()` (or `SNESSetDM()` or `TSSetDM()`) with an appropriate `DM`, such as `DMDA`, then `PCMG` will use the geometric information
1955:    from the `DM` to generate appropriate restriction and interpolation information and construct a geometric multigrid.

1957:    If you do not provide an appropriate `DM` and do not provide restriction or interpolation operators with `PCMGSetInterpolation()` and/or `PCMGSetRestriction()`,
1958:    then `PCMG` will run multigrid with only a single level (so not really multigrid).

1960:    The Krylov solver (if any) and preconditioner (smoother) and their parameters are controlled from the options database with the standard
1961:    options database keywords prefixed with `-mg_levels_` to affect all the levels but the coarsest, which is controlled with `-mg_coarse_`,
1962:    and the finest where `-mg_fine_` can override `-mg_levels_`.  One can set different preconditioners etc on specific levels with the prefix
1963:    `-mg_levels_n_` where `n` is the level number (zero being the coarse level. For example
1964: .vb
1965:    -mg_levels_ksp_type gmres -mg_levels_pc_type bjacobi -mg_coarse_pc_type svd -mg_levels_2_pc_type sor
1966: .ve
1967:    These options also work for controlling the smoothers etc inside `PCGAMG`

1969:    If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother than one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method

1971:    When run with a single level the smoother options are used on that level NOT the coarse grid solver options

1973:    When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This
1974:    is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing
1975:    (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the
1976:    residual is computed at the end of each cycle.

1978: .seealso: [](sec_mg), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE`
1979:           `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`,
1980:           `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`,
1981:           `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`,
1982:           `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`,
1983:           `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1984: M*/

1986: PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc)
1987: {
1988:   PC_MG *mg;

1990:   PetscFunctionBegin;
1991:   PetscCall(PetscNew(&mg));
1992:   pc->data               = mg;
1993:   mg->nlevels            = -1;
1994:   mg->am                 = PC_MG_MULTIPLICATIVE;
1995:   mg->galerkin           = PC_MG_GALERKIN_NONE;
1996:   mg->adaptInterpolation = PETSC_FALSE;
1997:   mg->Nc                 = -1;
1998:   mg->eigenvalue         = -1;

2000:   pc->useAmat = PETSC_TRUE;

2002:   pc->ops->apply             = PCApply_MG;
2003:   pc->ops->applytranspose    = PCApplyTranspose_MG;
2004:   pc->ops->matapply          = PCMatApply_MG;
2005:   pc->ops->matapplytranspose = PCMatApplyTranspose_MG;
2006:   pc->ops->setup             = PCSetUp_MG;
2007:   pc->ops->reset             = PCReset_MG;
2008:   pc->ops->destroy           = PCDestroy_MG;
2009:   pc->ops->setfromoptions    = PCSetFromOptions_MG;
2010:   pc->ops->view              = PCView_MG;

2012:   PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue));
2013:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG));
2014:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetReusePreconditioner_C", PCSetReusePreconditioner_MG));
2015:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG));
2016:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG));
2017:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG));
2018:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG));
2019:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG));
2020:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG));
2021:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG));
2022:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG));
2023:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG));
2024:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG));
2025:   PetscFunctionReturn(PETSC_SUCCESS);
2026: }