Actual source code: mg.c

  1: /*
  2:     Defines the multigrid preconditioner interface.
  3: */
  4: #include <petsc/private/pcmgimpl.h>
  5: #include <petsc/private/kspimpl.h>
  6: #include <petscdm.h>
  7: PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *);

  9: /*
 10:    Contains the list of registered coarse space construction routines
 11: */
 12: PetscFunctionList PCMGCoarseList = NULL;

 14: PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason)
 15: {
 16:   PC_MG        *mg = (PC_MG *)pc->data;
 17:   PC_MG_Levels *mgc, *mglevels = *mglevelsin;
 18:   PetscInt      cycles = (mglevels->level == 1) ? 1 : mglevels->cycles;

 20:   PetscFunctionBegin;
 21:   if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
 22:   if (!transpose) {
 23:     if (matapp) {
 24:       PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */
 25:       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL));
 26:     } else {
 27:       PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */
 28:       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
 29:     }
 30:   } else {
 31:     PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
 32:     PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */
 33:     PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
 34:   }
 35:   if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
 36:   if (mglevels->level) { /* not the coarsest grid */
 37:     if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0));
 38:     if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R));
 39:     if (!transpose) {
 40:       if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
 41:       else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
 42:     } else {
 43:       if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
 44:       else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
 45:     }
 46:     if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0));

 48:     /* if on finest level and have convergence criteria set */
 49:     if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) {
 50:       PetscReal rnorm;
 51:       PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm));
 52:       if (rnorm <= mg->ttol) {
 53:         if (rnorm < mg->abstol) {
 54:           *reason = PCRICHARDSON_CONVERGED_ATOL;
 55:           PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol));
 56:         } else {
 57:           *reason = PCRICHARDSON_CONVERGED_RTOL;
 58:           PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol));
 59:         }
 60:         PetscFunctionReturn(PETSC_SUCCESS);
 61:       }
 62:     }

 64:     mgc = *(mglevelsin - 1);
 65:     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
 66:     if (!transpose) {
 67:       if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B));
 68:       else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b));
 69:     } else {
 70:       if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B));
 71:       else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b));
 72:     }
 73:     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
 74:     if (matapp) {
 75:       if (!mgc->X) {
 76:         PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X));
 77:       } else {
 78:         PetscCall(MatZeroEntries(mgc->X));
 79:       }
 80:     } else {
 81:       PetscCall(VecZeroEntries(mgc->x));
 82:     }
 83:     while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason));
 84:     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
 85:     if (!transpose) {
 86:       if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X));
 87:       else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x));
 88:     } else {
 89:       PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x));
 90:     }
 91:     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
 92:     if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
 93:     if (!transpose) {
 94:       if (matapp) {
 95:         PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */
 96:         PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL));
 97:       } else {
 98:         PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */
 99:         PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
100:       }
101:     } else {
102:       PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
103:       PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */
104:       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
105:     }
106:     if (mglevels->cr) {
107:       Mat crA;

109:       PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
110:       /* TODO Turn on copy and turn off noisy if we have an exact solution
111:       PetscCall(VecCopy(mglevels->x, mglevels->crx));
112:       PetscCall(VecCopy(mglevels->b, mglevels->crb)); */
113:       PetscCall(KSPGetOperators(mglevels->cr, &crA, NULL));
114:       PetscCall(KSPSetNoisy_Private(crA, mglevels->crx));
115:       PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */
116:       PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx));
117:     }
118:     if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
119:   }
120:   PetscFunctionReturn(PETSC_SUCCESS);
121: }

123: static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
124: {
125:   PC_MG         *mg       = (PC_MG *)pc->data;
126:   PC_MG_Levels **mglevels = mg->levels;
127:   PC             tpc;
128:   PetscBool      changeu, changed;
129:   PetscInt       levels = mglevels[0]->levels, i;

131:   PetscFunctionBegin;
132:   /* When the DM is supplying the matrix then it will not exist until here */
133:   for (i = 0; i < levels; i++) {
134:     if (!mglevels[i]->A) {
135:       PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
136:       PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
137:     }
138:   }

140:   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
141:   PetscCall(PCPreSolveChangeRHS(tpc, &changed));
142:   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
143:   PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
144:   if (!changed && !changeu) {
145:     PetscCall(VecDestroy(&mglevels[levels - 1]->b));
146:     mglevels[levels - 1]->b = b;
147:   } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
148:     if (!mglevels[levels - 1]->b) {
149:       Vec *vec;

151:       PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
152:       mglevels[levels - 1]->b = *vec;
153:       PetscCall(PetscFree(vec));
154:     }
155:     PetscCall(VecCopy(b, mglevels[levels - 1]->b));
156:   }
157:   mglevels[levels - 1]->x = x;

159:   mg->rtol   = rtol;
160:   mg->abstol = abstol;
161:   mg->dtol   = dtol;
162:   if (rtol) {
163:     /* compute initial residual norm for relative convergence test */
164:     PetscReal rnorm;
165:     if (zeroguess) {
166:       PetscCall(VecNorm(b, NORM_2, &rnorm));
167:     } else {
168:       PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w));
169:       PetscCall(VecNorm(w, NORM_2, &rnorm));
170:     }
171:     mg->ttol = PetscMax(rtol * rnorm, abstol);
172:   } else if (abstol) mg->ttol = abstol;
173:   else mg->ttol = 0.0;

175:   /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't
176:      stop prematurely due to small residual */
177:   for (i = 1; i < levels; i++) {
178:     PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
179:     if (mglevels[i]->smoothu != mglevels[i]->smoothd) {
180:       /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */
181:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
182:       PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
183:     }
184:   }

186:   *reason = PCRICHARDSON_NOT_SET;
187:   for (i = 0; i < its; i++) {
188:     PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason));
189:     if (*reason) break;
190:   }
191:   if (*reason == PCRICHARDSON_NOT_SET) *reason = PCRICHARDSON_CONVERGED_ITS;
192:   *outits = i;
193:   if (!changed && !changeu) mglevels[levels - 1]->b = NULL;
194:   PetscFunctionReturn(PETSC_SUCCESS);
195: }

197: PetscErrorCode PCReset_MG(PC pc)
198: {
199:   PC_MG         *mg       = (PC_MG *)pc->data;
200:   PC_MG_Levels **mglevels = mg->levels;
201:   PetscInt       i, n;

203:   PetscFunctionBegin;
204:   if (mglevels) {
205:     n = mglevels[0]->levels;
206:     for (i = 0; i < n - 1; i++) {
207:       PetscCall(VecDestroy(&mglevels[i + 1]->r));
208:       PetscCall(VecDestroy(&mglevels[i]->b));
209:       PetscCall(VecDestroy(&mglevels[i]->x));
210:       PetscCall(MatDestroy(&mglevels[i + 1]->R));
211:       PetscCall(MatDestroy(&mglevels[i]->B));
212:       PetscCall(MatDestroy(&mglevels[i]->X));
213:       PetscCall(VecDestroy(&mglevels[i]->crx));
214:       PetscCall(VecDestroy(&mglevels[i]->crb));
215:       PetscCall(MatDestroy(&mglevels[i + 1]->restrct));
216:       PetscCall(MatDestroy(&mglevels[i + 1]->interpolate));
217:       PetscCall(MatDestroy(&mglevels[i + 1]->inject));
218:       PetscCall(VecDestroy(&mglevels[i + 1]->rscale));
219:     }
220:     PetscCall(VecDestroy(&mglevels[n - 1]->crx));
221:     PetscCall(VecDestroy(&mglevels[n - 1]->crb));
222:     /* this is not null only if the smoother on the finest level
223:        changes the rhs during PreSolve */
224:     PetscCall(VecDestroy(&mglevels[n - 1]->b));
225:     PetscCall(MatDestroy(&mglevels[n - 1]->B));

227:     for (i = 0; i < n; i++) {
228:       PetscCall(MatDestroy(&mglevels[i]->coarseSpace));
229:       PetscCall(MatDestroy(&mglevels[i]->A));
230:       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd));
231:       PetscCall(KSPReset(mglevels[i]->smoothu));
232:       if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr));
233:     }
234:     mg->Nc = 0;
235:   }
236:   PetscFunctionReturn(PETSC_SUCCESS);
237: }

239: /* Implementing CR

241: We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is

243:   Inj^T (Inj Inj^T)^{-1} Inj

245: and if Inj is a VecScatter, as it is now in PETSc, we have

247:   Inj^T Inj

249: and

251:   S = I - Inj^T Inj

253: since

255:   Inj S = Inj - (Inj Inj^T) Inj = 0.

257: Brannick suggests

259:   A \to S^T A S  \qquad\mathrm{and}\qquad M \to S^T M S

261: but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use

263:   M^{-1} A \to S M^{-1} A S

265: In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left.

267:   Check: || Inj P - I ||_F < tol
268:   Check: In general, Inj Inj^T = I
269: */

271: typedef struct {
272:   PC       mg;  /* The PCMG object */
273:   PetscInt l;   /* The multigrid level for this solver */
274:   Mat      Inj; /* The injection matrix */
275:   Mat      S;   /* I - Inj^T Inj */
276: } CRContext;

278: static PetscErrorCode CRSetup_Private(PC pc)
279: {
280:   CRContext *ctx;
281:   Mat        It;

283:   PetscFunctionBeginUser;
284:   PetscCall(PCShellGetContext(pc, &ctx));
285:   PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It));
286:   PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG");
287:   PetscCall(MatCreateTranspose(It, &ctx->Inj));
288:   PetscCall(MatCreateNormal(ctx->Inj, &ctx->S));
289:   PetscCall(MatScale(ctx->S, -1.0));
290:   PetscCall(MatShift(ctx->S, 1.0));
291:   PetscFunctionReturn(PETSC_SUCCESS);
292: }

294: static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y)
295: {
296:   CRContext *ctx;

298:   PetscFunctionBeginUser;
299:   PetscCall(PCShellGetContext(pc, &ctx));
300:   PetscCall(MatMult(ctx->S, x, y));
301:   PetscFunctionReturn(PETSC_SUCCESS);
302: }

304: static PetscErrorCode CRDestroy_Private(PC pc)
305: {
306:   CRContext *ctx;

308:   PetscFunctionBeginUser;
309:   PetscCall(PCShellGetContext(pc, &ctx));
310:   PetscCall(MatDestroy(&ctx->Inj));
311:   PetscCall(MatDestroy(&ctx->S));
312:   PetscCall(PetscFree(ctx));
313:   PetscCall(PCShellSetContext(pc, NULL));
314:   PetscFunctionReturn(PETSC_SUCCESS);
315: }

317: static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr)
318: {
319:   CRContext *ctx;

321:   PetscFunctionBeginUser;
322:   PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr));
323:   PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)"));
324:   PetscCall(PetscCalloc1(1, &ctx));
325:   ctx->mg = pc;
326:   ctx->l  = l;
327:   PetscCall(PCSetType(*cr, PCSHELL));
328:   PetscCall(PCShellSetContext(*cr, ctx));
329:   PetscCall(PCShellSetApply(*cr, CRApply_Private));
330:   PetscCall(PCShellSetSetUp(*cr, CRSetup_Private));
331:   PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private));
332:   PetscFunctionReturn(PETSC_SUCCESS);
333: }

335: PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char[], const char[], const char *[], const char *[], PetscBool *);

337: PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms)
338: {
339:   PC_MG         *mg = (PC_MG *)pc->data;
340:   MPI_Comm       comm;
341:   PC_MG_Levels **mglevels = mg->levels;
342:   PCMGType       mgtype   = mg->am;
343:   PetscInt       mgctype  = (PetscInt)PC_MG_CYCLE_V;
344:   PetscInt       i;
345:   PetscMPIInt    size;
346:   const char    *prefix;
347:   PC             ipc;
348:   PetscInt       n;

350:   PetscFunctionBegin;
353:   if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS);
354:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
355:   if (mglevels) {
356:     mgctype = mglevels[0]->cycles;
357:     /* changing the number of levels so free up the previous stuff */
358:     PetscCall(PCReset_MG(pc));
359:     n = mglevels[0]->levels;
360:     for (i = 0; i < n; i++) {
361:       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
362:       PetscCall(KSPDestroy(&mglevels[i]->smoothu));
363:       PetscCall(KSPDestroy(&mglevels[i]->cr));
364:       PetscCall(PetscFree(mglevels[i]));
365:     }
366:     PetscCall(PetscFree(mg->levels));
367:   }

369:   mg->nlevels = levels;

371:   PetscCall(PetscMalloc1(levels, &mglevels));

373:   PetscCall(PCGetOptionsPrefix(pc, &prefix));

375:   mg->stageApply = 0;
376:   for (i = 0; i < levels; i++) {
377:     PetscCall(PetscNew(&mglevels[i]));

379:     mglevels[i]->level               = i;
380:     mglevels[i]->levels              = levels;
381:     mglevels[i]->cycles              = mgctype;
382:     mg->default_smoothu              = 2;
383:     mg->default_smoothd              = 2;
384:     mglevels[i]->eventsmoothsetup    = 0;
385:     mglevels[i]->eventsmoothsolve    = 0;
386:     mglevels[i]->eventresidual       = 0;
387:     mglevels[i]->eventinterprestrict = 0;

389:     if (comms) comm = comms[i];
390:     if (comm != MPI_COMM_NULL) {
391:       PetscCall(KSPCreate(comm, &mglevels[i]->smoothd));
392:       PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel));
393:       PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure));
394:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i));
395:       PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix));
396:       PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level));
397:       if (i == 0 && levels > 1) { // coarse grid
398:         PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_"));

400:         /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */
401:         PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY));
402:         PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc));
403:         PetscCallMPI(MPI_Comm_size(comm, &size));
404:         if (size > 1) {
405:           PetscCall(PCSetType(ipc, PCREDUNDANT));
406:         } else {
407:           PetscCall(PCSetType(ipc, PCLU));
408:         }
409:         PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS));
410:       } else {
411:         char tprefix[128];

413:         PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV));
414:         PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL));
415:         PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE));
416:         PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc));
417:         PetscCall(PCSetType(ipc, PCSOR));
418:         PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));

420:         if (i == levels - 1 && levels > 1) { // replace 'mg_finegrid_' with 'mg_levels_X_'
421:           PetscBool set;
422:           PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)mglevels[i]->smoothd)->options, ((PetscObject)mglevels[i]->smoothd)->prefix, "-mg_fine_", NULL, NULL, &set));
423:           if (set) {
424:             if (prefix) PetscCall(PetscSNPrintf(tprefix, 128, "%smg_fine_", prefix));
425:             else PetscCall(PetscSNPrintf(tprefix, 128, "mg_fine_"));
426:             PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, tprefix));
427:           } else {
428:             PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
429:             PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
430:           }
431:         } else {
432:           PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
433:           PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
434:         }
435:       }
436:     }
437:     mglevels[i]->smoothu = mglevels[i]->smoothd;
438:     mg->rtol             = 0.0;
439:     mg->abstol           = 0.0;
440:     mg->dtol             = 0.0;
441:     mg->ttol             = 0.0;
442:     mg->cyclesperpcapply = 1;
443:   }
444:   mg->levels = mglevels;
445:   PetscCall(PCMGSetType(pc, mgtype));
446:   PetscFunctionReturn(PETSC_SUCCESS);
447: }

449: /*@C
450:   PCMGSetLevels - Sets the number of levels to use with `PCMG`.
451:   Must be called before any other `PCMG` routine.

453:   Logically Collective

455:   Input Parameters:
456: + pc     - the preconditioner context
457: . levels - the number of levels
458: - comms  - optional communicators for each level; this is to allow solving the coarser problems
459:            on smaller sets of processes. For processes that are not included in the computation
460:            you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes
461:            should participate in each level of problem.

463:   Level: intermediate

465:   Notes:
466:   If the number of levels is one then the multigrid uses the `-mg_levels` prefix
467:   for setting the level options rather than the `-mg_coarse` or `-mg_fine` prefix.

469:   You can free the information in comms after this routine is called.

471:   The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level
472:   are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on
473:   the two levels, rank 0 in the original communicator will pass in an array of 2 communicators
474:   of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators
475:   the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate
476:   in the coarse grid solve.

478:   Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one
479:   must take special care in providing the restriction and interpolation operation. We recommend
480:   providing these as two step operations; first perform a standard restriction or interpolation on
481:   the full number of ranks for that level and then use an MPI call to copy the resulting vector
482:   array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both
483:   cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and
484:   receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries.

486:   Fortran Notes:
487:   Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM`
488:   is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc.

490: .seealso: [](ch_ksp), `PCMGSetType()`, `PCMGGetLevels()`
491: @*/
492: PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms)
493: {
494:   PetscFunctionBegin;
496:   if (comms) PetscAssertPointer(comms, 3);
497:   PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms));
498:   PetscFunctionReturn(PETSC_SUCCESS);
499: }

501: PetscErrorCode PCDestroy_MG(PC pc)
502: {
503:   PC_MG         *mg       = (PC_MG *)pc->data;
504:   PC_MG_Levels **mglevels = mg->levels;
505:   PetscInt       i, n;

507:   PetscFunctionBegin;
508:   PetscCall(PCReset_MG(pc));
509:   if (mglevels) {
510:     n = mglevels[0]->levels;
511:     for (i = 0; i < n; i++) {
512:       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
513:       PetscCall(KSPDestroy(&mglevels[i]->smoothu));
514:       PetscCall(KSPDestroy(&mglevels[i]->cr));
515:       PetscCall(PetscFree(mglevels[i]));
516:     }
517:     PetscCall(PetscFree(mg->levels));
518:   }
519:   PetscCall(PetscFree(pc->data));
520:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
521:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
522:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL));
523:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetReusePreconditioner_C", NULL));
524:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL));
525:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL));
526:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
527:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
528:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL));
529:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL));
530:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL));
531:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL));
532:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL));
533:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL));
534:   PetscFunctionReturn(PETSC_SUCCESS);
535: }

537: /*
538:    PCApply_MG - Runs either an additive, multiplicative, Kaskadic
539:              or full cycle of multigrid.

541:   Note:
542:   A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle().
543: */
544: static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose)
545: {
546:   PC_MG         *mg       = (PC_MG *)pc->data;
547:   PC_MG_Levels **mglevels = mg->levels;
548:   PC             tpc;
549:   PetscInt       levels = mglevels[0]->levels, i;
550:   PetscBool      changeu, changed, matapp;

552:   PetscFunctionBegin;
553:   matapp = (PetscBool)(B && X);
554:   if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply));
555:   /* When the DM is supplying the matrix then it will not exist until here */
556:   for (i = 0; i < levels; i++) {
557:     if (!mglevels[i]->A) {
558:       PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
559:       PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
560:     }
561:   }

563:   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
564:   PetscCall(PCPreSolveChangeRHS(tpc, &changed));
565:   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
566:   PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
567:   if (!changeu && !changed) {
568:     if (matapp) {
569:       PetscCall(MatDestroy(&mglevels[levels - 1]->B));
570:       mglevels[levels - 1]->B = B;
571:     } else {
572:       PetscCall(VecDestroy(&mglevels[levels - 1]->b));
573:       mglevels[levels - 1]->b = b;
574:     }
575:   } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
576:     if (matapp) {
577:       if (mglevels[levels - 1]->B) {
578:         PetscInt  N1, N2;
579:         PetscBool flg;

581:         PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1));
582:         PetscCall(MatGetSize(B, NULL, &N2));
583:         PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg));
584:         if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B));
585:       }
586:       if (!mglevels[levels - 1]->B) {
587:         PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B));
588:       } else {
589:         PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN));
590:       }
591:     } else {
592:       if (!mglevels[levels - 1]->b) {
593:         Vec *vec;

595:         PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
596:         mglevels[levels - 1]->b = *vec;
597:         PetscCall(PetscFree(vec));
598:       }
599:       PetscCall(VecCopy(b, mglevels[levels - 1]->b));
600:     }
601:   }
602:   if (matapp) {
603:     mglevels[levels - 1]->X = X;
604:   } else {
605:     mglevels[levels - 1]->x = x;
606:   }

608:   /* If coarser Xs are present, it means we have already block applied the PC at least once
609:      Reset operators if sizes/type do no match */
610:   if (matapp && levels > 1 && mglevels[levels - 2]->X) {
611:     PetscInt  Xc, Bc;
612:     PetscBool flg;

614:     PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc));
615:     PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc));
616:     PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg));
617:     if (Xc != Bc || !flg) {
618:       PetscCall(MatDestroy(&mglevels[levels - 1]->R));
619:       for (i = 0; i < levels - 1; i++) {
620:         PetscCall(MatDestroy(&mglevels[i]->R));
621:         PetscCall(MatDestroy(&mglevels[i]->B));
622:         PetscCall(MatDestroy(&mglevels[i]->X));
623:       }
624:     }
625:   }

627:   if (mg->am == PC_MG_MULTIPLICATIVE) {
628:     if (matapp) PetscCall(MatZeroEntries(X));
629:     else PetscCall(VecZeroEntries(x));
630:     for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL));
631:   } else if (mg->am == PC_MG_ADDITIVE) {
632:     PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp));
633:   } else if (mg->am == PC_MG_KASKADE) {
634:     PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp));
635:   } else {
636:     PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp));
637:   }
638:   if (mg->stageApply) PetscCall(PetscLogStagePop());
639:   if (!changeu && !changed) {
640:     if (matapp) {
641:       mglevels[levels - 1]->B = NULL;
642:     } else {
643:       mglevels[levels - 1]->b = NULL;
644:     }
645:   }
646:   PetscFunctionReturn(PETSC_SUCCESS);
647: }

649: static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x)
650: {
651:   PetscFunctionBegin;
652:   PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE));
653:   PetscFunctionReturn(PETSC_SUCCESS);
654: }

656: static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x)
657: {
658:   PetscFunctionBegin;
659:   PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE));
660:   PetscFunctionReturn(PETSC_SUCCESS);
661: }

663: static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x)
664: {
665:   PetscFunctionBegin;
666:   PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE));
667:   PetscFunctionReturn(PETSC_SUCCESS);
668: }

670: static PetscErrorCode PCMatApplyTranspose_MG(PC pc, Mat b, Mat x)
671: {
672:   PetscFunctionBegin;
673:   PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_TRUE));
674:   PetscFunctionReturn(PETSC_SUCCESS);
675: }

677: PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems PetscOptionsObject)
678: {
679:   PetscInt            levels, cycles;
680:   PetscBool           flg, flg2;
681:   PC_MG              *mg = (PC_MG *)pc->data;
682:   PC_MG_Levels      **mglevels;
683:   PCMGType            mgtype;
684:   PCMGCycleType       mgctype;
685:   PCMGGalerkinType    gtype;
686:   PCMGCoarseSpaceType coarseSpaceType;

688:   PetscFunctionBegin;
689:   levels = PetscMax(mg->nlevels, 1);
690:   PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options");
691:   PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg));
692:   if (!flg && !mg->levels && pc->dm) {
693:     PetscCall(DMGetRefineLevel(pc->dm, &levels));
694:     levels++;
695:     mg->usedmfornumberoflevels = PETSC_TRUE;
696:   }
697:   PetscCall(PCMGSetLevels(pc, levels, NULL));
698:   mglevels = mg->levels;

700:   mgctype = (PCMGCycleType)mglevels[0]->cycles;
701:   PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg));
702:   if (flg) PetscCall(PCMGSetCycleType(pc, mgctype));
703:   coarseSpaceType = mg->coarseSpaceType;
704:   PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg));
705:   if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType));
706:   PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg));
707:   PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg));
708:   flg2 = PETSC_FALSE;
709:   PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg));
710:   if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2));
711:   flg = PETSC_FALSE;
712:   PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL));
713:   if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc));
714:   PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)mg->galerkin, (PetscEnum *)&gtype, &flg));
715:   if (flg) PetscCall(PCMGSetGalerkin(pc, gtype));
716:   mgtype = mg->am;
717:   PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg));
718:   if (flg) PetscCall(PCMGSetType(pc, mgtype));
719:   if (mg->am == PC_MG_MULTIPLICATIVE) {
720:     PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg));
721:     if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles));
722:   }
723:   flg = PETSC_FALSE;
724:   PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL));
725:   if (flg) {
726:     PetscInt i;
727:     char     eventname[128];

729:     levels = mglevels[0]->levels;
730:     for (i = 0; i < levels; i++) {
731:       PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %" PetscInt_FMT, i));
732:       PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup));
733:       PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %" PetscInt_FMT, i));
734:       PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve));
735:       if (i) {
736:         PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %" PetscInt_FMT, i));
737:         PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual));
738:         PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %" PetscInt_FMT, i));
739:         PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict));
740:       }
741:     }

743:     if (PetscDefined(USE_LOG)) {
744:       const char sname[] = "MG Apply";

746:       PetscCall(PetscLogStageGetId(sname, &mg->stageApply));
747:       if (mg->stageApply < 0) PetscCall(PetscLogStageRegister(sname, &mg->stageApply));
748:     }
749:   }
750:   PetscOptionsHeadEnd();
751:   /* Check option consistency */
752:   PetscCall(PCMGGetGalerkin(pc, &gtype));
753:   PetscCall(PCMGGetAdaptInterpolation(pc, &flg));
754:   PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator");
755:   PetscFunctionReturn(PETSC_SUCCESS);
756: }

758: const char *const PCMGTypes[]            = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL};
759: const char *const PCMGCycleTypes[]       = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL};
760: const char *const PCMGGalerkinTypes[]    = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL};
761: const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL};

763: #include <petscdraw.h>
764: PetscErrorCode PCView_MG(PC pc, PetscViewer viewer)
765: {
766:   PC_MG         *mg       = (PC_MG *)pc->data;
767:   PC_MG_Levels **mglevels = mg->levels;
768:   PetscInt       levels   = mglevels ? mglevels[0]->levels : 0, i;
769:   PetscBool      isascii, isbinary, isdraw;

771:   PetscFunctionBegin;
772:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
773:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
774:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
775:   if (isascii) {
776:     const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown";
777:     PetscCall(PetscViewerASCIIPrintf(viewer, "  type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename));
778:     if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "    Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply));
779:     if (mg->galerkin == PC_MG_GALERKIN_BOTH) {
780:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices\n"));
781:     } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) {
782:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices for pmat\n"));
783:     } else if (mg->galerkin == PC_MG_GALERKIN_MAT) {
784:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices for mat\n"));
785:     } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) {
786:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using externally compute Galerkin coarse grid matrices\n"));
787:     } else {
788:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using Galerkin computed coarse grid matrices\n"));
789:     }
790:     if (mg->view) PetscCall((*mg->view)(pc, viewer));
791:     for (i = 0; i < levels; i++) {
792:       if (i) {
793:         PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
794:       } else {
795:         PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i));
796:       }
797:       PetscCall(PetscViewerASCIIPushTab(viewer));
798:       PetscCall(KSPView(mglevels[i]->smoothd, viewer));
799:       PetscCall(PetscViewerASCIIPopTab(viewer));
800:       if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) {
801:         PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n"));
802:       } else if (i) {
803:         PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
804:         PetscCall(PetscViewerASCIIPushTab(viewer));
805:         PetscCall(KSPView(mglevels[i]->smoothu, viewer));
806:         PetscCall(PetscViewerASCIIPopTab(viewer));
807:       }
808:       if (i && mglevels[i]->cr) {
809:         PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i));
810:         PetscCall(PetscViewerASCIIPushTab(viewer));
811:         PetscCall(KSPView(mglevels[i]->cr, viewer));
812:         PetscCall(PetscViewerASCIIPopTab(viewer));
813:       }
814:     }
815:   } else if (isbinary) {
816:     for (i = levels - 1; i >= 0; i--) {
817:       PetscCall(KSPView(mglevels[i]->smoothd, viewer));
818:       if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer));
819:     }
820:   } else if (isdraw) {
821:     PetscDraw draw;
822:     PetscReal x, w, y, bottom, th;
823:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
824:     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
825:     PetscCall(PetscDrawStringGetSize(draw, NULL, &th));
826:     bottom = y - th;
827:     for (i = levels - 1; i >= 0; i--) {
828:       if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) {
829:         PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
830:         PetscCall(KSPView(mglevels[i]->smoothd, viewer));
831:         PetscCall(PetscDrawPopCurrentPoint(draw));
832:       } else {
833:         w = 0.5 * PetscMin(1.0 - x, x);
834:         PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom));
835:         PetscCall(KSPView(mglevels[i]->smoothd, viewer));
836:         PetscCall(PetscDrawPopCurrentPoint(draw));
837:         PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom));
838:         PetscCall(KSPView(mglevels[i]->smoothu, viewer));
839:         PetscCall(PetscDrawPopCurrentPoint(draw));
840:       }
841:       PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL));
842:       bottom -= th;
843:     }
844:   }
845:   PetscFunctionReturn(PETSC_SUCCESS);
846: }

848: #include <petsc/private/kspimpl.h>

850: /*
851:     Calls setup for the KSP on each level
852: */
853: PetscErrorCode PCSetUp_MG(PC pc)
854: {
855:   PC_MG         *mg       = (PC_MG *)pc->data;
856:   PC_MG_Levels **mglevels = mg->levels;
857:   PetscInt       i, n;
858:   PC             cpc;
859:   PetscBool      dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE;
860:   Mat            dA, dB;
861:   Vec            tvec;
862:   DM            *dms;
863:   PetscViewer    viewer = NULL;
864:   PetscBool      dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE;
865:   PetscBool      adaptInterpolation = mg->adaptInterpolation;

867:   PetscFunctionBegin;
868:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up");
869:   n = mglevels[0]->levels;
870:   /* FIX: Move this to PCSetFromOptions_MG? */
871:   if (mg->usedmfornumberoflevels) {
872:     PetscInt levels;
873:     PetscCall(DMGetRefineLevel(pc->dm, &levels));
874:     levels++;
875:     if (levels > n) { /* the problem is now being solved on a finer grid */
876:       PetscCall(PCMGSetLevels(pc, levels, NULL));
877:       n = levels;
878:       PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */
879:       mglevels = mg->levels;
880:     }
881:   }

883:   /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */
884:   /* so use those from global PC */
885:   /* Is this what we always want? What if user wants to keep old one? */
886:   PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset));
887:   if (opsset) {
888:     Mat mmat;
889:     PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat));
890:     if (mmat == pc->pmat) opsset = PETSC_FALSE;
891:   }
892:   /* fine grid smoother inherits the reuse-pc flag */
893:   PetscCall(KSPGetPC(mglevels[n - 1]->smoothd, &cpc));
894:   cpc->reusepreconditioner = pc->reusepreconditioner;
895:   PetscCall(KSPGetPC(mglevels[n - 1]->smoothu, &cpc));
896:   cpc->reusepreconditioner = pc->reusepreconditioner;

898:   /* Create CR solvers */
899:   PetscCall(PCMGGetAdaptCR(pc, &doCR));
900:   if (doCR) {
901:     const char *prefix;

903:     PetscCall(PCGetOptionsPrefix(pc, &prefix));
904:     for (i = 1; i < n; ++i) {
905:       PC   ipc, cr;
906:       char crprefix[128];

908:       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr));
909:       PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel));
910:       PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE));
911:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i));
912:       PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix));
913:       PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level));
914:       PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV));
915:       PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL));
916:       PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED));
917:       PetscCall(KSPGetPC(mglevels[i]->cr, &ipc));

919:       PetscCall(PCSetType(ipc, PCCOMPOSITE));
920:       PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE));
921:       PetscCall(PCCompositeAddPCType(ipc, PCSOR));
922:       PetscCall(CreateCR_Private(pc, i, &cr));
923:       PetscCall(PCCompositeAddPC(ipc, cr));
924:       PetscCall(PCDestroy(&cr));

926:       PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
927:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
928:       PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%" PetscInt_FMT "_cr_", i));
929:       PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix));
930:     }
931:   }

933:   if (!opsset) {
934:     PetscCall(PCGetUseAmat(pc, &use_amat));
935:     if (use_amat) {
936:       PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n  because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
937:       PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat));
938:     } else {
939:       PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n  because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
940:       PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat));
941:     }
942:   }

944:   for (i = n - 1; i > 0; i--) {
945:     if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) {
946:       missinginterpolate = PETSC_TRUE;
947:       break;
948:     }
949:   }

951:   PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB));
952:   if (dA == dB) dAeqdB = PETSC_TRUE;
953:   if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */

955:   if (pc->dm && !pc->setupcalled) {
956:     /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */
957:     PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm));
958:     PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE));
959:     if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) {
960:       PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm));
961:       PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE));
962:     }
963:     if (mglevels[n - 1]->cr) {
964:       PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm));
965:       PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE));
966:     }
967:   }

969:   /*
970:    Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS)
971:    Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs?
972:   */
973:   if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) {
974:     /* first see if we can compute a coarse space */
975:     if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) {
976:       for (i = n - 2; i > -1; i--) {
977:         if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) {
978:           PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace));
979:           PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace));
980:         }
981:       }
982:     } else { /* construct the interpolation from the DMs */
983:       Mat p;
984:       Vec rscale;
985:       PetscCall(PetscMalloc1(n, &dms));
986:       dms[n - 1] = pc->dm;
987:       /* Separately create them so we do not get DMKSP interference between levels */
988:       for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i]));
989:       for (i = n - 2; i > -1; i--) {
990:         DMKSP     kdm;
991:         PetscBool dmhasrestrict, dmhasinject;
992:         PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i]));
993:         if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE));
994:         if (mglevels[i]->smoothd != mglevels[i]->smoothu) {
995:           PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i]));
996:           if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE));
997:         }
998:         if (mglevels[i]->cr) {
999:           PetscCall(KSPSetDM(mglevels[i]->cr, dms[i]));
1000:           if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE));
1001:         }
1002:         PetscCall(DMGetDMKSPWrite(dms[i], &kdm));
1003:         /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take
1004:          * a bitwise OR of computing the matrix, RHS, and initial iterate. */
1005:         kdm->ops->computerhs = NULL;
1006:         kdm->rhsctx          = NULL;
1007:         if (!mglevels[i + 1]->interpolate) {
1008:           PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale));
1009:           PetscCall(PCMGSetInterpolation(pc, i + 1, p));
1010:           if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale));
1011:           PetscCall(VecDestroy(&rscale));
1012:           PetscCall(MatDestroy(&p));
1013:         }
1014:         PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict));
1015:         if (dmhasrestrict && !mglevels[i + 1]->restrct) {
1016:           PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p));
1017:           PetscCall(PCMGSetRestriction(pc, i + 1, p));
1018:           PetscCall(MatDestroy(&p));
1019:         }
1020:         PetscCall(DMHasCreateInjection(dms[i], &dmhasinject));
1021:         if (dmhasinject && !mglevels[i + 1]->inject) {
1022:           PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p));
1023:           PetscCall(PCMGSetInjection(pc, i + 1, p));
1024:           PetscCall(MatDestroy(&p));
1025:         }
1026:       }

1028:       for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i]));
1029:       PetscCall(PetscFree(dms));
1030:     }
1031:   }

1033:   if (mg->galerkin < PC_MG_GALERKIN_NONE) {
1034:     Mat       A, B;
1035:     PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE;
1036:     MatReuse  reuse = MAT_INITIAL_MATRIX;

1038:     if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE;
1039:     if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE;
1040:     if (pc->setupcalled) reuse = MAT_REUSE_MATRIX;
1041:     for (i = n - 2; i > -1; i--) {
1042:       PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0");
1043:       if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct));
1044:       if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate));
1045:       if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B));
1046:       if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A));
1047:       if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B));
1048:       /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */
1049:       if (!doA && dAeqdB) {
1050:         if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B));
1051:         A = B;
1052:       } else if (!doA && reuse == MAT_INITIAL_MATRIX) {
1053:         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL));
1054:         PetscCall(PetscObjectReference((PetscObject)A));
1055:       }
1056:       if (!doB && dAeqdB) {
1057:         if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A));
1058:         B = A;
1059:       } else if (!doB && reuse == MAT_INITIAL_MATRIX) {
1060:         PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B));
1061:         PetscCall(PetscObjectReference((PetscObject)B));
1062:       }
1063:       if (reuse == MAT_INITIAL_MATRIX) {
1064:         PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B));
1065:         PetscCall(PetscObjectDereference((PetscObject)A));
1066:         PetscCall(PetscObjectDereference((PetscObject)B));
1067:       }
1068:       dA = A;
1069:       dB = B;
1070:     }
1071:   }

1073:   /* Adapt interpolation matrices */
1074:   if (adaptInterpolation) {
1075:     for (i = 0; i < n; ++i) {
1076:       if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace));
1077:       if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace));
1078:     }
1079:     for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i));
1080:   }

1082:   if (needRestricts && pc->dm) {
1083:     for (i = n - 2; i >= 0; i--) {
1084:       DM  dmfine, dmcoarse;
1085:       Mat Restrict, Inject;
1086:       Vec rscale;
1087:       PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine));
1088:       PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse));
1089:       PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict));
1090:       PetscCall(PCMGGetRScale(pc, i + 1, &rscale));
1091:       PetscCall(PCMGGetInjection(pc, i + 1, &Inject));
1092:       PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse));
1093:     }
1094:   }

1096:   if (!pc->setupcalled) {
1097:     for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd));
1098:     for (i = 1; i < n; i++) {
1099:       if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu));
1100:       if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr));
1101:     }
1102:     /* insure that if either interpolation or restriction is set the other one is set */
1103:     for (i = 1; i < n; i++) {
1104:       PetscCall(PCMGGetInterpolation(pc, i, NULL));
1105:       PetscCall(PCMGGetRestriction(pc, i, NULL));
1106:     }
1107:     for (i = 0; i < n - 1; i++) {
1108:       if (!mglevels[i]->b) {
1109:         Vec *vec;
1110:         PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL));
1111:         PetscCall(PCMGSetRhs(pc, i, *vec));
1112:         PetscCall(VecDestroy(vec));
1113:         PetscCall(PetscFree(vec));
1114:       }
1115:       if (!mglevels[i]->r && i) {
1116:         PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1117:         PetscCall(PCMGSetR(pc, i, tvec));
1118:         PetscCall(VecDestroy(&tvec));
1119:       }
1120:       if (!mglevels[i]->x) {
1121:         PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1122:         PetscCall(PCMGSetX(pc, i, tvec));
1123:         PetscCall(VecDestroy(&tvec));
1124:       }
1125:       if (doCR) {
1126:         PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx));
1127:         PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb));
1128:         PetscCall(VecZeroEntries(mglevels[i]->crb));
1129:       }
1130:     }
1131:     if (n != 1 && !mglevels[n - 1]->r) {
1132:       /* PCMGSetR() on the finest level if user did not supply it */
1133:       Vec *vec;
1134:       PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL));
1135:       PetscCall(PCMGSetR(pc, n - 1, *vec));
1136:       PetscCall(VecDestroy(vec));
1137:       PetscCall(PetscFree(vec));
1138:     }
1139:     if (doCR) {
1140:       PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx));
1141:       PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb));
1142:       PetscCall(VecZeroEntries(mglevels[n - 1]->crb));
1143:     }
1144:   }

1146:   if (pc->dm) {
1147:     /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */
1148:     for (i = 0; i < n - 1; i++) {
1149:       if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1150:     }
1151:   }
1152:   // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g.,
1153:   // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply.
1154:   if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;

1156:   for (i = 1; i < n; i++) {
1157:     if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) {
1158:       /* if doing only down then initial guess is zero */
1159:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
1160:     }
1161:     if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1162:     if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1163:     PetscCall(KSPSetUp(mglevels[i]->smoothd));
1164:     if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1165:     if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1166:     if (!mglevels[i]->residual) {
1167:       Mat mat;
1168:       PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1169:       PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat));
1170:     }
1171:     if (!mglevels[i]->residualtranspose) {
1172:       Mat mat;
1173:       PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1174:       PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat));
1175:     }
1176:   }
1177:   for (i = 1; i < n; i++) {
1178:     if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) {
1179:       Mat downmat, downpmat;

1181:       /* check if operators have been set for up, if not use down operators to set them */
1182:       PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL));
1183:       if (!opsset) {
1184:         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1185:         PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat));
1186:       }

1188:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE));
1189:       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1190:       PetscCall(KSPSetUp(mglevels[i]->smoothu));
1191:       if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR;
1192:       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1193:     }
1194:     if (mglevels[i]->cr) {
1195:       Mat downmat, downpmat;

1197:       /* check if operators have been set for up, if not use down operators to set them */
1198:       PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL));
1199:       if (!opsset) {
1200:         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1201:         PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat));
1202:       }

1204:       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1205:       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1206:       PetscCall(KSPSetUp(mglevels[i]->cr));
1207:       if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR;
1208:       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1209:     }
1210:   }

1212:   if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1213:   PetscCall(KSPSetUp(mglevels[0]->smoothd));
1214:   if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1215:   if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));

1217:   /*
1218:      Dump the interpolation/restriction matrices plus the
1219:    Jacobian/stiffness on each level. This allows MATLAB users to
1220:    easily check if the Galerkin condition A_c = R A_f R^T is satisfied.

1222:    Only support one or the other at the same time.
1223:   */
1224: #if defined(PETSC_USE_SOCKET_VIEWER)
1225:   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL));
1226:   if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc));
1227:   dump = PETSC_FALSE;
1228: #endif
1229:   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL));
1230:   if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc));

1232:   if (viewer) {
1233:     for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer));
1234:     for (i = 0; i < n; i++) {
1235:       PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc));
1236:       PetscCall(MatView(pc->mat, viewer));
1237:     }
1238:   }
1239:   PetscFunctionReturn(PETSC_SUCCESS);
1240: }

1242: PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels)
1243: {
1244:   PC_MG *mg = (PC_MG *)pc->data;

1246:   PetscFunctionBegin;
1247:   *levels = mg->nlevels;
1248:   PetscFunctionReturn(PETSC_SUCCESS);
1249: }

1251: /*@
1252:   PCMGGetLevels - Gets the number of levels to use with `PCMG`.

1254:   Not Collective

1256:   Input Parameter:
1257: . pc - the preconditioner context

1259:   Output Parameter:
1260: . levels - the number of levels

1262:   Level: advanced

1264: .seealso: [](ch_ksp), `PCMG`, `PCMGSetLevels()`
1265: @*/
1266: PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels)
1267: {
1268:   PetscFunctionBegin;
1270:   PetscAssertPointer(levels, 2);
1271:   *levels = 0;
1272:   PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels));
1273:   PetscFunctionReturn(PETSC_SUCCESS);
1274: }

1276: /*@
1277:   PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy

1279:   Input Parameter:
1280: . pc - the preconditioner context

1282:   Output Parameters:
1283: + gc - grid complexity = sum_i(n_i) / n_0
1284: - oc - operator complexity = sum_i(nnz_i) / nnz_0

1286:   Level: advanced

1288:   Note:
1289:   This is often call the operator complexity in multigrid literature

1291: .seealso: [](ch_ksp), `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`
1292: @*/
1293: PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc)
1294: {
1295:   PC_MG         *mg       = (PC_MG *)pc->data;
1296:   PC_MG_Levels **mglevels = mg->levels;
1297:   PetscInt       lev, N;
1298:   PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0;
1299:   MatInfo        info;

1301:   PetscFunctionBegin;
1303:   if (gc) PetscAssertPointer(gc, 2);
1304:   if (oc) PetscAssertPointer(oc, 3);
1305:   if (!pc->setupcalled) {
1306:     if (gc) *gc = 0;
1307:     if (oc) *oc = 0;
1308:     PetscFunctionReturn(PETSC_SUCCESS);
1309:   }
1310:   PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels");
1311:   for (lev = 0; lev < mg->nlevels; lev++) {
1312:     Mat dB;
1313:     PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB));
1314:     PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */
1315:     PetscCall(MatGetSize(dB, &N, NULL));
1316:     sgc += N;
1317:     soc += info.nz_used;
1318:     if (lev == mg->nlevels - 1) {
1319:       nnz0 = info.nz_used;
1320:       n0   = N;
1321:     }
1322:   }
1323:   PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available");
1324:   *gc = (PetscReal)(sgc / n0);
1325:   if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0);
1326:   PetscFunctionReturn(PETSC_SUCCESS);
1327: }

1329: /*@
1330:   PCMGSetType - Determines the form of multigrid to use, either
1331:   multiplicative, additive, full, or the Kaskade algorithm.

1333:   Logically Collective

1335:   Input Parameters:
1336: + pc   - the preconditioner context
1337: - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`

1339:   Options Database Key:
1340: . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade

1342:   Level: advanced

1344: .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType`
1345: @*/
1346: PetscErrorCode PCMGSetType(PC pc, PCMGType form)
1347: {
1348:   PC_MG *mg = (PC_MG *)pc->data;

1350:   PetscFunctionBegin;
1353:   mg->am = form;
1354:   if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG;
1355:   else pc->ops->applyrichardson = NULL;
1356:   PetscFunctionReturn(PETSC_SUCCESS);
1357: }

1359: /*@
1360:   PCMGGetType - Finds the form of multigrid the `PCMG` is using  multiplicative, additive, full, or the Kaskade algorithm.

1362:   Logically Collective

1364:   Input Parameter:
1365: . pc - the preconditioner context

1367:   Output Parameter:
1368: . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType`

1370:   Level: advanced

1372: .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()`
1373: @*/
1374: PetscErrorCode PCMGGetType(PC pc, PCMGType *type)
1375: {
1376:   PC_MG *mg = (PC_MG *)pc->data;

1378:   PetscFunctionBegin;
1380:   *type = mg->am;
1381:   PetscFunctionReturn(PETSC_SUCCESS);
1382: }

1384: /*@
1385:   PCMGSetCycleType - Sets the type cycles to use.  Use `PCMGSetCycleTypeOnLevel()` for more
1386:   complicated cycling.

1388:   Logically Collective

1390:   Input Parameters:
1391: + pc - the multigrid context
1392: - n  - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W`

1394:   Options Database Key:
1395: . -pc_mg_cycle_type <v,w> - provide the cycle desired

1397:   Level: advanced

1399: .seealso: [](ch_ksp), `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType`
1400: @*/
1401: PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n)
1402: {
1403:   PC_MG         *mg       = (PC_MG *)pc->data;
1404:   PC_MG_Levels **mglevels = mg->levels;
1405:   PetscInt       i, levels;

1407:   PetscFunctionBegin;
1410:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1411:   levels = mglevels[0]->levels;
1412:   for (i = 0; i < levels; i++) mglevels[i]->cycles = n;
1413:   PetscFunctionReturn(PETSC_SUCCESS);
1414: }

1416: /*@
1417:   PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step
1418:   of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE`

1420:   Logically Collective

1422:   Input Parameters:
1423: + pc - the multigrid context
1424: - n  - number of cycles (default is 1)

1426:   Options Database Key:
1427: . -pc_mg_multiplicative_cycles n - set the number of cycles

1429:   Level: advanced

1431:   Note:
1432:   This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()`

1434: .seealso: [](ch_ksp), `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType`
1435: @*/
1436: PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n)
1437: {
1438:   PC_MG *mg = (PC_MG *)pc->data;

1440:   PetscFunctionBegin;
1443:   mg->cyclesperpcapply = n;
1444:   PetscFunctionReturn(PETSC_SUCCESS);
1445: }

1447: /*
1448:    Since the finest level KSP shares the original matrix (of the entire system), it's preconditioner
1449:    should not be updated if the whole PC is supposed to reuse the preconditioner
1450: */
1451: static PetscErrorCode PCSetReusePreconditioner_MG(PC pc, PetscBool flag)
1452: {
1453:   PC_MG         *mg       = (PC_MG *)pc->data;
1454:   PC_MG_Levels **mglevels = mg->levels;
1455:   PetscInt       levels;
1456:   PC             tpc;

1458:   PetscFunctionBegin;
1461:   if (mglevels) {
1462:     levels = mglevels[0]->levels;
1463:     PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
1464:     tpc->reusepreconditioner = flag;
1465:     PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
1466:     tpc->reusepreconditioner = flag;
1467:   }
1468:   PetscFunctionReturn(PETSC_SUCCESS);
1469: }

1471: static PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use)
1472: {
1473:   PC_MG *mg = (PC_MG *)pc->data;

1475:   PetscFunctionBegin;
1476:   mg->galerkin = use;
1477:   PetscFunctionReturn(PETSC_SUCCESS);
1478: }

1480: /*@
1481:   PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the
1482:   finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i

1484:   Logically Collective

1486:   Input Parameters:
1487: + pc  - the multigrid context
1488: - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE`

1490:   Options Database Key:
1491: . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process

1493:   Level: intermediate

1495:   Note:
1496:   Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not
1497:   use the `PCMG` construction of the coarser grids.

1499: .seealso: [](ch_ksp), `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType`
1500: @*/
1501: PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use)
1502: {
1503:   PetscFunctionBegin;
1505:   PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use));
1506:   PetscFunctionReturn(PETSC_SUCCESS);
1507: }

1509: /*@
1510:   PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i

1512:   Not Collective

1514:   Input Parameter:
1515: . pc - the multigrid context

1517:   Output Parameter:
1518: . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL`

1520:   Level: intermediate

1522: .seealso: [](ch_ksp), `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType`
1523: @*/
1524: PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin)
1525: {
1526:   PC_MG *mg = (PC_MG *)pc->data;

1528:   PetscFunctionBegin;
1530:   *galerkin = mg->galerkin;
1531:   PetscFunctionReturn(PETSC_SUCCESS);
1532: }

1534: static PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt)
1535: {
1536:   PC_MG *mg = (PC_MG *)pc->data;

1538:   PetscFunctionBegin;
1539:   mg->adaptInterpolation = adapt;
1540:   PetscFunctionReturn(PETSC_SUCCESS);
1541: }

1543: static PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt)
1544: {
1545:   PC_MG *mg = (PC_MG *)pc->data;

1547:   PetscFunctionBegin;
1548:   *adapt = mg->adaptInterpolation;
1549:   PetscFunctionReturn(PETSC_SUCCESS);
1550: }

1552: static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype)
1553: {
1554:   PC_MG *mg = (PC_MG *)pc->data;

1556:   PetscFunctionBegin;
1557:   mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE;
1558:   mg->coarseSpaceType    = ctype;
1559:   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
1560:   PetscFunctionReturn(PETSC_SUCCESS);
1561: }

1563: static PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype)
1564: {
1565:   PC_MG *mg = (PC_MG *)pc->data;

1567:   PetscFunctionBegin;
1568:   *ctype = mg->coarseSpaceType;
1569:   PetscFunctionReturn(PETSC_SUCCESS);
1570: }

1572: static PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr)
1573: {
1574:   PC_MG *mg = (PC_MG *)pc->data;

1576:   PetscFunctionBegin;
1577:   mg->compatibleRelaxation = cr;
1578:   PetscFunctionReturn(PETSC_SUCCESS);
1579: }

1581: static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr)
1582: {
1583:   PC_MG *mg = (PC_MG *)pc->data;

1585:   PetscFunctionBegin;
1586:   *cr = mg->compatibleRelaxation;
1587:   PetscFunctionReturn(PETSC_SUCCESS);
1588: }

1590: /*@
1591:   PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space.

1593:   Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.

1595:   Logically Collective

1597:   Input Parameters:
1598: + pc    - the multigrid context
1599: - ctype - the type of coarse space

1601:   Options Database Keys:
1602: + -pc_mg_adapt_interp_n <int>             - The number of modes to use
1603: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw`

1605:   Level: intermediate

1607:   Note:
1608:   Requires a `DM` with specific functionality be attached to the `PC`.

1610: .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM`
1611: @*/
1612: PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype)
1613: {
1614:   PetscFunctionBegin;
1617:   PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype));
1618:   PetscFunctionReturn(PETSC_SUCCESS);
1619: }

1621: /*@
1622:   PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space.

1624:   Not Collective

1626:   Input Parameter:
1627: . pc - the multigrid context

1629:   Output Parameter:
1630: . ctype - the type of coarse space

1632:   Level: intermediate

1634: .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`
1635: @*/
1636: PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype)
1637: {
1638:   PetscFunctionBegin;
1640:   PetscAssertPointer(ctype, 2);
1641:   PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype));
1642:   PetscFunctionReturn(PETSC_SUCCESS);
1643: }

1645: /* MATT: REMOVE? */
1646: /*@
1647:   PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.

1649:   Logically Collective

1651:   Input Parameters:
1652: + pc    - the multigrid context
1653: - adapt - flag for adaptation of the interpolator

1655:   Options Database Keys:
1656: + -pc_mg_adapt_interp                     - Turn on adaptation
1657: . -pc_mg_adapt_interp_n <int>             - The number of modes to use, should be divisible by dimension
1658: - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector

1660:   Level: intermediate

1662: .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1663: @*/
1664: PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt)
1665: {
1666:   PetscFunctionBegin;
1668:   PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt));
1669:   PetscFunctionReturn(PETSC_SUCCESS);
1670: }

1672: /*@
1673:   PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh,
1674:   and thus accurately interpolated.

1676:   Not Collective

1678:   Input Parameter:
1679: . pc - the multigrid context

1681:   Output Parameter:
1682: . adapt - flag for adaptation of the interpolator

1684:   Level: intermediate

1686: .seealso: [](ch_ksp), `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1687: @*/
1688: PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt)
1689: {
1690:   PetscFunctionBegin;
1692:   PetscAssertPointer(adapt, 2);
1693:   PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt));
1694:   PetscFunctionReturn(PETSC_SUCCESS);
1695: }

1697: /*@
1698:   PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation.

1700:   Logically Collective

1702:   Input Parameters:
1703: + pc - the multigrid context
1704: - cr - flag for compatible relaxation

1706:   Options Database Key:
1707: . -pc_mg_adapt_cr - Turn on compatible relaxation

1709:   Level: intermediate

1711: .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1712: @*/
1713: PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr)
1714: {
1715:   PetscFunctionBegin;
1717:   PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr));
1718:   PetscFunctionReturn(PETSC_SUCCESS);
1719: }

1721: /*@
1722:   PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation.

1724:   Not Collective

1726:   Input Parameter:
1727: . pc - the multigrid context

1729:   Output Parameter:
1730: . cr - flag for compatible relaxaion

1732:   Level: intermediate

1734: .seealso: [](ch_ksp), `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1735: @*/
1736: PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr)
1737: {
1738:   PetscFunctionBegin;
1740:   PetscAssertPointer(cr, 2);
1741:   PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr));
1742:   PetscFunctionReturn(PETSC_SUCCESS);
1743: }

1745: /*@
1746:   PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use
1747:   on all levels.  Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of
1748:   pre- and post-smoothing steps.

1750:   Logically Collective

1752:   Input Parameters:
1753: + pc - the multigrid context
1754: - n  - the number of smoothing steps

1756:   Options Database Key:
1757: . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps

1759:   Level: advanced

1761:   Note:
1762:   This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.

1764: .seealso: [](ch_ksp), `PCMG`, `PCMGSetDistinctSmoothUp()`
1765: @*/
1766: PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n)
1767: {
1768:   PC_MG         *mg       = (PC_MG *)pc->data;
1769:   PC_MG_Levels **mglevels = mg->levels;
1770:   PetscInt       i, levels;

1772:   PetscFunctionBegin;
1775:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1776:   levels = mglevels[0]->levels;

1778:   for (i = 1; i < levels; i++) {
1779:     PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1780:     PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1781:     mg->default_smoothu = n;
1782:     mg->default_smoothd = n;
1783:   }
1784:   PetscFunctionReturn(PETSC_SUCCESS);
1785: }

1787: /*@
1788:   PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels
1789:   and adds the suffix _up to the options name

1791:   Logically Collective

1793:   Input Parameter:
1794: . pc - the preconditioner context

1796:   Options Database Key:
1797: . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects

1799:   Level: advanced

1801:   Note:
1802:   This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.

1804: .seealso: [](ch_ksp), `PCMG`, `PCMGSetNumberSmooth()`
1805: @*/
1806: PetscErrorCode PCMGSetDistinctSmoothUp(PC pc)
1807: {
1808:   PC_MG         *mg       = (PC_MG *)pc->data;
1809:   PC_MG_Levels **mglevels = mg->levels;
1810:   PetscInt       i, levels;
1811:   KSP            subksp;

1813:   PetscFunctionBegin;
1815:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1816:   levels = mglevels[0]->levels;

1818:   for (i = 1; i < levels; i++) {
1819:     const char *prefix = NULL;
1820:     /* make sure smoother up and down are different */
1821:     PetscCall(PCMGGetSmootherUp(pc, i, &subksp));
1822:     PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix));
1823:     PetscCall(KSPSetOptionsPrefix(subksp, prefix));
1824:     PetscCall(KSPAppendOptionsPrefix(subksp, "up_"));
1825:   }
1826:   PetscFunctionReturn(PETSC_SUCCESS);
1827: }

1829: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1830: static PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[])
1831: {
1832:   PC_MG         *mg       = (PC_MG *)pc->data;
1833:   PC_MG_Levels **mglevels = mg->levels;
1834:   Mat           *mat;
1835:   PetscInt       l;

1837:   PetscFunctionBegin;
1838:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1839:   PetscCall(PetscMalloc1(mg->nlevels, &mat));
1840:   for (l = 1; l < mg->nlevels; l++) {
1841:     mat[l - 1] = mglevels[l]->interpolate;
1842:     PetscCall(PetscObjectReference((PetscObject)mat[l - 1]));
1843:   }
1844:   *num_levels     = mg->nlevels;
1845:   *interpolations = mat;
1846:   PetscFunctionReturn(PETSC_SUCCESS);
1847: }

1849: /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1850: static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[])
1851: {
1852:   PC_MG         *mg       = (PC_MG *)pc->data;
1853:   PC_MG_Levels **mglevels = mg->levels;
1854:   PetscInt       l;
1855:   Mat           *mat;

1857:   PetscFunctionBegin;
1858:   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1859:   PetscCall(PetscMalloc1(mg->nlevels, &mat));
1860:   for (l = 0; l < mg->nlevels - 1; l++) {
1861:     PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &mat[l]));
1862:     PetscCall(PetscObjectReference((PetscObject)mat[l]));
1863:   }
1864:   *num_levels      = mg->nlevels;
1865:   *coarseOperators = mat;
1866:   PetscFunctionReturn(PETSC_SUCCESS);
1867: }

1869: /*@C
1870:   PCMGRegisterCoarseSpaceConstructor -  Adds a method to the `PCMG` package for coarse space construction.

1872:   Not Collective, No Fortran Support

1874:   Input Parameters:
1875: + name     - name of the constructor
1876: - function - constructor routine, see `PCMGCoarseSpaceConstructorFn`

1878:   Level: advanced

1880:   Developer Notes:
1881:   This does not appear to be used anywhere

1883: .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()`
1884: @*/
1885: PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn *function)
1886: {
1887:   PetscFunctionBegin;
1888:   PetscCall(PCInitializePackage());
1889:   PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function));
1890:   PetscFunctionReturn(PETSC_SUCCESS);
1891: }

1893: /*@C
1894:   PCMGGetCoarseSpaceConstructor -  Returns the given coarse space construction method.

1896:   Not Collective, No Fortran Support

1898:   Input Parameter:
1899: . name - name of the constructor

1901:   Output Parameter:
1902: . function - constructor routine

1904:   Level: advanced

1906: .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()`
1907: @*/
1908: PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn **function)
1909: {
1910:   PetscFunctionBegin;
1911:   PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function));
1912:   PetscFunctionReturn(PETSC_SUCCESS);
1913: }

1915: /*MC
1916:    PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional
1917:     information about the coarser grid matrices and restriction/interpolation operators.

1919:    Options Database Keys:
1920: +  -pc_mg_levels <nlevels>                            - number of levels including finest
1921: .  -pc_mg_cycle_type <v,w>                            - provide the cycle desired
1922: .  -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default
1923: .  -pc_mg_log                                         - log information about time spent on each level of the solver
1924: .  -pc_mg_distinct_smoothup                           - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes)
1925: .  -pc_mg_galerkin <both,pmat,mat,none>               - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R'
1926: .  -pc_mg_multiplicative_cycles                        - number of cycles to use as the preconditioner (defaults to 1)
1927: .  -pc_mg_dump_matlab                                  - dumps the matrices for each level and the restriction/interpolation matrices
1928:                                                          to a `PETSCVIEWERSOCKET` for reading from MATLAB.
1929: -  -pc_mg_dump_binary                                  -dumps the matrices for each level and the restriction/interpolation matrices
1930:                                                         to the binary output file called binaryoutput

1932:    Level: intermediate

1934:    Notes:
1935:    The Krylov solver (if any) and preconditioner (smoother) and their parameters are controlled from the options database with the standard
1936:    options database keywords prefixed with `-mg_levels_` to affect all the levels but the coarsest, which is controlled with `-mg_coarse_`,
1937:    and the finest where `-mg_fine_` can override `-mg_levels_`.  One can set different preconditioners etc on specific levels with the prefix
1938:    `-mg_levels_n_` where `n` is the level number (zero being the coarse level. For example
1939: .vb
1940:    -mg_levels_ksp_type gmres -mg_levels_pc_type bjacobi -mg_coarse_pc_type svd -mg_levels_2_pc_type sor
1941: .ve
1942:    These options also work for controlling the smoothers etc inside `PCGAMG`

1944:    If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother than one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method

1946:    When run with a single level the smoother options are used on that level NOT the coarse grid solver options

1948:    When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This
1949:    is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing
1950:    (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the
1951:    residual is computed at the end of each cycle.

1953: .seealso: [](sec_mg), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE`
1954:           `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`,
1955:           `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`,
1956:           `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`,
1957:           `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`,
1958:           `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1959: M*/

1961: PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc)
1962: {
1963:   PC_MG *mg;

1965:   PetscFunctionBegin;
1966:   PetscCall(PetscNew(&mg));
1967:   pc->data               = mg;
1968:   mg->nlevels            = -1;
1969:   mg->am                 = PC_MG_MULTIPLICATIVE;
1970:   mg->galerkin           = PC_MG_GALERKIN_NONE;
1971:   mg->adaptInterpolation = PETSC_FALSE;
1972:   mg->Nc                 = -1;
1973:   mg->eigenvalue         = -1;

1975:   pc->useAmat = PETSC_TRUE;

1977:   pc->ops->apply             = PCApply_MG;
1978:   pc->ops->applytranspose    = PCApplyTranspose_MG;
1979:   pc->ops->matapply          = PCMatApply_MG;
1980:   pc->ops->matapplytranspose = PCMatApplyTranspose_MG;
1981:   pc->ops->setup             = PCSetUp_MG;
1982:   pc->ops->reset             = PCReset_MG;
1983:   pc->ops->destroy           = PCDestroy_MG;
1984:   pc->ops->setfromoptions    = PCSetFromOptions_MG;
1985:   pc->ops->view              = PCView_MG;

1987:   PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue));
1988:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG));
1989:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetReusePreconditioner_C", PCSetReusePreconditioner_MG));
1990:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG));
1991:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG));
1992:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG));
1993:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG));
1994:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG));
1995:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG));
1996:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG));
1997:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG));
1998:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG));
1999:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG));
2000:   PetscFunctionReturn(PETSC_SUCCESS);
2001: }