Actual source code: hmg.c

  1: #include <petscdm.h>
  2: #include <petsc/private/hashmapi.h>
  3: #include <petsc/private/matimpl.h>
  4: #include <petsc/private/pcmgimpl.h>
  5: #include <petsc/private/pcimpl.h>

  7: typedef struct {
  8:   PC        innerpc;       /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators  */
  9:   char     *innerpctype;   /* PCGAMG or PCHYPRE */
 10:   PetscBool reuseinterp;   /* A flag indicates if or not to reuse the interpolations */
 11:   PetscBool subcoarsening; /* If or not to use a subspace-based coarsening algorithm */
 12:   PetscBool usematmaij;    /* If or not to use MatMAIJ for saving memory */
 13:   PetscInt  component;     /* Which subspace is used for the subspace-based coarsening algorithm? */
 14: } PC_HMG;

 16: static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat, Mat *submat, MatReuse reuse, PetscInt component, PetscInt blocksize)
 17: {
 18:   IS       isrow;
 19:   PetscInt rstart, rend;
 20:   MPI_Comm comm;

 22:   PetscFunctionBegin;
 23:   PetscCall(PetscObjectGetComm((PetscObject)pmat, &comm));
 24:   PetscCheck(component < blocksize, comm, PETSC_ERR_ARG_INCOMP, "Component %" PetscInt_FMT " should be less than block size %" PetscInt_FMT " ", component, blocksize);
 25:   PetscCall(MatGetOwnershipRange(pmat, &rstart, &rend));
 26:   PetscCheck((rend - rstart) % blocksize == 0, comm, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " is inconsistent for [%" PetscInt_FMT ", %" PetscInt_FMT ") ", blocksize, rstart, rend);
 27:   PetscCall(ISCreateStride(comm, (rend - rstart) / blocksize, rstart + component, blocksize, &isrow));
 28:   PetscCall(MatCreateSubMatrix(pmat, isrow, isrow, reuse, submat));
 29:   PetscCall(ISDestroy(&isrow));
 30:   PetscFunctionReturn(PETSC_SUCCESS);
 31: }

 33: static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize)
 34: {
 35:   PetscInt           subrstart, subrend, subrowsize, subcolsize, subcstart, subcend, rowsize, colsize;
 36:   PetscInt           subrow, row, nz, *d_nnz, *o_nnz, i, j, dnz, onz, max_nz, *indices;
 37:   const PetscInt    *idx;
 38:   const PetscScalar *values;
 39:   MPI_Comm           comm;

 41:   PetscFunctionBegin;
 42:   PetscCall(PetscObjectGetComm((PetscObject)subinterp, &comm));
 43:   PetscCall(MatGetOwnershipRange(subinterp, &subrstart, &subrend));
 44:   subrowsize = subrend - subrstart;
 45:   rowsize    = subrowsize * blocksize;
 46:   PetscCall(PetscCalloc2(rowsize, &d_nnz, rowsize, &o_nnz));
 47:   PetscCall(MatGetOwnershipRangeColumn(subinterp, &subcstart, &subcend));
 48:   subcolsize = subcend - subcstart;
 49:   colsize    = subcolsize * blocksize;
 50:   max_nz     = 0;
 51:   for (subrow = subrstart; subrow < subrend; subrow++) {
 52:     PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, NULL));
 53:     if (max_nz < nz) max_nz = nz;
 54:     dnz = 0;
 55:     onz = 0;
 56:     for (i = 0; i < nz; i++) {
 57:       if (idx[i] >= subcstart && idx[i] < subcend) dnz++;
 58:       else onz++;
 59:     }
 60:     for (i = 0; i < blocksize; i++) {
 61:       d_nnz[(subrow - subrstart) * blocksize + i] = dnz;
 62:       o_nnz[(subrow - subrstart) * blocksize + i] = onz;
 63:     }
 64:     PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, NULL));
 65:   }
 66:   PetscCall(MatCreateAIJ(comm, rowsize, colsize, PETSC_DETERMINE, PETSC_DETERMINE, 0, d_nnz, 0, o_nnz, interp));
 67:   PetscCall(MatSetOption(*interp, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE));
 68:   PetscCall(MatSetOption(*interp, MAT_IGNORE_ZERO_ENTRIES, PETSC_TRUE));
 69:   PetscCall(MatSetOption(*interp, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
 70:   PetscCall(MatSetFromOptions(*interp));

 72:   PetscCall(MatSetUp(*interp));
 73:   PetscCall(PetscFree2(d_nnz, o_nnz));
 74:   PetscCall(PetscMalloc1(max_nz, &indices));
 75:   for (subrow = subrstart; subrow < subrend; subrow++) {
 76:     PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, &values));
 77:     for (i = 0; i < blocksize; i++) {
 78:       row = subrow * blocksize + i;
 79:       for (j = 0; j < nz; j++) indices[j] = idx[j] * blocksize + i;
 80:       PetscCall(MatSetValues(*interp, 1, &row, nz, indices, values, INSERT_VALUES));
 81:     }
 82:     PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, &values));
 83:   }
 84:   PetscCall(PetscFree(indices));
 85:   PetscCall(MatAssemblyBegin(*interp, MAT_FINAL_ASSEMBLY));
 86:   PetscCall(MatAssemblyEnd(*interp, MAT_FINAL_ASSEMBLY));
 87:   PetscFunctionReturn(PETSC_SUCCESS);
 88: }

 90: static PetscErrorCode PCSetUp_HMG(PC pc)
 91: {
 92:   Mat              PA, submat;
 93:   PC_MG           *mg  = (PC_MG *)pc->data;
 94:   PC_HMG          *hmg = (PC_HMG *)mg->innerctx;
 95:   MPI_Comm         comm;
 96:   PetscInt         level;
 97:   PetscInt         num_levels;
 98:   Mat             *operators, *interpolations;
 99:   PetscInt         blocksize;
100:   const char      *prefix;
101:   PCMGGalerkinType galerkin;

103:   PetscFunctionBegin;
104:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
105:   if (pc->setupcalled) {
106:     if (hmg->reuseinterp) {
107:       /* If we did not use Galerkin in the last call or we have a different sparsity pattern now,
108:       * we have to build from scratch
109:       * */
110:       PetscCall(PCMGGetGalerkin(pc, &galerkin));
111:       if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE;
112:       PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
113:       PetscCall(PCSetUp_MG(pc));
114:       PetscFunctionReturn(PETSC_SUCCESS);
115:     } else {
116:       PetscCall(PCReset_MG(pc));
117:       pc->setupcalled = PETSC_FALSE;
118:     }
119:   }

121:   /* Create an inner PC (GAMG or HYPRE) */
122:   if (!hmg->innerpc) {
123:     PetscCall(PCCreate(comm, &hmg->innerpc));
124:     /* If users do not set an inner pc type, we need to set a default value */
125:     if (!hmg->innerpctype) {
126:       /* If hypre is available, use hypre, otherwise, use gamg */
127: #if PetscDefined(HAVE_HYPRE)
128:       PetscCall(PetscStrallocpy(PCHYPRE, &hmg->innerpctype));
129: #else
130:       PetscCall(PetscStrallocpy(PCGAMG, &hmg->innerpctype));
131: #endif
132:     }
133:     PetscCall(PCSetType(hmg->innerpc, hmg->innerpctype));
134:   }
135:   PetscCall(PCGetOperators(pc, NULL, &PA));
136:   /* Users need to correctly set a block size of matrix in order to use subspace coarsening */
137:   PetscCall(MatGetBlockSize(PA, &blocksize));
138:   if (blocksize <= 1) hmg->subcoarsening = PETSC_FALSE;
139:   /* Extract a submatrix for constructing subinterpolations */
140:   if (hmg->subcoarsening) {
141:     PetscCall(PCHMGExtractSubMatrix_Private(PA, &submat, MAT_INITIAL_MATRIX, hmg->component, blocksize));
142:     PA = submat;
143:   }
144:   PetscCall(PCSetOperators(hmg->innerpc, PA, PA));
145:   if (hmg->subcoarsening) PetscCall(MatDestroy(&PA));
146:   /* Setup inner PC correctly. During this step, matrix will be coarsened */
147:   PetscCall(PCSetUseAmat(hmg->innerpc, PETSC_FALSE));
148:   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix));
149:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc, prefix));
150:   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc, "hmg_inner_"));
151:   PetscCall(PCSetFromOptions(hmg->innerpc));
152:   PetscCall(PCSetUp(hmg->innerpc));

154:   /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */
155:   PetscCall(PCGetInterpolations(hmg->innerpc, &num_levels, &interpolations));
156:   /* We can reuse the coarse operators when we do the full space coarsening */
157:   if (!hmg->subcoarsening) PetscCall(PCGetCoarseOperators(hmg->innerpc, &num_levels, &operators));

159:   PetscCall(PCDestroy(&hmg->innerpc));
160:   hmg->innerpc = NULL;
161:   PetscCall(PCMGSetLevels_MG(pc, num_levels, NULL));
162:   /* Set coarse matrices and interpolations to PCMG */
163:   for (level = num_levels - 1; level > 0; level--) {
164:     Mat P = NULL, pmat = NULL;
165:     Vec b, x, r;
166:     if (hmg->subcoarsening) {
167:       if (hmg->usematmaij) {
168:         PetscCall(MatCreateMAIJ(interpolations[level - 1], blocksize, &P));
169:         PetscCall(MatDestroy(&interpolations[level - 1]));
170:       } else {
171:         /* Grow interpolation. In the future, we should use MAIJ */
172:         PetscCall(PCHMGExpandInterpolation_Private(interpolations[level - 1], &P, blocksize));
173:         PetscCall(MatDestroy(&interpolations[level - 1]));
174:       }
175:     } else {
176:       P = interpolations[level - 1];
177:     }
178:     PetscCall(MatCreateVecs(P, &b, &r));
179:     PetscCall(PCMGSetInterpolation(pc, level, P));
180:     PetscCall(PCMGSetRestriction(pc, level, P));
181:     PetscCall(MatDestroy(&P));
182:     /* We reuse the matrices when we do not do subspace coarsening */
183:     if ((level - 1) >= 0 && !hmg->subcoarsening) {
184:       pmat = operators[level - 1];
185:       PetscCall(PCMGSetOperators(pc, level - 1, pmat, pmat));
186:       PetscCall(MatDestroy(&pmat));
187:     }
188:     PetscCall(PCMGSetRhs(pc, level - 1, b));

190:     PetscCall(PCMGSetR(pc, level, r));
191:     PetscCall(VecDestroy(&r));

193:     PetscCall(VecDuplicate(b, &x));
194:     PetscCall(PCMGSetX(pc, level - 1, x));
195:     PetscCall(VecDestroy(&x));
196:     PetscCall(VecDestroy(&b));
197:   }
198:   PetscCall(PetscFree(interpolations));
199:   if (!hmg->subcoarsening) PetscCall(PetscFree(operators));
200:   /* Turn Galerkin off when we already have coarse operators */
201:   PetscCall(PCMGSetGalerkin(pc, hmg->subcoarsening ? PC_MG_GALERKIN_PMAT : PC_MG_GALERKIN_NONE));
202:   PetscCall(PCSetDM(pc, NULL));
203:   PetscCall(PCSetUseAmat(pc, PETSC_FALSE));
204:   PetscObjectOptionsBegin((PetscObject)pc);
205:   PetscCall(PCSetFromOptions_MG(pc, PetscOptionsObject)); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */
206:   PetscOptionsEnd();
207:   PetscCall(PCSetUp_MG(pc));
208:   PetscFunctionReturn(PETSC_SUCCESS);
209: }

211: static PetscErrorCode PCDestroy_HMG(PC pc)
212: {
213:   PC_MG  *mg  = (PC_MG *)pc->data;
214:   PC_HMG *hmg = (PC_HMG *)mg->innerctx;

216:   PetscFunctionBegin;
217:   PetscCall(PCDestroy(&hmg->innerpc));
218:   PetscCall(PetscFree(hmg->innerpctype));
219:   PetscCall(PetscFree(hmg));
220:   PetscCall(PCDestroy_MG(pc));

222:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", NULL));
223:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", NULL));
224:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", NULL));
225:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", NULL));
226:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", NULL));
227:   PetscFunctionReturn(PETSC_SUCCESS);
228: }

230: static PetscErrorCode PCView_HMG(PC pc, PetscViewer viewer)
231: {
232:   PC_MG    *mg  = (PC_MG *)pc->data;
233:   PC_HMG   *hmg = (PC_HMG *)mg->innerctx;
234:   PetscBool isascii;

236:   PetscFunctionBegin;
237:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
238:   if (isascii) {
239:     PetscCall(PetscViewerASCIIPrintf(viewer, " Reuse interpolation: %s\n", hmg->reuseinterp ? "true" : "false"));
240:     PetscCall(PetscViewerASCIIPrintf(viewer, " Use subspace coarsening: %s\n", hmg->subcoarsening ? "true" : "false"));
241:     PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening component: %" PetscInt_FMT " \n", hmg->component));
242:     PetscCall(PetscViewerASCIIPrintf(viewer, " Use MatMAIJ: %s \n", hmg->usematmaij ? "true" : "false"));
243:     PetscCall(PetscViewerASCIIPrintf(viewer, " Inner PC type: %s \n", hmg->innerpctype));
244:   }
245:   PetscCall(PCView_MG(pc, viewer));
246:   PetscFunctionReturn(PETSC_SUCCESS);
247: }

249: static PetscErrorCode PCSetFromOptions_HMG(PC pc, PetscOptionItems PetscOptionsObject)
250: {
251:   PC_MG  *mg  = (PC_MG *)pc->data;
252:   PC_HMG *hmg = (PC_HMG *)mg->innerctx;

254:   PetscFunctionBegin;
255:   PetscOptionsHeadBegin(PetscOptionsObject, "HMG");
256:   PetscCall(PetscOptionsBool("-pc_hmg_reuse_interpolation", "Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)", "PCHMGSetReuseInterpolation", hmg->reuseinterp, &hmg->reuseinterp, NULL));
257:   PetscCall(PetscOptionsBool("-pc_hmg_use_subspace_coarsening", "Use the subspace coarsening to compute the interpolations", "PCHMGSetUseSubspaceCoarsening", hmg->subcoarsening, &hmg->subcoarsening, NULL));
258:   PetscCall(PetscOptionsBool("-pc_hmg_use_matmaij", "Use MatMAIJ store interpolation for saving memory", "PCHMGSetInnerPCType", hmg->usematmaij, &hmg->usematmaij, NULL));
259:   PetscCall(PetscOptionsInt("-pc_hmg_coarsening_component", "Which component is chosen for the subspace-based coarsening algorithm", "PCHMGSetCoarseningComponent", hmg->component, &hmg->component, NULL));
260:   PetscOptionsHeadEnd();
261:   PetscFunctionReturn(PETSC_SUCCESS);
262: }

264: static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse)
265: {
266:   PC_MG  *mg  = (PC_MG *)pc->data;
267:   PC_HMG *hmg = (PC_HMG *)mg->innerctx;

269:   PetscFunctionBegin;
270:   hmg->reuseinterp = reuse;
271:   PetscFunctionReturn(PETSC_SUCCESS);
272: }

274: /*@
275:   PCHMGSetReuseInterpolation - Reuse the interpolation matrices in `PCHMG` after changing the matrices numerical values

277:   Logically Collective

279:   Input Parameters:
280: + pc    - the `PCHMG` context
281: - reuse - `PETSC_TRUE` indicates that `PCHMG` will reuse the interpolations

283:   Options Database Key:
284: . -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.

286:   Level: beginner

288:   Note:
289:   This decreases the set up time of the `PC` significantly but may slow the convergence of the iterative method, `KSP`, that is using the `PCHMG`

291: .seealso: [](ch_ksp), `PCHMG`, `PCGAMG`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
292: @*/
293: PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse)
294: {
295:   PetscFunctionBegin;
297:   PetscUseMethod(pc, "PCHMGSetReuseInterpolation_C", (PC, PetscBool), (pc, reuse));
298:   PetscFunctionReturn(PETSC_SUCCESS);
299: }

301: static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace)
302: {
303:   PC_MG  *mg  = (PC_MG *)pc->data;
304:   PC_HMG *hmg = (PC_HMG *)mg->innerctx;

306:   PetscFunctionBegin;
307:   hmg->subcoarsening = subspace;
308:   PetscFunctionReturn(PETSC_SUCCESS);
309: }

311: /*@
312:   PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in `PCHMG`

314:   Logically Collective

316:   Input Parameters:
317: + pc       - the `PCHMG` context
318: - subspace - `PETSC_TRUE` indicates that `PCHMG` will use the subspace coarsening

320:   Options Database Key:
321: . -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).

323:   Level: beginner

325: .seealso: [](ch_ksp), `PCHMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
326: @*/
327: PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace)
328: {
329:   PetscFunctionBegin;
331:   PetscUseMethod(pc, "PCHMGSetUseSubspaceCoarsening_C", (PC, PetscBool), (pc, subspace));
332:   PetscFunctionReturn(PETSC_SUCCESS);
333: }

335: static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type)
336: {
337:   PC_MG  *mg  = (PC_MG *)pc->data;
338:   PC_HMG *hmg = (PC_HMG *)mg->innerctx;

340:   PetscFunctionBegin;
341:   PetscCall(PetscStrallocpy(type, &hmg->innerpctype));
342:   PetscFunctionReturn(PETSC_SUCCESS);
343: }

345: /*@
346:   PCHMGSetInnerPCType - Set an inner `PC` type to be used in the `PCHMG` preconditioner. That is the method used to compute
347:   the hierarchy of restriction operators.

349:   Logically Collective

351:   Input Parameters:
352: + pc   - the `PCHMG` context
353: - type - `PCHYPRE` or `PCGAMG` coarsening algorithm

355:   Options Database Key:
356: . -hmg_inner_pc_type <hypre, gamg> - What method is used to coarsen matrix

358:   Level: beginner

360: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`
361: @*/
362: PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type)
363: {
364:   PetscFunctionBegin;
366:   PetscUseMethod(pc, "PCHMGSetInnerPCType_C", (PC, PCType), (pc, type));
367:   PetscFunctionReturn(PETSC_SUCCESS);
368: }

370: static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component)
371: {
372:   PC_MG  *mg  = (PC_MG *)pc->data;
373:   PC_HMG *hmg = (PC_HMG *)mg->innerctx;

375:   PetscFunctionBegin;
376:   hmg->component = component;
377:   PetscFunctionReturn(PETSC_SUCCESS);
378: }

380: /*@
381:   PCHMGSetCoarseningComponent - Set which component of the PDE is used for the subspace-based coarsening algorithm in the preconditioner `PCHMG`

383:   Logically Collective

385:   Input Parameters:
386: + pc        - the `PCHMG` context
387: - component - which component `PC` will coarsen

389:   Options Database Key:
390: . -pc_hmg_coarsening_component <i> - Which component is chosen for the subspace-based coarsening algorithm

392:   Level: beginner

394:   Note:
395:   By default it uses the first component

397: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCGAMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`
398: @*/
399: PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component)
400: {
401:   PetscFunctionBegin;
403:   PetscUseMethod(pc, "PCHMGSetCoarseningComponent_C", (PC, PetscInt), (pc, component));
404:   PetscFunctionReturn(PETSC_SUCCESS);
405: }

407: static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij)
408: {
409:   PC_MG  *mg  = (PC_MG *)pc->data;
410:   PC_HMG *hmg = (PC_HMG *)mg->innerctx;

412:   PetscFunctionBegin;
413:   hmg->usematmaij = usematmaij;
414:   PetscFunctionReturn(PETSC_SUCCESS);
415: }

417: /*@
418:   PCHMGUseMatMAIJ - Set a flag that indicates if or not to use `MATMAIJ` for the interpolation matrices to save memory

420:   Logically Collective

422:   Input Parameters:
423: + pc         - the `PCHMG` context
424: - usematmaij - `PETSC_TRUE` (default) to use `MATMAIJ` for interpolations.

426:   Options Database Key:
427: . -pc_hmg_use_matmaij - <true | false >

429:   Level: beginner

431: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCGAMG`
432: @*/
433: PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij)
434: {
435:   PetscFunctionBegin;
437:   PetscUseMethod(pc, "PCHMGUseMatMAIJ_C", (PC, PetscBool), (pc, usematmaij));
438:   PetscFunctionReturn(PETSC_SUCCESS);
439: }

441: /*MC
442:    PCHMG - Preconditioner for multiple component PDE problems that constructs a hierarchy of restriction operators to coarse grid problems using the submatrix of
443:    a single component with either `PCHYPRE` or `PCGAMG`. The same restriction operators are then used for each of the components of the PDE within the `PCMG`
444:    multigrid preconditioner. This results in a much more efficient to build and apply preconditioner than using `PCGAMG` on the entire system {cite}`kong2020highly`.

446:    Options Database Keys:
447: +  -pc_hmg_reuse_interpolation <true | false>      - Whether or not to reuse the interpolations for new matrix values or rebuild the interpolation. This can save compute time.
448: .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix, or coarsen on the full matrix).
449: .  -hmg_inner_pc_type <hypre, gamg, ...>           - What method to use to generate the hierarchy of restriction operators
450: -  -pc_hmg_use_matmaij <true | false>              - Whether or not to use `MATMAIJ` for multicomponent problems for saving memory

452:    Level: intermediate

454:    Note:
455:    `MatSetBlockSize()` must be called on the linear system matrix to set the number of components of the PDE.

457: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMG`, `PCHYPRE`, `PCHMG`, `PCGetCoarseOperators()`, `PCGetInterpolations()`,
458:           `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`, `PCGAMG`
459: M*/
460: PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc)
461: {
462:   PC_HMG *hmg;
463:   PC_MG  *mg;

465:   PetscFunctionBegin;
466:   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
467:   PetscTryTypeMethod(pc, destroy);
468:   pc->data = NULL;
469:   PetscCall(PetscFree(((PetscObject)pc)->type_name));

471:   PetscCall(PCSetType(pc, PCMG));
472:   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCHMG));
473:   PetscCall(PetscNew(&hmg));

475:   mg                 = (PC_MG *)pc->data;
476:   mg->innerctx       = hmg;
477:   hmg->reuseinterp   = PETSC_FALSE;
478:   hmg->subcoarsening = PETSC_FALSE;
479:   hmg->usematmaij    = PETSC_TRUE;
480:   hmg->component     = 0;
481:   hmg->innerpc       = NULL;

483:   pc->ops->setfromoptions = PCSetFromOptions_HMG;
484:   pc->ops->view           = PCView_HMG;
485:   pc->ops->destroy        = PCDestroy_HMG;
486:   pc->ops->setup          = PCSetUp_HMG;

488:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", PCHMGSetReuseInterpolation_HMG));
489:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", PCHMGSetUseSubspaceCoarsening_HMG));
490:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", PCHMGSetInnerPCType_HMG));
491:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", PCHMGSetCoarseningComponent_HMG));
492:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", PCHMGUseMatMAIJ_HMG));
493:   PetscFunctionReturn(PETSC_SUCCESS);
494: }