Actual source code: hmg.c
1: #include <petscdm.h>
2: #include <petsc/private/hashmapi.h>
3: #include <petsc/private/matimpl.h>
4: #include <petsc/private/pcmgimpl.h>
5: #include <petsc/private/pcimpl.h>
7: typedef struct {
8: PC innerpc; /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators */
9: char *innerpctype; /* PCGAMG or PCHYPRE */
10: PetscBool reuseinterp; /* A flag indicates if or not to reuse the interpolations */
11: PetscBool subcoarsening; /* If or not to use a subspace-based coarsening algorithm */
12: PetscBool usematmaij; /* If or not to use MatMAIJ for saving memory */
13: PetscInt component; /* Which subspace is used for the subspace-based coarsening algorithm? */
14: } PC_HMG;
16: static PetscErrorCode PCSetFromOptions_HMG(PC, PetscOptionItems *);
17: PetscErrorCode PCReset_MG(PC);
19: static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat, Mat *submat, MatReuse reuse, PetscInt component, PetscInt blocksize)
20: {
21: IS isrow;
22: PetscInt rstart, rend;
23: MPI_Comm comm;
25: PetscFunctionBegin;
26: PetscCall(PetscObjectGetComm((PetscObject)pmat, &comm));
27: PetscCheck(component < blocksize, comm, PETSC_ERR_ARG_INCOMP, "Component %" PetscInt_FMT " should be less than block size %" PetscInt_FMT " ", component, blocksize);
28: PetscCall(MatGetOwnershipRange(pmat, &rstart, &rend));
29: PetscCheck((rend - rstart) % blocksize == 0, comm, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " is inconsistent for [%" PetscInt_FMT ", %" PetscInt_FMT ") ", blocksize, rstart, rend);
30: PetscCall(ISCreateStride(comm, (rend - rstart) / blocksize, rstart + component, blocksize, &isrow));
31: PetscCall(MatCreateSubMatrix(pmat, isrow, isrow, reuse, submat));
32: PetscCall(ISDestroy(&isrow));
33: PetscFunctionReturn(PETSC_SUCCESS);
34: }
36: static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize)
37: {
38: PetscInt subrstart, subrend, subrowsize, subcolsize, subcstart, subcend, rowsize, colsize;
39: PetscInt subrow, row, nz, *d_nnz, *o_nnz, i, j, dnz, onz, max_nz, *indices;
40: const PetscInt *idx;
41: const PetscScalar *values;
42: MPI_Comm comm;
44: PetscFunctionBegin;
45: PetscCall(PetscObjectGetComm((PetscObject)subinterp, &comm));
46: PetscCall(MatGetOwnershipRange(subinterp, &subrstart, &subrend));
47: subrowsize = subrend - subrstart;
48: rowsize = subrowsize * blocksize;
49: PetscCall(PetscCalloc2(rowsize, &d_nnz, rowsize, &o_nnz));
50: PetscCall(MatGetOwnershipRangeColumn(subinterp, &subcstart, &subcend));
51: subcolsize = subcend - subcstart;
52: colsize = subcolsize * blocksize;
53: max_nz = 0;
54: for (subrow = subrstart; subrow < subrend; subrow++) {
55: PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, NULL));
56: if (max_nz < nz) max_nz = nz;
57: dnz = 0;
58: onz = 0;
59: for (i = 0; i < nz; i++) {
60: if (idx[i] >= subcstart && idx[i] < subcend) dnz++;
61: else onz++;
62: }
63: for (i = 0; i < blocksize; i++) {
64: d_nnz[(subrow - subrstart) * blocksize + i] = dnz;
65: o_nnz[(subrow - subrstart) * blocksize + i] = onz;
66: }
67: PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, NULL));
68: }
69: PetscCall(MatCreateAIJ(comm, rowsize, colsize, PETSC_DETERMINE, PETSC_DETERMINE, 0, d_nnz, 0, o_nnz, interp));
70: PetscCall(MatSetOption(*interp, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE));
71: PetscCall(MatSetOption(*interp, MAT_IGNORE_ZERO_ENTRIES, PETSC_TRUE));
72: PetscCall(MatSetOption(*interp, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
73: PetscCall(MatSetFromOptions(*interp));
75: PetscCall(MatSetUp(*interp));
76: PetscCall(PetscFree2(d_nnz, o_nnz));
77: PetscCall(PetscMalloc1(max_nz, &indices));
78: for (subrow = subrstart; subrow < subrend; subrow++) {
79: PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, &values));
80: for (i = 0; i < blocksize; i++) {
81: row = subrow * blocksize + i;
82: for (j = 0; j < nz; j++) indices[j] = idx[j] * blocksize + i;
83: PetscCall(MatSetValues(*interp, 1, &row, nz, indices, values, INSERT_VALUES));
84: }
85: PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, &values));
86: }
87: PetscCall(PetscFree(indices));
88: PetscCall(MatAssemblyBegin(*interp, MAT_FINAL_ASSEMBLY));
89: PetscCall(MatAssemblyEnd(*interp, MAT_FINAL_ASSEMBLY));
90: PetscFunctionReturn(PETSC_SUCCESS);
91: }
93: static PetscErrorCode PCSetUp_HMG(PC pc)
94: {
95: Mat PA, submat;
96: PC_MG *mg = (PC_MG *)pc->data;
97: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
98: MPI_Comm comm;
99: PetscInt level;
100: PetscInt num_levels;
101: Mat *operators, *interpolations;
102: PetscInt blocksize;
103: const char *prefix;
104: PCMGGalerkinType galerkin;
106: PetscFunctionBegin;
107: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
108: if (pc->setupcalled) {
109: if (hmg->reuseinterp) {
110: /* If we did not use Galerkin in the last call or we have a different sparsity pattern now,
111: * we have to build from scratch
112: * */
113: PetscCall(PCMGGetGalerkin(pc, &galerkin));
114: if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE;
115: PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
116: PetscCall(PCSetUp_MG(pc));
117: PetscFunctionReturn(PETSC_SUCCESS);
118: } else {
119: PetscCall(PCReset_MG(pc));
120: pc->setupcalled = PETSC_FALSE;
121: }
122: }
124: /* Create an inner PC (GAMG or HYPRE) */
125: if (!hmg->innerpc) {
126: PetscCall(PCCreate(comm, &hmg->innerpc));
127: /* If users do not set an inner pc type, we need to set a default value */
128: if (!hmg->innerpctype) {
129: /* If hypre is available, use hypre, otherwise, use gamg */
130: #if PetscDefined(HAVE_HYPRE)
131: PetscCall(PetscStrallocpy(PCHYPRE, &hmg->innerpctype));
132: #else
133: PetscCall(PetscStrallocpy(PCGAMG, &hmg->innerpctype));
134: #endif
135: }
136: PetscCall(PCSetType(hmg->innerpc, hmg->innerpctype));
137: }
138: PetscCall(PCGetOperators(pc, NULL, &PA));
139: /* Users need to correctly set a block size of matrix in order to use subspace coarsening */
140: PetscCall(MatGetBlockSize(PA, &blocksize));
141: if (blocksize <= 1) hmg->subcoarsening = PETSC_FALSE;
142: /* Extract a submatrix for constructing subinterpolations */
143: if (hmg->subcoarsening) {
144: PetscCall(PCHMGExtractSubMatrix_Private(PA, &submat, MAT_INITIAL_MATRIX, hmg->component, blocksize));
145: PA = submat;
146: }
147: PetscCall(PCSetOperators(hmg->innerpc, PA, PA));
148: if (hmg->subcoarsening) PetscCall(MatDestroy(&PA));
149: /* Setup inner PC correctly. During this step, matrix will be coarsened */
150: PetscCall(PCSetUseAmat(hmg->innerpc, PETSC_FALSE));
151: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix));
152: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc, prefix));
153: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc, "hmg_inner_"));
154: PetscCall(PCSetFromOptions(hmg->innerpc));
155: PetscCall(PCSetUp(hmg->innerpc));
157: /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */
158: PetscCall(PCGetInterpolations(hmg->innerpc, &num_levels, &interpolations));
159: /* We can reuse the coarse operators when we do the full space coarsening */
160: if (!hmg->subcoarsening) PetscCall(PCGetCoarseOperators(hmg->innerpc, &num_levels, &operators));
162: PetscCall(PCDestroy(&hmg->innerpc));
163: hmg->innerpc = NULL;
164: PetscCall(PCMGSetLevels_MG(pc, num_levels, NULL));
165: /* Set coarse matrices and interpolations to PCMG */
166: for (level = num_levels - 1; level > 0; level--) {
167: Mat P = NULL, pmat = NULL;
168: Vec b, x, r;
169: if (hmg->subcoarsening) {
170: if (hmg->usematmaij) {
171: PetscCall(MatCreateMAIJ(interpolations[level - 1], blocksize, &P));
172: PetscCall(MatDestroy(&interpolations[level - 1]));
173: } else {
174: /* Grow interpolation. In the future, we should use MAIJ */
175: PetscCall(PCHMGExpandInterpolation_Private(interpolations[level - 1], &P, blocksize));
176: PetscCall(MatDestroy(&interpolations[level - 1]));
177: }
178: } else {
179: P = interpolations[level - 1];
180: }
181: PetscCall(MatCreateVecs(P, &b, &r));
182: PetscCall(PCMGSetInterpolation(pc, level, P));
183: PetscCall(PCMGSetRestriction(pc, level, P));
184: PetscCall(MatDestroy(&P));
185: /* We reuse the matrices when we do not do subspace coarsening */
186: if ((level - 1) >= 0 && !hmg->subcoarsening) {
187: pmat = operators[level - 1];
188: PetscCall(PCMGSetOperators(pc, level - 1, pmat, pmat));
189: PetscCall(MatDestroy(&pmat));
190: }
191: PetscCall(PCMGSetRhs(pc, level - 1, b));
193: PetscCall(PCMGSetR(pc, level, r));
194: PetscCall(VecDestroy(&r));
196: PetscCall(VecDuplicate(b, &x));
197: PetscCall(PCMGSetX(pc, level - 1, x));
198: PetscCall(VecDestroy(&x));
199: PetscCall(VecDestroy(&b));
200: }
201: PetscCall(PetscFree(interpolations));
202: if (!hmg->subcoarsening) PetscCall(PetscFree(operators));
203: /* Turn Galerkin off when we already have coarse operators */
204: PetscCall(PCMGSetGalerkin(pc, hmg->subcoarsening ? PC_MG_GALERKIN_PMAT : PC_MG_GALERKIN_NONE));
205: PetscCall(PCSetDM(pc, NULL));
206: PetscCall(PCSetUseAmat(pc, PETSC_FALSE));
207: PetscObjectOptionsBegin((PetscObject)pc);
208: PetscCall(PCSetFromOptions_MG(pc, PetscOptionsObject)); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */
209: PetscOptionsEnd();
210: PetscCall(PCSetUp_MG(pc));
211: PetscFunctionReturn(PETSC_SUCCESS);
212: }
214: static PetscErrorCode PCDestroy_HMG(PC pc)
215: {
216: PC_MG *mg = (PC_MG *)pc->data;
217: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
219: PetscFunctionBegin;
220: PetscCall(PCDestroy(&hmg->innerpc));
221: PetscCall(PetscFree(hmg->innerpctype));
222: PetscCall(PetscFree(hmg));
223: PetscCall(PCDestroy_MG(pc));
225: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", NULL));
226: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", NULL));
227: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", NULL));
228: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", NULL));
229: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", NULL));
230: PetscFunctionReturn(PETSC_SUCCESS);
231: }
233: static PetscErrorCode PCView_HMG(PC pc, PetscViewer viewer)
234: {
235: PC_MG *mg = (PC_MG *)pc->data;
236: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
237: PetscBool iascii;
239: PetscFunctionBegin;
240: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
241: if (iascii) {
242: PetscCall(PetscViewerASCIIPrintf(viewer, " Reuse interpolation: %s\n", hmg->reuseinterp ? "true" : "false"));
243: PetscCall(PetscViewerASCIIPrintf(viewer, " Use subspace coarsening: %s\n", hmg->subcoarsening ? "true" : "false"));
244: PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening component: %" PetscInt_FMT " \n", hmg->component));
245: PetscCall(PetscViewerASCIIPrintf(viewer, " Use MatMAIJ: %s \n", hmg->usematmaij ? "true" : "false"));
246: PetscCall(PetscViewerASCIIPrintf(viewer, " Inner PC type: %s \n", hmg->innerpctype));
247: }
248: PetscCall(PCView_MG(pc, viewer));
249: PetscFunctionReturn(PETSC_SUCCESS);
250: }
252: static PetscErrorCode PCSetFromOptions_HMG(PC pc, PetscOptionItems *PetscOptionsObject)
253: {
254: PC_MG *mg = (PC_MG *)pc->data;
255: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
257: PetscFunctionBegin;
258: PetscOptionsHeadBegin(PetscOptionsObject, "HMG");
259: PetscCall(PetscOptionsBool("-pc_hmg_reuse_interpolation", "Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)", "PCHMGSetReuseInterpolation", hmg->reuseinterp, &hmg->reuseinterp, NULL));
260: PetscCall(PetscOptionsBool("-pc_hmg_use_subspace_coarsening", "Use the subspace coarsening to compute the interpolations", "PCHMGSetUseSubspaceCoarsening", hmg->subcoarsening, &hmg->subcoarsening, NULL));
261: PetscCall(PetscOptionsBool("-pc_hmg_use_matmaij", "Use MatMAIJ store interpolation for saving memory", "PCHMGSetInnerPCType", hmg->usematmaij, &hmg->usematmaij, NULL));
262: PetscCall(PetscOptionsInt("-pc_hmg_coarsening_component", "Which component is chosen for the subspace-based coarsening algorithm", "PCHMGSetCoarseningComponent", hmg->component, &hmg->component, NULL));
263: PetscOptionsHeadEnd();
264: PetscFunctionReturn(PETSC_SUCCESS);
265: }
267: static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse)
268: {
269: PC_MG *mg = (PC_MG *)pc->data;
270: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
272: PetscFunctionBegin;
273: hmg->reuseinterp = reuse;
274: PetscFunctionReturn(PETSC_SUCCESS);
275: }
277: /*@
278: PCHMGSetReuseInterpolation - Reuse the interpolation matrices in `PCHMG` after changing the matrices numerical values
280: Logically Collective
282: Input Parameters:
283: + pc - the `PCHMG` context
284: - reuse - `PETSC_TRUE` indicates that `PCHMG` will reuse the interpolations
286: Options Database Key:
287: . -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
289: Level: beginner
291: Note:
292: This decreases the set up time of the `PC` significantly but may slow the convergence of the iterative method, `KSP`, that is using the `PCHMG`
294: .seealso: [](ch_ksp), `PCHMG`, `PCGAMG`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
295: @*/
296: PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse)
297: {
298: PetscFunctionBegin;
300: PetscUseMethod(pc, "PCHMGSetReuseInterpolation_C", (PC, PetscBool), (pc, reuse));
301: PetscFunctionReturn(PETSC_SUCCESS);
302: }
304: static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace)
305: {
306: PC_MG *mg = (PC_MG *)pc->data;
307: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
309: PetscFunctionBegin;
310: hmg->subcoarsening = subspace;
311: PetscFunctionReturn(PETSC_SUCCESS);
312: }
314: /*@
315: PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in `PCHMG`
317: Logically Collective
319: Input Parameters:
320: + pc - the `PCHMG` context
321: - subspace - `PETSC_TRUE` indicates that `PCHMG` will use the subspace coarsening
323: Options Database Key:
324: . -pc_hmg_use_subspace_coarsening <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
326: Level: beginner
328: .seealso: [](ch_ksp), `PCHMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
329: @*/
330: PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace)
331: {
332: PetscFunctionBegin;
334: PetscUseMethod(pc, "PCHMGSetUseSubspaceCoarsening_C", (PC, PetscBool), (pc, subspace));
335: PetscFunctionReturn(PETSC_SUCCESS);
336: }
338: static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type)
339: {
340: PC_MG *mg = (PC_MG *)pc->data;
341: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
343: PetscFunctionBegin;
344: PetscCall(PetscStrallocpy(type, &hmg->innerpctype));
345: PetscFunctionReturn(PETSC_SUCCESS);
346: }
348: /*@
349: PCHMGSetInnerPCType - Set an inner `PC` type to be used in the `PCHMG` preconditioner. That is the method used to compute
350: the hierarchy of restriction operators.
352: Logically Collective
354: Input Parameters:
355: + pc - the `PCHMG` context
356: - type - `PCHYPRE` or `PCGAMG` coarsening algorithm
358: Options Database Key:
359: . -hmg_inner_pc_type <hypre, gamg> - What method is used to coarsen matrix
361: Level: beginner
363: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`
364: @*/
365: PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type)
366: {
367: PetscFunctionBegin;
369: PetscUseMethod(pc, "PCHMGSetInnerPCType_C", (PC, PCType), (pc, type));
370: PetscFunctionReturn(PETSC_SUCCESS);
371: }
373: static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component)
374: {
375: PC_MG *mg = (PC_MG *)pc->data;
376: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
378: PetscFunctionBegin;
379: hmg->component = component;
380: PetscFunctionReturn(PETSC_SUCCESS);
381: }
383: /*@
384: PCHMGSetCoarseningComponent - Set which component of the PDE is used for the subspace-based coarsening algorithm in the preconditioner `PCHMG`
386: Logically Collective
388: Input Parameters:
389: + pc - the `PCHMG` context
390: - component - which component `PC` will coarsen
392: Options Database Key:
393: . -pc_hmg_coarsening_component <i> - Which component is chosen for the subspace-based coarsening algorithm
395: Level: beginner
397: Note:
398: By default it uses the first component
400: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCGAMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`
401: @*/
402: PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component)
403: {
404: PetscFunctionBegin;
406: PetscUseMethod(pc, "PCHMGSetCoarseningComponent_C", (PC, PetscInt), (pc, component));
407: PetscFunctionReturn(PETSC_SUCCESS);
408: }
410: static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij)
411: {
412: PC_MG *mg = (PC_MG *)pc->data;
413: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
415: PetscFunctionBegin;
416: hmg->usematmaij = usematmaij;
417: PetscFunctionReturn(PETSC_SUCCESS);
418: }
420: /*@
421: PCHMGUseMatMAIJ - Set a flag that indicates if or not to use `MATMAIJ` for the interpolation matrices to save memory
423: Logically Collective
425: Input Parameters:
426: + pc - the `PCHMG` context
427: - usematmaij - `PETSC_TRUE` (default) to use `MATMAIJ` for interpolations.
429: Options Database Key:
430: . -pc_hmg_use_matmaij - <true | false >
432: Level: beginner
434: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCGAMG`
435: @*/
436: PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij)
437: {
438: PetscFunctionBegin;
440: PetscUseMethod(pc, "PCHMGUseMatMAIJ_C", (PC, PetscBool), (pc, usematmaij));
441: PetscFunctionReturn(PETSC_SUCCESS);
442: }
444: /*MC
445: PCHMG - Preconditioner for multiple component PDE problems that constructs a hierarchy of restriction operators to coarse grid problems using the submatrix of
446: a single component with either `PCHYPRE` or `PCGAMG`. The same restriction operators are then used for each of the components of the PDE within the `PCMG`
447: multigrid preconditioner. This results in a much more efficient to build and apply preconditioner than using `PCGAMG` on the entire system {cite}`kong2020highly`.
449: Options Database Keys:
450: + -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations for new matrix values or rebuild the interpolation. This can save compute time.
451: . -pc_hmg_use_subspace_coarsening <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix, or coarsen on the full matrix).
452: . -hmg_inner_pc_type <hypre, gamg, ...> - What method to use to generate the hierarchy of restriction operators
453: - -pc_hmg_use_matmaij <true | false> - Whether or not to use `MATMAIJ` for multicomponent problems for saving memory
455: Level: intermediate
457: Note:
458: `MatSetBlockSize()` must be called on the linear system matrix to set the number of components of the PDE.
460: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMG`, `PCHYPRE`, `PCHMG`, `PCGetCoarseOperators()`, `PCGetInterpolations()`,
461: `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`, `PCGAMG`
462: M*/
463: PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc)
464: {
465: PC_HMG *hmg;
466: PC_MG *mg;
468: PetscFunctionBegin;
469: /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
470: PetscTryTypeMethod(pc, destroy);
471: pc->data = NULL;
472: PetscCall(PetscFree(((PetscObject)pc)->type_name));
474: PetscCall(PCSetType(pc, PCMG));
475: PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCHMG));
476: PetscCall(PetscNew(&hmg));
478: mg = (PC_MG *)pc->data;
479: mg->innerctx = hmg;
480: hmg->reuseinterp = PETSC_FALSE;
481: hmg->subcoarsening = PETSC_FALSE;
482: hmg->usematmaij = PETSC_TRUE;
483: hmg->component = 0;
484: hmg->innerpc = NULL;
486: pc->ops->setfromoptions = PCSetFromOptions_HMG;
487: pc->ops->view = PCView_HMG;
488: pc->ops->destroy = PCDestroy_HMG;
489: pc->ops->setup = PCSetUp_HMG;
491: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", PCHMGSetReuseInterpolation_HMG));
492: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", PCHMGSetUseSubspaceCoarsening_HMG));
493: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", PCHMGSetInnerPCType_HMG));
494: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", PCHMGSetCoarseningComponent_HMG));
495: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", PCHMGUseMatMAIJ_HMG));
496: PetscFunctionReturn(PETSC_SUCCESS);
497: }