Actual source code: hmg.c
1: #include <petscdm.h>
2: #include <petsc/private/hashmapi.h>
3: #include <petsc/private/matimpl.h>
4: #include <petsc/private/pcmgimpl.h>
5: #include <petsc/private/pcimpl.h>
7: typedef struct {
8: PC innerpc; /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators */
9: char *innerpctype; /* PCGAMG or PCHYPRE */
10: PetscBool reuseinterp; /* A flag indicates if or not to reuse the interpolations */
11: PetscBool subcoarsening; /* If or not to use a subspace-based coarsening algorithm */
12: PetscBool usematmaij; /* If or not to use MatMAIJ for saving memory */
13: PetscInt component; /* Which subspace is used for the subspace-based coarsening algorithm? */
14: } PC_HMG;
16: static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat, Mat *submat, MatReuse reuse, PetscInt component, PetscInt blocksize)
17: {
18: IS isrow;
19: PetscInt rstart, rend;
20: MPI_Comm comm;
22: PetscFunctionBegin;
23: PetscCall(PetscObjectGetComm((PetscObject)pmat, &comm));
24: PetscCheck(component < blocksize, comm, PETSC_ERR_ARG_INCOMP, "Component %" PetscInt_FMT " should be less than block size %" PetscInt_FMT " ", component, blocksize);
25: PetscCall(MatGetOwnershipRange(pmat, &rstart, &rend));
26: PetscCheck((rend - rstart) % blocksize == 0, comm, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " is inconsistent for [%" PetscInt_FMT ", %" PetscInt_FMT ") ", blocksize, rstart, rend);
27: PetscCall(ISCreateStride(comm, (rend - rstart) / blocksize, rstart + component, blocksize, &isrow));
28: PetscCall(MatCreateSubMatrix(pmat, isrow, isrow, reuse, submat));
29: PetscCall(ISDestroy(&isrow));
30: PetscFunctionReturn(PETSC_SUCCESS);
31: }
33: static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize)
34: {
35: PetscInt subrstart, subrend, subrowsize, subcolsize, subcstart, subcend, rowsize, colsize;
36: PetscInt subrow, row, nz, *d_nnz, *o_nnz, i, j, dnz, onz, max_nz, *indices;
37: const PetscInt *idx;
38: const PetscScalar *values;
39: MPI_Comm comm;
41: PetscFunctionBegin;
42: PetscCall(PetscObjectGetComm((PetscObject)subinterp, &comm));
43: PetscCall(MatGetOwnershipRange(subinterp, &subrstart, &subrend));
44: subrowsize = subrend - subrstart;
45: rowsize = subrowsize * blocksize;
46: PetscCall(PetscCalloc2(rowsize, &d_nnz, rowsize, &o_nnz));
47: PetscCall(MatGetOwnershipRangeColumn(subinterp, &subcstart, &subcend));
48: subcolsize = subcend - subcstart;
49: colsize = subcolsize * blocksize;
50: max_nz = 0;
51: for (subrow = subrstart; subrow < subrend; subrow++) {
52: PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, NULL));
53: if (max_nz < nz) max_nz = nz;
54: dnz = 0;
55: onz = 0;
56: for (i = 0; i < nz; i++) {
57: if (idx[i] >= subcstart && idx[i] < subcend) dnz++;
58: else onz++;
59: }
60: for (i = 0; i < blocksize; i++) {
61: d_nnz[(subrow - subrstart) * blocksize + i] = dnz;
62: o_nnz[(subrow - subrstart) * blocksize + i] = onz;
63: }
64: PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, NULL));
65: }
66: PetscCall(MatCreateAIJ(comm, rowsize, colsize, PETSC_DETERMINE, PETSC_DETERMINE, 0, d_nnz, 0, o_nnz, interp));
67: PetscCall(MatSetOption(*interp, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE));
68: PetscCall(MatSetOption(*interp, MAT_IGNORE_ZERO_ENTRIES, PETSC_TRUE));
69: PetscCall(MatSetOption(*interp, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
70: PetscCall(MatSetFromOptions(*interp));
72: PetscCall(MatSetUp(*interp));
73: PetscCall(PetscFree2(d_nnz, o_nnz));
74: PetscCall(PetscMalloc1(max_nz, &indices));
75: for (subrow = subrstart; subrow < subrend; subrow++) {
76: PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, &values));
77: for (i = 0; i < blocksize; i++) {
78: row = subrow * blocksize + i;
79: for (j = 0; j < nz; j++) indices[j] = idx[j] * blocksize + i;
80: PetscCall(MatSetValues(*interp, 1, &row, nz, indices, values, INSERT_VALUES));
81: }
82: PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, &values));
83: }
84: PetscCall(PetscFree(indices));
85: PetscCall(MatAssemblyBegin(*interp, MAT_FINAL_ASSEMBLY));
86: PetscCall(MatAssemblyEnd(*interp, MAT_FINAL_ASSEMBLY));
87: PetscFunctionReturn(PETSC_SUCCESS);
88: }
90: static PetscErrorCode PCSetUp_HMG(PC pc)
91: {
92: Mat PA, submat;
93: PC_MG *mg = (PC_MG *)pc->data;
94: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
95: MPI_Comm comm;
96: PetscInt num_levels;
97: Mat *operators, *interpolations;
98: PetscInt blocksize;
99: const char *prefix;
100: PCMGGalerkinType galerkin;
102: PetscFunctionBegin;
103: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
104: if (pc->setupcalled) {
105: if (hmg->reuseinterp) {
106: /* If we did not use Galerkin in the last call or we have a different sparsity pattern now,
107: * we have to build from scratch
108: * */
109: PetscCall(PCMGGetGalerkin(pc, &galerkin));
110: if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE;
111: PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
112: PetscCall(PCSetUp_MG(pc));
113: PetscFunctionReturn(PETSC_SUCCESS);
114: } else {
115: PetscCall(PCReset_MG(pc));
116: pc->setupcalled = PETSC_FALSE;
117: }
118: }
120: /* Create an inner PC (GAMG or HYPRE) */
121: if (!hmg->innerpc) {
122: PetscCall(PCCreate(comm, &hmg->innerpc));
123: /* If users do not set an inner pc type, we need to set a default value */
124: if (!hmg->innerpctype) {
125: /* If hypre is available, use hypre, otherwise, use gamg */
126: #if PetscDefined(HAVE_HYPRE)
127: PetscCall(PetscStrallocpy(PCHYPRE, &hmg->innerpctype));
128: #else
129: PetscCall(PetscStrallocpy(PCGAMG, &hmg->innerpctype));
130: #endif
131: }
132: PetscCall(PCSetType(hmg->innerpc, hmg->innerpctype));
133: }
134: PetscCall(PCGetOperators(pc, NULL, &PA));
135: /* Users need to correctly set a block size of matrix in order to use subspace coarsening */
136: PetscCall(MatGetBlockSize(PA, &blocksize));
137: if (blocksize <= 1) hmg->subcoarsening = PETSC_FALSE;
138: /* Extract a submatrix for constructing subinterpolations */
139: if (hmg->subcoarsening) {
140: PetscCall(PCHMGExtractSubMatrix_Private(PA, &submat, MAT_INITIAL_MATRIX, hmg->component, blocksize));
141: PA = submat;
142: }
143: PetscCall(PCSetOperators(hmg->innerpc, PA, PA));
144: if (hmg->subcoarsening) PetscCall(MatDestroy(&PA));
145: /* Setup inner PC correctly. During this step, matrix will be coarsened */
146: PetscCall(PCSetUseAmat(hmg->innerpc, PETSC_FALSE));
147: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix));
148: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc, prefix));
149: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc, "hmg_inner_"));
150: PetscCall(PCSetFromOptions(hmg->innerpc));
151: PetscCall(PCSetUp(hmg->innerpc));
153: /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */
154: PetscCall(PCGetInterpolations(hmg->innerpc, &num_levels, &interpolations));
155: /* We can reuse the coarse operators when we do the full space coarsening */
156: if (!hmg->subcoarsening) PetscCall(PCGetCoarseOperators(hmg->innerpc, &num_levels, &operators));
158: PetscCall(PCDestroy(&hmg->innerpc));
159: hmg->innerpc = NULL;
160: PetscCall(PCMGSetLevels_MG(pc, num_levels, NULL));
161: /* Set coarse matrices and interpolations to PCMG */
162: for (PetscInt level = num_levels - 1; level > 0; level--) {
163: Mat P = NULL, pmat = NULL;
164: Vec b, x, r;
165: if (hmg->subcoarsening) {
166: if (hmg->usematmaij) {
167: PetscCall(MatCreateMAIJ(interpolations[level - 1], blocksize, &P));
168: PetscCall(MatDestroy(&interpolations[level - 1]));
169: } else {
170: /* Grow interpolation. In the future, we should use MAIJ */
171: PetscCall(PCHMGExpandInterpolation_Private(interpolations[level - 1], &P, blocksize));
172: PetscCall(MatDestroy(&interpolations[level - 1]));
173: }
174: } else {
175: P = interpolations[level - 1];
176: }
177: PetscCall(MatCreateVecs(P, &b, &r));
178: PetscCall(PCMGSetInterpolation(pc, level, P));
179: PetscCall(PCMGSetRestriction(pc, level, P));
180: PetscCall(MatDestroy(&P));
181: /* We reuse the matrices when we do not do subspace coarsening */
182: if ((level - 1) >= 0 && !hmg->subcoarsening) {
183: pmat = operators[level - 1];
184: PetscCall(PCMGSetOperators(pc, level - 1, pmat, pmat));
185: PetscCall(MatDestroy(&pmat));
186: }
187: PetscCall(PCMGSetRhs(pc, level - 1, b));
189: PetscCall(PCMGSetR(pc, level, r));
190: PetscCall(VecDestroy(&r));
192: PetscCall(VecDuplicate(b, &x));
193: PetscCall(PCMGSetX(pc, level - 1, x));
194: PetscCall(VecDestroy(&x));
195: PetscCall(VecDestroy(&b));
196: }
197: PetscCall(PetscFree(interpolations));
198: if (!hmg->subcoarsening) PetscCall(PetscFree(operators));
199: /* Turn Galerkin off when we already have coarse operators */
200: PetscCall(PCMGSetGalerkin(pc, hmg->subcoarsening ? PC_MG_GALERKIN_PMAT : PC_MG_GALERKIN_NONE));
201: PetscCall(PCSetDM(pc, NULL));
202: PetscCall(PCSetUseAmat(pc, PETSC_FALSE));
203: PetscObjectOptionsBegin((PetscObject)pc);
204: PetscCall(PCSetFromOptions_MG(pc, PetscOptionsObject)); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */
205: PetscOptionsEnd();
206: PetscCall(PCSetUp_MG(pc));
207: PetscFunctionReturn(PETSC_SUCCESS);
208: }
210: static PetscErrorCode PCDestroy_HMG(PC pc)
211: {
212: PC_MG *mg = (PC_MG *)pc->data;
213: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
215: PetscFunctionBegin;
216: PetscCall(PCDestroy(&hmg->innerpc));
217: PetscCall(PetscFree(hmg->innerpctype));
218: PetscCall(PetscFree(hmg));
219: PetscCall(PCDestroy_MG(pc));
221: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", NULL));
222: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", NULL));
223: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", NULL));
224: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", NULL));
225: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", NULL));
226: PetscFunctionReturn(PETSC_SUCCESS);
227: }
229: static PetscErrorCode PCView_HMG(PC pc, PetscViewer viewer)
230: {
231: PC_MG *mg = (PC_MG *)pc->data;
232: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
233: PetscBool isascii;
235: PetscFunctionBegin;
236: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
237: if (isascii) {
238: PetscCall(PetscViewerASCIIPrintf(viewer, " Reuse interpolation: %s\n", hmg->reuseinterp ? "true" : "false"));
239: PetscCall(PetscViewerASCIIPrintf(viewer, " Use subspace coarsening: %s\n", hmg->subcoarsening ? "true" : "false"));
240: PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening component: %" PetscInt_FMT " \n", hmg->component));
241: PetscCall(PetscViewerASCIIPrintf(viewer, " Use MatMAIJ: %s \n", hmg->usematmaij ? "true" : "false"));
242: PetscCall(PetscViewerASCIIPrintf(viewer, " Inner PC type: %s \n", hmg->innerpctype));
243: }
244: PetscCall(PCView_MG(pc, viewer));
245: PetscFunctionReturn(PETSC_SUCCESS);
246: }
248: static PetscErrorCode PCSetFromOptions_HMG(PC pc, PetscOptionItems PetscOptionsObject)
249: {
250: PC_MG *mg = (PC_MG *)pc->data;
251: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
253: PetscFunctionBegin;
254: PetscOptionsHeadBegin(PetscOptionsObject, "HMG");
255: PetscCall(PetscOptionsBool("-pc_hmg_reuse_interpolation", "Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)", "PCHMGSetReuseInterpolation", hmg->reuseinterp, &hmg->reuseinterp, NULL));
256: PetscCall(PetscOptionsBool("-pc_hmg_use_subspace_coarsening", "Use the subspace coarsening to compute the interpolations", "PCHMGSetUseSubspaceCoarsening", hmg->subcoarsening, &hmg->subcoarsening, NULL));
257: PetscCall(PetscOptionsBool("-pc_hmg_use_matmaij", "Use MatMAIJ store interpolation for saving memory", "PCHMGSetInnerPCType", hmg->usematmaij, &hmg->usematmaij, NULL));
258: PetscCall(PetscOptionsInt("-pc_hmg_coarsening_component", "Which component is chosen for the subspace-based coarsening algorithm", "PCHMGSetCoarseningComponent", hmg->component, &hmg->component, NULL));
259: PetscOptionsHeadEnd();
260: PetscFunctionReturn(PETSC_SUCCESS);
261: }
263: static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse)
264: {
265: PC_MG *mg = (PC_MG *)pc->data;
266: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
268: PetscFunctionBegin;
269: hmg->reuseinterp = reuse;
270: PetscFunctionReturn(PETSC_SUCCESS);
271: }
273: /*@
274: PCHMGSetReuseInterpolation - Reuse the interpolation matrices in `PCHMG` after changing the matrices numerical values
276: Logically Collective
278: Input Parameters:
279: + pc - the `PCHMG` context
280: - reuse - `PETSC_TRUE` indicates that `PCHMG` will reuse the interpolations
282: Options Database Key:
283: . -pc_hmg_reuse_interpolation (true|false) - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
285: Level: beginner
287: Note:
288: This decreases the set up time of the `PC` significantly but may slow the convergence of the iterative method, `KSP`, that is using the `PCHMG`
290: .seealso: [](ch_ksp), `PCHMG`, `PCGAMG`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
291: @*/
292: PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse)
293: {
294: PetscFunctionBegin;
296: PetscUseMethod(pc, "PCHMGSetReuseInterpolation_C", (PC, PetscBool), (pc, reuse));
297: PetscFunctionReturn(PETSC_SUCCESS);
298: }
300: static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace)
301: {
302: PC_MG *mg = (PC_MG *)pc->data;
303: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
305: PetscFunctionBegin;
306: hmg->subcoarsening = subspace;
307: PetscFunctionReturn(PETSC_SUCCESS);
308: }
310: /*@
311: PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in `PCHMG`
313: Logically Collective
315: Input Parameters:
316: + pc - the `PCHMG` context
317: - subspace - `PETSC_TRUE` indicates that `PCHMG` will use the subspace coarsening
319: Options Database Key:
320: . -pc_hmg_use_subspace_coarsening (true|false) - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
322: Level: beginner
324: .seealso: [](ch_ksp), `PCHMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetCoarseningComponent()`, `PCHMGSetInnerPCType()`
325: @*/
326: PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace)
327: {
328: PetscFunctionBegin;
330: PetscUseMethod(pc, "PCHMGSetUseSubspaceCoarsening_C", (PC, PetscBool), (pc, subspace));
331: PetscFunctionReturn(PETSC_SUCCESS);
332: }
334: static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type)
335: {
336: PC_MG *mg = (PC_MG *)pc->data;
337: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
339: PetscFunctionBegin;
340: PetscCall(PetscStrallocpy(type, &hmg->innerpctype));
341: PetscFunctionReturn(PETSC_SUCCESS);
342: }
344: /*@
345: PCHMGSetInnerPCType - Set an inner `PC` type to be used in the `PCHMG` preconditioner. That is the method used to compute
346: the hierarchy of restriction operators.
348: Logically Collective
350: Input Parameters:
351: + pc - the `PCHMG` context
352: - type - `PCHYPRE` or `PCGAMG` coarsening algorithm
354: Options Database Key:
355: . -hmg_inner_pc_type (hypre|gamg) - What method is used to coarsen matrix
357: Level: beginner
359: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetCoarseningComponent()`
360: @*/
361: PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type)
362: {
363: PetscFunctionBegin;
365: PetscUseMethod(pc, "PCHMGSetInnerPCType_C", (PC, PCType), (pc, type));
366: PetscFunctionReturn(PETSC_SUCCESS);
367: }
369: static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component)
370: {
371: PC_MG *mg = (PC_MG *)pc->data;
372: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
374: PetscFunctionBegin;
375: hmg->component = component;
376: PetscFunctionReturn(PETSC_SUCCESS);
377: }
379: /*@
380: PCHMGSetCoarseningComponent - Set which component of the PDE is used for the subspace-based coarsening algorithm in the preconditioner `PCHMG`
382: Logically Collective
384: Input Parameters:
385: + pc - the `PCHMG` context
386: - component - which component `PC` will coarsen
388: Options Database Key:
389: . -pc_hmg_coarsening_component i - Which component is chosen for the subspace-based coarsening algorithm
391: Level: beginner
393: Note:
394: By default it uses the first component
396: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCGAMG`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`
397: @*/
398: PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component)
399: {
400: PetscFunctionBegin;
402: PetscUseMethod(pc, "PCHMGSetCoarseningComponent_C", (PC, PetscInt), (pc, component));
403: PetscFunctionReturn(PETSC_SUCCESS);
404: }
406: static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij)
407: {
408: PC_MG *mg = (PC_MG *)pc->data;
409: PC_HMG *hmg = (PC_HMG *)mg->innerctx;
411: PetscFunctionBegin;
412: hmg->usematmaij = usematmaij;
413: PetscFunctionReturn(PETSC_SUCCESS);
414: }
416: /*@
417: PCHMGUseMatMAIJ - Set a flag that indicates if or not to use `MATMAIJ` for the interpolation matrices to save memory
419: Logically Collective
421: Input Parameters:
422: + pc - the `PCHMG` context
423: - usematmaij - `PETSC_TRUE` (default) to use `MATMAIJ` for interpolations.
425: Options Database Key:
426: . -pc_hmg_use_matmaij (true|false) - use `MATMAIJ` for interpolations
428: Level: beginner
430: .seealso: [](ch_ksp), `PCHMG`, `PCType`, `PCGAMG`
431: @*/
432: PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij)
433: {
434: PetscFunctionBegin;
436: PetscUseMethod(pc, "PCHMGUseMatMAIJ_C", (PC, PetscBool), (pc, usematmaij));
437: PetscFunctionReturn(PETSC_SUCCESS);
438: }
440: /*MC
441: PCHMG - Preconditioner for multiple component PDE problems that constructs a hierarchy of restriction operators to coarse grid problems using the submatrix of
442: a single component with either `PCHYPRE` or `PCGAMG`. The same restriction operators are then used for each of the components of the PDE within the `PCMG`
443: multigrid preconditioner. This results in a much more efficient to build and apply preconditioner than using `PCGAMG` on the entire system {cite}`kong2020highly`.
445: Options Database Keys:
446: + -pc_hmg_reuse_interpolation (true|false) - Whether or not to reuse the interpolations for new matrix values or rebuild the interpolation. This can save compute time.
447: . -pc_hmg_use_subspace_coarsening (true|false) - Whether or not to use subspace coarsening (that is, coarsen a submatrix, or coarsen on the full matrix).
448: . -hmg_inner_pc_type (hypre|gamg) - What method to use to generate the hierarchy of restriction operators
449: - -pc_hmg_use_matmaij (true|false) - Whether or not to use `MATMAIJ` for multicomponent problems for saving memory
451: Level: intermediate
453: Note:
454: `MatSetBlockSize()` must be called on the linear system matrix to set the number of components of the PDE.
456: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMG`, `PCHYPRE`, `PCHMG`, `PCGetCoarseOperators()`, `PCGetInterpolations()`,
457: `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`, `PCHMGSetInnerPCType()`, `PCGAMG`
458: M*/
459: PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc)
460: {
461: PC_HMG *hmg;
462: PC_MG *mg;
464: PetscFunctionBegin;
465: /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
466: PetscTryTypeMethod(pc, destroy);
467: pc->data = NULL;
468: PetscCall(PetscFree(((PetscObject)pc)->type_name));
470: PetscCall(PCSetType(pc, PCMG));
471: PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCHMG));
472: PetscCall(PetscNew(&hmg));
474: mg = (PC_MG *)pc->data;
475: mg->innerctx = hmg;
476: hmg->reuseinterp = PETSC_FALSE;
477: hmg->subcoarsening = PETSC_FALSE;
478: hmg->usematmaij = PETSC_TRUE;
479: hmg->component = 0;
480: hmg->innerpc = NULL;
482: pc->ops->setfromoptions = PCSetFromOptions_HMG;
483: pc->ops->view = PCView_HMG;
484: pc->ops->destroy = PCDestroy_HMG;
485: pc->ops->setup = PCSetUp_HMG;
487: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", PCHMGSetReuseInterpolation_HMG));
488: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", PCHMGSetUseSubspaceCoarsening_HMG));
489: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", PCHMGSetInnerPCType_HMG));
490: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", PCHMGSetCoarseningComponent_HMG));
491: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", PCHMGUseMatMAIJ_HMG));
492: PetscFunctionReturn(PETSC_SUCCESS);
493: }