Actual source code: hypre.c
1: /*
2: Provides an interface to the LLNL package hypre
3: */
5: #include <petscpkg_version.h>
6: #include <petsc/private/pcimpl.h>
7: /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */
8: #include <petsc/private/matimpl.h>
9: #include <petsc/private/vecimpl.h>
10: #include <../src/vec/vec/impls/hypre/vhyp.h>
11: #include <../src/mat/impls/hypre/mhypre.h>
12: #include <../src/dm/impls/da/hypre/mhyp.h>
13: #include <_hypre_parcsr_ls.h>
14: #include <petscmathypre.h>
16: #if defined(PETSC_HAVE_HYPRE_DEVICE)
17: #include <petsc/private/deviceimpl.h>
18: #endif
20: static PetscBool cite = PETSC_FALSE;
21: static const char hypreCitation[] = "@manual{hypre-web-page,\n title = {{\\sl hypre}: High Performance Preconditioners},\n organization = {Lawrence Livermore National Laboratory},\n note = "
22: "{\\url{https://www.llnl.gov/casc/hypre}}\n}\n";
24: /*
25: Private context (data structure) for the preconditioner.
26: */
27: typedef struct {
28: HYPRE_Solver hsolver;
29: Mat hpmat; /* MatHYPRE */
31: HYPRE_Int (*destroy)(HYPRE_Solver);
32: HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
33: HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
35: MPI_Comm comm_hypre;
36: char *hypre_type;
38: /* options for Pilut and BoomerAMG*/
39: PetscInt maxiter;
40: PetscReal tol;
42: /* options for Pilut */
43: PetscInt factorrowsize;
45: /* options for ParaSails */
46: PetscInt nlevels;
47: PetscReal threshold;
48: PetscReal filter;
49: PetscReal loadbal;
50: PetscInt logging;
51: PetscInt ruse;
52: PetscInt symt;
54: /* options for BoomerAMG */
55: PetscBool printstatistics;
57: /* options for BoomerAMG */
58: PetscInt cycletype;
59: PetscInt maxlevels;
60: PetscReal strongthreshold;
61: PetscReal maxrowsum;
62: PetscInt gridsweeps[3];
63: PetscInt coarsentype;
64: PetscInt measuretype;
65: PetscInt smoothtype;
66: PetscInt smoothsweeps;
67: PetscInt smoothnumlevels;
68: PetscInt eu_level; /* Number of levels for ILU(k) in Euclid */
69: PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */
70: PetscInt eu_bj; /* Defines use of Block Jacobi ILU in Euclid */
71: PetscInt relaxtype[3];
72: PetscReal relaxweight;
73: PetscReal outerrelaxweight;
74: PetscInt relaxorder;
75: PetscReal truncfactor;
76: PetscBool applyrichardson;
77: PetscInt pmax;
78: PetscInt interptype;
79: PetscInt maxc;
80: PetscInt minc;
81: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
82: char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG
83: #endif
84: /* GPU */
85: PetscBool keeptranspose;
86: PetscInt rap2;
87: PetscInt mod_rap2;
89: /* AIR */
90: PetscInt Rtype;
91: PetscReal Rstrongthreshold;
92: PetscReal Rfilterthreshold;
93: PetscInt Adroptype;
94: PetscReal Adroptol;
96: PetscInt agg_nl;
97: PetscInt agg_interptype;
98: PetscInt agg_num_paths;
99: PetscBool nodal_relax;
100: PetscInt nodal_relax_levels;
102: PetscInt nodal_coarsening;
103: PetscInt nodal_coarsening_diag;
104: PetscInt vec_interp_variant;
105: PetscInt vec_interp_qmax;
106: PetscBool vec_interp_smooth;
107: PetscInt interp_refine;
109: /* NearNullSpace support */
110: VecHYPRE_IJVector *hmnull;
111: HYPRE_ParVector *phmnull;
112: PetscInt n_hmnull;
113: Vec hmnull_constant;
115: /* options for AS (Auxiliary Space preconditioners) */
116: PetscInt as_print;
117: PetscInt as_max_iter;
118: PetscReal as_tol;
119: PetscInt as_relax_type;
120: PetscInt as_relax_times;
121: PetscReal as_relax_weight;
122: PetscReal as_omega;
123: PetscInt as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */
124: PetscReal as_amg_alpha_theta; /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */
125: PetscInt as_amg_beta_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */
126: PetscReal as_amg_beta_theta; /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS) */
127: PetscInt ams_cycle_type;
128: PetscInt ads_cycle_type;
130: /* additional data */
131: Mat G; /* MatHYPRE */
132: Mat C; /* MatHYPRE */
133: Mat alpha_Poisson; /* MatHYPRE */
134: Mat beta_Poisson; /* MatHYPRE */
136: /* extra information for AMS */
137: PetscInt dim; /* geometrical dimension */
138: VecHYPRE_IJVector coords[3];
139: VecHYPRE_IJVector constants[3];
140: VecHYPRE_IJVector interior;
141: Mat RT_PiFull, RT_Pi[3];
142: Mat ND_PiFull, ND_Pi[3];
143: PetscBool ams_beta_is_zero;
144: PetscBool ams_beta_is_zero_part;
145: PetscInt ams_proj_freq;
146: } PC_HYPRE;
148: /*
149: Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
150: is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
151: It is used in PCHMG. Other users should avoid using this function.
152: */
153: static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[])
154: {
155: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
156: PetscBool same;
157: PetscInt num_levels, l;
158: Mat *mattmp;
159: hypre_ParCSRMatrix **A_array;
161: PetscFunctionBegin;
162: PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
163: PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG");
164: num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver);
165: PetscCall(PetscMalloc1(num_levels, &mattmp));
166: A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)jac->hsolver);
167: for (l = 1; l < num_levels; l++) {
168: PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &mattmp[num_levels - 1 - l]));
169: /* We want to own the data, and HYPRE can not touch this matrix any more */
170: A_array[l] = NULL;
171: }
172: *nlevels = num_levels;
173: *operators = mattmp;
174: PetscFunctionReturn(PETSC_SUCCESS);
175: }
177: /*
178: Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
179: is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
180: It is used in PCHMG. Other users should avoid using this function.
181: */
182: static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[])
183: {
184: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
185: PetscBool same;
186: PetscInt num_levels, l;
187: Mat *mattmp;
188: hypre_ParCSRMatrix **P_array;
190: PetscFunctionBegin;
191: PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
192: PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG");
193: num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver);
194: PetscCall(PetscMalloc1(num_levels, &mattmp));
195: P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)jac->hsolver);
196: for (l = 1; l < num_levels; l++) {
197: PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &mattmp[l - 1]));
198: /* We want to own the data, and HYPRE can not touch this matrix any more */
199: P_array[num_levels - 1 - l] = NULL;
200: }
201: *nlevels = num_levels;
202: *interpolations = mattmp;
203: PetscFunctionReturn(PETSC_SUCCESS);
204: }
206: /*
207: Boolean Vecs are created IN PLACE with using data from BoomerAMG.
208: */
209: static PetscErrorCode PCHYPREGetCFMarkers_BoomerAMG(PC pc, PetscInt *n_per_level[], PetscBT *CFMarkers[])
210: {
211: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
212: PetscBool same;
213: PetscInt num_levels, fine_nodes = 0, coarse_nodes;
214: PetscInt *n_per_temp;
215: PetscBT *markertmp;
216: hypre_IntArray **CF_marker_array;
218: PetscFunctionBegin;
219: PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
220: PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG");
221: num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver);
222: PetscCall(PetscMalloc1(num_levels, &n_per_temp));
223: PetscCall(PetscMalloc1(num_levels - 1, &markertmp));
224: CF_marker_array = hypre_ParAMGDataCFMarkerArray((hypre_ParAMGData *)jac->hsolver);
225: for (PetscInt l = 0, CFMaxIndex = num_levels - 2; CFMaxIndex >= 0; l++, CFMaxIndex--) {
226: fine_nodes = hypre_IntArraySize(CF_marker_array[CFMaxIndex]);
227: coarse_nodes = 0;
228: PetscCall(PetscBTCreate(fine_nodes, &markertmp[l]));
229: for (PetscInt k = 0; k < fine_nodes; k++) {
230: if (hypre_IntArrayDataI(CF_marker_array[CFMaxIndex], k) > 0) {
231: PetscCall(PetscBTSet(markertmp[l], k));
232: coarse_nodes++;
233: }
234: }
235: n_per_temp[l] = coarse_nodes;
236: }
237: n_per_temp[num_levels - 1] = fine_nodes;
238: *n_per_level = n_per_temp;
239: *CFMarkers = markertmp;
240: PetscFunctionReturn(PETSC_SUCCESS);
241: }
243: /* Resets (frees) Hypre's representation of the near null space */
244: static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc)
245: {
246: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
247: PetscInt i;
249: PetscFunctionBegin;
250: for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i]));
251: PetscCall(PetscFree(jac->hmnull));
252: PetscCall(PetscFree(jac->phmnull));
253: PetscCall(VecDestroy(&jac->hmnull_constant));
254: jac->n_hmnull = 0;
255: PetscFunctionReturn(PETSC_SUCCESS);
256: }
258: static PetscErrorCode PCSetUp_HYPRE(PC pc)
259: {
260: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
261: Mat_HYPRE *hjac;
262: HYPRE_ParCSRMatrix hmat;
263: HYPRE_ParVector bv, xv;
264: PetscBool ishypre;
266: PetscFunctionBegin;
267: /* default type is boomerAMG */
268: if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg"));
270: /* get hypre matrix */
271: if (pc->flag == DIFFERENT_NONZERO_PATTERN) PetscCall(MatDestroy(&jac->hpmat));
272: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre));
273: if (!ishypre) {
274: /* Temporary fix since we do not support MAT_REUSE_MATRIX with HYPRE device */
275: #if defined(PETSC_HAVE_HYPRE_DEVICE)
276: PetscBool iscuda, iship, iskokkos;
278: PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
279: PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
280: PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iskokkos, MATSEQAIJKOKKOS, MATMPIAIJKOKKOS, ""));
281: if (iscuda || iship || iskokkos) PetscCall(MatDestroy(&jac->hpmat));
282: #endif
283: PetscCall(MatConvert(pc->pmat, MATHYPRE, jac->hpmat ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &jac->hpmat));
284: } else {
285: PetscCall(PetscObjectReference((PetscObject)pc->pmat));
286: PetscCall(MatDestroy(&jac->hpmat));
287: jac->hpmat = pc->pmat;
288: }
290: /* allow debug */
291: PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view"));
292: hjac = (Mat_HYPRE *)jac->hpmat->data;
294: /* special case for BoomerAMG */
295: if (jac->setup == HYPRE_BoomerAMGSetup) {
296: MatNullSpace mnull;
297: PetscBool has_const;
298: PetscInt bs, nvec, i;
299: const Vec *vecs;
301: PetscCall(MatGetBlockSize(pc->pmat, &bs));
302: if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
303: PetscCall(MatGetNearNullSpace(pc->mat, &mnull));
304: if (mnull) {
305: PetscCall(PCHYPREResetNearNullSpace_Private(pc));
306: PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
307: PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull));
308: PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull));
309: for (i = 0; i < nvec; i++) {
310: PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i]));
311: PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i]));
312: PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]);
313: }
314: if (has_const) {
315: PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL));
316: PetscCall(VecSet(jac->hmnull_constant, 1));
317: PetscCall(VecNormalize(jac->hmnull_constant, NULL));
318: PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec]));
319: PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec]));
320: PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]);
321: nvec++;
322: }
323: PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull);
324: jac->n_hmnull = nvec;
325: }
326: }
328: /* special case for AMS */
329: if (jac->setup == HYPRE_AMSSetup) {
330: Mat_HYPRE *hm;
331: HYPRE_ParCSRMatrix parcsr;
332: if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
333: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()");
334: }
335: if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim);
336: if (jac->constants[0]) {
337: HYPRE_ParVector ozz, zoz, zzo = NULL;
338: PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz));
339: PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz));
340: if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo));
341: PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo);
342: }
343: if (jac->coords[0]) {
344: HYPRE_ParVector coords[3];
345: coords[0] = NULL;
346: coords[1] = NULL;
347: coords[2] = NULL;
348: if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
349: if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
350: if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
351: PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
352: }
353: PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
354: hm = (Mat_HYPRE *)jac->G->data;
355: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
356: PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr);
357: if (jac->alpha_Poisson) {
358: hm = (Mat_HYPRE *)jac->alpha_Poisson->data;
359: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
360: PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr);
361: }
362: if (jac->ams_beta_is_zero) {
363: PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL);
364: } else if (jac->beta_Poisson) {
365: hm = (Mat_HYPRE *)jac->beta_Poisson->data;
366: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
367: PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr);
368: } else if (jac->ams_beta_is_zero_part) {
369: if (jac->interior) {
370: HYPRE_ParVector interior = NULL;
371: PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior));
372: PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior);
373: } else {
374: jac->ams_beta_is_zero_part = PETSC_FALSE;
375: }
376: }
377: if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) {
378: PetscInt i;
379: HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
380: if (jac->ND_PiFull) {
381: hm = (Mat_HYPRE *)jac->ND_PiFull->data;
382: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
383: } else {
384: nd_parcsrfull = NULL;
385: }
386: for (i = 0; i < 3; ++i) {
387: if (jac->ND_Pi[i]) {
388: hm = (Mat_HYPRE *)jac->ND_Pi[i]->data;
389: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
390: } else {
391: nd_parcsr[i] = NULL;
392: }
393: }
394: PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
395: }
396: }
397: /* special case for ADS */
398: if (jac->setup == HYPRE_ADSSetup) {
399: Mat_HYPRE *hm;
400: HYPRE_ParCSRMatrix parcsr;
401: if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) {
402: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations");
403: } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead");
404: PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
405: PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient");
406: if (jac->coords[0]) {
407: HYPRE_ParVector coords[3];
408: coords[0] = NULL;
409: coords[1] = NULL;
410: coords[2] = NULL;
411: if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
412: if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
413: if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
414: PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
415: }
416: hm = (Mat_HYPRE *)jac->G->data;
417: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
418: PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr);
419: hm = (Mat_HYPRE *)jac->C->data;
420: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
421: PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr);
422: if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
423: PetscInt i;
424: HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3];
425: HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
426: if (jac->RT_PiFull) {
427: hm = (Mat_HYPRE *)jac->RT_PiFull->data;
428: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull));
429: } else {
430: rt_parcsrfull = NULL;
431: }
432: for (i = 0; i < 3; ++i) {
433: if (jac->RT_Pi[i]) {
434: hm = (Mat_HYPRE *)jac->RT_Pi[i]->data;
435: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i]));
436: } else {
437: rt_parcsr[i] = NULL;
438: }
439: }
440: if (jac->ND_PiFull) {
441: hm = (Mat_HYPRE *)jac->ND_PiFull->data;
442: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
443: } else {
444: nd_parcsrfull = NULL;
445: }
446: for (i = 0; i < 3; ++i) {
447: if (jac->ND_Pi[i]) {
448: hm = (Mat_HYPRE *)jac->ND_Pi[i]->data;
449: PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
450: } else {
451: nd_parcsr[i] = NULL;
452: }
453: }
454: PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
455: }
456: }
457: PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
458: PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv);
459: PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv);
460: PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
461: PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv);
462: PetscCall(PetscFPTrapPop());
463: PetscFunctionReturn(PETSC_SUCCESS);
464: }
466: static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x)
467: {
468: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
469: Mat_HYPRE *hjac = (Mat_HYPRE *)jac->hpmat->data;
470: HYPRE_ParCSRMatrix hmat;
471: HYPRE_ParVector jbv, jxv;
473: PetscFunctionBegin;
474: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
475: if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0));
476: PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b));
477: if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x));
478: else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x));
479: PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
480: PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
481: PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
482: PetscStackCallExternalVoid(
483: "Hypre solve", do {
484: HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv);
485: if (hierr) {
486: PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
487: HYPRE_ClearAllErrors();
488: }
489: } while (0));
491: if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv);
492: PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
493: PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
494: PetscFunctionReturn(PETSC_SUCCESS);
495: }
497: static PetscErrorCode PCMatApply_HYPRE_BoomerAMG(PC pc, Mat B, Mat X)
498: {
499: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
500: Mat_HYPRE *hjac = (Mat_HYPRE *)jac->hpmat->data;
501: hypre_ParCSRMatrix *par_matrix;
502: HYPRE_ParVector hb, hx;
503: const PetscScalar *b;
504: PetscScalar *x;
505: PetscInt m, N, lda;
506: hypre_Vector *x_local;
507: PetscMemType type;
509: PetscFunctionBegin;
510: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
511: PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&par_matrix);
512: PetscCall(MatGetLocalSize(B, &m, NULL));
513: PetscCall(MatGetSize(B, NULL, &N));
514: PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hb);
515: PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hx);
516: PetscCall(MatZeroEntries(X));
517: PetscCall(MatDenseGetArrayReadAndMemType(B, &b, &type));
518: PetscCall(MatDenseGetLDA(B, &lda));
519: PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
520: PetscCall(MatDenseGetLDA(X, &lda));
521: PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
522: x_local = hypre_ParVectorLocalVector(hb);
523: PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
524: hypre_VectorData(x_local) = (HYPRE_Complex *)b;
525: PetscCall(MatDenseGetArrayWriteAndMemType(X, &x, NULL));
526: x_local = hypre_ParVectorLocalVector(hx);
527: PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
528: hypre_VectorData(x_local) = (HYPRE_Complex *)x;
529: PetscCallExternal(hypre_ParVectorInitialize_v2, hb, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
530: PetscCallExternal(hypre_ParVectorInitialize_v2, hx, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
531: PetscStackCallExternalVoid(
532: "Hypre solve", do {
533: HYPRE_Int hierr = (*jac->solve)(jac->hsolver, par_matrix, hb, hx);
534: if (hierr) {
535: PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
536: HYPRE_ClearAllErrors();
537: }
538: } while (0));
539: PetscCallExternal(HYPRE_ParVectorDestroy, hb);
540: PetscCallExternal(HYPRE_ParVectorDestroy, hx);
541: PetscCall(MatDenseRestoreArrayReadAndMemType(B, &b));
542: PetscCall(MatDenseRestoreArrayWriteAndMemType(X, &x));
543: PetscFunctionReturn(PETSC_SUCCESS);
544: }
546: static PetscErrorCode PCReset_HYPRE(PC pc)
547: {
548: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
550: PetscFunctionBegin;
551: PetscCall(MatDestroy(&jac->hpmat));
552: PetscCall(MatDestroy(&jac->G));
553: PetscCall(MatDestroy(&jac->C));
554: PetscCall(MatDestroy(&jac->alpha_Poisson));
555: PetscCall(MatDestroy(&jac->beta_Poisson));
556: PetscCall(MatDestroy(&jac->RT_PiFull));
557: PetscCall(MatDestroy(&jac->RT_Pi[0]));
558: PetscCall(MatDestroy(&jac->RT_Pi[1]));
559: PetscCall(MatDestroy(&jac->RT_Pi[2]));
560: PetscCall(MatDestroy(&jac->ND_PiFull));
561: PetscCall(MatDestroy(&jac->ND_Pi[0]));
562: PetscCall(MatDestroy(&jac->ND_Pi[1]));
563: PetscCall(MatDestroy(&jac->ND_Pi[2]));
564: PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
565: PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
566: PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
567: PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
568: PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
569: PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
570: PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
571: PetscCall(PCHYPREResetNearNullSpace_Private(pc));
572: jac->ams_beta_is_zero = PETSC_FALSE;
573: jac->ams_beta_is_zero_part = PETSC_FALSE;
574: jac->dim = 0;
575: PetscFunctionReturn(PETSC_SUCCESS);
576: }
578: static PetscErrorCode PCDestroy_HYPRE(PC pc)
579: {
580: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
582: PetscFunctionBegin;
583: PetscCall(PCReset_HYPRE(pc));
584: if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver);
585: PetscCall(PetscFree(jac->hypre_type));
586: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
587: PetscCall(PetscFree(jac->spgemm_type));
588: #endif
589: if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
590: PetscCall(PetscFree(pc->data));
592: PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0));
593: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL));
594: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL));
595: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL));
596: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL));
597: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL));
598: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL));
599: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL));
600: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL));
601: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL));
602: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
603: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
604: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetCFMarkers_C", NULL));
605: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL));
606: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL));
607: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
608: PetscFunctionReturn(PETSC_SUCCESS);
609: }
611: static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems *PetscOptionsObject)
612: {
613: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
614: PetscBool flag;
616: PetscFunctionBegin;
617: PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options");
618: PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag));
619: if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter);
620: PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag));
621: if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol);
622: PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag));
623: if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize);
624: PetscOptionsHeadEnd();
625: PetscFunctionReturn(PETSC_SUCCESS);
626: }
628: static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer)
629: {
630: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
631: PetscBool iascii;
633: PetscFunctionBegin;
634: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
635: if (iascii) {
636: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Pilut preconditioning\n"));
637: if (jac->maxiter != PETSC_DEFAULT) {
638: PetscCall(PetscViewerASCIIPrintf(viewer, " maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter));
639: } else {
640: PetscCall(PetscViewerASCIIPrintf(viewer, " default maximum number of iterations \n"));
641: }
642: if (jac->tol != PETSC_DEFAULT) {
643: PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->tol));
644: } else {
645: PetscCall(PetscViewerASCIIPrintf(viewer, " default drop tolerance \n"));
646: }
647: if (jac->factorrowsize != PETSC_DEFAULT) {
648: PetscCall(PetscViewerASCIIPrintf(viewer, " factor row size %" PetscInt_FMT "\n", jac->factorrowsize));
649: } else {
650: PetscCall(PetscViewerASCIIPrintf(viewer, " default factor row size \n"));
651: }
652: }
653: PetscFunctionReturn(PETSC_SUCCESS);
654: }
656: static const char *HYPREILUType[] = {
657: "Block-Jacobi-ILUk", "Block-Jacobi-ILUT", "", "", "", "", "", "", "", "", /* 0-9 */
658: "GMRES-ILUk", "GMRES-ILUT", "", "", "", "", "", "", "", "", /* 10-19 */
659: "NSH-ILUk", "NSH-ILUT", "", "", "", "", "", "", "", "", /* 20-29 */
660: "RAS-ILUk", "RAS-ILUT", "", "", "", "", "", "", "", "", /* 30-39 */
661: "ddPQ-GMRES-ILUk", "ddPQ-GMRES-ILUT", "", "", "", "", "", "", "", "", /* 40-49 */
662: "GMRES-ILU0" /* 50 */
663: };
665: static const char *HYPREILUIterSetup[] = {"default", "async-in-place", "async-explicit", "sync-explicit", "semisync-explicit"};
667: static PetscErrorCode PCSetFromOptions_HYPRE_ILU(PC pc, PetscOptionItems *PetscOptionsObject)
668: {
669: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
670: PetscBool flg;
671: PetscInt indx;
672: PetscReal tmpdbl;
673: PetscBool tmp_truth;
675: PetscFunctionBegin;
676: PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ILU Options");
678: /* ILU: ILU Type */
679: PetscCall(PetscOptionsEList("-pc_hypre_ilu_type", "Choose ILU Type", "None", HYPREILUType, PETSC_STATIC_ARRAY_LENGTH(HYPREILUType), HYPREILUType[0], &indx, &flg));
680: if (flg) { PetscCallExternal(HYPRE_ILUSetType, jac->hsolver, indx); }
682: /* ILU: ILU iterative setup type*/
683: PetscCall(PetscOptionsEList("-pc_hypre_ilu_iterative_setup_type", "Set ILU iterative setup type", "None", HYPREILUIterSetup, PETSC_STATIC_ARRAY_LENGTH(HYPREILUIterSetup), HYPREILUIterSetup[0], &indx, &flg));
684: if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupType, jac->hsolver, indx); }
686: /* ILU: ILU iterative setup option*/
687: PetscCall(PetscOptionsInt("-pc_hypre_ilu_iterative_setup_option", "Set ILU iterative setup option", "None", 0, &indx, &flg));
688: if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupOption, jac->hsolver, indx); }
690: /* ILU: ILU iterative setup maxiter */
691: PetscCall(PetscOptionsInt("-pc_hypre_ilu_iterative_setup_maxiter", "Set ILU iterative setup maximum iteration count", "None", 0, &indx, &flg));
692: if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupMaxIter, jac->hsolver, indx); }
694: /* ILU: ILU iterative setup tolerance */
695: PetscCall(PetscOptionsReal("-pc_hypre_ilu_iterative_setup_tolerance", "Set ILU iterative setup tolerance", "None", 0, &tmpdbl, &flg));
696: if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupTolerance, jac->hsolver, tmpdbl); }
698: /* ILU: ILU Print Level */
699: PetscCall(PetscOptionsInt("-pc_hypre_ilu_print_level", "Set ILU print level", "None", 0, &indx, &flg));
700: if (flg) { PetscCallExternal(HYPRE_ILUSetPrintLevel, jac->hsolver, indx); }
702: /* ILU: Logging */
703: PetscCall(PetscOptionsInt("-pc_hypre_ilu_logging", "Set ILU logging level", "None", 0, &indx, &flg));
704: if (flg) { PetscCallExternal(HYPRE_ILUSetLogging, jac->hsolver, indx); }
706: /* ILU: ILU Level */
707: PetscCall(PetscOptionsInt("-pc_hypre_ilu_level", "Set ILU level", "None", 0, &indx, &flg));
708: if (flg) { PetscCallExternal(HYPRE_ILUSetLevelOfFill, jac->hsolver, indx); }
710: /* ILU: ILU Max NNZ per row */
711: PetscCall(PetscOptionsInt("-pc_hypre_ilu_max_nnz_per_row", "Set maximum NNZ per row", "None", 0, &indx, &flg));
712: if (flg) { PetscCallExternal(HYPRE_ILUSetMaxNnzPerRow, jac->hsolver, indx); }
714: /* ILU: tolerance */
715: PetscCall(PetscOptionsReal("-pc_hypre_ilu_tol", "Tolerance for ILU", "None", 0, &tmpdbl, &flg));
716: if (flg) { PetscCallExternal(HYPRE_ILUSetTol, jac->hsolver, tmpdbl); }
718: /* ILU: maximum iteration count */
719: PetscCall(PetscOptionsInt("-pc_hypre_ilu_maxiter", "Set ILU max iterations", "None", 0, &indx, &flg));
720: if (flg) { PetscCallExternal(HYPRE_ILUSetMaxIter, jac->hsolver, indx); }
722: /* ILU: drop threshold */
723: PetscCall(PetscOptionsReal("-pc_hypre_ilu_drop_threshold", "Drop threshold for ILU", "None", 0, &tmpdbl, &flg));
724: if (flg) { PetscCallExternal(HYPRE_ILUSetDropThreshold, jac->hsolver, tmpdbl); }
726: /* ILU: Triangular Solve */
727: PetscCall(PetscOptionsBool("-pc_hypre_ilu_tri_solve", "Enable triangular solve", "None", PETSC_FALSE, &tmp_truth, &flg));
728: if (flg) { PetscCallExternal(HYPRE_ILUSetTriSolve, jac->hsolver, tmp_truth); }
730: /* ILU: Lower Jacobi iteration */
731: PetscCall(PetscOptionsInt("-pc_hypre_ilu_lower_jacobi_iters", "Set lower Jacobi iteration count", "None", 0, &indx, &flg));
732: if (flg) { PetscCallExternal(HYPRE_ILUSetLowerJacobiIters, jac->hsolver, indx); }
734: /* ILU: Upper Jacobi iteration */
735: PetscCall(PetscOptionsInt("-pc_hypre_ilu_upper_jacobi_iters", "Set upper Jacobi iteration count", "None", 0, &indx, &flg));
736: if (flg) { PetscCallExternal(HYPRE_ILUSetUpperJacobiIters, jac->hsolver, indx); }
738: /* ILU: local reordering */
739: PetscCall(PetscOptionsBool("-pc_hypre_ilu_local_reordering", "Enable local reordering", "None", PETSC_FALSE, &tmp_truth, &flg));
740: if (flg) { PetscCallExternal(HYPRE_ILUSetLocalReordering, jac->hsolver, tmp_truth); }
742: PetscOptionsHeadEnd();
743: PetscFunctionReturn(PETSC_SUCCESS);
744: }
746: static PetscErrorCode PCView_HYPRE_ILU(PC pc, PetscViewer viewer)
747: {
748: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
749: hypre_ParILUData *ilu_data = (hypre_ParILUData *)jac->hsolver;
750: PetscBool iascii;
751: PetscInt indx;
752: PetscReal tmpdbl;
753: PetscReal *tmpdbl3;
755: PetscFunctionBegin;
756: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
757: if (iascii) {
758: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ILU preconditioning\n"));
759: PetscStackCallExternalVoid("hypre_ParILUDataIluType", indx = hypre_ParILUDataIluType(ilu_data));
760: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU type %s (%" PetscInt_FMT ")\n", HYPREILUType[indx], indx));
761: PetscStackCallExternalVoid("hypre_ParILUDataLfil", indx = hypre_ParILUDataLfil(ilu_data));
762: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU level %" PetscInt_FMT "\n", indx));
763: PetscStackCallExternalVoid("hypre_ParILUDataMaxIter", indx = hypre_ParILUDataMaxIter(ilu_data));
764: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU max iterations %" PetscInt_FMT "\n", indx));
765: PetscStackCallExternalVoid("hypre_ParILUDataMaxRowNnz", indx = hypre_ParILUDataMaxRowNnz(ilu_data));
766: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU max NNZ per row %" PetscInt_FMT "\n", indx));
767: PetscStackCallExternalVoid("hypre_ParILUDataTriSolve", indx = hypre_ParILUDataTriSolve(ilu_data));
768: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU triangular solve %" PetscInt_FMT "\n", indx));
769: PetscStackCallExternalVoid("hypre_ParILUDataTol", tmpdbl = hypre_ParILUDataTol(ilu_data));
770: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU tolerance %e\n", tmpdbl));
771: PetscStackCallExternalVoid("hypre_ParILUDataDroptol", tmpdbl3 = hypre_ParILUDataDroptol(ilu_data));
772: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU drop tolerance %e / %e / %e\n", tmpdbl3[0], tmpdbl3[1], tmpdbl3[2]));
773: PetscStackCallExternalVoid("hypre_ParILUDataReorderingType", indx = hypre_ParILUDataReorderingType(ilu_data));
774: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU local reordering %" PetscInt_FMT "\n", indx));
775: PetscStackCallExternalVoid("hypre_ParILUDataLowerJacobiIters", indx = hypre_ParILUDataLowerJacobiIters(ilu_data));
776: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU lower Jacobi iterations %" PetscInt_FMT "\n", indx));
777: PetscStackCallExternalVoid("hypre_ParILUDataUpperJacobiIters", indx = hypre_ParILUDataUpperJacobiIters(ilu_data));
778: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU upper Jacobi iterations %" PetscInt_FMT "\n", indx));
779: PetscStackCallExternalVoid("hypre_ParILUDataPrintLevel", indx = hypre_ParILUDataPrintLevel(ilu_data));
780: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU print level %" PetscInt_FMT "\n", indx));
781: PetscStackCallExternalVoid("hypre_ParILUDataLogging", indx = hypre_ParILUDataLogging(ilu_data));
782: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU logging level %" PetscInt_FMT "\n", indx));
783: PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupType", indx = hypre_ParILUDataIterativeSetupType(ilu_data));
784: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup type %s (%" PetscInt_FMT ")\n", HYPREILUIterSetup[indx], indx));
785: PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupOption", indx = hypre_ParILUDataIterativeSetupOption(ilu_data));
786: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup option %" PetscInt_FMT "\n", indx));
787: PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupMaxIter", indx = hypre_ParILUDataIterativeSetupMaxIter(ilu_data));
788: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup max iterations %" PetscInt_FMT "\n", indx));
789: PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupTolerance", tmpdbl = hypre_ParILUDataIterativeSetupTolerance(ilu_data));
790: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup tolerance %e\n", tmpdbl));
791: }
792: PetscFunctionReturn(PETSC_SUCCESS);
793: }
795: static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems *PetscOptionsObject)
796: {
797: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
798: PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE;
800: PetscFunctionBegin;
801: PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options");
802: PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag));
803: if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level);
805: PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag));
806: if (flag) {
807: PetscMPIInt size;
809: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
810: PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance");
811: PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance);
812: }
814: PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag));
815: if (flag) {
816: jac->eu_bj = eu_bj ? 1 : 0;
817: PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj);
818: }
819: PetscOptionsHeadEnd();
820: PetscFunctionReturn(PETSC_SUCCESS);
821: }
823: static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer)
824: {
825: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
826: PetscBool iascii;
828: PetscFunctionBegin;
829: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
830: if (iascii) {
831: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Euclid preconditioning\n"));
832: if (jac->eu_level != PETSC_DEFAULT) {
833: PetscCall(PetscViewerASCIIPrintf(viewer, " factorization levels %" PetscInt_FMT "\n", jac->eu_level));
834: } else {
835: PetscCall(PetscViewerASCIIPrintf(viewer, " default factorization levels \n"));
836: }
837: PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->eu_droptolerance));
838: PetscCall(PetscViewerASCIIPrintf(viewer, " use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
839: }
840: PetscFunctionReturn(PETSC_SUCCESS);
841: }
843: static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x)
844: {
845: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
846: Mat_HYPRE *hjac = (Mat_HYPRE *)jac->hpmat->data;
847: HYPRE_ParCSRMatrix hmat;
848: HYPRE_ParVector jbv, jxv;
850: PetscFunctionBegin;
851: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
852: PetscCall(VecSet(x, 0.0));
853: PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->x, b));
854: PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->b, x));
856: PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
857: PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
858: PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
860: PetscStackCallExternalVoid(
861: "Hypre Transpose solve", do {
862: HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv);
863: if (hierr) {
864: /* error code of 1 in BoomerAMG merely means convergence not achieved */
865: PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
866: HYPRE_ClearAllErrors();
867: }
868: } while (0));
870: PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
871: PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
872: PetscFunctionReturn(PETSC_SUCCESS);
873: }
875: static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[])
876: {
877: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
878: PetscBool flag;
880: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
881: PetscFunctionBegin;
882: if (jac->spgemm_type) {
883: PetscCall(PetscStrcmp(jac->spgemm_type, name, &flag));
884: PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE SpGEMM (really we can)");
885: PetscFunctionReturn(PETSC_SUCCESS);
886: } else {
887: PetscCall(PetscStrallocpy(name, &jac->spgemm_type));
888: }
889: PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flag));
890: if (flag) {
891: PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1);
892: PetscFunctionReturn(PETSC_SUCCESS);
893: }
894: PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flag));
895: if (flag) {
896: PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0);
897: PetscFunctionReturn(PETSC_SUCCESS);
898: }
899: jac->spgemm_type = NULL;
900: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEMM type %s; Choices are cusparse, hypre", name);
901: #endif
902: }
904: static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[])
905: {
906: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
908: PetscFunctionBegin;
910: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
911: *spgemm = jac->spgemm_type;
912: #endif
913: PetscFunctionReturn(PETSC_SUCCESS);
914: }
916: static const char *HYPREBoomerAMGCycleType[] = {"", "V", "W"};
917: static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"};
918: static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"};
919: /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */
920: static const char *HYPREBoomerAMGSmoothType[] = {"ILU", "Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"};
921: static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"};
922: static const char *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"};
923: static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems *PetscOptionsObject)
924: {
925: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
926: PetscInt bs, n, indx, level;
927: PetscBool flg, tmp_truth;
928: PetscReal tmpdbl, twodbl[2];
929: const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};
930: const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"};
932: PetscFunctionBegin;
933: PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options");
934: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg));
935: if (flg) {
936: jac->cycletype = indx + 1;
937: PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
938: }
939: PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg, 2));
940: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
941: PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg, 1));
942: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
943: PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg, 0.0));
944: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
945: bs = 1;
946: if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs));
947: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg));
948: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
950: PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg, 0.0));
951: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
953: PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg, 0));
954: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
956: PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels));
957: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
959: PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg, 1));
960: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
962: PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg, 0.0));
963: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
964: PetscCall(PetscOptionsRangeReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg, 0.0, 1.0));
965: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
967: /* Grid sweeps */
968: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg));
969: if (flg) {
970: PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx);
971: /* modify the jac structure so we can view the updated options with PC_View */
972: jac->gridsweeps[0] = indx;
973: jac->gridsweeps[1] = indx;
974: /*defaults coarse to 1 */
975: jac->gridsweeps[2] = 1;
976: }
977: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg));
978: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening);
979: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg));
980: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag);
981: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg));
982: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant);
983: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg));
984: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax);
985: PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg));
986: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth);
987: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg));
988: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine);
989: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg));
990: if (flg) {
991: PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1);
992: jac->gridsweeps[0] = indx;
993: }
994: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg));
995: if (flg) {
996: PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2);
997: jac->gridsweeps[1] = indx;
998: }
999: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg));
1000: if (flg) {
1001: PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3);
1002: jac->gridsweeps[2] = indx;
1003: }
1005: /* Smooth type */
1006: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg));
1007: if (flg) {
1008: jac->smoothtype = indx;
1009: PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 5);
1010: jac->smoothnumlevels = 25;
1011: PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25);
1012: }
1014: /* Number of smoothing levels */
1015: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg));
1016: if (flg && (jac->smoothtype != -1)) {
1017: jac->smoothnumlevels = indx;
1018: PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx);
1019: }
1021: /* Smooth num sweeps */
1022: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_sweeps", "Set number of smoother sweeps", "None", 1, &indx, &flg));
1023: if (flg && indx > 0) {
1024: jac->smoothsweeps = indx;
1025: PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumSweeps, jac->hsolver, indx);
1026: }
1028: /* ILU: ILU Type */
1029: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_ilu_type", "Choose ILU Type", "None", HYPREILUType, PETSC_STATIC_ARRAY_LENGTH(HYPREILUType), HYPREILUType[0], &indx, &flg));
1030: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUType, jac->hsolver, indx); }
1032: /* ILU: ILU iterative setup type*/
1033: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_ilu_iterative_setup_type", "Set ILU iterative setup type", "None", HYPREILUIterSetup, PETSC_STATIC_ARRAY_LENGTH(HYPREILUIterSetup), HYPREILUIterSetup[0], &indx, &flg));
1034: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupType, jac->hsolver, indx); }
1036: /* ILU: ILU iterative setup option*/
1037: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_iterative_setup_option", "Set ILU iterative setup option", "None", 0, &indx, &flg));
1038: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupOption, jac->hsolver, indx); }
1040: /* ILU: ILU iterative setup maxiter */
1041: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_iterative_setup_maxiter", "Set ILU iterative setup maximum iteration count", "None", 0, &indx, &flg));
1042: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupMaxIter, jac->hsolver, indx); }
1044: /* ILU: ILU iterative setup tolerance */
1045: PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_ilu_iterative_setup_tolerance", "Set ILU iterative setup tolerance", "None", 0, &tmpdbl, &flg));
1046: if (flg) { PetscCallExternal(hypre_BoomerAMGSetILUIterSetupTolerance, jac->hsolver, tmpdbl); }
1048: /* ILU: ILU Print Level */
1049: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_print_level", "Set ILU print level", "None", 0, &indx, &flg));
1050: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, indx); }
1052: /* ILU: Logging */
1053: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_logging", "Set ILU logging level", "None", 0, &indx, &flg));
1054: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetLogging, jac->hsolver, indx); }
1056: /* ILU: ILU Level */
1057: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_level", "Set ILU level", "None", 0, &indx, &flg));
1058: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULevel, jac->hsolver, indx); }
1060: /* ILU: ILU Max NNZ per row */
1061: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_max_nnz_per_row", "Set maximum NNZ per row", "None", 0, &indx, &flg));
1062: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUMaxRowNnz, jac->hsolver, indx); }
1064: /* ILU: maximum iteration count */
1065: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_maxiter", "Set ILU max iterations", "None", 0, &indx, &flg));
1066: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUMaxIter, jac->hsolver, indx); }
1068: /* ILU: drop threshold */
1069: PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_ilu_drop_tol", "Drop tolerance for ILU", "None", 0, &tmpdbl, &flg));
1070: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUDroptol, jac->hsolver, tmpdbl); }
1072: /* ILU: Triangular Solve */
1073: PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_ilu_tri_solve", "Enable triangular solve", "None", PETSC_FALSE, &tmp_truth, &flg));
1074: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUTriSolve, jac->hsolver, tmp_truth); }
1076: /* ILU: Lower Jacobi iteration */
1077: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_lower_jacobi_iters", "Set lower Jacobi iteration count", "None", 0, &indx, &flg));
1078: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULowerJacobiIters, jac->hsolver, indx); }
1080: /* ILU: Upper Jacobi iteration */
1081: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_upper_jacobi_iters", "Set upper Jacobi iteration count", "None", 0, &indx, &flg));
1082: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUUpperJacobiIters, jac->hsolver, indx); }
1084: /* ILU: local reordering */
1085: PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_ilu_local_reordering", "Enable local reordering", "None", PETSC_FALSE, &tmp_truth, &flg));
1086: if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULocalReordering, jac->hsolver, tmp_truth); }
1088: /* Number of levels for ILU(k) for Euclid */
1089: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg));
1090: if (flg && (jac->smoothtype == 4)) {
1091: jac->eu_level = indx;
1092: PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx);
1093: }
1095: /* Filter for ILU(k) for Euclid */
1096: PetscReal droptolerance;
1097: PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg));
1098: if (flg && (jac->smoothtype == 4)) {
1099: jac->eu_droptolerance = droptolerance;
1100: PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance);
1101: }
1103: /* Use Block Jacobi ILUT for Euclid */
1104: PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg));
1105: if (flg && (jac->smoothtype == 4)) {
1106: jac->eu_bj = tmp_truth;
1107: PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj);
1108: }
1110: /* Relax type */
1111: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
1112: if (flg) {
1113: jac->relaxtype[0] = jac->relaxtype[1] = indx;
1114: PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx);
1115: /* by default, coarse type set to 9 */
1116: jac->relaxtype[2] = 9;
1117: PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3);
1118: }
1119: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
1120: if (flg) {
1121: jac->relaxtype[0] = indx;
1122: PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1);
1123: }
1124: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
1125: if (flg) {
1126: jac->relaxtype[1] = indx;
1127: PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2);
1128: }
1129: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg));
1130: if (flg) {
1131: jac->relaxtype[2] = indx;
1132: PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3);
1133: }
1135: /* Relaxation Weight */
1136: PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg));
1137: if (flg) {
1138: PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl);
1139: jac->relaxweight = tmpdbl;
1140: }
1142: n = 2;
1143: twodbl[0] = twodbl[1] = 1.0;
1144: PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
1145: if (flg) {
1146: PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
1147: indx = (int)PetscAbsReal(twodbl[1]);
1148: PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx);
1149: }
1151: /* Outer relaxation Weight */
1152: PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg));
1153: if (flg) {
1154: PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl);
1155: jac->outerrelaxweight = tmpdbl;
1156: }
1158: n = 2;
1159: twodbl[0] = twodbl[1] = 1.0;
1160: PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
1161: if (flg) {
1162: PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
1163: indx = (int)PetscAbsReal(twodbl[1]);
1164: PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx);
1165: }
1167: /* the Relax Order */
1168: PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg));
1170: if (flg && tmp_truth) {
1171: jac->relaxorder = 0;
1172: PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
1173: }
1174: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg));
1175: if (flg) {
1176: jac->measuretype = indx;
1177: PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
1178: }
1179: /* update list length 3/07 */
1180: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg));
1181: if (flg) {
1182: jac->coarsentype = indx;
1183: PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
1184: }
1186: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg));
1187: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
1188: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg));
1189: if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
1190: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1191: // global parameter but is closely associated with BoomerAMG
1192: PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg));
1193: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1194: if (!flg) indx = 0;
1195: PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx]));
1196: #else
1197: PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, "hypre"));
1198: #endif
1199: #endif
1200: /* AIR */
1201: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
1202: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL));
1203: PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
1204: if (jac->Rtype) {
1205: HYPRE_Int **grid_relax_points = hypre_TAlloc(HYPRE_Int *, 4, HYPRE_MEMORY_HOST);
1206: char *prerelax[256];
1207: char *postrelax[256];
1208: char stringF[2] = "F", stringC[2] = "C", stringA[2] = "A";
1209: PetscInt ns_down = 256, ns_up = 256;
1210: PetscBool matchF, matchC, matchA;
1212: jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */
1214: PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL));
1215: PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
1217: PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL));
1218: PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
1220: PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL));
1221: PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
1223: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL));
1224: PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
1225: PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_prerelax", "Defines prerelax scheme", "None", prerelax, &ns_down, NULL));
1226: PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_postrelax", "Defines postrelax scheme", "None", postrelax, &ns_up, NULL));
1227: PetscCheck(ns_down == jac->gridsweeps[0], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_prerelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_down");
1228: PetscCheck(ns_up == jac->gridsweeps[1], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_postrelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_up");
1230: grid_relax_points[0] = NULL;
1231: grid_relax_points[1] = hypre_TAlloc(HYPRE_Int, ns_down, HYPRE_MEMORY_HOST);
1232: grid_relax_points[2] = hypre_TAlloc(HYPRE_Int, ns_up, HYPRE_MEMORY_HOST);
1233: grid_relax_points[3] = hypre_TAlloc(HYPRE_Int, jac->gridsweeps[2], HYPRE_MEMORY_HOST);
1234: grid_relax_points[3][0] = 0;
1236: // set down relax scheme
1237: for (PetscInt i = 0; i < ns_down; i++) {
1238: PetscCall(PetscStrcasecmp(prerelax[i], stringF, &matchF));
1239: PetscCall(PetscStrcasecmp(prerelax[i], stringC, &matchC));
1240: PetscCall(PetscStrcasecmp(prerelax[i], stringA, &matchA));
1241: PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_prerelax are C, F, and A");
1242: if (matchF) grid_relax_points[1][i] = -1;
1243: else if (matchC) grid_relax_points[1][i] = 1;
1244: else if (matchA) grid_relax_points[1][i] = 0;
1245: }
1247: // set up relax scheme
1248: for (PetscInt i = 0; i < ns_up; i++) {
1249: PetscCall(PetscStrcasecmp(postrelax[i], stringF, &matchF));
1250: PetscCall(PetscStrcasecmp(postrelax[i], stringC, &matchC));
1251: PetscCall(PetscStrcasecmp(postrelax[i], stringA, &matchA));
1252: PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_postrelax are C, F, and A");
1253: if (matchF) grid_relax_points[2][i] = -1;
1254: else if (matchC) grid_relax_points[2][i] = 1;
1255: else if (matchA) grid_relax_points[2][i] = 0;
1256: }
1258: // set coarse relax scheme
1259: for (PetscInt i = 0; i < jac->gridsweeps[2]; i++) grid_relax_points[3][i] = 0;
1261: // Pass relax schemes to hypre
1262: PetscCallExternal(HYPRE_BoomerAMGSetGridRelaxPoints, jac->hsolver, grid_relax_points);
1264: // cleanup memory
1265: for (PetscInt i = 0; i < ns_down; i++) PetscCall(PetscFree(prerelax[i]));
1266: for (PetscInt i = 0; i < ns_up; i++) PetscCall(PetscFree(postrelax[i]));
1267: }
1268: #endif
1270: #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9)
1271: PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl);
1272: #endif
1274: /* new 3/07 */
1275: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg));
1276: if (flg || jac->Rtype) {
1277: if (flg) jac->interptype = indx;
1278: PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
1279: }
1281: PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg));
1282: if (flg) {
1283: level = 3;
1284: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL));
1286: jac->printstatistics = PETSC_TRUE;
1287: PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level);
1288: }
1290: PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg));
1291: if (flg) {
1292: level = 3;
1293: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL));
1295: jac->printstatistics = PETSC_TRUE;
1296: PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level);
1297: }
1299: PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg));
1300: if (flg && tmp_truth) {
1301: PetscInt tmp_int;
1302: PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg));
1303: if (flg) jac->nodal_relax_levels = tmp_int;
1304: PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6);
1305: PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1);
1306: PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0);
1307: PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels);
1308: }
1310: PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL));
1311: PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);
1313: /* options for ParaSails solvers */
1314: PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg));
1315: if (flg) {
1316: jac->symt = indx;
1317: PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt);
1318: }
1320: PetscOptionsHeadEnd();
1321: PetscFunctionReturn(PETSC_SUCCESS);
1322: }
1324: static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
1325: {
1326: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1327: HYPRE_Int oits;
1329: PetscFunctionBegin;
1330: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
1331: PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter);
1332: PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol);
1333: jac->applyrichardson = PETSC_TRUE;
1334: PetscCall(PCApply_HYPRE(pc, b, y));
1335: jac->applyrichardson = PETSC_FALSE;
1336: PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits);
1337: *outits = oits;
1338: if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
1339: else *reason = PCRICHARDSON_CONVERGED_RTOL;
1340: PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
1341: PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1342: PetscFunctionReturn(PETSC_SUCCESS);
1343: }
1345: static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer)
1346: {
1347: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1348: hypre_ParAMGData *amg_data = (hypre_ParAMGData *)jac->hsolver;
1349: PetscBool iascii;
1350: PetscInt indx;
1351: PetscReal val;
1353: PetscFunctionBegin;
1354: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1355: if (iascii) {
1356: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE BoomerAMG preconditioning\n"));
1357: PetscCall(PetscViewerASCIIPrintf(viewer, " Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype]));
1358: PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels));
1359: PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter));
1360: PetscCall(PetscViewerASCIIPrintf(viewer, " Convergence tolerance PER hypre call %g\n", (double)jac->tol));
1361: PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for strong coupling %g\n", (double)jac->strongthreshold));
1362: PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation truncation factor %g\n", (double)jac->truncfactor));
1363: PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax));
1364: if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine));
1365: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl));
1366: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths));
1368: PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum row sums %g\n", (double)jac->maxrowsum));
1370: PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps down %" PetscInt_FMT "\n", jac->gridsweeps[0]));
1371: PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps up %" PetscInt_FMT "\n", jac->gridsweeps[1]));
1372: PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps on coarse %" PetscInt_FMT "\n", jac->gridsweeps[2]));
1374: PetscCall(PetscViewerASCIIPrintf(viewer, " Relax down %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]]));
1375: PetscCall(PetscViewerASCIIPrintf(viewer, " Relax up %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]]));
1376: PetscCall(PetscViewerASCIIPrintf(viewer, " Relax on coarse %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]]));
1378: PetscCall(PetscViewerASCIIPrintf(viewer, " Relax weight (all) %g\n", (double)jac->relaxweight));
1379: PetscCall(PetscViewerASCIIPrintf(viewer, " Outer relax weight (all) %g\n", (double)jac->outerrelaxweight));
1381: PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum size of coarsest grid %" PetscInt_FMT "\n", jac->maxc));
1382: PetscCall(PetscViewerASCIIPrintf(viewer, " Minimum size of coarsest grid %" PetscInt_FMT "\n", jac->minc));
1384: if (jac->relaxorder) {
1385: PetscCall(PetscViewerASCIIPrintf(viewer, " Using CF-relaxation\n"));
1386: } else {
1387: PetscCall(PetscViewerASCIIPrintf(viewer, " Not using CF-relaxation\n"));
1388: }
1389: if (jac->smoothtype != -1) {
1390: PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth type %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype]));
1391: PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth num levels %" PetscInt_FMT "\n", jac->smoothnumlevels));
1392: PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth num sweeps %" PetscInt_FMT "\n", jac->smoothsweeps));
1393: if (jac->smoothtype == 0) {
1394: PetscStackCallExternalVoid("hypre_ParAMGDataILUType", indx = hypre_ParAMGDataILUType(amg_data));
1395: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU type %s (%" PetscInt_FMT ")\n", HYPREILUType[indx], indx));
1396: PetscStackCallExternalVoid("hypre_ParAMGDataILULevel", indx = hypre_ParAMGDataILULevel(amg_data));
1397: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU level %" PetscInt_FMT "\n", indx));
1398: PetscStackCallExternalVoid("hypre_ParAMGDataILUMaxIter", indx = hypre_ParAMGDataILUMaxIter(amg_data));
1399: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU max iterations %" PetscInt_FMT "\n", indx));
1400: PetscStackCallExternalVoid("hypre_ParAMGDataILUMaxRowNnz", indx = hypre_ParAMGDataILUMaxRowNnz(amg_data));
1401: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU max NNZ per row %" PetscInt_FMT "\n", indx));
1402: PetscStackCallExternalVoid("hypre_ParAMGDataILUTriSolve", indx = hypre_ParAMGDataILUTriSolve(amg_data));
1403: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU triangular solve %" PetscInt_FMT "\n", indx));
1404: PetscStackCallExternalVoid("hypre_ParAMGDataTol", val = hypre_ParAMGDataTol(amg_data));
1405: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU tolerance %e\n", val));
1406: PetscStackCallExternalVoid("hypre_ParAMGDataILUDroptol", val = hypre_ParAMGDataILUDroptol(amg_data));
1407: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU drop tolerance %e\n", val));
1408: PetscStackCallExternalVoid("hypre_ParAMGDataILULocalReordering", indx = hypre_ParAMGDataILULocalReordering(amg_data));
1409: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU local reordering %" PetscInt_FMT "\n", indx));
1410: PetscStackCallExternalVoid("hypre_ParAMGDataILULowerJacobiIters", indx = hypre_ParAMGDataILULowerJacobiIters(amg_data));
1411: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU lower Jacobi iterations %" PetscInt_FMT "\n", indx));
1412: PetscStackCallExternalVoid("hypre_ParAMGDataILUUpperJacobiIters", indx = hypre_ParAMGDataILUUpperJacobiIters(amg_data));
1413: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU upper Jacobi iterations %" PetscInt_FMT "\n", indx));
1414: PetscStackCallExternalVoid("hypre_ParAMGDataPrintLevel", indx = hypre_ParAMGDataPrintLevel(amg_data));
1415: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU print level %" PetscInt_FMT "\n", indx));
1416: PetscStackCallExternalVoid("hypre_ParAMGDataLogging", indx = hypre_ParAMGDataLogging(amg_data));
1417: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU logging level %" PetscInt_FMT "\n", indx));
1418: PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupType", indx = hypre_ParAMGDataILUIterSetupType(amg_data));
1419: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup type %s (%" PetscInt_FMT ")\n", HYPREILUIterSetup[indx], indx));
1420: PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupOption", indx = hypre_ParAMGDataILUIterSetupOption(amg_data));
1421: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup option %" PetscInt_FMT "\n", indx));
1422: PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupMaxIter", indx = hypre_ParAMGDataILUIterSetupMaxIter(amg_data));
1423: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup max iterations %" PetscInt_FMT "\n", indx));
1424: PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupTolerance", val = hypre_ParAMGDataILUIterSetupTolerance(amg_data));
1425: PetscCall(PetscViewerASCIIPrintf(viewer, " ILU iterative setup tolerance %e\n", val));
1426: }
1427: } else {
1428: PetscCall(PetscViewerASCIIPrintf(viewer, " Not using more complex smoothers.\n"));
1429: }
1430: if (jac->smoothtype == 3) {
1431: PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level));
1432: PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance));
1433: PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
1434: }
1435: PetscCall(PetscViewerASCIIPrintf(viewer, " Measure type %s\n", HYPREBoomerAMGMeasureType[jac->measuretype]));
1436: PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsen type %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype]));
1437: PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation type %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt"));
1438: if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening));
1439: if (jac->vec_interp_variant) {
1440: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant));
1441: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax));
1442: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth));
1443: }
1444: if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels));
1445: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1446: PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", jac->spgemm_type));
1447: #else
1448: PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", "hypre"));
1449: #endif
1450: /* AIR */
1451: if (jac->Rtype) {
1452: PetscCall(PetscViewerASCIIPrintf(viewer, " Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype));
1453: PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for R %g\n", (double)jac->Rstrongthreshold));
1454: PetscCall(PetscViewerASCIIPrintf(viewer, " Filter for R %g\n", (double)jac->Rfilterthreshold));
1455: PetscCall(PetscViewerASCIIPrintf(viewer, " A drop tolerance %g\n", (double)jac->Adroptol));
1456: PetscCall(PetscViewerASCIIPrintf(viewer, " A drop type %" PetscInt_FMT "\n", jac->Adroptype));
1457: }
1458: }
1459: PetscFunctionReturn(PETSC_SUCCESS);
1460: }
1462: static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems *PetscOptionsObject)
1463: {
1464: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1465: PetscInt indx;
1466: PetscBool flag;
1467: const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};
1469: PetscFunctionBegin;
1470: PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options");
1471: PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0));
1472: PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag));
1473: if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
1475: PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag));
1476: if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
1478: PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag));
1479: if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
1481: PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag));
1482: if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
1484: PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag));
1485: if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
1487: PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag));
1488: if (flag) {
1489: jac->symt = indx;
1490: PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1491: }
1493: PetscOptionsHeadEnd();
1494: PetscFunctionReturn(PETSC_SUCCESS);
1495: }
1497: static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer)
1498: {
1499: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1500: PetscBool iascii;
1501: const char *symt = 0;
1503: PetscFunctionBegin;
1504: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1505: if (iascii) {
1506: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ParaSails preconditioning\n"));
1507: PetscCall(PetscViewerASCIIPrintf(viewer, " nlevels %" PetscInt_FMT "\n", jac->nlevels));
1508: PetscCall(PetscViewerASCIIPrintf(viewer, " threshold %g\n", (double)jac->threshold));
1509: PetscCall(PetscViewerASCIIPrintf(viewer, " filter %g\n", (double)jac->filter));
1510: PetscCall(PetscViewerASCIIPrintf(viewer, " load balance %g\n", (double)jac->loadbal));
1511: PetscCall(PetscViewerASCIIPrintf(viewer, " reuse nonzero structure %s\n", PetscBools[jac->ruse]));
1512: PetscCall(PetscViewerASCIIPrintf(viewer, " print info to screen %s\n", PetscBools[jac->logging]));
1513: if (!jac->symt) symt = "nonsymmetric matrix and preconditioner";
1514: else if (jac->symt == 1) symt = "SPD matrix and preconditioner";
1515: else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner";
1516: else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt);
1517: PetscCall(PetscViewerASCIIPrintf(viewer, " %s\n", symt));
1518: }
1519: PetscFunctionReturn(PETSC_SUCCESS);
1520: }
1522: static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems *PetscOptionsObject)
1523: {
1524: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1525: PetscInt n;
1526: PetscBool flag, flag2, flag3, flag4;
1528: PetscFunctionBegin;
1529: PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options");
1530: PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag));
1531: if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
1532: PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1533: if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
1534: PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag));
1535: if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
1536: PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1537: if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
1538: PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1539: PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1540: PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1541: PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1542: if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1543: PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1544: n = 5;
1545: PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2));
1546: if (flag || flag2) {
1547: PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
1548: jac->as_amg_alpha_opts[1], /* AMG agg_levels */
1549: jac->as_amg_alpha_opts[2], /* AMG relax_type */
1550: jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */
1551: jac->as_amg_alpha_opts[4]); /* AMG Pmax */
1552: }
1553: PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1554: n = 5;
1555: PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2));
1556: if (flag || flag2) {
1557: PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1558: jac->as_amg_beta_opts[1], /* AMG agg_levels */
1559: jac->as_amg_beta_opts[2], /* AMG relax_type */
1560: jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */
1561: jac->as_amg_beta_opts[4]); /* AMG Pmax */
1562: }
1563: PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag));
1564: if (flag) { /* override HYPRE's default only if the options is used */
1565: PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq);
1566: }
1567: PetscOptionsHeadEnd();
1568: PetscFunctionReturn(PETSC_SUCCESS);
1569: }
1571: static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer)
1572: {
1573: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1574: PetscBool iascii;
1576: PetscFunctionBegin;
1577: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1578: if (iascii) {
1579: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE AMS preconditioning\n"));
1580: PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1581: PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1582: PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol));
1583: PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1584: PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1585: PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight));
1586: PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega));
1587: if (jac->alpha_Poisson) {
1588: PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (passed in by user)\n"));
1589: } else {
1590: PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (computed) \n"));
1591: }
1592: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1593: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1594: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1595: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1596: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1597: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1598: if (!jac->ams_beta_is_zero) {
1599: if (jac->beta_Poisson) {
1600: PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (passed in by user)\n"));
1601: } else {
1602: PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (computed) \n"));
1603: }
1604: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1605: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1606: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1607: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1608: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1609: PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta));
1610: if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, " compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq));
1611: } else {
1612: PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver not used (zero-conductivity everywhere) \n"));
1613: }
1614: }
1615: PetscFunctionReturn(PETSC_SUCCESS);
1616: }
1618: static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems *PetscOptionsObject)
1619: {
1620: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1621: PetscInt n;
1622: PetscBool flag, flag2, flag3, flag4;
1624: PetscFunctionBegin;
1625: PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options");
1626: PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag));
1627: if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
1628: PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1629: if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
1630: PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag));
1631: if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type);
1632: PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1633: if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
1634: PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1635: PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1636: PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1637: PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1638: if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1639: PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1640: n = 5;
1641: PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2));
1642: PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3));
1643: if (flag || flag2 || flag3) {
1644: PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */
1645: jac->as_amg_alpha_opts[0], /* AMG coarsen type */
1646: jac->as_amg_alpha_opts[1], /* AMG agg_levels */
1647: jac->as_amg_alpha_opts[2], /* AMG relax_type */
1648: jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */
1649: jac->as_amg_alpha_opts[4]); /* AMG Pmax */
1650: }
1651: PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1652: n = 5;
1653: PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2));
1654: if (flag || flag2) {
1655: PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1656: jac->as_amg_beta_opts[1], /* AMG agg_levels */
1657: jac->as_amg_beta_opts[2], /* AMG relax_type */
1658: jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */
1659: jac->as_amg_beta_opts[4]); /* AMG Pmax */
1660: }
1661: PetscOptionsHeadEnd();
1662: PetscFunctionReturn(PETSC_SUCCESS);
1663: }
1665: static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer)
1666: {
1667: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1668: PetscBool iascii;
1670: PetscFunctionBegin;
1671: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1672: if (iascii) {
1673: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ADS preconditioning\n"));
1674: PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1675: PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type));
1676: PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol));
1677: PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1678: PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1679: PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight));
1680: PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega));
1681: PetscCall(PetscViewerASCIIPrintf(viewer, " AMS solver using boomerAMG\n"));
1682: PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1683: PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1684: PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1685: PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1686: PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1687: PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1688: PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1689: PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver using boomerAMG\n"));
1690: PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1691: PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1692: PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1693: PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1694: PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1695: PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_beta_theta));
1696: }
1697: PetscFunctionReturn(PETSC_SUCCESS);
1698: }
1700: static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G)
1701: {
1702: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1703: PetscBool ishypre;
1705: PetscFunctionBegin;
1706: PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre));
1707: if (ishypre) {
1708: PetscCall(PetscObjectReference((PetscObject)G));
1709: PetscCall(MatDestroy(&jac->G));
1710: jac->G = G;
1711: } else {
1712: PetscCall(MatDestroy(&jac->G));
1713: PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G));
1714: }
1715: PetscFunctionReturn(PETSC_SUCCESS);
1716: }
1718: /*@
1719: PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads
1721: Collective
1723: Input Parameters:
1724: + pc - the preconditioning context
1725: - G - the discrete gradient
1727: Level: intermediate
1729: Notes:
1730: G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh
1732: Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation
1734: Developer Notes:
1735: This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1737: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteCurl()`
1738: @*/
1739: PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G)
1740: {
1741: PetscFunctionBegin;
1744: PetscCheckSameComm(pc, 1, G, 2);
1745: PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G));
1746: PetscFunctionReturn(PETSC_SUCCESS);
1747: }
1749: static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C)
1750: {
1751: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1752: PetscBool ishypre;
1754: PetscFunctionBegin;
1755: PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre));
1756: if (ishypre) {
1757: PetscCall(PetscObjectReference((PetscObject)C));
1758: PetscCall(MatDestroy(&jac->C));
1759: jac->C = C;
1760: } else {
1761: PetscCall(MatDestroy(&jac->C));
1762: PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C));
1763: }
1764: PetscFunctionReturn(PETSC_SUCCESS);
1765: }
1767: /*@
1768: PCHYPRESetDiscreteCurl - Set discrete curl matrx for `PCHYPRE` type of ads
1770: Collective
1772: Input Parameters:
1773: + pc - the preconditioning context
1774: - C - the discrete curl
1776: Level: intermediate
1778: Notes:
1779: C should have as many rows as the number of faces and as many columns as the number of edges in the mesh
1781: Each row of G has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation
1783: Developer Notes:
1784: This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1786: If this is only for `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()`
1788: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`
1789: @*/
1790: PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C)
1791: {
1792: PetscFunctionBegin;
1795: PetscCheckSameComm(pc, 1, C, 2);
1796: PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C));
1797: PetscFunctionReturn(PETSC_SUCCESS);
1798: }
1800: static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1801: {
1802: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1803: PetscBool ishypre;
1804: PetscInt i;
1806: PetscFunctionBegin;
1807: PetscCall(MatDestroy(&jac->RT_PiFull));
1808: PetscCall(MatDestroy(&jac->ND_PiFull));
1809: for (i = 0; i < 3; ++i) {
1810: PetscCall(MatDestroy(&jac->RT_Pi[i]));
1811: PetscCall(MatDestroy(&jac->ND_Pi[i]));
1812: }
1814: jac->dim = dim;
1815: if (RT_PiFull) {
1816: PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre));
1817: if (ishypre) {
1818: PetscCall(PetscObjectReference((PetscObject)RT_PiFull));
1819: jac->RT_PiFull = RT_PiFull;
1820: } else {
1821: PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull));
1822: }
1823: }
1824: if (RT_Pi) {
1825: for (i = 0; i < dim; ++i) {
1826: if (RT_Pi[i]) {
1827: PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre));
1828: if (ishypre) {
1829: PetscCall(PetscObjectReference((PetscObject)RT_Pi[i]));
1830: jac->RT_Pi[i] = RT_Pi[i];
1831: } else {
1832: PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i]));
1833: }
1834: }
1835: }
1836: }
1837: if (ND_PiFull) {
1838: PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre));
1839: if (ishypre) {
1840: PetscCall(PetscObjectReference((PetscObject)ND_PiFull));
1841: jac->ND_PiFull = ND_PiFull;
1842: } else {
1843: PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull));
1844: }
1845: }
1846: if (ND_Pi) {
1847: for (i = 0; i < dim; ++i) {
1848: if (ND_Pi[i]) {
1849: PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre));
1850: if (ishypre) {
1851: PetscCall(PetscObjectReference((PetscObject)ND_Pi[i]));
1852: jac->ND_Pi[i] = ND_Pi[i];
1853: } else {
1854: PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i]));
1855: }
1856: }
1857: }
1858: }
1859: PetscFunctionReturn(PETSC_SUCCESS);
1860: }
1862: /*@
1863: PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads
1865: Collective
1867: Input Parameters:
1868: + pc - the preconditioning context
1869: . dim - the dimension of the problem, only used in AMS
1870: . RT_PiFull - Raviart-Thomas interpolation matrix
1871: . RT_Pi - x/y/z component of Raviart-Thomas interpolation matrix
1872: . ND_PiFull - Nedelec interpolation matrix
1873: - ND_Pi - x/y/z component of Nedelec interpolation matrix
1875: Level: intermediate
1877: Notes:
1878: For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL.
1880: For ADS, both type of interpolation matrices are needed.
1882: Developer Notes:
1883: This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1885: .seealso: [](ch_ksp), `PCHYPRE`
1886: @*/
1887: PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1888: {
1889: PetscInt i;
1891: PetscFunctionBegin;
1893: if (RT_PiFull) {
1895: PetscCheckSameComm(pc, 1, RT_PiFull, 3);
1896: }
1897: if (RT_Pi) {
1898: PetscAssertPointer(RT_Pi, 4);
1899: for (i = 0; i < dim; ++i) {
1900: if (RT_Pi[i]) {
1902: PetscCheckSameComm(pc, 1, RT_Pi[i], 4);
1903: }
1904: }
1905: }
1906: if (ND_PiFull) {
1908: PetscCheckSameComm(pc, 1, ND_PiFull, 5);
1909: }
1910: if (ND_Pi) {
1911: PetscAssertPointer(ND_Pi, 6);
1912: for (i = 0; i < dim; ++i) {
1913: if (ND_Pi[i]) {
1915: PetscCheckSameComm(pc, 1, ND_Pi[i], 6);
1916: }
1917: }
1918: }
1919: PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi));
1920: PetscFunctionReturn(PETSC_SUCCESS);
1921: }
1923: static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha)
1924: {
1925: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1926: PetscBool ishypre;
1928: PetscFunctionBegin;
1929: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1930: if (ishypre) {
1931: if (isalpha) {
1932: PetscCall(PetscObjectReference((PetscObject)A));
1933: PetscCall(MatDestroy(&jac->alpha_Poisson));
1934: jac->alpha_Poisson = A;
1935: } else {
1936: if (A) {
1937: PetscCall(PetscObjectReference((PetscObject)A));
1938: } else {
1939: jac->ams_beta_is_zero = PETSC_TRUE;
1940: }
1941: PetscCall(MatDestroy(&jac->beta_Poisson));
1942: jac->beta_Poisson = A;
1943: }
1944: } else {
1945: if (isalpha) {
1946: PetscCall(MatDestroy(&jac->alpha_Poisson));
1947: PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson));
1948: } else {
1949: if (A) {
1950: PetscCall(MatDestroy(&jac->beta_Poisson));
1951: PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson));
1952: } else {
1953: PetscCall(MatDestroy(&jac->beta_Poisson));
1954: jac->ams_beta_is_zero = PETSC_TRUE;
1955: }
1956: }
1957: }
1958: PetscFunctionReturn(PETSC_SUCCESS);
1959: }
1961: /*@
1962: PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams
1964: Collective
1966: Input Parameters:
1967: + pc - the preconditioning context
1968: - A - the matrix
1970: Level: intermediate
1972: Note:
1973: A should be obtained by discretizing the vector valued Poisson problem with linear finite elements
1975: Developer Notes:
1976: This automatically converts the matrix to `MATHYPRE` if it is not already of that type
1978: If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()`
1980: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()`
1981: @*/
1982: PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A)
1983: {
1984: PetscFunctionBegin;
1987: PetscCheckSameComm(pc, 1, A, 2);
1988: PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE));
1989: PetscFunctionReturn(PETSC_SUCCESS);
1990: }
1992: /*@
1993: PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams
1995: Collective
1997: Input Parameters:
1998: + pc - the preconditioning context
1999: - A - the matrix, or NULL to turn it off
2001: Level: intermediate
2003: Note:
2004: A should be obtained by discretizing the Poisson problem with linear finite elements.
2006: Developer Notes:
2007: This automatically converts the matrix to `MATHYPRE` if it is not already of that type
2009: If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()`
2011: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
2012: @*/
2013: PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A)
2014: {
2015: PetscFunctionBegin;
2017: if (A) {
2019: PetscCheckSameComm(pc, 1, A, 2);
2020: }
2021: PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE));
2022: PetscFunctionReturn(PETSC_SUCCESS);
2023: }
2025: static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo)
2026: {
2027: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2029: PetscFunctionBegin;
2030: /* throw away any vector if already set */
2031: PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
2032: PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
2033: PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
2034: PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0]));
2035: PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0]));
2036: PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1]));
2037: PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1]));
2038: jac->dim = 2;
2039: if (zzo) {
2040: PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2]));
2041: PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2]));
2042: jac->dim++;
2043: }
2044: PetscFunctionReturn(PETSC_SUCCESS);
2045: }
2047: /*@
2048: PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams
2050: Collective
2052: Input Parameters:
2053: + pc - the preconditioning context
2054: . ozz - vector representing (1,0,0) (or (1,0) in 2D)
2055: . zoz - vector representing (0,1,0) (or (0,1) in 2D)
2056: - zzo - vector representing (0,0,1) (use NULL in 2D)
2058: Level: intermediate
2060: Developer Notes:
2061: If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()`
2063: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
2064: @*/
2065: PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo)
2066: {
2067: PetscFunctionBegin;
2072: PetscCheckSameComm(pc, 1, ozz, 2);
2073: PetscCheckSameComm(pc, 1, zoz, 3);
2074: if (zzo) PetscCheckSameComm(pc, 1, zzo, 4);
2075: PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo));
2076: PetscFunctionReturn(PETSC_SUCCESS);
2077: }
2079: static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior)
2080: {
2081: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2083: PetscFunctionBegin;
2084: PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
2085: PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior));
2086: PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior));
2087: jac->ams_beta_is_zero_part = PETSC_TRUE;
2088: PetscFunctionReturn(PETSC_SUCCESS);
2089: }
2091: /*@
2092: PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams
2094: Collective
2096: Input Parameters:
2097: + pc - the preconditioning context
2098: - interior - vector. node is interior if its entry in the array is 1.0.
2100: Level: intermediate
2102: Note:
2103: This calls `HYPRE_AMSSetInteriorNodes()`
2105: Developer Notes:
2106: If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetInteriorNodes()`
2108: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
2109: @*/
2110: PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior)
2111: {
2112: PetscFunctionBegin;
2115: PetscCheckSameComm(pc, 1, interior, 2);
2116: PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior));
2117: PetscFunctionReturn(PETSC_SUCCESS);
2118: }
2120: static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
2121: {
2122: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2123: Vec tv;
2124: PetscInt i;
2126: PetscFunctionBegin;
2127: /* throw away any coordinate vector if already set */
2128: PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
2129: PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
2130: PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
2131: jac->dim = dim;
2133: /* compute IJ vector for coordinates */
2134: PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv));
2135: PetscCall(VecSetType(tv, VECSTANDARD));
2136: PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE));
2137: for (i = 0; i < dim; i++) {
2138: PetscScalar *array;
2139: PetscInt j;
2141: PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i]));
2142: PetscCall(VecGetArrayWrite(tv, &array));
2143: for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i];
2144: PetscCall(VecRestoreArrayWrite(tv, &array));
2145: PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i]));
2146: }
2147: PetscCall(VecDestroy(&tv));
2148: PetscFunctionReturn(PETSC_SUCCESS);
2149: }
2151: static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[])
2152: {
2153: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2155: PetscFunctionBegin;
2156: *name = jac->hypre_type;
2157: PetscFunctionReturn(PETSC_SUCCESS);
2158: }
2160: static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[])
2161: {
2162: PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2163: PetscBool flag;
2165: PetscFunctionBegin;
2166: if (jac->hypre_type) {
2167: PetscCall(PetscStrcmp(jac->hypre_type, name, &flag));
2168: PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE preconditioner type once it has been set");
2169: PetscFunctionReturn(PETSC_SUCCESS);
2170: } else {
2171: PetscCall(PetscStrallocpy(name, &jac->hypre_type));
2172: }
2174: jac->maxiter = PETSC_DEFAULT;
2175: jac->tol = PETSC_DEFAULT;
2176: jac->printstatistics = PetscLogPrintInfo;
2178: PetscCall(PetscStrcmp("ilu", jac->hypre_type, &flag));
2179: if (flag) {
2180: PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2181: PetscCallExternal(HYPRE_ILUCreate, &jac->hsolver);
2182: pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ILU;
2183: pc->ops->view = PCView_HYPRE_ILU;
2184: jac->destroy = HYPRE_ILUDestroy;
2185: jac->setup = HYPRE_ILUSetup;
2186: jac->solve = HYPRE_ILUSolve;
2187: jac->factorrowsize = PETSC_DEFAULT;
2188: PetscFunctionReturn(PETSC_SUCCESS);
2189: }
2191: PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag));
2192: if (flag) {
2193: PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2194: PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver);
2195: pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut;
2196: pc->ops->view = PCView_HYPRE_Pilut;
2197: jac->destroy = HYPRE_ParCSRPilutDestroy;
2198: jac->setup = HYPRE_ParCSRPilutSetup;
2199: jac->solve = HYPRE_ParCSRPilutSolve;
2200: jac->factorrowsize = PETSC_DEFAULT;
2201: PetscFunctionReturn(PETSC_SUCCESS);
2202: }
2203: PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag));
2204: if (flag) {
2205: #if defined(PETSC_USE_64BIT_INDICES)
2206: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64-bit indices");
2207: #endif
2208: PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2209: PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver);
2210: pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid;
2211: pc->ops->view = PCView_HYPRE_Euclid;
2212: jac->destroy = HYPRE_EuclidDestroy;
2213: jac->setup = HYPRE_EuclidSetup;
2214: jac->solve = HYPRE_EuclidSolve;
2215: jac->factorrowsize = PETSC_DEFAULT;
2216: jac->eu_level = PETSC_DEFAULT; /* default */
2217: PetscFunctionReturn(PETSC_SUCCESS);
2218: }
2219: PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag));
2220: if (flag) {
2221: PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2222: PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver);
2223: pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails;
2224: pc->ops->view = PCView_HYPRE_ParaSails;
2225: jac->destroy = HYPRE_ParaSailsDestroy;
2226: jac->setup = HYPRE_ParaSailsSetup;
2227: jac->solve = HYPRE_ParaSailsSolve;
2228: /* initialize */
2229: jac->nlevels = 1;
2230: jac->threshold = .1;
2231: jac->filter = .1;
2232: jac->loadbal = 0;
2233: if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE;
2234: else jac->logging = (int)PETSC_FALSE;
2236: jac->ruse = (int)PETSC_FALSE;
2237: jac->symt = 0;
2238: PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
2239: PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
2240: PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
2241: PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
2242: PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
2243: PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
2244: PetscFunctionReturn(PETSC_SUCCESS);
2245: }
2246: PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag));
2247: if (flag) {
2248: PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver);
2249: pc->ops->setfromoptions = PCSetFromOptions_HYPRE_BoomerAMG;
2250: pc->ops->view = PCView_HYPRE_BoomerAMG;
2251: pc->ops->applytranspose = PCApplyTranspose_HYPRE_BoomerAMG;
2252: pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG;
2253: pc->ops->matapply = PCMatApply_HYPRE_BoomerAMG;
2254: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG));
2255: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG));
2256: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetCFMarkers_C", PCHYPREGetCFMarkers_BoomerAMG));
2257: jac->destroy = HYPRE_BoomerAMGDestroy;
2258: jac->setup = HYPRE_BoomerAMGSetup;
2259: jac->solve = HYPRE_BoomerAMGSolve;
2260: jac->applyrichardson = PETSC_FALSE;
2261: /* these defaults match the hypre defaults */
2262: jac->cycletype = 1;
2263: jac->maxlevels = 25;
2264: jac->maxiter = 1;
2265: jac->tol = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */
2266: jac->truncfactor = 0.0;
2267: jac->strongthreshold = .25;
2268: jac->maxrowsum = .9;
2269: jac->coarsentype = 6;
2270: jac->measuretype = 0;
2271: jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1;
2272: jac->smoothtype = -1; /* Not set by default */
2273: jac->smoothnumlevels = 25;
2274: jac->eu_level = 0;
2275: jac->eu_droptolerance = 0;
2276: jac->eu_bj = 0;
2277: jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */
2278: jac->relaxtype[2] = 9; /*G.E. */
2279: jac->relaxweight = 1.0;
2280: jac->outerrelaxweight = 1.0;
2281: jac->relaxorder = 1;
2282: jac->interptype = 0;
2283: jac->Rtype = 0;
2284: jac->Rstrongthreshold = 0.25;
2285: jac->Rfilterthreshold = 0.0;
2286: jac->Adroptype = -1;
2287: jac->Adroptol = 0.0;
2288: jac->agg_nl = 0;
2289: jac->agg_interptype = 4;
2290: jac->pmax = 0;
2291: jac->truncfactor = 0.0;
2292: jac->agg_num_paths = 1;
2293: jac->maxc = 9;
2294: jac->minc = 1;
2295: jac->nodal_coarsening = 0;
2296: jac->nodal_coarsening_diag = 0;
2297: jac->vec_interp_variant = 0;
2298: jac->vec_interp_qmax = 0;
2299: jac->vec_interp_smooth = PETSC_FALSE;
2300: jac->interp_refine = 0;
2301: jac->nodal_relax = PETSC_FALSE;
2302: jac->nodal_relax_levels = 1;
2303: jac->rap2 = 0;
2305: /* GPU defaults
2306: from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options
2307: and /src/parcsr_ls/par_amg.c */
2308: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2309: jac->keeptranspose = PETSC_TRUE;
2310: jac->mod_rap2 = 1;
2311: jac->coarsentype = 8;
2312: jac->relaxorder = 0;
2313: jac->interptype = 6;
2314: jac->relaxtype[0] = 18;
2315: jac->relaxtype[1] = 18;
2316: jac->agg_interptype = 7;
2317: #else
2318: jac->keeptranspose = PETSC_FALSE;
2319: jac->mod_rap2 = 0;
2320: #endif
2321: PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
2322: PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
2323: PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
2324: PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
2325: PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
2326: PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
2327: PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
2328: PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
2329: PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
2330: PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
2331: PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
2332: PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
2333: PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype);
2334: PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
2335: PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
2336: PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]); /* defaults coarse to 9 */
2337: PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */
2338: PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
2339: PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
2340: /* GPU */
2341: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2342: PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);
2343: PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2);
2344: PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2);
2345: #endif
2347: /* AIR */
2348: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2349: PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
2350: PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
2351: PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
2352: PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
2353: PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
2354: #endif
2355: PetscFunctionReturn(PETSC_SUCCESS);
2356: }
2357: PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag));
2358: if (flag) {
2359: PetscCallExternal(HYPRE_AMSCreate, &jac->hsolver);
2360: pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS;
2361: pc->ops->view = PCView_HYPRE_AMS;
2362: jac->destroy = HYPRE_AMSDestroy;
2363: jac->setup = HYPRE_AMSSetup;
2364: jac->solve = HYPRE_AMSSolve;
2365: jac->coords[0] = NULL;
2366: jac->coords[1] = NULL;
2367: jac->coords[2] = NULL;
2368: jac->interior = NULL;
2369: /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */
2370: jac->as_print = 0;
2371: jac->as_max_iter = 1; /* used as a preconditioner */
2372: jac->as_tol = 0.; /* used as a preconditioner */
2373: jac->ams_cycle_type = 13;
2374: /* Smoothing options */
2375: jac->as_relax_type = 2;
2376: jac->as_relax_times = 1;
2377: jac->as_relax_weight = 1.0;
2378: jac->as_omega = 1.0;
2379: /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2380: jac->as_amg_alpha_opts[0] = 10;
2381: jac->as_amg_alpha_opts[1] = 1;
2382: jac->as_amg_alpha_opts[2] = 6;
2383: jac->as_amg_alpha_opts[3] = 6;
2384: jac->as_amg_alpha_opts[4] = 4;
2385: jac->as_amg_alpha_theta = 0.25;
2386: /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2387: jac->as_amg_beta_opts[0] = 10;
2388: jac->as_amg_beta_opts[1] = 1;
2389: jac->as_amg_beta_opts[2] = 6;
2390: jac->as_amg_beta_opts[3] = 6;
2391: jac->as_amg_beta_opts[4] = 4;
2392: jac->as_amg_beta_theta = 0.25;
2393: PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
2394: PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
2395: PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2396: PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
2397: PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2398: PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
2399: jac->as_amg_alpha_opts[1], /* AMG agg_levels */
2400: jac->as_amg_alpha_opts[2], /* AMG relax_type */
2401: jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */
2402: jac->as_amg_alpha_opts[4]); /* AMG Pmax */
2403: PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
2404: jac->as_amg_beta_opts[1], /* AMG agg_levels */
2405: jac->as_amg_beta_opts[2], /* AMG relax_type */
2406: jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */
2407: jac->as_amg_beta_opts[4]); /* AMG Pmax */
2408: /* Zero conductivity */
2409: jac->ams_beta_is_zero = PETSC_FALSE;
2410: jac->ams_beta_is_zero_part = PETSC_FALSE;
2411: PetscFunctionReturn(PETSC_SUCCESS);
2412: }
2413: PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag));
2414: if (flag) {
2415: PetscCallExternal(HYPRE_ADSCreate, &jac->hsolver);
2416: pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS;
2417: pc->ops->view = PCView_HYPRE_ADS;
2418: jac->destroy = HYPRE_ADSDestroy;
2419: jac->setup = HYPRE_ADSSetup;
2420: jac->solve = HYPRE_ADSSolve;
2421: jac->coords[0] = NULL;
2422: jac->coords[1] = NULL;
2423: jac->coords[2] = NULL;
2424: /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */
2425: jac->as_print = 0;
2426: jac->as_max_iter = 1; /* used as a preconditioner */
2427: jac->as_tol = 0.; /* used as a preconditioner */
2428: jac->ads_cycle_type = 13;
2429: /* Smoothing options */
2430: jac->as_relax_type = 2;
2431: jac->as_relax_times = 1;
2432: jac->as_relax_weight = 1.0;
2433: jac->as_omega = 1.0;
2434: /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */
2435: jac->ams_cycle_type = 14;
2436: jac->as_amg_alpha_opts[0] = 10;
2437: jac->as_amg_alpha_opts[1] = 1;
2438: jac->as_amg_alpha_opts[2] = 6;
2439: jac->as_amg_alpha_opts[3] = 6;
2440: jac->as_amg_alpha_opts[4] = 4;
2441: jac->as_amg_alpha_theta = 0.25;
2442: /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2443: jac->as_amg_beta_opts[0] = 10;
2444: jac->as_amg_beta_opts[1] = 1;
2445: jac->as_amg_beta_opts[2] = 6;
2446: jac->as_amg_beta_opts[3] = 6;
2447: jac->as_amg_beta_opts[4] = 4;
2448: jac->as_amg_beta_theta = 0.25;
2449: PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
2450: PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
2451: PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2452: PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
2453: PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2454: PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMG coarsen type */
2455: jac->as_amg_alpha_opts[0], /* AMG coarsen type */
2456: jac->as_amg_alpha_opts[1], /* AMG agg_levels */
2457: jac->as_amg_alpha_opts[2], /* AMG relax_type */
2458: jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */
2459: jac->as_amg_alpha_opts[4]); /* AMG Pmax */
2460: PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
2461: jac->as_amg_beta_opts[1], /* AMG agg_levels */
2462: jac->as_amg_beta_opts[2], /* AMG relax_type */
2463: jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */
2464: jac->as_amg_beta_opts[4]); /* AMG Pmax */
2465: PetscFunctionReturn(PETSC_SUCCESS);
2466: }
2467: PetscCall(PetscFree(jac->hypre_type));
2469: jac->hypre_type = NULL;
2470: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, pilut, parasails, boomeramg, ams", name);
2471: }
2473: /*
2474: It only gets here if the HYPRE type has not been set before the call to
2475: ...SetFromOptions() which actually is most of the time
2476: */
2477: static PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptionsObject)
2478: {
2479: PetscInt indx;
2480: const char *type[] = {"ilu", "euclid", "pilut", "parasails", "boomeramg", "ams", "ads"};
2481: PetscBool flg;
2483: PetscFunctionBegin;
2484: PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options");
2485: PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg));
2486: if (flg) {
2487: PetscCall(PCHYPRESetType_HYPRE(pc, type[indx]));
2488: } else {
2489: PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg"));
2490: }
2491: PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject);
2492: PetscOptionsHeadEnd();
2493: PetscFunctionReturn(PETSC_SUCCESS);
2494: }
2496: /*@
2497: PCHYPRESetType - Sets which hypre preconditioner you wish to use
2499: Input Parameters:
2500: + pc - the preconditioner context
2501: - name - either euclid, pilut, parasails, boomeramg, ams, ads
2503: Options Database Key:
2504: . pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads
2506: Level: intermediate
2508: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE`
2509: @*/
2510: PetscErrorCode PCHYPRESetType(PC pc, const char name[])
2511: {
2512: PetscFunctionBegin;
2514: PetscAssertPointer(name, 2);
2515: PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name));
2516: PetscFunctionReturn(PETSC_SUCCESS);
2517: }
2519: /*@C
2520: PCHYPREGetCFMarkers - Gets CF marker arrays for all levels (except the finest level)
2522: Logically Collective
2524: Input Parameter:
2525: . pc - the preconditioner context
2527: Output Parameters:
2528: + n_per_level - the number of nodes per level (size of `num_levels`)
2529: - CFMarkers - the Coarse/Fine Boolean arrays (size of `num_levels` - 1)
2531: Note:
2532: Caller is responsible for memory management of `n_per_level` and `CFMarkers` pointers. That is they should free them with `PetscFree()` when no longer needed.
2534: Level: advanced
2536: .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()`
2537: @*/
2538: PetscErrorCode PCHYPREGetCFMarkers(PC pc, PetscInt *n_per_level[], PetscBT *CFMarkers[])
2539: {
2540: PetscFunctionBegin;
2542: PetscAssertPointer(n_per_level, 2);
2543: PetscAssertPointer(CFMarkers, 3);
2544: PetscUseMethod(pc, "PCHYPREGetCFMarkers_C", (PC, PetscInt *[], PetscBT *[]), (pc, n_per_level, CFMarkers));
2545: PetscFunctionReturn(PETSC_SUCCESS);
2546: }
2548: /*@
2549: PCHYPREGetType - Gets which hypre preconditioner you are using
2551: Input Parameter:
2552: . pc - the preconditioner context
2554: Output Parameter:
2555: . name - either euclid, pilut, parasails, boomeramg, ams, ads
2557: Level: intermediate
2559: .seealso: [](ch_ksp), `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE`
2560: @*/
2561: PetscErrorCode PCHYPREGetType(PC pc, const char *name[])
2562: {
2563: PetscFunctionBegin;
2565: PetscAssertPointer(name, 2);
2566: PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name));
2567: PetscFunctionReturn(PETSC_SUCCESS);
2568: }
2570: /*@
2571: PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs
2573: Logically Collective
2575: Input Parameters:
2576: + pc - the hypre context
2577: - name - one of 'cusparse', 'hypre'
2579: Options Database Key:
2580: . -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre
2582: Level: intermediate
2584: Developer Notes:
2585: How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`?
2587: .seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()`
2588: @*/
2589: PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[])
2590: {
2591: PetscFunctionBegin;
2593: PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name));
2594: PetscFunctionReturn(PETSC_SUCCESS);
2595: }
2597: /*@
2598: PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs
2600: Not Collective
2602: Input Parameter:
2603: . pc - the multigrid context
2605: Output Parameter:
2606: . name - one of 'cusparse', 'hypre'
2608: Level: intermediate
2610: .seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinSetMatProductAlgorithm()`
2611: @*/
2612: PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[])
2613: {
2614: PetscFunctionBegin;
2616: PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name));
2617: PetscFunctionReturn(PETSC_SUCCESS);
2618: }
2620: /*MC
2621: PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC`
2623: Options Database Keys:
2624: + -pc_hypre_type - One of `euclid`, `pilut`, `parasails`, `boomeramg`, `ams`, or `ads`
2625: . -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BOOMERAMGSetNodal()`)
2626: . -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`)
2627: - Many others, run with `-pc_type hypre` `-pc_hypre_type XXX` `-help` to see options for the XXX preconditioner
2629: Level: intermediate
2631: Notes:
2632: Apart from `-pc_hypre_type` (for which there is `PCHYPRESetType()`),
2633: the many hypre options can ONLY be set via the options database (e.g. the command line
2634: or with `PetscOptionsSetValue()`, there are no functions to set them)
2636: The options `-pc_hypre_boomeramg_max_iter` and `-pc_hypre_boomeramg_tol` refer to the number of iterations
2637: (V-cycles) and tolerance that boomerAMG does EACH time it is called. So for example, if
2638: `-pc_hypre_boomeramg_max_iter` is set to 2 then 2-V-cycles are being used to define the preconditioner
2639: (`-pc_hypre_boomeramg_tol` should be set to 0.0 - the default - to strictly use a fixed number of
2640: iterations per hypre call). `-ksp_max_it` and `-ksp_rtol` STILL determine the total number of iterations
2641: and tolerance for the Krylov solver. For example, if `-pc_hypre_boomeramg_max_iter` is 2 and `-ksp_max_it` is 10
2642: then AT MOST twenty V-cycles of boomeramg will be used.
2644: Note that the option `-pc_hypre_boomeramg_relax_type_all` defaults to symmetric relaxation
2645: (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry.
2646: Otherwise, you may want to use `-pc_hypre_boomeramg_relax_type_all SOR/Jacobi`.
2648: `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use
2649: the following two options: `-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>`
2651: See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers
2653: For `PCHYPRE` type of `ams` or `ads` auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`,
2654: `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2655: `PCHYPREAMSSetInteriorNodes()`
2657: Sometimes people want to try algebraic multigrid as a "standalone" solver, that is not accelerating it with a Krylov method. Though we generally do not recommend this
2658: since it is usually slower, one should use a `KSPType` of `KSPRICHARDSON`
2659: (or equivalently `-ksp_type richardson`) to achieve this. Using `KSPPREONLY` will not work since it only applies a single cycle of multigrid.
2661: PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems
2663: GPU Notes:
2664: To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda
2665: Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers.
2667: To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip
2668: Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers.
2670: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`,
2671: `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2672: PCHYPREAMSSetInteriorNodes()
2673: M*/
2675: PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc)
2676: {
2677: PC_HYPRE *jac;
2679: PetscFunctionBegin;
2680: PetscCall(PetscNew(&jac));
2682: pc->data = jac;
2683: pc->ops->reset = PCReset_HYPRE;
2684: pc->ops->destroy = PCDestroy_HYPRE;
2685: pc->ops->setfromoptions = PCSetFromOptions_HYPRE;
2686: pc->ops->setup = PCSetUp_HYPRE;
2687: pc->ops->apply = PCApply_HYPRE;
2688: jac->comm_hypre = MPI_COMM_NULL;
2689: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE));
2690: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE));
2691: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE));
2692: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE));
2693: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE));
2694: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE));
2695: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE));
2696: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE));
2697: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE));
2698: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG));
2699: PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG));
2700: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2701: #if defined(HYPRE_USING_HIP)
2702: PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2703: #endif
2704: #if defined(HYPRE_USING_CUDA)
2705: PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2706: #endif
2707: #endif
2708: PetscHYPREInitialize();
2709: PetscFunctionReturn(PETSC_SUCCESS);
2710: }
2712: typedef struct {
2713: MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2714: HYPRE_StructSolver hsolver;
2716: /* keep copy of PFMG options used so may view them */
2717: PetscInt its;
2718: PetscReal tol;
2719: PetscInt relax_type;
2720: PetscInt rap_type;
2721: PetscInt num_pre_relax, num_post_relax;
2722: PetscInt max_levels;
2723: PetscInt skip_relax;
2724: PetscBool print_statistics;
2725: } PC_PFMG;
2727: static PetscErrorCode PCDestroy_PFMG(PC pc)
2728: {
2729: PC_PFMG *ex = (PC_PFMG *)pc->data;
2731: PetscFunctionBegin;
2732: if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2733: PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2734: PetscCall(PetscFree(pc->data));
2735: PetscFunctionReturn(PETSC_SUCCESS);
2736: }
2738: static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"};
2739: static const char *PFMGRAPType[] = {"Galerkin", "non-Galerkin"};
2741: static PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer)
2742: {
2743: PetscBool iascii;
2744: PC_PFMG *ex = (PC_PFMG *)pc->data;
2746: PetscFunctionBegin;
2747: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2748: if (iascii) {
2749: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE PFMG preconditioning\n"));
2750: PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its));
2751: PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol));
2752: PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type]));
2753: PetscCall(PetscViewerASCIIPrintf(viewer, " RAP type %s\n", PFMGRAPType[ex->rap_type]));
2754: PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2755: PetscCall(PetscViewerASCIIPrintf(viewer, " max levels %" PetscInt_FMT "\n", ex->max_levels));
2756: PetscCall(PetscViewerASCIIPrintf(viewer, " skip relax %" PetscInt_FMT "\n", ex->skip_relax));
2757: }
2758: PetscFunctionReturn(PETSC_SUCCESS);
2759: }
2761: static PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2762: {
2763: PC_PFMG *ex = (PC_PFMG *)pc->data;
2765: PetscFunctionBegin;
2766: PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options");
2767: PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL));
2768: PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL));
2769: PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2770: PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2771: PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2772: PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2773: PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2775: PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL));
2776: PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2778: PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL));
2779: PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2780: PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2781: PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2782: PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL));
2783: PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2784: PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL));
2785: PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax);
2786: PetscOptionsHeadEnd();
2787: PetscFunctionReturn(PETSC_SUCCESS);
2788: }
2790: static PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y)
2791: {
2792: PC_PFMG *ex = (PC_PFMG *)pc->data;
2793: PetscScalar *yy;
2794: const PetscScalar *xx;
2795: PetscInt ilower[3], iupper[3];
2796: HYPRE_Int hlower[3], hupper[3];
2797: Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data;
2799: PetscFunctionBegin;
2800: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2801: PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2802: /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2803: iupper[0] += ilower[0] - 1;
2804: iupper[1] += ilower[1] - 1;
2805: iupper[2] += ilower[2] - 1;
2806: hlower[0] = (HYPRE_Int)ilower[0];
2807: hlower[1] = (HYPRE_Int)ilower[1];
2808: hlower[2] = (HYPRE_Int)ilower[2];
2809: hupper[0] = (HYPRE_Int)iupper[0];
2810: hupper[1] = (HYPRE_Int)iupper[1];
2811: hupper[2] = (HYPRE_Int)iupper[2];
2813: /* copy x values over to hypre */
2814: PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2815: PetscCall(VecGetArrayRead(x, &xx));
2816: PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2817: PetscCall(VecRestoreArrayRead(x, &xx));
2818: PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2819: PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2821: /* copy solution values back to PETSc */
2822: PetscCall(VecGetArray(y, &yy));
2823: PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2824: PetscCall(VecRestoreArray(y, &yy));
2825: PetscFunctionReturn(PETSC_SUCCESS);
2826: }
2828: static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2829: {
2830: PC_PFMG *jac = (PC_PFMG *)pc->data;
2831: HYPRE_Int oits;
2833: PetscFunctionBegin;
2834: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2835: PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its);
2836: PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol);
2838: PetscCall(PCApply_PFMG(pc, b, y));
2839: PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits);
2840: *outits = oits;
2841: if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2842: else *reason = PCRICHARDSON_CONVERGED_RTOL;
2843: PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol);
2844: PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its);
2845: PetscFunctionReturn(PETSC_SUCCESS);
2846: }
2848: static PetscErrorCode PCSetUp_PFMG(PC pc)
2849: {
2850: PC_PFMG *ex = (PC_PFMG *)pc->data;
2851: Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data;
2852: PetscBool flg;
2854: PetscFunctionBegin;
2855: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
2856: PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");
2858: /* create the hypre solver object and set its information */
2859: if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2860: PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2862: // Print Hypre statistics about the solve process
2863: if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3);
2865: // The hypre options must be repeated here because the StructPFMG was destroyed and recreated
2866: PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2867: PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2868: PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2869: PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2870: PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2871: PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2872: PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2874: PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2875: PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver);
2876: PetscFunctionReturn(PETSC_SUCCESS);
2877: }
2879: /*MC
2880: PCPFMG - the hypre PFMG multigrid solver
2882: Options Database Keys:
2883: + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner
2884: . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve
2885: . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve
2886: . -pc_pfmg_tol <tol> - tolerance of PFMG
2887: . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel
2888: . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin
2889: - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations
2890: when the underlying problem is isotropic, one of 0,1
2892: Level: advanced
2894: Notes:
2895: This is for CELL-centered descretizations
2897: See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG`
2899: See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver
2901: This must be used with the `MATHYPRESTRUCT` matrix type.
2903: This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`.
2905: .seealso: [](ch_ksp), `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`
2906: M*/
2908: PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc)
2909: {
2910: PC_PFMG *ex;
2912: PetscFunctionBegin;
2913: PetscCall(PetscNew(&ex));
2914: pc->data = ex;
2916: ex->its = 1;
2917: ex->tol = 1.e-8;
2918: ex->relax_type = 1;
2919: ex->rap_type = 0;
2920: ex->num_pre_relax = 1;
2921: ex->num_post_relax = 1;
2922: ex->max_levels = 0;
2923: ex->skip_relax = 0;
2924: ex->print_statistics = PETSC_FALSE;
2926: pc->ops->setfromoptions = PCSetFromOptions_PFMG;
2927: pc->ops->view = PCView_PFMG;
2928: pc->ops->destroy = PCDestroy_PFMG;
2929: pc->ops->apply = PCApply_PFMG;
2930: pc->ops->applyrichardson = PCApplyRichardson_PFMG;
2931: pc->ops->setup = PCSetUp_PFMG;
2933: PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2934: PetscHYPREInitialize();
2935: PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2936: PetscFunctionReturn(PETSC_SUCCESS);
2937: }
2939: /* we know we are working with a HYPRE_SStructMatrix */
2940: typedef struct {
2941: MPI_Comm hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */
2942: HYPRE_SStructSolver ss_solver;
2944: /* keep copy of SYSPFMG options used so may view them */
2945: PetscInt its;
2946: PetscReal tol;
2947: PetscInt relax_type;
2948: PetscInt num_pre_relax, num_post_relax;
2949: } PC_SysPFMG;
2951: static PetscErrorCode PCDestroy_SysPFMG(PC pc)
2952: {
2953: PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
2955: PetscFunctionBegin;
2956: if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2957: PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2958: PetscCall(PetscFree(pc->data));
2959: PetscFunctionReturn(PETSC_SUCCESS);
2960: }
2962: static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"};
2964: static PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer)
2965: {
2966: PetscBool iascii;
2967: PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
2969: PetscFunctionBegin;
2970: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2971: if (iascii) {
2972: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SysPFMG preconditioning\n"));
2973: PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its));
2974: PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol));
2975: PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type]));
2976: PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2977: }
2978: PetscFunctionReturn(PETSC_SUCCESS);
2979: }
2981: static PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2982: {
2983: PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
2984: PetscBool flg = PETSC_FALSE;
2986: PetscFunctionBegin;
2987: PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options");
2988: PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL));
2989: if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3);
2990: PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL));
2991: PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its);
2992: PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2993: PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax);
2994: PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2995: PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax);
2997: PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL));
2998: PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol);
2999: PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
3000: PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type);
3001: PetscOptionsHeadEnd();
3002: PetscFunctionReturn(PETSC_SUCCESS);
3003: }
3005: static PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y)
3006: {
3007: PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
3008: PetscScalar *yy;
3009: const PetscScalar *xx;
3010: PetscInt ilower[3], iupper[3];
3011: HYPRE_Int hlower[3], hupper[3];
3012: Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)pc->pmat->data;
3013: PetscInt ordering = mx->dofs_order;
3014: PetscInt nvars = mx->nvars;
3015: PetscInt part = 0;
3016: PetscInt size;
3017: PetscInt i;
3019: PetscFunctionBegin;
3020: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3021: PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
3022: /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
3023: iupper[0] += ilower[0] - 1;
3024: iupper[1] += ilower[1] - 1;
3025: iupper[2] += ilower[2] - 1;
3026: hlower[0] = (HYPRE_Int)ilower[0];
3027: hlower[1] = (HYPRE_Int)ilower[1];
3028: hlower[2] = (HYPRE_Int)ilower[2];
3029: hupper[0] = (HYPRE_Int)iupper[0];
3030: hupper[1] = (HYPRE_Int)iupper[1];
3031: hupper[2] = (HYPRE_Int)iupper[2];
3033: size = 1;
3034: for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1);
3036: /* copy x values over to hypre for variable ordering */
3037: if (ordering) {
3038: PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
3039: PetscCall(VecGetArrayRead(x, &xx));
3040: for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i)));
3041: PetscCall(VecRestoreArrayRead(x, &xx));
3042: PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
3043: PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x);
3044: PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
3046: /* copy solution values back to PETSc */
3047: PetscCall(VecGetArray(y, &yy));
3048: for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i)));
3049: PetscCall(VecRestoreArray(y, &yy));
3050: } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */
3051: PetscScalar *z;
3052: PetscInt j, k;
3054: PetscCall(PetscMalloc1(nvars * size, &z));
3055: PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
3056: PetscCall(VecGetArrayRead(x, &xx));
3058: /* transform nodal to hypre's variable ordering for sys_pfmg */
3059: for (i = 0; i < size; i++) {
3060: k = i * nvars;
3061: for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j];
3062: }
3063: for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
3064: PetscCall(VecRestoreArrayRead(x, &xx));
3065: PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
3066: PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
3068: /* copy solution values back to PETSc */
3069: PetscCall(VecGetArray(y, &yy));
3070: for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
3071: /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
3072: for (i = 0; i < size; i++) {
3073: k = i * nvars;
3074: for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i];
3075: }
3076: PetscCall(VecRestoreArray(y, &yy));
3077: PetscCall(PetscFree(z));
3078: }
3079: PetscFunctionReturn(PETSC_SUCCESS);
3080: }
3082: static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
3083: {
3084: PC_SysPFMG *jac = (PC_SysPFMG *)pc->data;
3085: HYPRE_Int oits;
3087: PetscFunctionBegin;
3088: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3089: PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its);
3090: PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol);
3091: PetscCall(PCApply_SysPFMG(pc, b, y));
3092: PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits);
3093: *outits = oits;
3094: if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
3095: else *reason = PCRICHARDSON_CONVERGED_RTOL;
3096: PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol);
3097: PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its);
3098: PetscFunctionReturn(PETSC_SUCCESS);
3099: }
3101: static PetscErrorCode PCSetUp_SysPFMG(PC pc)
3102: {
3103: PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;
3104: Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)pc->pmat->data;
3105: PetscBool flg;
3107: PetscFunctionBegin;
3108: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg));
3109: PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner");
3111: /* create the hypre sstruct solver object and set its information */
3112: if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
3113: PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
3114: PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver);
3115: PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
3116: PetscFunctionReturn(PETSC_SUCCESS);
3117: }
3119: /*MC
3120: PCSYSPFMG - the hypre SysPFMG multigrid solver
3122: Level: advanced
3124: Options Database Keys:
3125: + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner
3126: . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid
3127: . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid
3128: . -pc_syspfmg_tol <tol> - tolerance of SysPFMG
3129: - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles
3131: Notes:
3132: See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG`
3134: See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver
3136: This is for CELL-centered descretizations
3138: This must be used with the `MATHYPRESSTRUCT` matrix type.
3140: This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`.
3142: .seealso: [](ch_ksp), `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG`
3143: M*/
3145: PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc)
3146: {
3147: PC_SysPFMG *ex;
3149: PetscFunctionBegin;
3150: PetscCall(PetscNew(&ex));
3151: pc->data = ex;
3153: ex->its = 1;
3154: ex->tol = 1.e-8;
3155: ex->relax_type = 1;
3156: ex->num_pre_relax = 1;
3157: ex->num_post_relax = 1;
3159: pc->ops->setfromoptions = PCSetFromOptions_SysPFMG;
3160: pc->ops->view = PCView_SysPFMG;
3161: pc->ops->destroy = PCDestroy_SysPFMG;
3162: pc->ops->apply = PCApply_SysPFMG;
3163: pc->ops->applyrichardson = PCApplyRichardson_SysPFMG;
3164: pc->ops->setup = PCSetUp_SysPFMG;
3166: PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3167: PetscHYPREInitialize();
3168: PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
3169: PetscFunctionReturn(PETSC_SUCCESS);
3170: }
3172: /* PC SMG */
3173: typedef struct {
3174: MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
3175: HYPRE_StructSolver hsolver;
3176: PetscInt its; /* keep copy of SMG options used so may view them */
3177: PetscReal tol;
3178: PetscBool print_statistics;
3179: PetscInt num_pre_relax, num_post_relax;
3180: } PC_SMG;
3182: static PetscErrorCode PCDestroy_SMG(PC pc)
3183: {
3184: PC_SMG *ex = (PC_SMG *)pc->data;
3186: PetscFunctionBegin;
3187: if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
3188: PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3189: PetscCall(PetscFree(pc->data));
3190: PetscFunctionReturn(PETSC_SUCCESS);
3191: }
3193: static PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer)
3194: {
3195: PetscBool iascii;
3196: PC_SMG *ex = (PC_SMG *)pc->data;
3198: PetscFunctionBegin;
3199: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
3200: if (iascii) {
3201: PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SMG preconditioning\n"));
3202: PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its));
3203: PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol));
3204: PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
3205: }
3206: PetscFunctionReturn(PETSC_SUCCESS);
3207: }
3209: static PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems *PetscOptionsObject)
3210: {
3211: PC_SMG *ex = (PC_SMG *)pc->data;
3213: PetscFunctionBegin;
3214: PetscOptionsHeadBegin(PetscOptionsObject, "SMG options");
3216: PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL));
3217: PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
3218: PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
3219: PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL));
3221: PetscOptionsHeadEnd();
3222: PetscFunctionReturn(PETSC_SUCCESS);
3223: }
3225: static PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y)
3226: {
3227: PC_SMG *ex = (PC_SMG *)pc->data;
3228: PetscScalar *yy;
3229: const PetscScalar *xx;
3230: PetscInt ilower[3], iupper[3];
3231: HYPRE_Int hlower[3], hupper[3];
3232: Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data;
3234: PetscFunctionBegin;
3235: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3236: PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
3237: /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
3238: iupper[0] += ilower[0] - 1;
3239: iupper[1] += ilower[1] - 1;
3240: iupper[2] += ilower[2] - 1;
3241: hlower[0] = (HYPRE_Int)ilower[0];
3242: hlower[1] = (HYPRE_Int)ilower[1];
3243: hlower[2] = (HYPRE_Int)ilower[2];
3244: hupper[0] = (HYPRE_Int)iupper[0];
3245: hupper[1] = (HYPRE_Int)iupper[1];
3246: hupper[2] = (HYPRE_Int)iupper[2];
3248: /* copy x values over to hypre */
3249: PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
3250: PetscCall(VecGetArrayRead(x, &xx));
3251: PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
3252: PetscCall(VecRestoreArrayRead(x, &xx));
3253: PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
3254: PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);
3256: /* copy solution values back to PETSc */
3257: PetscCall(VecGetArray(y, &yy));
3258: PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
3259: PetscCall(VecRestoreArray(y, &yy));
3260: PetscFunctionReturn(PETSC_SUCCESS);
3261: }
3263: static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
3264: {
3265: PC_SMG *jac = (PC_SMG *)pc->data;
3266: HYPRE_Int oits;
3268: PetscFunctionBegin;
3269: PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3270: PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its);
3271: PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol);
3273: PetscCall(PCApply_SMG(pc, b, y));
3274: PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits);
3275: *outits = oits;
3276: if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
3277: else *reason = PCRICHARDSON_CONVERGED_RTOL;
3278: PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol);
3279: PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its);
3280: PetscFunctionReturn(PETSC_SUCCESS);
3281: }
3283: static PetscErrorCode PCSetUp_SMG(PC pc)
3284: {
3285: PetscInt i, dim;
3286: PC_SMG *ex = (PC_SMG *)pc->data;
3287: Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data;
3288: PetscBool flg;
3289: DMBoundaryType p[3];
3290: PetscInt M[3];
3292: PetscFunctionBegin;
3293: PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
3294: PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");
3296: PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0));
3297: // Check if power of 2 in periodic directions
3298: for (i = 0; i < dim; i++) {
3299: if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) {
3300: SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]);
3301: }
3302: }
3304: /* create the hypre solver object and set its information */
3305: if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
3306: PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
3307: // The hypre options must be set here and not in SetFromOptions because it is created here!
3308: PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its);
3309: PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
3310: PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
3311: PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol);
3313: PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
3314: PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver);
3315: PetscFunctionReturn(PETSC_SUCCESS);
3316: }
3318: /*MC
3319: PCSMG - the hypre (structured grid) SMG multigrid solver
3321: Level: advanced
3323: Options Database Keys:
3324: + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner
3325: . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid
3326: . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid
3327: - -pc_smg_tol <tol> - tolerance of SMG
3329: Notes:
3330: This is for CELL-centered descretizations
3332: This must be used with the `MATHYPRESTRUCT` `MatType`.
3334: This does not provide all the functionality of hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`.
3336: See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners
3338: .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG`
3339: M*/
3341: PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc)
3342: {
3343: PC_SMG *ex;
3345: PetscFunctionBegin;
3346: PetscCall(PetscNew(&ex));
3347: pc->data = ex;
3349: ex->its = 1;
3350: ex->tol = 1.e-8;
3351: ex->num_pre_relax = 1;
3352: ex->num_post_relax = 1;
3354: pc->ops->setfromoptions = PCSetFromOptions_SMG;
3355: pc->ops->view = PCView_SMG;
3356: pc->ops->destroy = PCDestroy_SMG;
3357: pc->ops->apply = PCApply_SMG;
3358: pc->ops->applyrichardson = PCApplyRichardson_SMG;
3359: pc->ops->setup = PCSetUp_SMG;
3361: PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3362: PetscHYPREInitialize();
3363: PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
3364: PetscFunctionReturn(PETSC_SUCCESS);
3365: }