Actual source code: hypre.c

  1: /*
  2:    Provides an interface to the LLNL package hypre
  3: */

  5: #include <petscpkg_version.h>
  6: #include <petsc/private/pcimpl.h>
  7: /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */
  8: #include <petsc/private/matimpl.h>
  9: #include <petsc/private/vecimpl.h>
 10: #include <../src/vec/vec/impls/hypre/vhyp.h>
 11: #include <../src/mat/impls/hypre/mhypre.h>
 12: #include <../src/dm/impls/da/hypre/mhyp.h>
 13: #include <_hypre_parcsr_ls.h>
 14: #include <petscmathypre.h>

 16: #if defined(PETSC_HAVE_HYPRE_DEVICE)
 17: #include <petsc/private/deviceimpl.h>
 18: #endif

 20: static PetscBool  cite            = PETSC_FALSE;
 21: static const char hypreCitation[] = "@manual{hypre-web-page,\n  title  = {{\\sl hypre}: High Performance Preconditioners},\n  organization = {Lawrence Livermore National Laboratory},\n  note  = "
 22:                                     "{\\url{https://www.llnl.gov/casc/hypre}}\n}\n";

 24: /*
 25:    Private context (data structure) for the  preconditioner.
 26: */
 27: typedef struct {
 28:   HYPRE_Solver hsolver;
 29:   Mat          hpmat; /* MatHYPRE */

 31:   HYPRE_Int (*destroy)(HYPRE_Solver);
 32:   HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
 33:   HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);

 35:   MPI_Comm comm_hypre;
 36:   char    *hypre_type;

 38:   /* options for Pilut and BoomerAMG*/
 39:   PetscInt  maxiter;
 40:   PetscReal tol;

 42:   /* options for Pilut */
 43:   PetscInt factorrowsize;

 45:   /* options for ParaSails */
 46:   PetscInt  nlevels;
 47:   PetscReal threshold;
 48:   PetscReal filter;
 49:   PetscReal loadbal;
 50:   PetscInt  logging;
 51:   PetscInt  ruse;
 52:   PetscInt  symt;

 54:   /* options for BoomerAMG */
 55:   PetscBool printstatistics;

 57:   /* options for BoomerAMG */
 58:   PetscInt  cycletype;
 59:   PetscInt  maxlevels;
 60:   PetscReal strongthreshold;
 61:   PetscReal maxrowsum;
 62:   PetscInt  gridsweeps[3];
 63:   PetscObjectParameterDeclare(PetscInt, coarsentype);
 64:   PetscInt  measuretype;
 65:   PetscInt  smoothtype;
 66:   PetscInt  smoothsweeps;
 67:   PetscInt  smoothnumlevels;
 68:   PetscInt  eu_level;         /* Number of levels for ILU(k) in Euclid */
 69:   PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */
 70:   PetscInt  eu_bj;            /* Defines use of Block Jacobi ILU in Euclid */
 71:   PetscObjectParameterDeclare(PetscInt, relaxtype[3]);
 72:   PetscReal relaxweight;
 73:   PetscReal outerrelaxweight;
 74:   PetscObjectParameterDeclare(PetscInt, relaxorder);
 75:   PetscReal truncfactor;
 76:   PetscBool applyrichardson;
 77:   PetscInt  pmax;
 78:   PetscObjectParameterDeclare(PetscInt, interptype);
 79:   PetscInt maxc;
 80:   PetscInt minc;
 81: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
 82:   PetscObjectParameterDeclarePtr(const char, spgemm_type); // this is a global hypre parameter but is closely associated with BoomerAMG
 83: #endif
 84:   /* GPU */
 85:   PetscObjectParameterDeclare(PetscBool3, keeptranspose);
 86:   PetscInt rap2;
 87:   PetscObjectParameterDeclare(PetscInt, mod_rap2);

 89:   /* AIR */
 90:   PetscInt  Rtype;
 91:   PetscReal Rstrongthreshold;
 92:   PetscReal Rfilterthreshold;
 93:   PetscInt  Adroptype;
 94:   PetscReal Adroptol;

 96:   PetscInt agg_nl;
 97:   PetscObjectParameterDeclare(PetscInt, agg_interptype);
 98:   PetscInt  agg_num_paths;
 99:   PetscBool nodal_relax;
100:   PetscInt  nodal_relax_levels;

102:   PetscInt  nodal_coarsening;
103:   PetscInt  nodal_coarsening_diag;
104:   PetscInt  vec_interp_variant;
105:   PetscInt  vec_interp_qmax;
106:   PetscBool vec_interp_smooth;
107:   PetscInt  interp_refine;

109:   /* NearNullSpace support */
110:   VecHYPRE_IJVector *hmnull;
111:   HYPRE_ParVector   *phmnull;
112:   PetscInt           n_hmnull;
113:   Vec                hmnull_constant;

115:   /* options for AS (Auxiliary Space preconditioners) */
116:   PetscInt  as_print;
117:   PetscInt  as_max_iter;
118:   PetscReal as_tol;
119:   PetscInt  as_relax_type;
120:   PetscInt  as_relax_times;
121:   PetscReal as_relax_weight;
122:   PetscReal as_omega;
123:   PetscInt  as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */
124:   PetscReal as_amg_alpha_theta;   /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */
125:   PetscInt  as_amg_beta_opts[5];  /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */
126:   PetscReal as_amg_beta_theta;    /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS)  */
127:   PetscInt  ams_cycle_type;
128:   PetscInt  ads_cycle_type;

130:   /* additional data */
131:   Mat G;             /* MatHYPRE */
132:   Mat C;             /* MatHYPRE */
133:   Mat alpha_Poisson; /* MatHYPRE */
134:   Mat beta_Poisson;  /* MatHYPRE */

136:   /* extra information for AMS */
137:   PetscInt          dim; /* geometrical dimension */
138:   VecHYPRE_IJVector coords[3];
139:   VecHYPRE_IJVector constants[3];
140:   VecHYPRE_IJVector interior;
141:   Mat               RT_PiFull, RT_Pi[3];
142:   Mat               ND_PiFull, ND_Pi[3];
143:   PetscBool         ams_beta_is_zero;
144:   PetscBool         ams_beta_is_zero_part;
145:   PetscInt          ams_proj_freq;
146: } PC_HYPRE;

148: /*
149:   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
150:   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
151:   It is used in PCHMG. Other users should avoid using this function.
152: */
153: static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[])
154: {
155:   PC_HYPRE            *jac = (PC_HYPRE *)pc->data;
156:   PetscBool            same;
157:   PetscInt             num_levels, l;
158:   Mat                 *mattmp;
159:   hypre_ParCSRMatrix **A_array;

161:   PetscFunctionBegin;
162:   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
163:   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG");
164:   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver);
165:   PetscCall(PetscMalloc1(num_levels, &mattmp));
166:   A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)jac->hsolver);
167:   for (l = 1; l < num_levels; l++) {
168:     PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &mattmp[num_levels - 1 - l]));
169:     /* We want to own the data, and HYPRE can not touch this matrix any more */
170:     A_array[l] = NULL;
171:   }
172:   *nlevels   = num_levels;
173:   *operators = mattmp;
174:   PetscFunctionReturn(PETSC_SUCCESS);
175: }

177: /*
178:   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
179:   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
180:   It is used in PCHMG. Other users should avoid using this function.
181: */
182: static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[])
183: {
184:   PC_HYPRE            *jac = (PC_HYPRE *)pc->data;
185:   PetscBool            same;
186:   PetscInt             num_levels, l;
187:   Mat                 *mattmp;
188:   hypre_ParCSRMatrix **P_array;

190:   PetscFunctionBegin;
191:   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
192:   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG");
193:   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver);
194:   PetscCall(PetscMalloc1(num_levels, &mattmp));
195:   P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)jac->hsolver);
196:   for (l = 1; l < num_levels; l++) {
197:     PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &mattmp[l - 1]));
198:     /* We want to own the data, and HYPRE can not touch this matrix any more */
199:     P_array[num_levels - 1 - l] = NULL;
200:   }
201:   *nlevels        = num_levels;
202:   *interpolations = mattmp;
203:   PetscFunctionReturn(PETSC_SUCCESS);
204: }

206: /*
207:   Boolean Vecs are created IN PLACE with using data from BoomerAMG.
208: */
209: static PetscErrorCode PCHYPREGetCFMarkers_BoomerAMG(PC pc, PetscInt *n_per_level[], PetscBT *CFMarkers[])
210: {
211:   PC_HYPRE        *jac = (PC_HYPRE *)pc->data;
212:   PetscBool        same;
213:   PetscInt         num_levels, fine_nodes = 0, coarse_nodes;
214:   PetscInt        *n_per_temp;
215:   PetscBT         *markertmp;
216:   hypre_IntArray **CF_marker_array;

218:   PetscFunctionBegin;
219:   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
220:   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG");
221:   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver);
222:   PetscCall(PetscMalloc1(num_levels, &n_per_temp));
223:   PetscCall(PetscMalloc1(num_levels - 1, &markertmp));
224:   CF_marker_array = hypre_ParAMGDataCFMarkerArray((hypre_ParAMGData *)jac->hsolver);
225:   for (PetscInt l = 0, CFMaxIndex = num_levels - 2; CFMaxIndex >= 0; l++, CFMaxIndex--) {
226:     fine_nodes   = hypre_IntArraySize(CF_marker_array[CFMaxIndex]);
227:     coarse_nodes = 0;
228:     PetscCall(PetscBTCreate(fine_nodes, &markertmp[l]));
229:     for (PetscInt k = 0; k < fine_nodes; k++) {
230:       if (hypre_IntArrayDataI(CF_marker_array[CFMaxIndex], k) > 0) {
231:         PetscCall(PetscBTSet(markertmp[l], k));
232:         coarse_nodes++;
233:       }
234:     }
235:     n_per_temp[l] = coarse_nodes;
236:   }
237:   n_per_temp[num_levels - 1] = fine_nodes;
238:   *n_per_level               = n_per_temp;
239:   *CFMarkers                 = markertmp;
240:   PetscFunctionReturn(PETSC_SUCCESS);
241: }

243: /* Resets (frees) Hypre's representation of the near null space */
244: static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc)
245: {
246:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
247:   PetscInt  i;

249:   PetscFunctionBegin;
250:   for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i]));
251:   PetscCall(PetscFree(jac->hmnull));
252:   PetscCall(PetscFree(jac->phmnull));
253:   PetscCall(VecDestroy(&jac->hmnull_constant));
254:   jac->n_hmnull = 0;
255:   PetscFunctionReturn(PETSC_SUCCESS);
256: }

258: static const char    *HYPRESpgemmTypes[] = {"cusparse", "hypre"};
259: static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[])
260: {
261:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

263: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
264:   PetscFunctionBegin;
265:   jac->spgemm_type = name;
266:   PetscFunctionReturn(PETSC_SUCCESS);
267: #endif
268: }

270: static PetscErrorCode PCSetUp_HYPRE(PC pc)
271: {
272:   PC_HYPRE          *jac = (PC_HYPRE *)pc->data;
273:   Mat_HYPRE         *hjac;
274:   HYPRE_ParCSRMatrix hmat;
275:   HYPRE_ParVector    bv, xv;
276:   PetscBool          ishypre;

278:   PetscFunctionBegin;
279:   /* default type is boomerAMG */
280:   if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg"));

282:   /* get hypre matrix */
283:   if (pc->flag == DIFFERENT_NONZERO_PATTERN) PetscCall(MatDestroy(&jac->hpmat));
284:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre));
285:   if (!ishypre) {
286: #if defined(PETSC_HAVE_HYPRE_DEVICE) && PETSC_PKG_HYPRE_VERSION_LE(2, 30, 0)
287:     /* Temporary fix since we do not support MAT_REUSE_MATRIX with HYPRE device */
288:     PetscBool iscuda, iship, iskokkos;

290:     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
291:     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
292:     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iskokkos, MATSEQAIJKOKKOS, MATMPIAIJKOKKOS, ""));
293:     if (iscuda || iship || iskokkos) PetscCall(MatDestroy(&jac->hpmat));
294: #endif
295:     PetscCall(MatConvert(pc->pmat, MATHYPRE, jac->hpmat ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &jac->hpmat));
296:   } else {
297:     PetscCall(PetscObjectReference((PetscObject)pc->pmat));
298:     PetscCall(MatDestroy(&jac->hpmat));
299:     jac->hpmat = pc->pmat;
300:   }

302:   /* allow debug */
303:   PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view"));
304:   hjac = (Mat_HYPRE *)jac->hpmat->data;

306:   /* special case for BoomerAMG */
307:   if (jac->setup == HYPRE_BoomerAMGSetup) {
308:     MatNullSpace mnull;
309:     PetscBool    has_const;
310:     PetscInt     bs, nvec, i;
311:     PetscMemType memtype;
312:     const Vec   *vecs;

314:     PetscCall(MatGetCurrentMemType(jac->hpmat, &memtype));
315:     if (PetscMemTypeDevice(memtype)) {
316:       /* GPU defaults
317:          From https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options
318:          and /src/parcsr_ls/par_amg.c
319:          First handle options which users have interfaces for changing */
320:       PetscObjectParameterSetDefault(jac, coarsentype, 8);
321:       PetscObjectParameterSetDefault(jac, relaxorder, 0);
322:       PetscObjectParameterSetDefault(jac, interptype, 6);
323:       PetscObjectParameterSetDefault(jac, relaxtype[0], 18);
324:       PetscObjectParameterSetDefault(jac, relaxtype[1], 18);
325: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
326:       PetscObjectParameterSetDefault(jac, spgemm_type, HYPRESpgemmTypes[0]);
327: #endif
328: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
329:       PetscObjectParameterSetDefault(jac, keeptranspose, PETSC_BOOL3_TRUE);
330:       PetscObjectParameterSetDefault(jac, mod_rap2, 1);
331: #endif
332:       PetscObjectParameterSetDefault(jac, agg_interptype, 7);
333:     } else {
334:       PetscObjectParameterSetDefault(jac, coarsentype, 6);
335:       PetscObjectParameterSetDefault(jac, relaxorder, 1);
336:       PetscObjectParameterSetDefault(jac, interptype, 0);
337:       PetscObjectParameterSetDefault(jac, relaxtype[0], 6);
338:       PetscObjectParameterSetDefault(jac, relaxtype[1], 6); /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */
339: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
340:       PetscObjectParameterSetDefault(jac, spgemm_type, "hypre");
341: #endif
342: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
343:       PetscObjectParameterSetDefault(jac, keeptranspose, PETSC_BOOL3_FALSE);
344:       PetscObjectParameterSetDefault(jac, mod_rap2, 0);
345: #endif
346:       PetscObjectParameterSetDefault(jac, agg_interptype, 4);
347:     }
348:     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
349:     PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
350:     PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
351:     PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
352:     PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
353:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
354:     PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
355:     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
356:     PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
357:     PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
358:     PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
359:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, jac->gridsweeps[0], 1);
360:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, jac->gridsweeps[1], 2);
361:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, jac->gridsweeps[2], 3);
362:     PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
363:     PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
364:     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
365:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
366:     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
367:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]);
368:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, jac->relaxtype[0], 1);
369:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, jac->relaxtype[1], 2);
370:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, jac->relaxtype[2], 3);
371:     /* GPU */
372: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
373:     {
374:       PetscBool flg_cusparse, flg_hypre;

376:       PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flg_cusparse));
377:       PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flg_hypre));
378:       if (flg_cusparse) PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1);
379:       else if (flg_hypre) PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0);
380:       else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEMM type %s; Choices are cusparse, hypre", jac->spgemm_type);
381:     }
382: #endif
383: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
384:     PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose == PETSC_BOOL3_TRUE ? 1 : 0);
385:     PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2);
386:     PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2);
387: #endif
388:     PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype);

390:     /* AIR */
391: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
392:     PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
393:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
394:     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
395:     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
396:     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
397: #endif

399:     PetscCall(MatGetBlockSize(pc->pmat, &bs));
400:     if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
401:     PetscCall(MatGetNearNullSpace(pc->mat, &mnull));
402:     if (mnull) {
403:       PetscCall(PCHYPREResetNearNullSpace_Private(pc));
404:       PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
405:       PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull));
406:       PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull));
407:       for (i = 0; i < nvec; i++) {
408:         PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i]));
409:         PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i]));
410:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]);
411:       }
412:       if (has_const) {
413:         PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL));
414:         PetscCall(VecSet(jac->hmnull_constant, 1));
415:         PetscCall(VecNormalize(jac->hmnull_constant, NULL));
416:         PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec]));
417:         PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec]));
418:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]);
419:         nvec++;
420:       }
421:       PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull);
422:       jac->n_hmnull = nvec;
423:     }
424:   }

426:   /* special case for AMS */
427:   if (jac->setup == HYPRE_AMSSetup) {
428:     Mat_HYPRE         *hm;
429:     HYPRE_ParCSRMatrix parcsr;
430:     PetscCheck(jac->coords[0] || jac->constants[0] || jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]), PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()");
431:     if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim);
432:     if (jac->constants[0]) {
433:       HYPRE_ParVector ozz, zoz, zzo = NULL;
434:       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz));
435:       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz));
436:       if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo));
437:       PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo);
438:     }
439:     if (jac->coords[0]) {
440:       HYPRE_ParVector coords[3];
441:       coords[0] = NULL;
442:       coords[1] = NULL;
443:       coords[2] = NULL;
444:       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
445:       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
446:       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
447:       PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
448:     }
449:     PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
450:     hm = (Mat_HYPRE *)jac->G->data;
451:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
452:     PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr);
453:     if (jac->alpha_Poisson) {
454:       hm = (Mat_HYPRE *)jac->alpha_Poisson->data;
455:       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
456:       PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr);
457:     }
458:     if (jac->ams_beta_is_zero) {
459:       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL);
460:     } else if (jac->beta_Poisson) {
461:       hm = (Mat_HYPRE *)jac->beta_Poisson->data;
462:       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
463:       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr);
464:     } else if (jac->ams_beta_is_zero_part) {
465:       if (jac->interior) {
466:         HYPRE_ParVector interior = NULL;
467:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior));
468:         PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior);
469:       } else {
470:         jac->ams_beta_is_zero_part = PETSC_FALSE;
471:       }
472:     }
473:     if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) {
474:       PetscInt           i;
475:       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
476:       if (jac->ND_PiFull) {
477:         hm = (Mat_HYPRE *)jac->ND_PiFull->data;
478:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
479:       } else {
480:         nd_parcsrfull = NULL;
481:       }
482:       for (i = 0; i < 3; ++i) {
483:         if (jac->ND_Pi[i]) {
484:           hm = (Mat_HYPRE *)jac->ND_Pi[i]->data;
485:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
486:         } else {
487:           nd_parcsr[i] = NULL;
488:         }
489:       }
490:       PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
491:     }
492:   }
493:   /* special case for ADS */
494:   if (jac->setup == HYPRE_ADSSetup) {
495:     Mat_HYPRE         *hm;
496:     HYPRE_ParCSRMatrix parcsr;
497:     if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) {
498:       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations");
499:     } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead");
500:     PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
501:     PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient");
502:     if (jac->coords[0]) {
503:       HYPRE_ParVector coords[3];
504:       coords[0] = NULL;
505:       coords[1] = NULL;
506:       coords[2] = NULL;
507:       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
508:       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
509:       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
510:       PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
511:     }
512:     hm = (Mat_HYPRE *)jac->G->data;
513:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
514:     PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr);
515:     hm = (Mat_HYPRE *)jac->C->data;
516:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
517:     PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr);
518:     if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
519:       PetscInt           i;
520:       HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3];
521:       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
522:       if (jac->RT_PiFull) {
523:         hm = (Mat_HYPRE *)jac->RT_PiFull->data;
524:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull));
525:       } else {
526:         rt_parcsrfull = NULL;
527:       }
528:       for (i = 0; i < 3; ++i) {
529:         if (jac->RT_Pi[i]) {
530:           hm = (Mat_HYPRE *)jac->RT_Pi[i]->data;
531:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i]));
532:         } else {
533:           rt_parcsr[i] = NULL;
534:         }
535:       }
536:       if (jac->ND_PiFull) {
537:         hm = (Mat_HYPRE *)jac->ND_PiFull->data;
538:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
539:       } else {
540:         nd_parcsrfull = NULL;
541:       }
542:       for (i = 0; i < 3; ++i) {
543:         if (jac->ND_Pi[i]) {
544:           hm = (Mat_HYPRE *)jac->ND_Pi[i]->data;
545:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
546:         } else {
547:           nd_parcsr[i] = NULL;
548:         }
549:       }
550:       PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
551:     }
552:   }
553:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
554:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv);
555:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv);
556:   PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
557:   PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv);
558:   PetscCall(PetscFPTrapPop());
559:   PetscFunctionReturn(PETSC_SUCCESS);
560: }

562: static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x)
563: {
564:   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
565:   Mat_HYPRE         *hjac = (Mat_HYPRE *)jac->hpmat->data;
566:   HYPRE_ParCSRMatrix hmat;
567:   HYPRE_ParVector    jbv, jxv;

569:   PetscFunctionBegin;
570:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
571:   if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0));
572:   PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b));
573:   if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x));
574:   else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x));
575:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
576:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
577:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
578:   PetscStackCallExternalVoid(
579:     "Hypre solve", do {
580:       HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv);
581:       if (hierr) {
582:         PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
583:         HYPRE_ClearAllErrors();
584:       }
585:     } while (0));

587:   if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv);
588:   PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
589:   PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
590:   PetscFunctionReturn(PETSC_SUCCESS);
591: }

593: static PetscErrorCode PCMatApply_HYPRE_BoomerAMG(PC pc, Mat B, Mat X)
594: {
595:   PC_HYPRE           *jac  = (PC_HYPRE *)pc->data;
596:   Mat_HYPRE          *hjac = (Mat_HYPRE *)jac->hpmat->data;
597:   hypre_ParCSRMatrix *par_matrix;
598:   HYPRE_ParVector     hb, hx;
599:   const PetscScalar  *b;
600:   PetscScalar        *x;
601:   PetscInt            m, N, lda;
602:   hypre_Vector       *x_local;
603:   PetscMemType        type;

605:   PetscFunctionBegin;
606:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
607:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&par_matrix);
608:   PetscCall(MatGetLocalSize(B, &m, NULL));
609:   PetscCall(MatGetSize(B, NULL, &N));
610:   PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hb);
611:   PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hx);
612:   PetscCall(MatZeroEntries(X));
613:   PetscCall(MatDenseGetArrayReadAndMemType(B, &b, &type));
614:   PetscCall(MatDenseGetLDA(B, &lda));
615:   PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
616:   PetscCall(MatDenseGetLDA(X, &lda));
617:   PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
618:   x_local = hypre_ParVectorLocalVector(hb);
619:   PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
620:   hypre_VectorData(x_local) = (HYPRE_Complex *)b;
621:   PetscCall(MatDenseGetArrayWriteAndMemType(X, &x, NULL));
622:   x_local = hypre_ParVectorLocalVector(hx);
623:   PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
624:   hypre_VectorData(x_local) = (HYPRE_Complex *)x;
625:   PetscCallExternal(hypre_ParVectorInitialize_v2, hb, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
626:   PetscCallExternal(hypre_ParVectorInitialize_v2, hx, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
627:   PetscStackCallExternalVoid(
628:     "Hypre solve", do {
629:       HYPRE_Int hierr = (*jac->solve)(jac->hsolver, par_matrix, hb, hx);
630:       if (hierr) {
631:         PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
632:         HYPRE_ClearAllErrors();
633:       }
634:     } while (0));
635:   PetscCallExternal(HYPRE_ParVectorDestroy, hb);
636:   PetscCallExternal(HYPRE_ParVectorDestroy, hx);
637:   PetscCall(MatDenseRestoreArrayReadAndMemType(B, &b));
638:   PetscCall(MatDenseRestoreArrayWriteAndMemType(X, &x));
639:   PetscFunctionReturn(PETSC_SUCCESS);
640: }

642: static PetscErrorCode PCReset_HYPRE(PC pc)
643: {
644:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

646:   PetscFunctionBegin;
647:   PetscCall(MatDestroy(&jac->hpmat));
648:   PetscCall(MatDestroy(&jac->G));
649:   PetscCall(MatDestroy(&jac->C));
650:   PetscCall(MatDestroy(&jac->alpha_Poisson));
651:   PetscCall(MatDestroy(&jac->beta_Poisson));
652:   PetscCall(MatDestroy(&jac->RT_PiFull));
653:   PetscCall(MatDestroy(&jac->RT_Pi[0]));
654:   PetscCall(MatDestroy(&jac->RT_Pi[1]));
655:   PetscCall(MatDestroy(&jac->RT_Pi[2]));
656:   PetscCall(MatDestroy(&jac->ND_PiFull));
657:   PetscCall(MatDestroy(&jac->ND_Pi[0]));
658:   PetscCall(MatDestroy(&jac->ND_Pi[1]));
659:   PetscCall(MatDestroy(&jac->ND_Pi[2]));
660:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
661:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
662:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
663:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
664:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
665:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
666:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
667:   PetscCall(PCHYPREResetNearNullSpace_Private(pc));
668:   jac->ams_beta_is_zero      = PETSC_FALSE;
669:   jac->ams_beta_is_zero_part = PETSC_FALSE;
670:   jac->dim                   = 0;
671:   PetscFunctionReturn(PETSC_SUCCESS);
672: }

674: static PetscErrorCode PCDestroy_HYPRE(PC pc)
675: {
676:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

678:   PetscFunctionBegin;
679:   PetscCall(PCReset_HYPRE(pc));
680:   if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver);
681:   PetscCall(PetscFree(jac->hypre_type));
682:   if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
683:   PetscCall(PetscFree(pc->data));

685:   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0));
686:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL));
687:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL));
688:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL));
689:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL));
690:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL));
691:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL));
692:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL));
693:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL));
694:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL));
695:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
696:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
697:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetCFMarkers_C", NULL));
698:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL));
699:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL));
700:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
701:   PetscFunctionReturn(PETSC_SUCCESS);
702: }

704: static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems PetscOptionsObject)
705: {
706:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
707:   PetscBool flag;

709:   PetscFunctionBegin;
710:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options");
711:   PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag));
712:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter);
713:   PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag));
714:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol);
715:   PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag));
716:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize);
717:   PetscOptionsHeadEnd();
718:   PetscFunctionReturn(PETSC_SUCCESS);
719: }

721: static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer)
722: {
723:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
724:   PetscBool isascii;

726:   PetscFunctionBegin;
727:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
728:   if (isascii) {
729:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE Pilut preconditioning\n"));
730:     if (jac->maxiter != PETSC_DEFAULT) {
731:       PetscCall(PetscViewerASCIIPrintf(viewer, "    maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter));
732:     } else {
733:       PetscCall(PetscViewerASCIIPrintf(viewer, "    default maximum number of iterations \n"));
734:     }
735:     if (jac->tol != PETSC_DEFAULT) {
736:       PetscCall(PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->tol));
737:     } else {
738:       PetscCall(PetscViewerASCIIPrintf(viewer, "    default drop tolerance \n"));
739:     }
740:     if (jac->factorrowsize != PETSC_DEFAULT) {
741:       PetscCall(PetscViewerASCIIPrintf(viewer, "    factor row size %" PetscInt_FMT "\n", jac->factorrowsize));
742:     } else {
743:       PetscCall(PetscViewerASCIIPrintf(viewer, "    default factor row size \n"));
744:     }
745:   }
746:   PetscFunctionReturn(PETSC_SUCCESS);
747: }

749: static const char *HYPREILUType[] = {
750:   "Block-Jacobi-ILUk", "Block-Jacobi-ILUT", "", "", "", "", "", "", "", "", /* 0-9 */
751:   "GMRES-ILUk",        "GMRES-ILUT",        "", "", "", "", "", "", "", "", /* 10-19 */
752:   "NSH-ILUk",          "NSH-ILUT",          "", "", "", "", "", "", "", "", /* 20-29 */
753:   "RAS-ILUk",          "RAS-ILUT",          "", "", "", "", "", "", "", "", /* 30-39 */
754:   "ddPQ-GMRES-ILUk",   "ddPQ-GMRES-ILUT",   "", "", "", "", "", "", "", "", /* 40-49 */
755:   "GMRES-ILU0"                                                              /* 50 */
756: };

758: static const char *HYPREILUIterSetup[] = {"default", "async-in-place", "async-explicit", "sync-explicit", "semisync-explicit"};

760: static PetscErrorCode PCSetFromOptions_HYPRE_ILU(PC pc, PetscOptionItems PetscOptionsObject)
761: {
762:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
763:   PetscBool flg;
764:   PetscInt  indx;
765:   PetscReal tmpdbl;
766:   PetscBool tmp_truth;

768:   PetscFunctionBegin;
769:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ILU Options");

771:   /* ILU: ILU Type */
772:   PetscCall(PetscOptionsEList("-pc_hypre_ilu_type", "Choose ILU Type", "None", HYPREILUType, PETSC_STATIC_ARRAY_LENGTH(HYPREILUType), HYPREILUType[0], &indx, &flg));
773:   if (flg) PetscCallExternal(HYPRE_ILUSetType, jac->hsolver, indx);

775:   /* ILU: ILU iterative setup type*/
776:   PetscCall(PetscOptionsEList("-pc_hypre_ilu_iterative_setup_type", "Set ILU iterative setup type", "None", HYPREILUIterSetup, PETSC_STATIC_ARRAY_LENGTH(HYPREILUIterSetup), HYPREILUIterSetup[0], &indx, &flg));
777:   if (flg) PetscCallExternal(HYPRE_ILUSetIterativeSetupType, jac->hsolver, indx);

779:   /* ILU: ILU iterative setup option*/
780:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_iterative_setup_option", "Set ILU iterative setup option", "None", 0, &indx, &flg));
781:   if (flg) PetscCallExternal(HYPRE_ILUSetIterativeSetupOption, jac->hsolver, indx);

783:   /* ILU: ILU iterative setup maxiter */
784:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_iterative_setup_maxiter", "Set ILU iterative setup maximum iteration count", "None", 0, &indx, &flg));
785:   if (flg) PetscCallExternal(HYPRE_ILUSetIterativeSetupMaxIter, jac->hsolver, indx);

787:   /* ILU: ILU iterative setup tolerance */
788:   PetscCall(PetscOptionsReal("-pc_hypre_ilu_iterative_setup_tolerance", "Set ILU iterative setup tolerance", "None", 0, &tmpdbl, &flg));
789:   if (flg) PetscCallExternal(HYPRE_ILUSetIterativeSetupTolerance, jac->hsolver, tmpdbl);

791:   /* ILU: ILU Print Level */
792:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_print_level", "Set ILU print level", "None", 0, &indx, &flg));
793:   if (flg) PetscCallExternal(HYPRE_ILUSetPrintLevel, jac->hsolver, indx);

795:   /* ILU: Logging */
796:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_logging", "Set ILU logging level", "None", 0, &indx, &flg));
797:   if (flg) PetscCallExternal(HYPRE_ILUSetLogging, jac->hsolver, indx);

799:   /* ILU: ILU Level */
800:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_level", "Set ILU level", "None", 0, &indx, &flg));
801:   if (flg) PetscCallExternal(HYPRE_ILUSetLevelOfFill, jac->hsolver, indx);

803:   /* ILU: ILU Max NNZ per row */
804:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_max_nnz_per_row", "Set maximum NNZ per row", "None", 0, &indx, &flg));
805:   if (flg) PetscCallExternal(HYPRE_ILUSetMaxNnzPerRow, jac->hsolver, indx);

807:   /* ILU: tolerance */
808:   PetscCall(PetscOptionsReal("-pc_hypre_ilu_tol", "Tolerance for ILU", "None", 0, &tmpdbl, &flg));
809:   if (flg) PetscCallExternal(HYPRE_ILUSetTol, jac->hsolver, tmpdbl);

811:   /* ILU: maximum iteration count */
812:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_maxiter", "Set ILU max iterations", "None", 0, &indx, &flg));
813:   if (flg) PetscCallExternal(HYPRE_ILUSetMaxIter, jac->hsolver, indx);

815:   /* ILU: drop threshold */
816:   PetscCall(PetscOptionsReal("-pc_hypre_ilu_drop_threshold", "Drop threshold for ILU", "None", 0, &tmpdbl, &flg));
817:   if (flg) PetscCallExternal(HYPRE_ILUSetDropThreshold, jac->hsolver, tmpdbl);

819:   /* ILU: Triangular Solve */
820:   PetscCall(PetscOptionsBool("-pc_hypre_ilu_tri_solve", "Enable triangular solve", "None", PETSC_FALSE, &tmp_truth, &flg));
821:   if (flg) PetscCallExternal(HYPRE_ILUSetTriSolve, jac->hsolver, tmp_truth);

823:   /* ILU: Lower Jacobi iteration */
824:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_lower_jacobi_iters", "Set lower Jacobi iteration count", "None", 0, &indx, &flg));
825:   if (flg) PetscCallExternal(HYPRE_ILUSetLowerJacobiIters, jac->hsolver, indx);

827:   /* ILU: Upper Jacobi iteration */
828:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_upper_jacobi_iters", "Set upper Jacobi iteration count", "None", 0, &indx, &flg));
829:   if (flg) PetscCallExternal(HYPRE_ILUSetUpperJacobiIters, jac->hsolver, indx);

831:   /* ILU: local reordering */
832:   PetscCall(PetscOptionsBool("-pc_hypre_ilu_local_reordering", "Enable local reordering", "None", PETSC_FALSE, &tmp_truth, &flg));
833:   if (flg) PetscCallExternal(HYPRE_ILUSetLocalReordering, jac->hsolver, tmp_truth);

835:   PetscOptionsHeadEnd();
836:   PetscFunctionReturn(PETSC_SUCCESS);
837: }

839: static PetscErrorCode PCView_HYPRE_ILU(PC pc, PetscViewer viewer)
840: {
841:   PC_HYPRE         *jac      = (PC_HYPRE *)pc->data;
842:   hypre_ParILUData *ilu_data = (hypre_ParILUData *)jac->hsolver;
843:   PetscBool         isascii;
844:   PetscInt          indx;
845:   PetscReal         tmpdbl;
846:   PetscReal        *tmpdbl3;

848:   PetscFunctionBegin;
849:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
850:   if (isascii) {
851:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ILU preconditioning\n"));
852:     PetscStackCallExternalVoid("hypre_ParILUDataIluType", indx = hypre_ParILUDataIluType(ilu_data));
853:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU type              %s (%" PetscInt_FMT ")\n", HYPREILUType[indx], indx));
854:     PetscStackCallExternalVoid("hypre_ParILUDataLfil", indx = hypre_ParILUDataLfil(ilu_data));
855:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU level             %" PetscInt_FMT "\n", indx));
856:     PetscStackCallExternalVoid("hypre_ParILUDataMaxIter", indx = hypre_ParILUDataMaxIter(ilu_data));
857:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU max iterations    %" PetscInt_FMT "\n", indx));
858:     PetscStackCallExternalVoid("hypre_ParILUDataMaxRowNnz", indx = hypre_ParILUDataMaxRowNnz(ilu_data));
859:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU max NNZ per row   %" PetscInt_FMT "\n", indx));
860:     PetscStackCallExternalVoid("hypre_ParILUDataTriSolve", indx = hypre_ParILUDataTriSolve(ilu_data));
861:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU triangular solve  %" PetscInt_FMT "\n", indx));
862:     PetscStackCallExternalVoid("hypre_ParILUDataTol", tmpdbl = hypre_ParILUDataTol(ilu_data));
863:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU tolerance         %e\n", tmpdbl));
864:     PetscStackCallExternalVoid("hypre_ParILUDataDroptol", tmpdbl3 = hypre_ParILUDataDroptol(ilu_data));
865:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU drop tolerance    %e / %e / %e\n", tmpdbl3[0], tmpdbl3[1], tmpdbl3[2]));
866:     PetscStackCallExternalVoid("hypre_ParILUDataReorderingType", indx = hypre_ParILUDataReorderingType(ilu_data));
867:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU local reordering  %" PetscInt_FMT "\n", indx));
868:     PetscStackCallExternalVoid("hypre_ParILUDataLowerJacobiIters", indx = hypre_ParILUDataLowerJacobiIters(ilu_data));
869:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU lower Jacobi iterations  %" PetscInt_FMT "\n", indx));
870:     PetscStackCallExternalVoid("hypre_ParILUDataUpperJacobiIters", indx = hypre_ParILUDataUpperJacobiIters(ilu_data));
871:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU upper Jacobi iterations  %" PetscInt_FMT "\n", indx));
872:     PetscStackCallExternalVoid("hypre_ParILUDataPrintLevel", indx = hypre_ParILUDataPrintLevel(ilu_data));
873:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU print level      %" PetscInt_FMT "\n", indx));
874:     PetscStackCallExternalVoid("hypre_ParILUDataLogging", indx = hypre_ParILUDataLogging(ilu_data));
875:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU logging level    %" PetscInt_FMT "\n", indx));
876:     PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupType", indx = hypre_ParILUDataIterativeSetupType(ilu_data));
877:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup type           %s (%" PetscInt_FMT ")\n", HYPREILUIterSetup[indx], indx));
878:     PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupOption", indx = hypre_ParILUDataIterativeSetupOption(ilu_data));
879:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup option         %" PetscInt_FMT "\n", indx));
880:     PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupMaxIter", indx = hypre_ParILUDataIterativeSetupMaxIter(ilu_data));
881:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup max iterations %" PetscInt_FMT "\n", indx));
882:     PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupTolerance", tmpdbl = hypre_ParILUDataIterativeSetupTolerance(ilu_data));
883:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup tolerance      %e\n", tmpdbl));
884:   }
885:   PetscFunctionReturn(PETSC_SUCCESS);
886: }

888: static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems PetscOptionsObject)
889: {
890:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
891:   PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE;

893:   PetscFunctionBegin;
894:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options");
895:   PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag));
896:   if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level);

898:   PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag));
899:   if (flag) {
900:     PetscMPIInt size;

902:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
903:     PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance");
904:     PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance);
905:   }

907:   PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag));
908:   if (flag) {
909:     jac->eu_bj = eu_bj ? 1 : 0;
910:     PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj);
911:   }
912:   PetscOptionsHeadEnd();
913:   PetscFunctionReturn(PETSC_SUCCESS);
914: }

916: static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer)
917: {
918:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
919:   PetscBool isascii;

921:   PetscFunctionBegin;
922:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
923:   if (isascii) {
924:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE Euclid preconditioning\n"));
925:     if (jac->eu_level != PETSC_DEFAULT) {
926:       PetscCall(PetscViewerASCIIPrintf(viewer, "    factorization levels %" PetscInt_FMT "\n", jac->eu_level));
927:     } else {
928:       PetscCall(PetscViewerASCIIPrintf(viewer, "    default factorization levels \n"));
929:     }
930:     PetscCall(PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->eu_droptolerance));
931:     PetscCall(PetscViewerASCIIPrintf(viewer, "    use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
932:   }
933:   PetscFunctionReturn(PETSC_SUCCESS);
934: }

936: static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x)
937: {
938:   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
939:   Mat_HYPRE         *hjac = (Mat_HYPRE *)jac->hpmat->data;
940:   HYPRE_ParCSRMatrix hmat;
941:   HYPRE_ParVector    jbv, jxv;

943:   PetscFunctionBegin;
944:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
945:   PetscCall(VecSet(x, 0.0));
946:   PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b));
947:   PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x));

949:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
950:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
951:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);

953:   PetscStackCallExternalVoid(
954:     "Hypre Transpose solve", do {
955:       HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv);
956:       if (hierr) {
957:         /* error code of 1 in BoomerAMG merely means convergence not achieved */
958:         PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
959:         HYPRE_ClearAllErrors();
960:       }
961:     } while (0));

963:   PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
964:   PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
965:   PetscFunctionReturn(PETSC_SUCCESS);
966: }

968: static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[])
969: {
970:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

972:   PetscFunctionBegin;
974: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
975:   *spgemm = jac->spgemm_type;
976: #endif
977:   PetscFunctionReturn(PETSC_SUCCESS);
978: }

980: static const char *HYPREBoomerAMGCycleType[]   = {"", "V", "W"};
981: static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"};
982: static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"};
983: /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */
984: static const char *HYPREBoomerAMGSmoothType[] = {"ILU", "Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"};
985: static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"};
986: static const char *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"};

988: static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems PetscOptionsObject)
989: {
990:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
991:   PetscInt    bs, n, indx, level;
992:   PetscBool   flg, tmp_truth;
993:   PetscReal   tmpdbl, twodbl[2];
994:   const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};

996:   PetscFunctionBegin;
997:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options");
998:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg));
999:   if (flg) {
1000:     jac->cycletype = indx + 1;
1001:     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
1002:   }
1003:   PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg, 2));
1004:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
1005:   PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg, 1));
1006:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1007:   PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg, 0.0));
1008:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
1009:   bs = 1;
1010:   if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs));
1011:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg));
1012:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);

1014:   PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg, 0.0));
1015:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);

1017:   PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg, 0));
1018:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);

1020:   PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels));
1021:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);

1023:   PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg, 1));
1024:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);

1026:   PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg, 0.0));
1027:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
1028:   PetscCall(PetscOptionsRangeReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg, 0.0, 1.0));
1029:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);

1031:   /* Grid sweeps */
1032:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg));
1033:   if (flg) {
1034:     /* modify the jac structure so we can view the updated options with PC_View */
1035:     jac->gridsweeps[0] = indx;
1036:     jac->gridsweeps[1] = indx;
1037:     /*defaults coarse to 1 */
1038:     jac->gridsweeps[2] = 1;
1039:   }
1040:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg));
1041:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening);
1042:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg));
1043:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag);
1044:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg));
1045:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant);
1046:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg));
1047:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax);
1048:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg));
1049:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth);
1050:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg));
1051:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine);
1052:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg));
1053:   if (flg) {
1054:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1);
1055:     jac->gridsweeps[0] = indx;
1056:   }
1057:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg));
1058:   if (flg) {
1059:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2);
1060:     jac->gridsweeps[1] = indx;
1061:   }
1062:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg));
1063:   if (flg) {
1064:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3);
1065:     jac->gridsweeps[2] = indx;
1066:   }

1068:   /* Smooth type */
1069:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg));
1070:   if (flg) {
1071:     jac->smoothtype = indx;
1072:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 5);
1073:     jac->smoothnumlevels = 25;
1074:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25);
1075:   }

1077:   /* Number of smoothing levels */
1078:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg));
1079:   if (flg && (jac->smoothtype != -1)) {
1080:     jac->smoothnumlevels = indx;
1081:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx);
1082:   }

1084:   /* Smooth num sweeps */
1085:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_sweeps", "Set number of smoother sweeps", "None", 1, &indx, &flg));
1086:   if (flg && indx > 0) {
1087:     jac->smoothsweeps = indx;
1088:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumSweeps, jac->hsolver, indx);
1089:   }

1091:   /* ILU: ILU Type */
1092:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_ilu_type", "Choose ILU Type", "None", HYPREILUType, PETSC_STATIC_ARRAY_LENGTH(HYPREILUType), HYPREILUType[0], &indx, &flg));
1093:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUType, jac->hsolver, indx);

1095:   /* ILU: ILU iterative setup type*/
1096:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_ilu_iterative_setup_type", "Set ILU iterative setup type", "None", HYPREILUIterSetup, PETSC_STATIC_ARRAY_LENGTH(HYPREILUIterSetup), HYPREILUIterSetup[0], &indx, &flg));
1097:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupType, jac->hsolver, indx);

1099:   /* ILU: ILU iterative setup option*/
1100:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_iterative_setup_option", "Set ILU iterative setup option", "None", 0, &indx, &flg));
1101:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupOption, jac->hsolver, indx);

1103:   /* ILU: ILU iterative setup maxiter */
1104:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_iterative_setup_maxiter", "Set ILU iterative setup maximum iteration count", "None", 0, &indx, &flg));
1105:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupMaxIter, jac->hsolver, indx);

1107:   /* ILU: ILU iterative setup tolerance */
1108:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_ilu_iterative_setup_tolerance", "Set ILU iterative setup tolerance", "None", 0, &tmpdbl, &flg));
1109:   if (flg) PetscCallExternal(hypre_BoomerAMGSetILUIterSetupTolerance, jac->hsolver, tmpdbl);

1111:   /* ILU: ILU Print Level */
1112:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_print_level", "Set ILU print level", "None", 0, &indx, &flg));
1113:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, indx);

1115:   /* ILU: Logging */
1116:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_logging", "Set ILU logging level", "None", 0, &indx, &flg));
1117:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetLogging, jac->hsolver, indx);

1119:   /* ILU: ILU Level */
1120:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_level", "Set ILU level", "None", 0, &indx, &flg));
1121:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILULevel, jac->hsolver, indx);

1123:   /* ILU: ILU Max NNZ per row */
1124:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_max_nnz_per_row", "Set maximum NNZ per row", "None", 0, &indx, &flg));
1125:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUMaxRowNnz, jac->hsolver, indx);

1127:   /* ILU: maximum iteration count */
1128:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_maxiter", "Set ILU max iterations", "None", 0, &indx, &flg));
1129:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUMaxIter, jac->hsolver, indx);

1131:   /* ILU: drop threshold */
1132:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_ilu_drop_tol", "Drop tolerance for ILU", "None", 0, &tmpdbl, &flg));
1133:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUDroptol, jac->hsolver, tmpdbl);

1135:   /* ILU: Triangular Solve */
1136:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_ilu_tri_solve", "Enable triangular solve", "None", PETSC_FALSE, &tmp_truth, &flg));
1137:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUTriSolve, jac->hsolver, tmp_truth);

1139:   /* ILU: Lower Jacobi iteration */
1140:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_lower_jacobi_iters", "Set lower Jacobi iteration count", "None", 0, &indx, &flg));
1141:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILULowerJacobiIters, jac->hsolver, indx);

1143:   /* ILU: Upper Jacobi iteration */
1144:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_upper_jacobi_iters", "Set upper Jacobi iteration count", "None", 0, &indx, &flg));
1145:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILUUpperJacobiIters, jac->hsolver, indx);

1147:   /* ILU: local reordering */
1148:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_ilu_local_reordering", "Enable local reordering", "None", PETSC_FALSE, &tmp_truth, &flg));
1149:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetILULocalReordering, jac->hsolver, tmp_truth);

1151:   /* Number of levels for ILU(k) for Euclid */
1152:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg));
1153:   if (flg && (jac->smoothtype == 4)) {
1154:     jac->eu_level = indx;
1155:     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx);
1156:   }

1158:   /* Filter for ILU(k) for Euclid */
1159:   PetscReal droptolerance;
1160:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg));
1161:   if (flg && (jac->smoothtype == 4)) {
1162:     jac->eu_droptolerance = droptolerance;
1163:     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance);
1164:   }

1166:   /* Use Block Jacobi ILUT for Euclid */
1167:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg));
1168:   if (flg && (jac->smoothtype == 4)) {
1169:     jac->eu_bj = tmp_truth;
1170:     PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj);
1171:   }

1173:   /* Relax type */
1174:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType),
1175:                               jac->relaxtype[0] < 0 ? "not yet set" : HYPREBoomerAMGRelaxType[jac->relaxtype[0]], &indx, &flg));
1176:   if (flg) jac->relaxtype[0] = jac->relaxtype[1] = indx;
1177:   PetscCall(
1178:     PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), jac->relaxtype[0] < 0 ? "not yet set" : HYPREBoomerAMGRelaxType[jac->relaxtype[0]], &indx, &flg));
1179:   if (flg) jac->relaxtype[0] = indx;
1180:   PetscCall(
1181:     PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), jac->relaxtype[1] < 0 ? "not yet set" : HYPREBoomerAMGRelaxType[jac->relaxtype[1]], &indx, &flg));
1182:   if (flg) jac->relaxtype[1] = indx;
1183:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[jac->relaxtype[2]], &indx, &flg));
1184:   if (flg) jac->relaxtype[2] = indx;

1186:   /* Relaxation Weight */
1187:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg));
1188:   if (flg) {
1189:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl);
1190:     jac->relaxweight = tmpdbl;
1191:   }

1193:   n         = 2;
1194:   twodbl[0] = twodbl[1] = 1.0;
1195:   PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
1196:   if (flg) {
1197:     PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
1198:     indx = (int)PetscAbsReal(twodbl[1]);
1199:     PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx);
1200:   }

1202:   /* Outer relaxation Weight */
1203:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg));
1204:   if (flg) {
1205:     PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl);
1206:     jac->outerrelaxweight = tmpdbl;
1207:   }

1209:   n         = 2;
1210:   twodbl[0] = twodbl[1] = 1.0;
1211:   PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
1212:   if (flg) {
1213:     PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
1214:     indx = (int)PetscAbsReal(twodbl[1]);
1215:     PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx);
1216:   }

1218:   /* the Relax Order */
1219:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg));
1220:   if (flg && tmp_truth) jac->relaxorder = 0;
1221:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg));
1222:   if (flg) {
1223:     jac->measuretype = indx;
1224:     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
1225:   }
1226:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), jac->coarsentype < 0 ? "unknown" : HYPREBoomerAMGCoarsenType[jac->coarsentype], &indx, &flg));
1227:   if (flg) jac->coarsentype = indx;

1229:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg));
1230:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
1231:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg));
1232:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
1233: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1234:   // global parameter but is closely associated with BoomerAMG
1235:   PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", HYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(HYPRESpgemmTypes), jac->spgemm_type, &indx, &flg));
1236:   if (flg) PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, HYPRESpgemmTypes[indx]));
1237: #endif
1238:   /* AIR */
1239: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
1240:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL));
1241:   PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
1242:   if (jac->Rtype) {
1243:     HYPRE_Int **grid_relax_points = hypre_TAlloc(HYPRE_Int *, 4, HYPRE_MEMORY_HOST);
1244:     char       *prerelax[256];
1245:     char       *postrelax[256];
1246:     char        stringF[2] = "F", stringC[2] = "C", stringA[2] = "A";
1247:     PetscInt    ns_down = 256, ns_up = 256;
1248:     PetscBool   matchF, matchC, matchA;

1250:     jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */

1252:     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL));
1253:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);

1255:     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL));
1256:     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);

1258:     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL));
1259:     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);

1261:     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL));
1262:     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
1263:     PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_prerelax", "Defines prerelax scheme", "None", prerelax, &ns_down, NULL));
1264:     PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_postrelax", "Defines postrelax scheme", "None", postrelax, &ns_up, NULL));
1265:     PetscCheck(ns_down == jac->gridsweeps[0], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_prerelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_down");
1266:     PetscCheck(ns_up == jac->gridsweeps[1], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_postrelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_up");

1268:     grid_relax_points[0]    = NULL;
1269:     grid_relax_points[1]    = hypre_TAlloc(HYPRE_Int, ns_down, HYPRE_MEMORY_HOST);
1270:     grid_relax_points[2]    = hypre_TAlloc(HYPRE_Int, ns_up, HYPRE_MEMORY_HOST);
1271:     grid_relax_points[3]    = hypre_TAlloc(HYPRE_Int, jac->gridsweeps[2], HYPRE_MEMORY_HOST);
1272:     grid_relax_points[3][0] = 0;

1274:     // set down relax scheme
1275:     for (PetscInt i = 0; i < ns_down; i++) {
1276:       PetscCall(PetscStrcasecmp(prerelax[i], stringF, &matchF));
1277:       PetscCall(PetscStrcasecmp(prerelax[i], stringC, &matchC));
1278:       PetscCall(PetscStrcasecmp(prerelax[i], stringA, &matchA));
1279:       PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_prerelax are C, F, and A");
1280:       if (matchF) grid_relax_points[1][i] = -1;
1281:       else if (matchC) grid_relax_points[1][i] = 1;
1282:       else if (matchA) grid_relax_points[1][i] = 0;
1283:     }

1285:     // set up relax scheme
1286:     for (PetscInt i = 0; i < ns_up; i++) {
1287:       PetscCall(PetscStrcasecmp(postrelax[i], stringF, &matchF));
1288:       PetscCall(PetscStrcasecmp(postrelax[i], stringC, &matchC));
1289:       PetscCall(PetscStrcasecmp(postrelax[i], stringA, &matchA));
1290:       PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_postrelax are C, F, and A");
1291:       if (matchF) grid_relax_points[2][i] = -1;
1292:       else if (matchC) grid_relax_points[2][i] = 1;
1293:       else if (matchA) grid_relax_points[2][i] = 0;
1294:     }

1296:     // set coarse relax scheme
1297:     for (PetscInt i = 0; i < jac->gridsweeps[2]; i++) grid_relax_points[3][i] = 0;

1299:     // Pass relax schemes to hypre
1300:     PetscCallExternal(HYPRE_BoomerAMGSetGridRelaxPoints, jac->hsolver, grid_relax_points);

1302:     // cleanup memory
1303:     for (PetscInt i = 0; i < ns_down; i++) PetscCall(PetscFree(prerelax[i]));
1304:     for (PetscInt i = 0; i < ns_up; i++) PetscCall(PetscFree(postrelax[i]));
1305:   }
1306: #endif

1308: #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9)
1309:   PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl);
1310: #endif

1312:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), jac->interptype < 0 ? "unknown" : HYPREBoomerAMGInterpType[jac->interptype], &indx, &flg));
1313:   if (flg) jac->interptype = indx;

1315:   PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg));
1316:   if (flg) {
1317:     level = 3;
1318:     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL));

1320:     jac->printstatistics = PETSC_TRUE;
1321:     PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level);
1322:   }

1324:   PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg));
1325:   if (flg) {
1326:     level = 3;
1327:     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL));

1329:     jac->printstatistics = PETSC_TRUE;
1330:     PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level);
1331:   }

1333:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg));
1334:   if (flg && tmp_truth) {
1335:     PetscInt tmp_int;
1336:     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg));
1337:     if (flg) jac->nodal_relax_levels = tmp_int;
1338:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6);
1339:     PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1);
1340:     PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0);
1341:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels);
1342:   }

1344:   PetscCall(PetscOptionsBool3("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL));

1346:   /* options for ParaSails solvers */
1347:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg));
1348:   if (flg) {
1349:     jac->symt = indx;
1350:     PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt);
1351:   }

1353:   PetscOptionsHeadEnd();
1354:   PetscFunctionReturn(PETSC_SUCCESS);
1355: }

1357: static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
1358: {
1359:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1360:   HYPRE_Int oits;

1362:   PetscFunctionBegin;
1363:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
1364:   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter);
1365:   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol);
1366:   jac->applyrichardson = PETSC_TRUE;
1367:   PetscCall(PCApply_HYPRE(pc, b, y));
1368:   jac->applyrichardson = PETSC_FALSE;
1369:   PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits);
1370:   *outits = oits;
1371:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
1372:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
1373:   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
1374:   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1375:   PetscFunctionReturn(PETSC_SUCCESS);
1376: }

1378: static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer)
1379: {
1380:   PC_HYPRE         *jac      = (PC_HYPRE *)pc->data;
1381:   hypre_ParAMGData *amg_data = (hypre_ParAMGData *)jac->hsolver;
1382:   PetscBool         isascii;
1383:   PetscInt          indx;
1384:   PetscReal         val;

1386:   PetscFunctionBegin;
1387:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1388:   if (isascii) {
1389:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE BoomerAMG preconditioning\n"));
1390:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype]));
1391:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels));
1392:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter));
1393:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Convergence tolerance PER hypre call %g\n", (double)jac->tol));
1394:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Threshold for strong coupling %g\n", (double)jac->strongthreshold));
1395:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation truncation factor %g\n", (double)jac->truncfactor));
1396:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax));
1397:     if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine));
1398:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl));
1399:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths));

1401:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum row sums %g\n", (double)jac->maxrowsum));

1403:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps down         %" PetscInt_FMT "\n", jac->gridsweeps[0]));
1404:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps up           %" PetscInt_FMT "\n", jac->gridsweeps[1]));
1405:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps on coarse    %" PetscInt_FMT "\n", jac->gridsweeps[2]));

1407:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax down          %s\n", jac->relaxtype[0] < 0 ? "not yet set" : HYPREBoomerAMGRelaxType[jac->relaxtype[0]]));
1408:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax up            %s\n", jac->relaxtype[1] < 0 ? "not yet set" : HYPREBoomerAMGRelaxType[jac->relaxtype[1]]));
1409:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax on coarse     %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]]));

1411:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax weight  (all)      %g\n", (double)jac->relaxweight));
1412:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Outer relax weight (all) %g\n", (double)jac->outerrelaxweight));

1414:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum size of coarsest grid %" PetscInt_FMT "\n", jac->maxc));
1415:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Minimum size of coarsest grid %" PetscInt_FMT "\n", jac->minc));

1417:     if (jac->relaxorder == PETSC_DECIDE) {
1418:       PetscCall(PetscViewerASCIIPrintf(viewer, "    CF-relaxation option not yet determined\n"));
1419:     } else if (jac->relaxorder) {
1420:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using CF-relaxation\n"));
1421:     } else {
1422:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using CF-relaxation\n"));
1423:     }
1424:     if (jac->smoothtype != -1) {
1425:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth type          %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype]));
1426:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth num levels    %" PetscInt_FMT "\n", jac->smoothnumlevels));
1427:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth num sweeps    %" PetscInt_FMT "\n", jac->smoothsweeps));
1428:       if (jac->smoothtype == 0) {
1429:         PetscStackCallExternalVoid("hypre_ParAMGDataILUType", indx = hypre_ParAMGDataILUType(amg_data));
1430:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU type              %s (%" PetscInt_FMT ")\n", HYPREILUType[indx], indx));
1431:         PetscStackCallExternalVoid("hypre_ParAMGDataILULevel", indx = hypre_ParAMGDataILULevel(amg_data));
1432:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU level             %" PetscInt_FMT "\n", indx));
1433:         PetscStackCallExternalVoid("hypre_ParAMGDataILUMaxIter", indx = hypre_ParAMGDataILUMaxIter(amg_data));
1434:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU max iterations    %" PetscInt_FMT "\n", indx));
1435:         PetscStackCallExternalVoid("hypre_ParAMGDataILUMaxRowNnz", indx = hypre_ParAMGDataILUMaxRowNnz(amg_data));
1436:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU max NNZ per row   %" PetscInt_FMT "\n", indx));
1437:         PetscStackCallExternalVoid("hypre_ParAMGDataILUTriSolve", indx = hypre_ParAMGDataILUTriSolve(amg_data));
1438:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU triangular solve  %" PetscInt_FMT "\n", indx));
1439:         PetscStackCallExternalVoid("hypre_ParAMGDataTol", val = hypre_ParAMGDataTol(amg_data));
1440:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU tolerance         %e\n", val));
1441:         PetscStackCallExternalVoid("hypre_ParAMGDataILUDroptol", val = hypre_ParAMGDataILUDroptol(amg_data));
1442:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU drop tolerance    %e\n", val));
1443:         PetscStackCallExternalVoid("hypre_ParAMGDataILULocalReordering", indx = hypre_ParAMGDataILULocalReordering(amg_data));
1444:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU local reordering  %" PetscInt_FMT "\n", indx));
1445:         PetscStackCallExternalVoid("hypre_ParAMGDataILULowerJacobiIters", indx = hypre_ParAMGDataILULowerJacobiIters(amg_data));
1446:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU lower Jacobi iterations  %" PetscInt_FMT "\n", indx));
1447:         PetscStackCallExternalVoid("hypre_ParAMGDataILUUpperJacobiIters", indx = hypre_ParAMGDataILUUpperJacobiIters(amg_data));
1448:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU upper Jacobi iterations  %" PetscInt_FMT "\n", indx));
1449:         PetscStackCallExternalVoid("hypre_ParAMGDataPrintLevel", indx = hypre_ParAMGDataPrintLevel(amg_data));
1450:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU print level      %" PetscInt_FMT "\n", indx));
1451:         PetscStackCallExternalVoid("hypre_ParAMGDataLogging", indx = hypre_ParAMGDataLogging(amg_data));
1452:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU logging level    %" PetscInt_FMT "\n", indx));
1453:         PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupType", indx = hypre_ParAMGDataILUIterSetupType(amg_data));
1454:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup type           %s (%" PetscInt_FMT ")\n", HYPREILUIterSetup[indx], indx));
1455:         PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupOption", indx = hypre_ParAMGDataILUIterSetupOption(amg_data));
1456:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup option         %" PetscInt_FMT "\n", indx));
1457:         PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupMaxIter", indx = hypre_ParAMGDataILUIterSetupMaxIter(amg_data));
1458:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup max iterations %" PetscInt_FMT "\n", indx));
1459:         PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupTolerance", val = hypre_ParAMGDataILUIterSetupTolerance(amg_data));
1460:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup tolerance      %e\n", val));
1461:       }
1462:     } else {
1463:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using more complex smoothers.\n"));
1464:     }
1465:     if (jac->smoothtype == 3) {
1466:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level));
1467:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance));
1468:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
1469:     }
1470:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Measure type        %s\n", HYPREBoomerAMGMeasureType[jac->measuretype]));
1471:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Coarsen type        %s\n", jac->coarsentype < 0 ? "not yet set" : HYPREBoomerAMGCoarsenType[jac->coarsentype]));
1472:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation type  %s\n", jac->interptype != 100 ? (jac->interptype < 0 ? "not yet set" : HYPREBoomerAMGInterpType[jac->interptype]) : "1pt"));
1473:     if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, "    Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening));
1474:     if (jac->vec_interp_variant) {
1475:       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant));
1476:       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax));
1477:       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth));
1478:     }
1479:     if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, "    Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels));
1480: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1481:     PetscCall(PetscViewerASCIIPrintf(viewer, "    SpGEMM type         %s\n", jac->spgemm_type));
1482: #else
1483:     PetscCall(PetscViewerASCIIPrintf(viewer, "    SpGEMM type         %s\n", "hypre"));
1484: #endif
1485:     /* AIR */
1486:     if (jac->Rtype) {
1487:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype));
1488:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Threshold for R %g\n", (double)jac->Rstrongthreshold));
1489:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Filter for R %g\n", (double)jac->Rfilterthreshold));
1490:       PetscCall(PetscViewerASCIIPrintf(viewer, "      A drop tolerance %g\n", (double)jac->Adroptol));
1491:       PetscCall(PetscViewerASCIIPrintf(viewer, "      A drop type %" PetscInt_FMT "\n", jac->Adroptype));
1492:     }
1493:   }
1494:   PetscFunctionReturn(PETSC_SUCCESS);
1495: }

1497: static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems PetscOptionsObject)
1498: {
1499:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1500:   PetscInt    indx;
1501:   PetscBool   flag;
1502:   const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};

1504:   PetscFunctionBegin;
1505:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options");
1506:   PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0));
1507:   PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag));
1508:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);

1510:   PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag));
1511:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);

1513:   PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag));
1514:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);

1516:   PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag));
1517:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);

1519:   PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag));
1520:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);

1522:   PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag));
1523:   if (flag) {
1524:     jac->symt = indx;
1525:     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1526:   }

1528:   PetscOptionsHeadEnd();
1529:   PetscFunctionReturn(PETSC_SUCCESS);
1530: }

1532: static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer)
1533: {
1534:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1535:   PetscBool   isascii;
1536:   const char *symt = 0;

1538:   PetscFunctionBegin;
1539:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1540:   if (isascii) {
1541:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ParaSails preconditioning\n"));
1542:     PetscCall(PetscViewerASCIIPrintf(viewer, "    nlevels %" PetscInt_FMT "\n", jac->nlevels));
1543:     PetscCall(PetscViewerASCIIPrintf(viewer, "    threshold %g\n", (double)jac->threshold));
1544:     PetscCall(PetscViewerASCIIPrintf(viewer, "    filter %g\n", (double)jac->filter));
1545:     PetscCall(PetscViewerASCIIPrintf(viewer, "    load balance %g\n", (double)jac->loadbal));
1546:     PetscCall(PetscViewerASCIIPrintf(viewer, "    reuse nonzero structure %s\n", PetscBools[jac->ruse]));
1547:     PetscCall(PetscViewerASCIIPrintf(viewer, "    print info to screen %s\n", PetscBools[jac->logging]));
1548:     if (!jac->symt) symt = "nonsymmetric matrix and preconditioner";
1549:     else if (jac->symt == 1) symt = "SPD matrix and preconditioner";
1550:     else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner";
1551:     else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt);
1552:     PetscCall(PetscViewerASCIIPrintf(viewer, "    %s\n", symt));
1553:   }
1554:   PetscFunctionReturn(PETSC_SUCCESS);
1555: }

1557: static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems PetscOptionsObject)
1558: {
1559:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1560:   PetscInt  n;
1561:   PetscBool flag, flag2, flag3, flag4;

1563:   PetscFunctionBegin;
1564:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options");
1565:   PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag));
1566:   if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
1567:   PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1568:   if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
1569:   PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag));
1570:   if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
1571:   PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1572:   if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
1573:   PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1574:   PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1575:   PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1576:   PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1577:   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1578:   PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1579:   n = 5;
1580:   PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2));
1581:   if (flag || flag2) {
1582:     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
1583:                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
1584:                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
1585:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
1586:                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
1587:   }
1588:   PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1589:   n = 5;
1590:   PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2));
1591:   if (flag || flag2) {
1592:     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1593:                       jac->as_amg_beta_opts[1],                                           /* AMG agg_levels */
1594:                       jac->as_amg_beta_opts[2],                                           /* AMG relax_type */
1595:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                   /* AMG interp_type */
1596:                       jac->as_amg_beta_opts[4]);                                          /* AMG Pmax */
1597:   }
1598:   PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag));
1599:   if (flag) { /* override HYPRE's default only if the options is used */
1600:     PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq);
1601:   }
1602:   PetscOptionsHeadEnd();
1603:   PetscFunctionReturn(PETSC_SUCCESS);
1604: }

1606: static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer)
1607: {
1608:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1609:   PetscBool isascii;

1611:   PetscFunctionBegin;
1612:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1613:   if (isascii) {
1614:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE AMS preconditioning\n"));
1615:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1616:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1617:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol));
1618:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1619:     PetscCall(PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1620:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight));
1621:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega));
1622:     if (jac->alpha_Poisson) {
1623:       PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (passed in by user)\n"));
1624:     } else {
1625:       PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (computed) \n"));
1626:     }
1627:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1628:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1629:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1630:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1631:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1632:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1633:     if (!jac->ams_beta_is_zero) {
1634:       if (jac->beta_Poisson) {
1635:         PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (passed in by user)\n"));
1636:       } else {
1637:         PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (computed) \n"));
1638:       }
1639:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1640:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1641:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1642:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1643:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1644:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta));
1645:       if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, "        compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq));
1646:     } else {
1647:       PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver not used (zero-conductivity everywhere) \n"));
1648:     }
1649:   }
1650:   PetscFunctionReturn(PETSC_SUCCESS);
1651: }

1653: static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems PetscOptionsObject)
1654: {
1655:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1656:   PetscInt  n;
1657:   PetscBool flag, flag2, flag3, flag4;

1659:   PetscFunctionBegin;
1660:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options");
1661:   PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag));
1662:   if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
1663:   PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1664:   if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
1665:   PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag));
1666:   if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type);
1667:   PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1668:   if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
1669:   PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1670:   PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1671:   PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1672:   PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1673:   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1674:   PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1675:   n = 5;
1676:   PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2));
1677:   PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3));
1678:   if (flag || flag2 || flag3) {
1679:     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */
1680:                       jac->as_amg_alpha_opts[0],                                 /* AMG coarsen type */
1681:                       jac->as_amg_alpha_opts[1],                                 /* AMG agg_levels */
1682:                       jac->as_amg_alpha_opts[2],                                 /* AMG relax_type */
1683:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],        /* AMG interp_type */
1684:                       jac->as_amg_alpha_opts[4]);                                /* AMG Pmax */
1685:   }
1686:   PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1687:   n = 5;
1688:   PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2));
1689:   if (flag || flag2) {
1690:     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1691:                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
1692:                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
1693:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
1694:                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
1695:   }
1696:   PetscOptionsHeadEnd();
1697:   PetscFunctionReturn(PETSC_SUCCESS);
1698: }

1700: static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer)
1701: {
1702:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1703:   PetscBool isascii;

1705:   PetscFunctionBegin;
1706:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1707:   if (isascii) {
1708:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ADS preconditioning\n"));
1709:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1710:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type));
1711:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol));
1712:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1713:     PetscCall(PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1714:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight));
1715:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega));
1716:     PetscCall(PetscViewerASCIIPrintf(viewer, "    AMS solver using boomerAMG\n"));
1717:     PetscCall(PetscViewerASCIIPrintf(viewer, "        subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1718:     PetscCall(PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1719:     PetscCall(PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1720:     PetscCall(PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1721:     PetscCall(PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1722:     PetscCall(PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1723:     PetscCall(PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1724:     PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver using boomerAMG\n"));
1725:     PetscCall(PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1726:     PetscCall(PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1727:     PetscCall(PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1728:     PetscCall(PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1729:     PetscCall(PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1730:     PetscCall(PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_beta_theta));
1731:   }
1732:   PetscFunctionReturn(PETSC_SUCCESS);
1733: }

1735: static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G)
1736: {
1737:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1738:   PetscBool ishypre;

1740:   PetscFunctionBegin;
1741:   PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre));
1742:   if (ishypre) {
1743:     PetscCall(PetscObjectReference((PetscObject)G));
1744:     PetscCall(MatDestroy(&jac->G));
1745:     jac->G = G;
1746:   } else {
1747:     PetscCall(MatDestroy(&jac->G));
1748:     PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G));
1749:   }
1750:   PetscFunctionReturn(PETSC_SUCCESS);
1751: }

1753: /*@
1754:   PCHYPRESetDiscreteGradient - Set the discrete gradient matrix for `PCHYPRE` type of AMS or ADS

1756:   Collective

1758:   Input Parameters:
1759: + pc - the preconditioning context
1760: - G  - the discrete gradient

1762:   Level: intermediate

1764:   Notes:
1765:   `G` should have as many rows as the number of edges and as many columns as the number of vertices in the mesh

1767:   Each row of `G` has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation

1769:   Developer Note:
1770:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1772: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteCurl()`
1773: @*/
1774: PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G)
1775: {
1776:   PetscFunctionBegin;
1779:   PetscCheckSameComm(pc, 1, G, 2);
1780:   PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G));
1781:   PetscFunctionReturn(PETSC_SUCCESS);
1782: }

1784: static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C)
1785: {
1786:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1787:   PetscBool ishypre;

1789:   PetscFunctionBegin;
1790:   PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre));
1791:   if (ishypre) {
1792:     PetscCall(PetscObjectReference((PetscObject)C));
1793:     PetscCall(MatDestroy(&jac->C));
1794:     jac->C = C;
1795:   } else {
1796:     PetscCall(MatDestroy(&jac->C));
1797:     PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C));
1798:   }
1799:   PetscFunctionReturn(PETSC_SUCCESS);
1800: }

1802: /*@
1803:   PCHYPRESetDiscreteCurl - Set the discrete curl matrix for `PCHYPRE` type of ADS

1805:   Collective

1807:   Input Parameters:
1808: + pc - the preconditioning context
1809: - C  - the discrete curl

1811:   Level: intermediate

1813:   Notes:
1814:   `C` should have as many rows as the number of faces and as many columns as the number of edges in the mesh

1816:   Each row of `C` has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge.
1817:   Matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation

1819:   Developer Notes:
1820:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1822:   If this is only for  `PCHYPRE` type of ADS it should be called `PCHYPREADSSetDiscreteCurl()`

1824: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`
1825: @*/
1826: PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C)
1827: {
1828:   PetscFunctionBegin;
1831:   PetscCheckSameComm(pc, 1, C, 2);
1832:   PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C));
1833:   PetscFunctionReturn(PETSC_SUCCESS);
1834: }

1836: static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1837: {
1838:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1839:   PetscBool ishypre;
1840:   PetscInt  i;

1842:   PetscFunctionBegin;
1843:   PetscCall(MatDestroy(&jac->RT_PiFull));
1844:   PetscCall(MatDestroy(&jac->ND_PiFull));
1845:   for (i = 0; i < 3; ++i) {
1846:     PetscCall(MatDestroy(&jac->RT_Pi[i]));
1847:     PetscCall(MatDestroy(&jac->ND_Pi[i]));
1848:   }

1850:   jac->dim = dim;
1851:   if (RT_PiFull) {
1852:     PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre));
1853:     if (ishypre) {
1854:       PetscCall(PetscObjectReference((PetscObject)RT_PiFull));
1855:       jac->RT_PiFull = RT_PiFull;
1856:     } else {
1857:       PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull));
1858:     }
1859:   }
1860:   if (RT_Pi) {
1861:     for (i = 0; i < dim; ++i) {
1862:       if (RT_Pi[i]) {
1863:         PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre));
1864:         if (ishypre) {
1865:           PetscCall(PetscObjectReference((PetscObject)RT_Pi[i]));
1866:           jac->RT_Pi[i] = RT_Pi[i];
1867:         } else {
1868:           PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i]));
1869:         }
1870:       }
1871:     }
1872:   }
1873:   if (ND_PiFull) {
1874:     PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre));
1875:     if (ishypre) {
1876:       PetscCall(PetscObjectReference((PetscObject)ND_PiFull));
1877:       jac->ND_PiFull = ND_PiFull;
1878:     } else {
1879:       PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull));
1880:     }
1881:   }
1882:   if (ND_Pi) {
1883:     for (i = 0; i < dim; ++i) {
1884:       if (ND_Pi[i]) {
1885:         PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre));
1886:         if (ishypre) {
1887:           PetscCall(PetscObjectReference((PetscObject)ND_Pi[i]));
1888:           jac->ND_Pi[i] = ND_Pi[i];
1889:         } else {
1890:           PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i]));
1891:         }
1892:       }
1893:     }
1894:   }
1895:   PetscFunctionReturn(PETSC_SUCCESS);
1896: }

1898: /*@
1899:   PCHYPRESetInterpolations - Set the interpolation matrices for `PCHYPRE` type of AMS or ADS

1901:   Collective

1903:   Input Parameters:
1904: + pc        - the preconditioning context
1905: . dim       - the dimension of the problem, only used in AMS
1906: . RT_PiFull - Raviart-Thomas interpolation matrix
1907: . RT_Pi     - x/y/z component of Raviart-Thomas interpolation matrix
1908: . ND_PiFull - Nedelec interpolation matrix
1909: - ND_Pi     - x/y/z component of Nedelec interpolation matrix

1911:   Level: intermediate

1913:   Notes:
1914:   For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to `NULL`.

1916:   For ADS, both type of interpolation matrices are needed.

1918:   Developer Note:
1919:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1921: .seealso: [](ch_ksp), `PCHYPRE`
1922: @*/
1923: PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1924: {
1925:   PetscInt i;

1927:   PetscFunctionBegin;
1929:   if (RT_PiFull) {
1931:     PetscCheckSameComm(pc, 1, RT_PiFull, 3);
1932:   }
1933:   if (RT_Pi) {
1934:     PetscAssertPointer(RT_Pi, 4);
1935:     for (i = 0; i < dim; ++i) {
1936:       if (RT_Pi[i]) {
1938:         PetscCheckSameComm(pc, 1, RT_Pi[i], 4);
1939:       }
1940:     }
1941:   }
1942:   if (ND_PiFull) {
1944:     PetscCheckSameComm(pc, 1, ND_PiFull, 5);
1945:   }
1946:   if (ND_Pi) {
1947:     PetscAssertPointer(ND_Pi, 6);
1948:     for (i = 0; i < dim; ++i) {
1949:       if (ND_Pi[i]) {
1951:         PetscCheckSameComm(pc, 1, ND_Pi[i], 6);
1952:       }
1953:     }
1954:   }
1955:   PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi));
1956:   PetscFunctionReturn(PETSC_SUCCESS);
1957: }

1959: static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha)
1960: {
1961:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1962:   PetscBool ishypre;

1964:   PetscFunctionBegin;
1965:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1966:   if (ishypre) {
1967:     if (isalpha) {
1968:       PetscCall(PetscObjectReference((PetscObject)A));
1969:       PetscCall(MatDestroy(&jac->alpha_Poisson));
1970:       jac->alpha_Poisson = A;
1971:     } else {
1972:       if (A) {
1973:         PetscCall(PetscObjectReference((PetscObject)A));
1974:       } else {
1975:         jac->ams_beta_is_zero = PETSC_TRUE;
1976:       }
1977:       PetscCall(MatDestroy(&jac->beta_Poisson));
1978:       jac->beta_Poisson = A;
1979:     }
1980:   } else {
1981:     if (isalpha) {
1982:       PetscCall(MatDestroy(&jac->alpha_Poisson));
1983:       PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson));
1984:     } else {
1985:       if (A) {
1986:         PetscCall(MatDestroy(&jac->beta_Poisson));
1987:         PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson));
1988:       } else {
1989:         PetscCall(MatDestroy(&jac->beta_Poisson));
1990:         jac->ams_beta_is_zero = PETSC_TRUE;
1991:       }
1992:     }
1993:   }
1994:   PetscFunctionReturn(PETSC_SUCCESS);
1995: }

1997: /*@
1998:   PCHYPRESetAlphaPoissonMatrix - Set the vector Poisson matrix for `PCHYPRE` of type AMS

2000:   Collective

2002:   Input Parameters:
2003: + pc - the preconditioning context
2004: - A  - the matrix

2006:   Level: intermediate

2008:   Note:
2009:   `A` should be obtained by discretizing the vector valued Poisson problem with linear finite elements

2011:   Developer Notes:
2012:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

2014:   If this is only for  `PCHYPRE` type of AMS it should be called `PCHYPREAMSSetAlphaPoissonMatrix()`

2016: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()`
2017: @*/
2018: PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A)
2019: {
2020:   PetscFunctionBegin;
2023:   PetscCheckSameComm(pc, 1, A, 2);
2024:   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE));
2025:   PetscFunctionReturn(PETSC_SUCCESS);
2026: }

2028: /*@
2029:   PCHYPRESetBetaPoissonMatrix - Set the Poisson matrix for `PCHYPRE` of type AMS

2031:   Collective

2033:   Input Parameters:
2034: + pc - the preconditioning context
2035: - A  - the matrix, or `NULL` to turn it off

2037:   Level: intermediate

2039:   Note:
2040:   `A` should be obtained by discretizing the Poisson problem with linear finite elements.

2042:   Developer Notes:
2043:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

2045:   If this is only for  `PCHYPRE` type of AMS it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()`

2047: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
2048: @*/
2049: PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A)
2050: {
2051:   PetscFunctionBegin;
2053:   if (A) {
2055:     PetscCheckSameComm(pc, 1, A, 2);
2056:   }
2057:   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE));
2058:   PetscFunctionReturn(PETSC_SUCCESS);
2059: }

2061: static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo)
2062: {
2063:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

2065:   PetscFunctionBegin;
2066:   /* throw away any vector if already set */
2067:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
2068:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
2069:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
2070:   PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0]));
2071:   PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0]));
2072:   PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1]));
2073:   PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1]));
2074:   jac->dim = 2;
2075:   if (zzo) {
2076:     PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2]));
2077:     PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2]));
2078:     jac->dim++;
2079:   }
2080:   PetscFunctionReturn(PETSC_SUCCESS);
2081: }

2083: /*@
2084:   PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type AMS

2086:   Collective

2088:   Input Parameters:
2089: + pc  - the preconditioning context
2090: . ozz - vector representing (1,0,0) (or (1,0) in 2D)
2091: . zoz - vector representing (0,1,0) (or (0,1) in 2D)
2092: - zzo - vector representing (0,0,1) (use NULL in 2D)

2094:   Level: intermediate

2096:   Developer Note:
2097:   If this is only for  `PCHYPRE` type of AMS it should be called `PCHYPREAMSSetEdgeConstantVectors()`

2099: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
2100: @*/
2101: PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo)
2102: {
2103:   PetscFunctionBegin;
2108:   PetscCheckSameComm(pc, 1, ozz, 2);
2109:   PetscCheckSameComm(pc, 1, zoz, 3);
2110:   if (zzo) PetscCheckSameComm(pc, 1, zzo, 4);
2111:   PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo));
2112:   PetscFunctionReturn(PETSC_SUCCESS);
2113: }

2115: static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior)
2116: {
2117:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

2119:   PetscFunctionBegin;
2120:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
2121:   PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior));
2122:   PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior));
2123:   jac->ams_beta_is_zero_part = PETSC_TRUE;
2124:   PetscFunctionReturn(PETSC_SUCCESS);
2125: }

2127: /*@
2128:   PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type AMS

2130:   Collective

2132:   Input Parameters:
2133: + pc       - the preconditioning context
2134: - interior - vector. node is interior if its entry in the array is 1.0.

2136:   Level: intermediate

2138:   Note:
2139:   This calls `HYPRE_AMSSetInteriorNodes()`

2141: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
2142: @*/
2143: PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior)
2144: {
2145:   PetscFunctionBegin;
2148:   PetscCheckSameComm(pc, 1, interior, 2);
2149:   PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior));
2150:   PetscFunctionReturn(PETSC_SUCCESS);
2151: }

2153: static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
2154: {
2155:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2156:   Vec       tv;
2157:   PetscInt  i;

2159:   PetscFunctionBegin;
2160:   /* throw away any coordinate vector if already set */
2161:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
2162:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
2163:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
2164:   jac->dim = dim;

2166:   /* compute IJ vector for coordinates */
2167:   PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv));
2168:   PetscCall(VecSetType(tv, VECSTANDARD));
2169:   PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE));
2170:   for (i = 0; i < dim; i++) {
2171:     PetscScalar *array;
2172:     PetscInt     j;

2174:     PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i]));
2175:     PetscCall(VecGetArrayWrite(tv, &array));
2176:     for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i];
2177:     PetscCall(VecRestoreArrayWrite(tv, &array));
2178:     PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i]));
2179:   }
2180:   PetscCall(VecDestroy(&tv));
2181:   PetscFunctionReturn(PETSC_SUCCESS);
2182: }

2184: static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[])
2185: {
2186:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

2188:   PetscFunctionBegin;
2189:   *name = jac->hypre_type;
2190:   PetscFunctionReturn(PETSC_SUCCESS);
2191: }

2193: static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[])
2194: {
2195:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2196:   PetscBool flag;

2198:   PetscFunctionBegin;
2199:   if (jac->hypre_type) {
2200:     PetscCall(PetscStrcmp(jac->hypre_type, name, &flag));
2201:     if (flag) PetscFunctionReturn(PETSC_SUCCESS);
2202:   }

2204:   PetscCall(PCReset_HYPRE(pc));
2205:   PetscCall(PetscFree(jac->hypre_type));
2206:   PetscCall(PetscStrallocpy(name, &jac->hypre_type));

2208:   jac->maxiter         = PETSC_DEFAULT;
2209:   jac->tol             = PETSC_DEFAULT;
2210:   jac->printstatistics = PetscLogPrintInfo;

2212:   PetscCall(PetscStrcmp("ilu", jac->hypre_type, &flag));
2213:   if (flag) {
2214:     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2215:     PetscCallExternal(HYPRE_ILUCreate, &jac->hsolver);
2216:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ILU;
2217:     pc->ops->view           = PCView_HYPRE_ILU;
2218:     jac->destroy            = HYPRE_ILUDestroy;
2219:     jac->setup              = HYPRE_ILUSetup;
2220:     jac->solve              = HYPRE_ILUSolve;
2221:     jac->factorrowsize      = PETSC_DEFAULT;
2222:     PetscFunctionReturn(PETSC_SUCCESS);
2223:   }

2225:   PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag));
2226:   if (flag) {
2227:     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2228:     PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver);
2229:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut;
2230:     pc->ops->view           = PCView_HYPRE_Pilut;
2231:     jac->destroy            = HYPRE_ParCSRPilutDestroy;
2232:     jac->setup              = HYPRE_ParCSRPilutSetup;
2233:     jac->solve              = HYPRE_ParCSRPilutSolve;
2234:     jac->factorrowsize      = PETSC_DEFAULT;
2235:     PetscFunctionReturn(PETSC_SUCCESS);
2236:   }
2237:   PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag));
2238:   if (flag) {
2239: #if defined(PETSC_USE_64BIT_INDICES)
2240:     SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64-bit indices");
2241: #endif
2242:     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2243:     PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver);
2244:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid;
2245:     pc->ops->view           = PCView_HYPRE_Euclid;
2246:     jac->destroy            = HYPRE_EuclidDestroy;
2247:     jac->setup              = HYPRE_EuclidSetup;
2248:     jac->solve              = HYPRE_EuclidSolve;
2249:     jac->factorrowsize      = PETSC_DEFAULT;
2250:     jac->eu_level           = PETSC_DEFAULT; /* default */
2251:     PetscFunctionReturn(PETSC_SUCCESS);
2252:   }
2253:   PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag));
2254:   if (flag) {
2255:     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2256:     PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver);
2257:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails;
2258:     pc->ops->view           = PCView_HYPRE_ParaSails;
2259:     jac->destroy            = HYPRE_ParaSailsDestroy;
2260:     jac->setup              = HYPRE_ParaSailsSetup;
2261:     jac->solve              = HYPRE_ParaSailsSolve;
2262:     /* initialize */
2263:     jac->nlevels   = 1;
2264:     jac->threshold = .1;
2265:     jac->filter    = .1;
2266:     jac->loadbal   = 0;
2267:     if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE;
2268:     else jac->logging = (int)PETSC_FALSE;

2270:     jac->ruse = (int)PETSC_FALSE;
2271:     jac->symt = 0;
2272:     PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
2273:     PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
2274:     PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
2275:     PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
2276:     PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
2277:     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
2278:     PetscFunctionReturn(PETSC_SUCCESS);
2279:   }
2280:   PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag));
2281:   if (flag) {
2282:     PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver);
2283:     pc->ops->setfromoptions  = PCSetFromOptions_HYPRE_BoomerAMG;
2284:     pc->ops->view            = PCView_HYPRE_BoomerAMG;
2285:     pc->ops->applytranspose  = PCApplyTranspose_HYPRE_BoomerAMG;
2286:     pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG;
2287:     pc->ops->matapply        = PCMatApply_HYPRE_BoomerAMG;
2288:     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG));
2289:     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG));
2290:     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetCFMarkers_C", PCHYPREGetCFMarkers_BoomerAMG));
2291:     jac->destroy         = HYPRE_BoomerAMGDestroy;
2292:     jac->setup           = HYPRE_BoomerAMGSetup;
2293:     jac->solve           = HYPRE_BoomerAMGSolve;
2294:     jac->applyrichardson = PETSC_FALSE;
2295:     /* these defaults match the hypre defaults */
2296:     jac->cycletype       = 1;
2297:     jac->maxlevels       = 25;
2298:     jac->maxiter         = 1;
2299:     jac->tol             = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */
2300:     jac->truncfactor     = 0.0;
2301:     jac->strongthreshold = .25;
2302:     jac->maxrowsum       = .9;
2303:     jac->measuretype     = 0;
2304:     jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1;
2305:     jac->smoothtype                                              = -1; /* Not set by default */
2306:     jac->smoothnumlevels                                         = 25;
2307:     jac->eu_level                                                = 0;
2308:     jac->eu_droptolerance                                        = 0;
2309:     jac->eu_bj                                                   = 0;
2310:     jac->relaxweight                                             = 1.0;
2311:     jac->outerrelaxweight                                        = 1.0;
2312:     jac->Rtype                                                   = 0;
2313:     jac->Rstrongthreshold                                        = 0.25;
2314:     jac->Rfilterthreshold                                        = 0.0;
2315:     jac->Adroptype                                               = -1;
2316:     jac->Adroptol                                                = 0.0;
2317:     jac->agg_nl                                                  = 0;
2318:     jac->pmax                                                    = 0;
2319:     jac->truncfactor                                             = 0.0;
2320:     jac->agg_num_paths                                           = 1;
2321:     jac->maxc                                                    = 9;
2322:     jac->minc                                                    = 1;
2323:     jac->nodal_coarsening                                        = 0;
2324:     jac->nodal_coarsening_diag                                   = 0;
2325:     jac->vec_interp_variant                                      = 0;
2326:     jac->vec_interp_qmax                                         = 0;
2327:     jac->vec_interp_smooth                                       = PETSC_FALSE;
2328:     jac->interp_refine                                           = 0;
2329:     jac->nodal_relax                                             = PETSC_FALSE;
2330:     jac->nodal_relax_levels                                      = 1;
2331:     jac->rap2                                                    = 0;
2332:     PetscObjectParameterSetDefault(jac, relaxtype[2], 9); /* G.E. */

2334:     /*
2335:       Initialize the following parameters with invalid value so we can recognize user input that sets the parameter.
2336:       If there is no user input they are overwritten in PCSetUp_HYPRE() depending on if the matrix is on the CPU or the GPU
2337:     */
2338:     PetscObjectParameterSetDefault(jac, relaxorder, PETSC_DECIDE);
2339:     PetscObjectParameterSetDefault(jac, coarsentype, PETSC_DECIDE);
2340:     PetscObjectParameterSetDefault(jac, interptype, PETSC_DECIDE);
2341:     PetscObjectParameterSetDefault(jac, relaxtype[0], PETSC_DECIDE);
2342:     PetscObjectParameterSetDefault(jac, relaxtype[1], PETSC_DECIDE);
2343: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
2344:     PetscObjectParameterSetDefault(jac, spgemm_type, "not yet set");
2345: #endif
2346: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2347:     PetscObjectParameterSetDefault(jac, keeptranspose, PETSC_BOOL3_UNKNOWN);
2348:     PetscObjectParameterSetDefault(jac, mod_rap2, PETSC_DECIDE);
2349: #endif
2350:     PetscObjectParameterSetDefault(jac, agg_interptype, PETSC_DECIDE);
2351:     PetscFunctionReturn(PETSC_SUCCESS);
2352:   }
2353:   PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag));
2354:   if (flag) {
2355:     PetscCallExternal(HYPRE_AMSCreate, &jac->hsolver);
2356:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS;
2357:     pc->ops->view           = PCView_HYPRE_AMS;
2358:     jac->destroy            = HYPRE_AMSDestroy;
2359:     jac->setup              = HYPRE_AMSSetup;
2360:     jac->solve              = HYPRE_AMSSolve;
2361:     jac->coords[0]          = NULL;
2362:     jac->coords[1]          = NULL;
2363:     jac->coords[2]          = NULL;
2364:     jac->interior           = NULL;
2365:     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */
2366:     jac->as_print       = 0;
2367:     jac->as_max_iter    = 1;  /* used as a preconditioner */
2368:     jac->as_tol         = 0.; /* used as a preconditioner */
2369:     jac->ams_cycle_type = 13;
2370:     /* Smoothing options */
2371:     jac->as_relax_type   = 2;
2372:     jac->as_relax_times  = 1;
2373:     jac->as_relax_weight = 1.0;
2374:     jac->as_omega        = 1.0;
2375:     /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2376:     jac->as_amg_alpha_opts[0] = 10;
2377:     jac->as_amg_alpha_opts[1] = 1;
2378:     jac->as_amg_alpha_opts[2] = 6;
2379:     jac->as_amg_alpha_opts[3] = 6;
2380:     jac->as_amg_alpha_opts[4] = 4;
2381:     jac->as_amg_alpha_theta   = 0.25;
2382:     /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2383:     jac->as_amg_beta_opts[0] = 10;
2384:     jac->as_amg_beta_opts[1] = 1;
2385:     jac->as_amg_beta_opts[2] = 6;
2386:     jac->as_amg_beta_opts[3] = 6;
2387:     jac->as_amg_beta_opts[4] = 4;
2388:     jac->as_amg_beta_theta   = 0.25;
2389:     PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
2390:     PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
2391:     PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2392:     PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
2393:     PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2394:     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
2395:                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
2396:                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
2397:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
2398:                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
2399:     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0],   /* AMG coarsen type */
2400:                       jac->as_amg_beta_opts[1],                                             /* AMG agg_levels */
2401:                       jac->as_amg_beta_opts[2],                                             /* AMG relax_type */
2402:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                     /* AMG interp_type */
2403:                       jac->as_amg_beta_opts[4]);                                            /* AMG Pmax */
2404:     /* Zero conductivity */
2405:     jac->ams_beta_is_zero      = PETSC_FALSE;
2406:     jac->ams_beta_is_zero_part = PETSC_FALSE;
2407:     PetscFunctionReturn(PETSC_SUCCESS);
2408:   }
2409:   PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag));
2410:   if (flag) {
2411:     PetscCallExternal(HYPRE_ADSCreate, &jac->hsolver);
2412:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS;
2413:     pc->ops->view           = PCView_HYPRE_ADS;
2414:     jac->destroy            = HYPRE_ADSDestroy;
2415:     jac->setup              = HYPRE_ADSSetup;
2416:     jac->solve              = HYPRE_ADSSolve;
2417:     jac->coords[0]          = NULL;
2418:     jac->coords[1]          = NULL;
2419:     jac->coords[2]          = NULL;
2420:     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */
2421:     jac->as_print       = 0;
2422:     jac->as_max_iter    = 1;  /* used as a preconditioner */
2423:     jac->as_tol         = 0.; /* used as a preconditioner */
2424:     jac->ads_cycle_type = 13;
2425:     /* Smoothing options */
2426:     jac->as_relax_type   = 2;
2427:     jac->as_relax_times  = 1;
2428:     jac->as_relax_weight = 1.0;
2429:     jac->as_omega        = 1.0;
2430:     /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */
2431:     jac->ams_cycle_type       = 14;
2432:     jac->as_amg_alpha_opts[0] = 10;
2433:     jac->as_amg_alpha_opts[1] = 1;
2434:     jac->as_amg_alpha_opts[2] = 6;
2435:     jac->as_amg_alpha_opts[3] = 6;
2436:     jac->as_amg_alpha_opts[4] = 4;
2437:     jac->as_amg_alpha_theta   = 0.25;
2438:     /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2439:     jac->as_amg_beta_opts[0] = 10;
2440:     jac->as_amg_beta_opts[1] = 1;
2441:     jac->as_amg_beta_opts[2] = 6;
2442:     jac->as_amg_beta_opts[3] = 6;
2443:     jac->as_amg_beta_opts[4] = 4;
2444:     jac->as_amg_beta_theta   = 0.25;
2445:     PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
2446:     PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
2447:     PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2448:     PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
2449:     PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2450:     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type,      /* AMG coarsen type */
2451:                       jac->as_amg_alpha_opts[0],                                      /* AMG coarsen type */
2452:                       jac->as_amg_alpha_opts[1],                                      /* AMG agg_levels */
2453:                       jac->as_amg_alpha_opts[2],                                      /* AMG relax_type */
2454:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],             /* AMG interp_type */
2455:                       jac->as_amg_alpha_opts[4]);                                     /* AMG Pmax */
2456:     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
2457:                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
2458:                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
2459:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
2460:                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
2461:     PetscFunctionReturn(PETSC_SUCCESS);
2462:   }
2463:   PetscCall(PetscFree(jac->hypre_type));

2465:   jac->hypre_type = NULL;
2466:   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, ilu, pilut, parasails, boomeramg, ams, ads", name);
2467: }

2469: /*
2470:     It only gets here if the HYPRE type has not been set before the call to
2471:    ...SetFromOptions() which actually is most of the time
2472: */
2473: static PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems PetscOptionsObject)
2474: {
2475:   PetscInt    indx;
2476:   const char *type[] = {"ilu", "euclid", "pilut", "parasails", "boomeramg", "ams", "ads"};
2477:   PetscBool   flg;
2478:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;

2480:   PetscFunctionBegin;
2481:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options");
2482:   PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg));
2483:   if (flg) PetscCall(PCHYPRESetType_HYPRE(pc, type[indx]));
2484:   /*
2485:     Set the type if it was never set.
2486:   */
2487:   if (!jac->hypre_type) PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg"));
2488:   PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject);
2489:   PetscOptionsHeadEnd();
2490:   PetscFunctionReturn(PETSC_SUCCESS);
2491: }

2493: /*@
2494:   PCHYPRESetType - Sets which hypre preconditioner you wish to use

2496:   Input Parameters:
2497: + pc   - the preconditioner context
2498: - name - either euclid, ilu, pilut, parasails, boomeramg, ams, or ads

2500:   Options Database Key:
2501: . pc_hypre_type - One of euclid, ilu, pilut, parasails, boomeramg, ams, or ads

2503:   Level: intermediate

2505: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE`
2506: @*/
2507: PetscErrorCode PCHYPRESetType(PC pc, const char name[])
2508: {
2509:   PetscFunctionBegin;
2511:   PetscAssertPointer(name, 2);
2512:   PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name));
2513:   PetscFunctionReturn(PETSC_SUCCESS);
2514: }

2516: /*@C
2517:   PCHYPREGetCFMarkers - Gets CF marker arrays for all levels (except the finest level)

2519:   Logically Collective

2521:   Input Parameter:
2522: . pc - the preconditioner context

2524:   Output Parameters:
2525: + n_per_level - the number of nodes per level (size of `num_levels`)
2526: - CFMarkers   - the Coarse/Fine Boolean arrays (size of `num_levels` - 1)

2528:   Level: advanced

2530:   Note:
2531:   Caller is responsible for memory management of `n_per_level` and `CFMarkers` pointers. That is they should free them with `PetscFree()` when no longer needed.

2533: .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()`
2534: @*/
2535: PetscErrorCode PCHYPREGetCFMarkers(PC pc, PetscInt *n_per_level[], PetscBT *CFMarkers[])
2536: {
2537:   PetscFunctionBegin;
2539:   PetscAssertPointer(n_per_level, 2);
2540:   PetscAssertPointer(CFMarkers, 3);
2541:   PetscUseMethod(pc, "PCHYPREGetCFMarkers_C", (PC, PetscInt *[], PetscBT *[]), (pc, n_per_level, CFMarkers));
2542:   PetscFunctionReturn(PETSC_SUCCESS);
2543: }

2545: /*@
2546:   PCHYPREGetType - Gets which hypre preconditioner you are using

2548:   Input Parameter:
2549: . pc - the preconditioner context

2551:   Output Parameter:
2552: . name - either euclid, ilu, pilut, parasails, boomeramg, ams, or ads

2554:   Level: intermediate

2556: .seealso: [](ch_ksp), `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE`
2557: @*/
2558: PetscErrorCode PCHYPREGetType(PC pc, const char *name[])
2559: {
2560:   PetscFunctionBegin;
2562:   PetscAssertPointer(name, 2);
2563:   PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name));
2564:   PetscFunctionReturn(PETSC_SUCCESS);
2565: }

2567: /*@
2568:   PCMGGalerkinSetMatProductAlgorithm - Set type of sparse matrix-matrix product for hypre's BoomerAMG to use on GPUs

2570:   Logically Collective

2572:   Input Parameters:
2573: + pc   - the hypre context
2574: - name - one of 'cusparse', 'hypre'

2576:   Options Database Key:
2577: . -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of sparse matrix-matrix product to use in hypre

2579:   Level: intermediate

2581:   Developer Note:
2582:   How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`?

2584: .seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()`
2585: @*/
2586: PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[])
2587: {
2588:   PetscFunctionBegin;
2590:   PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name));
2591:   PetscFunctionReturn(PETSC_SUCCESS);
2592: }

2594: /*@
2595:   PCMGGalerkinGetMatProductAlgorithm - Get type of sparse matrix-matrix product for hypre's BoomerAMG to use on GPUs

2597:   Not Collective

2599:   Input Parameter:
2600: . pc - the multigrid context

2602:   Output Parameter:
2603: . name - one of 'cusparse', 'hypre'

2605:   Level: intermediate

2607: .seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinSetMatProductAlgorithm()`
2608: @*/
2609: PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[])
2610: {
2611:   PetscFunctionBegin;
2613:   PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name));
2614:   PetscFunctionReturn(PETSC_SUCCESS);
2615: }

2617: /*MC
2618:   PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC`

2620:   Options Database Keys:
2621: +   -pc_hypre_type                           - One of `euclid`, `ilu`, `pilut`, `parasails`, `boomeramg`, `ams`, or `ads`
2622: . -pc_hypre_boomeramg_nodal_coarsen <n>      - where `n` is from 1 to 6 (see `HYPRE_BoomerAMGSetNodal()`)
2623: . -pc_hypre_boomeramg_vec_interp_variant <v> - where `v` is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`)
2624: - Many others - run with `-pc_type hypre` `-pc_hypre_type XXX` `-help` to see options for the XXX preconditioner

2626:   Level: intermediate

2628:   Notes:
2629:   Apart from `-pc_hypre_type` (for which there is `PCHYPRESetType()`),
2630:   the many hypre options can ONLY be set via the options database (e.g. the command line
2631:   or with `PetscOptionsSetValue()`, there are no functions to set them)

2633:   The options `-pc_hypre_boomeramg_max_iter` and `-pc_hypre_boomeramg_tol` refer to the number of iterations
2634:   (V-cycles) and tolerance that boomerAMG does EACH time it is called. So for example, if
2635:   `-pc_hypre_boomeramg_max_iter` is set to 2 then 2-V-cycles are being used to define the preconditioner
2636:   (`-pc_hypre_boomeramg_tol` should be set to 0.0 - the default - to strictly use a fixed number of
2637:   iterations per hypre call). `-ksp_max_it` and `-ksp_rtol` STILL determine the total number of iterations
2638:   and tolerance for the Krylov solver. For example, if `-pc_hypre_boomeramg_max_iter` is 2 and `-ksp_max_it` is 10
2639:   then AT MOST twenty V-cycles of BoomerAMG will be used.

2641:   Note that the option `-pc_hypre_boomeramg_relax_type_all` defaults to symmetric relaxation
2642:   (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry.
2643:   Otherwise, you may want to use `-pc_hypre_boomeramg_relax_type_all SOR/Jacobi`.

2645:   If you provide a near null space to your matrix with `MatSetNearNullSpace()` it is ignored by hypre's BoomerAMG UNLESS you also use
2646:   the following two options: `-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>`

2648:   See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers

2650:   For `PCHYPRE` type of `ams` or `ads` auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`,
2651:   `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2652:   `PCHYPREAMSSetInteriorNodes()`

2654:   Sometimes people want to try algebraic multigrid as a "standalone" solver, that is not accelerating it with a Krylov method. Though we generally do not recommend this
2655:   since it is usually slower, one should use a `KSPType` of `KSPRICHARDSON`
2656:   (or equivalently `-ksp_type richardson`) to achieve this. Using `KSPPREONLY` will not work since it only applies a single cycle of multigrid.

2658:   PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems

2660:   GPU Notes:
2661:   To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda
2662:   Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers.

2664:   To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip
2665:   Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers.

2667: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`,
2668:           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2669:           PCHYPREAMSSetInteriorNodes()
2670: M*/

2672: PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc)
2673: {
2674:   PC_HYPRE *jac;

2676:   PetscFunctionBegin;
2677:   PetscCall(PetscNew(&jac));

2679:   pc->data                = jac;
2680:   pc->ops->reset          = PCReset_HYPRE;
2681:   pc->ops->destroy        = PCDestroy_HYPRE;
2682:   pc->ops->setfromoptions = PCSetFromOptions_HYPRE;
2683:   pc->ops->setup          = PCSetUp_HYPRE;
2684:   pc->ops->apply          = PCApply_HYPRE;
2685:   jac->hypre_type         = NULL;
2686:   jac->comm_hypre         = MPI_COMM_NULL;
2687:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE));
2688:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE));
2689:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE));
2690:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE));
2691:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE));
2692:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE));
2693:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE));
2694:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE));
2695:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE));
2696:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG));
2697:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG));
2698: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2699:   #if defined(HYPRE_USING_HIP)
2700:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2701:   #endif
2702:   #if defined(HYPRE_USING_CUDA)
2703:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2704:   #endif
2705: #endif
2706:   PetscHYPREInitialize();
2707:   PetscFunctionReturn(PETSC_SUCCESS);
2708: }

2710: typedef struct {
2711:   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2712:   HYPRE_StructSolver hsolver;

2714:   /* keep copy of PFMG options used so may view them */
2715:   PetscInt  its;
2716:   PetscReal tol;
2717:   PetscInt  relax_type;
2718:   PetscInt  rap_type;
2719:   PetscInt  num_pre_relax, num_post_relax;
2720:   PetscInt  max_levels;
2721:   PetscInt  skip_relax;
2722:   PetscBool print_statistics;
2723: } PC_PFMG;

2725: static PetscErrorCode PCDestroy_PFMG(PC pc)
2726: {
2727:   PC_PFMG *ex = (PC_PFMG *)pc->data;

2729:   PetscFunctionBegin;
2730:   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2731:   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2732:   PetscCall(PetscFree(pc->data));
2733:   PetscFunctionReturn(PETSC_SUCCESS);
2734: }

2736: static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"};
2737: static const char *PFMGRAPType[]   = {"Galerkin", "non-Galerkin"};

2739: static PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer)
2740: {
2741:   PetscBool isascii;
2742:   PC_PFMG  *ex = (PC_PFMG *)pc->data;

2744:   PetscFunctionBegin;
2745:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2746:   if (isascii) {
2747:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE PFMG preconditioning\n"));
2748:     PetscCall(PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its));
2749:     PetscCall(PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol));
2750:     PetscCall(PetscViewerASCIIPrintf(viewer, "    relax type %s\n", PFMGRelaxType[ex->relax_type]));
2751:     PetscCall(PetscViewerASCIIPrintf(viewer, "    RAP type %s\n", PFMGRAPType[ex->rap_type]));
2752:     PetscCall(PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2753:     PetscCall(PetscViewerASCIIPrintf(viewer, "    max levels %" PetscInt_FMT "\n", ex->max_levels));
2754:     PetscCall(PetscViewerASCIIPrintf(viewer, "    skip relax %" PetscInt_FMT "\n", ex->skip_relax));
2755:   }
2756:   PetscFunctionReturn(PETSC_SUCCESS);
2757: }

2759: static PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems PetscOptionsObject)
2760: {
2761:   PC_PFMG *ex = (PC_PFMG *)pc->data;

2763:   PetscFunctionBegin;
2764:   PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options");
2765:   PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL));
2766:   PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL));
2767:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2768:   PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2769:   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2770:   PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2771:   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);

2773:   PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL));
2774:   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);

2776:   PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL));
2777:   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2778:   PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2779:   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2780:   PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL));
2781:   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2782:   PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL));
2783:   PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax);
2784:   PetscOptionsHeadEnd();
2785:   PetscFunctionReturn(PETSC_SUCCESS);
2786: }

2788: static PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y)
2789: {
2790:   PC_PFMG           *ex = (PC_PFMG *)pc->data;
2791:   PetscScalar       *yy;
2792:   const PetscScalar *xx;
2793:   PetscInt           ilower[3], iupper[3];
2794:   HYPRE_Int          hlower[3], hupper[3];
2795:   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)pc->pmat->data;

2797:   PetscFunctionBegin;
2798:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2799:   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2800:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2801:   iupper[0] += ilower[0] - 1;
2802:   iupper[1] += ilower[1] - 1;
2803:   iupper[2] += ilower[2] - 1;
2804:   hlower[0] = (HYPRE_Int)ilower[0];
2805:   hlower[1] = (HYPRE_Int)ilower[1];
2806:   hlower[2] = (HYPRE_Int)ilower[2];
2807:   hupper[0] = (HYPRE_Int)iupper[0];
2808:   hupper[1] = (HYPRE_Int)iupper[1];
2809:   hupper[2] = (HYPRE_Int)iupper[2];

2811:   /* copy x values over to hypre */
2812:   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2813:   PetscCall(VecGetArrayRead(x, &xx));
2814:   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2815:   PetscCall(VecRestoreArrayRead(x, &xx));
2816:   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2817:   PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);

2819:   /* copy solution values back to PETSc */
2820:   PetscCall(VecGetArray(y, &yy));
2821:   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2822:   PetscCall(VecRestoreArray(y, &yy));
2823:   PetscFunctionReturn(PETSC_SUCCESS);
2824: }

2826: static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2827: {
2828:   PC_PFMG  *jac = (PC_PFMG *)pc->data;
2829:   HYPRE_Int oits;

2831:   PetscFunctionBegin;
2832:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2833:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its);
2834:   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol);

2836:   PetscCall(PCApply_PFMG(pc, b, y));
2837:   PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits);
2838:   *outits = oits;
2839:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2840:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2841:   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol);
2842:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its);
2843:   PetscFunctionReturn(PETSC_SUCCESS);
2844: }

2846: static PetscErrorCode PCSetUp_PFMG(PC pc)
2847: {
2848:   PC_PFMG         *ex = (PC_PFMG *)pc->data;
2849:   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data;
2850:   PetscBool        flg;

2852:   PetscFunctionBegin;
2853:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
2854:   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");

2856:   /* create the hypre solver object and set its information */
2857:   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2858:   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);

2860:   // Print Hypre statistics about the solve process
2861:   if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3);

2863:   // The hypre options must be repeated here because the StructPFMG was destroyed and recreated
2864:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2865:   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2866:   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2867:   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2868:   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2869:   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2870:   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);

2872:   PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2873:   PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver);
2874:   PetscFunctionReturn(PETSC_SUCCESS);
2875: }

2877: /*MC
2878:   PCPFMG - the hypre PFMG multigrid solver

2880:   Options Database Keys:
2881: + -pc_pfmg_its <its>              - number of iterations of PFMG to use as preconditioner
2882: . -pc_pfmg_num_pre_relax <steps>  - number of smoothing steps before coarse grid solve
2883: . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve
2884: . -pc_pfmg_tol <tol>              - tolerance of PFMG
2885: . -pc_pfmg_relax_type             - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel
2886: . -pc_pfmg_rap_type               - type of coarse matrix generation, one of Galerkin,non-Galerkin
2887: - -pc_pfmg_skip_relax             - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations
2888:                                     when the underlying problem is isotropic, one of 0,1

2890:   Level: advanced

2892:   Notes:
2893:   This is for CELL-centered descretizations

2895:   See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG`

2897:   See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver

2899:   This must be used with the `MATHYPRESTRUCT` matrix type.

2901:   This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`.

2903: .seealso: [](ch_ksp), `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`
2904: M*/

2906: PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc)
2907: {
2908:   PC_PFMG *ex;

2910:   PetscFunctionBegin;
2911:   PetscCall(PetscNew(&ex));
2912:   pc->data = ex;

2914:   ex->its              = 1;
2915:   ex->tol              = 1.e-8;
2916:   ex->relax_type       = 1;
2917:   ex->rap_type         = 0;
2918:   ex->num_pre_relax    = 1;
2919:   ex->num_post_relax   = 1;
2920:   ex->max_levels       = 0;
2921:   ex->skip_relax       = 0;
2922:   ex->print_statistics = PETSC_FALSE;

2924:   pc->ops->setfromoptions  = PCSetFromOptions_PFMG;
2925:   pc->ops->view            = PCView_PFMG;
2926:   pc->ops->destroy         = PCDestroy_PFMG;
2927:   pc->ops->apply           = PCApply_PFMG;
2928:   pc->ops->applyrichardson = PCApplyRichardson_PFMG;
2929:   pc->ops->setup           = PCSetUp_PFMG;

2931:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2932:   PetscHYPREInitialize();
2933:   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2934:   PetscFunctionReturn(PETSC_SUCCESS);
2935: }

2937: /* we know we are working with a HYPRE_SStructMatrix */
2938: typedef struct {
2939:   MPI_Comm            hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */
2940:   HYPRE_SStructSolver ss_solver;

2942:   /* keep copy of SYSPFMG options used so may view them */
2943:   PetscInt  its;
2944:   PetscReal tol;
2945:   PetscInt  relax_type;
2946:   PetscInt  num_pre_relax, num_post_relax;
2947: } PC_SysPFMG;

2949: static PetscErrorCode PCDestroy_SysPFMG(PC pc)
2950: {
2951:   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;

2953:   PetscFunctionBegin;
2954:   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2955:   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2956:   PetscCall(PetscFree(pc->data));
2957:   PetscFunctionReturn(PETSC_SUCCESS);
2958: }

2960: static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"};

2962: static PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer)
2963: {
2964:   PetscBool   isascii;
2965:   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;

2967:   PetscFunctionBegin;
2968:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2969:   if (isascii) {
2970:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE SysPFMG preconditioning\n"));
2971:     PetscCall(PetscViewerASCIIPrintf(viewer, "  max iterations %" PetscInt_FMT "\n", ex->its));
2972:     PetscCall(PetscViewerASCIIPrintf(viewer, "  tolerance %g\n", ex->tol));
2973:     PetscCall(PetscViewerASCIIPrintf(viewer, "  relax type %s\n", PFMGRelaxType[ex->relax_type]));
2974:     PetscCall(PetscViewerASCIIPrintf(viewer, "  number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2975:   }
2976:   PetscFunctionReturn(PETSC_SUCCESS);
2977: }

2979: static PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems PetscOptionsObject)
2980: {
2981:   PC_SysPFMG *ex  = (PC_SysPFMG *)pc->data;
2982:   PetscBool   flg = PETSC_FALSE;

2984:   PetscFunctionBegin;
2985:   PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options");
2986:   PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL));
2987:   if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3);
2988:   PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL));
2989:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its);
2990:   PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2991:   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax);
2992:   PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2993:   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax);

2995:   PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL));
2996:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol);
2997:   PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2998:   PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type);
2999:   PetscOptionsHeadEnd();
3000:   PetscFunctionReturn(PETSC_SUCCESS);
3001: }

3003: static PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y)
3004: {
3005:   PC_SysPFMG        *ex = (PC_SysPFMG *)pc->data;
3006:   PetscScalar       *yy;
3007:   const PetscScalar *xx;
3008:   PetscInt           ilower[3], iupper[3];
3009:   HYPRE_Int          hlower[3], hupper[3];
3010:   Mat_HYPRESStruct  *mx       = (Mat_HYPRESStruct *)pc->pmat->data;
3011:   PetscInt           ordering = mx->dofs_order;
3012:   PetscInt           nvars    = mx->nvars;
3013:   PetscInt           part     = 0;
3014:   PetscInt           size;
3015:   PetscInt           i;

3017:   PetscFunctionBegin;
3018:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3019:   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
3020:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
3021:   iupper[0] += ilower[0] - 1;
3022:   iupper[1] += ilower[1] - 1;
3023:   iupper[2] += ilower[2] - 1;
3024:   hlower[0] = (HYPRE_Int)ilower[0];
3025:   hlower[1] = (HYPRE_Int)ilower[1];
3026:   hlower[2] = (HYPRE_Int)ilower[2];
3027:   hupper[0] = (HYPRE_Int)iupper[0];
3028:   hupper[1] = (HYPRE_Int)iupper[1];
3029:   hupper[2] = (HYPRE_Int)iupper[2];

3031:   size = 1;
3032:   for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1);

3034:   /* copy x values over to hypre for variable ordering */
3035:   if (ordering) {
3036:     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
3037:     PetscCall(VecGetArrayRead(x, &xx));
3038:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i)));
3039:     PetscCall(VecRestoreArrayRead(x, &xx));
3040:     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
3041:     PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x);
3042:     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);

3044:     /* copy solution values back to PETSc */
3045:     PetscCall(VecGetArray(y, &yy));
3046:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i)));
3047:     PetscCall(VecRestoreArray(y, &yy));
3048:   } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */
3049:     PetscScalar *z;
3050:     PetscInt     j, k;

3052:     PetscCall(PetscMalloc1(nvars * size, &z));
3053:     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
3054:     PetscCall(VecGetArrayRead(x, &xx));

3056:     /* transform nodal to hypre's variable ordering for sys_pfmg */
3057:     for (i = 0; i < size; i++) {
3058:       k = i * nvars;
3059:       for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j];
3060:     }
3061:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
3062:     PetscCall(VecRestoreArrayRead(x, &xx));
3063:     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
3064:     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);

3066:     /* copy solution values back to PETSc */
3067:     PetscCall(VecGetArray(y, &yy));
3068:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
3069:     /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
3070:     for (i = 0; i < size; i++) {
3071:       k = i * nvars;
3072:       for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i];
3073:     }
3074:     PetscCall(VecRestoreArray(y, &yy));
3075:     PetscCall(PetscFree(z));
3076:   }
3077:   PetscFunctionReturn(PETSC_SUCCESS);
3078: }

3080: static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
3081: {
3082:   PC_SysPFMG *jac = (PC_SysPFMG *)pc->data;
3083:   HYPRE_Int   oits;

3085:   PetscFunctionBegin;
3086:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3087:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its);
3088:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol);
3089:   PetscCall(PCApply_SysPFMG(pc, b, y));
3090:   PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits);
3091:   *outits = oits;
3092:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
3093:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
3094:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol);
3095:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its);
3096:   PetscFunctionReturn(PETSC_SUCCESS);
3097: }

3099: static PetscErrorCode PCSetUp_SysPFMG(PC pc)
3100: {
3101:   PC_SysPFMG       *ex = (PC_SysPFMG *)pc->data;
3102:   Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)pc->pmat->data;
3103:   PetscBool         flg;

3105:   PetscFunctionBegin;
3106:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg));
3107:   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner");

3109:   /* create the hypre sstruct solver object and set its information */
3110:   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
3111:   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
3112:   PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver);
3113:   PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
3114:   PetscFunctionReturn(PETSC_SUCCESS);
3115: }

3117: /*MC
3118:    PCSYSPFMG - the hypre SysPFMG multigrid solver

3120:    Level: advanced

3122:    Options Database Keys:
3123: + -pc_syspfmg_its <its>                                           - number of iterations of SysPFMG to use as preconditioner
3124: . -pc_syspfmg_num_pre_relax <steps>                               - number of smoothing steps before coarse grid
3125: . -pc_syspfmg_num_post_relax <steps>                              - number of smoothing steps after coarse grid
3126: . -pc_syspfmg_tol <tol>                                           - tolerance of SysPFMG
3127: - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles

3129:    Notes:
3130:    See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG`

3132:    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver

3134:    This is for CELL-centered descretizations

3136:    This must be used with the `MATHYPRESSTRUCT` matrix type.

3138:    This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`.

3140: .seealso: [](ch_ksp), `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG`
3141: M*/

3143: PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc)
3144: {
3145:   PC_SysPFMG *ex;

3147:   PetscFunctionBegin;
3148:   PetscCall(PetscNew(&ex));
3149:   pc->data = ex;

3151:   ex->its            = 1;
3152:   ex->tol            = 1.e-8;
3153:   ex->relax_type     = 1;
3154:   ex->num_pre_relax  = 1;
3155:   ex->num_post_relax = 1;

3157:   pc->ops->setfromoptions  = PCSetFromOptions_SysPFMG;
3158:   pc->ops->view            = PCView_SysPFMG;
3159:   pc->ops->destroy         = PCDestroy_SysPFMG;
3160:   pc->ops->apply           = PCApply_SysPFMG;
3161:   pc->ops->applyrichardson = PCApplyRichardson_SysPFMG;
3162:   pc->ops->setup           = PCSetUp_SysPFMG;

3164:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3165:   PetscHYPREInitialize();
3166:   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
3167:   PetscFunctionReturn(PETSC_SUCCESS);
3168: }

3170: /* PC SMG */
3171: typedef struct {
3172:   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
3173:   HYPRE_StructSolver hsolver;
3174:   PetscInt           its; /* keep copy of SMG options used so may view them */
3175:   PetscReal          tol;
3176:   PetscBool          print_statistics;
3177:   PetscInt           num_pre_relax, num_post_relax;
3178: } PC_SMG;

3180: static PetscErrorCode PCDestroy_SMG(PC pc)
3181: {
3182:   PC_SMG *ex = (PC_SMG *)pc->data;

3184:   PetscFunctionBegin;
3185:   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
3186:   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3187:   PetscCall(PetscFree(pc->data));
3188:   PetscFunctionReturn(PETSC_SUCCESS);
3189: }

3191: static PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer)
3192: {
3193:   PetscBool isascii;
3194:   PC_SMG   *ex = (PC_SMG *)pc->data;

3196:   PetscFunctionBegin;
3197:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
3198:   if (isascii) {
3199:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE SMG preconditioning\n"));
3200:     PetscCall(PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its));
3201:     PetscCall(PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol));
3202:     PetscCall(PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
3203:   }
3204:   PetscFunctionReturn(PETSC_SUCCESS);
3205: }

3207: static PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems PetscOptionsObject)
3208: {
3209:   PC_SMG *ex = (PC_SMG *)pc->data;

3211:   PetscFunctionBegin;
3212:   PetscOptionsHeadBegin(PetscOptionsObject, "SMG options");

3214:   PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL));
3215:   PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
3216:   PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
3217:   PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL));

3219:   PetscOptionsHeadEnd();
3220:   PetscFunctionReturn(PETSC_SUCCESS);
3221: }

3223: static PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y)
3224: {
3225:   PC_SMG            *ex = (PC_SMG *)pc->data;
3226:   PetscScalar       *yy;
3227:   const PetscScalar *xx;
3228:   PetscInt           ilower[3], iupper[3];
3229:   HYPRE_Int          hlower[3], hupper[3];
3230:   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)pc->pmat->data;

3232:   PetscFunctionBegin;
3233:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3234:   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
3235:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
3236:   iupper[0] += ilower[0] - 1;
3237:   iupper[1] += ilower[1] - 1;
3238:   iupper[2] += ilower[2] - 1;
3239:   hlower[0] = (HYPRE_Int)ilower[0];
3240:   hlower[1] = (HYPRE_Int)ilower[1];
3241:   hlower[2] = (HYPRE_Int)ilower[2];
3242:   hupper[0] = (HYPRE_Int)iupper[0];
3243:   hupper[1] = (HYPRE_Int)iupper[1];
3244:   hupper[2] = (HYPRE_Int)iupper[2];

3246:   /* copy x values over to hypre */
3247:   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
3248:   PetscCall(VecGetArrayRead(x, &xx));
3249:   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
3250:   PetscCall(VecRestoreArrayRead(x, &xx));
3251:   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
3252:   PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);

3254:   /* copy solution values back to PETSc */
3255:   PetscCall(VecGetArray(y, &yy));
3256:   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
3257:   PetscCall(VecRestoreArray(y, &yy));
3258:   PetscFunctionReturn(PETSC_SUCCESS);
3259: }

3261: static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
3262: {
3263:   PC_SMG   *jac = (PC_SMG *)pc->data;
3264:   HYPRE_Int oits;

3266:   PetscFunctionBegin;
3267:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3268:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its);
3269:   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol);

3271:   PetscCall(PCApply_SMG(pc, b, y));
3272:   PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits);
3273:   *outits = oits;
3274:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
3275:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
3276:   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol);
3277:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its);
3278:   PetscFunctionReturn(PETSC_SUCCESS);
3279: }

3281: static PetscErrorCode PCSetUp_SMG(PC pc)
3282: {
3283:   PetscInt         i, dim;
3284:   PC_SMG          *ex = (PC_SMG *)pc->data;
3285:   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data;
3286:   PetscBool        flg;
3287:   DMBoundaryType   p[3];
3288:   PetscInt         M[3];

3290:   PetscFunctionBegin;
3291:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
3292:   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");

3294:   PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0));
3295:   // Check if power of 2 in periodic directions
3296:   for (i = 0; i < dim; i++) {
3297:     PetscCheck((M[i] & (M[i] - 1)) == 0 || p[i] != DM_BOUNDARY_PERIODIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]);
3298:   }

3300:   /* create the hypre solver object and set its information */
3301:   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
3302:   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
3303:   // The hypre options must be set here and not in SetFromOptions because it is created here!
3304:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its);
3305:   PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
3306:   PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
3307:   PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol);

3309:   PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
3310:   PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver);
3311:   PetscFunctionReturn(PETSC_SUCCESS);
3312: }

3314: /*MC
3315:   PCSMG - the hypre (structured grid) SMG multigrid solver

3317:   Level: advanced

3319:   Options Database Keys:
3320: + -pc_smg_its <its>              - number of iterations of SMG to use as preconditioner
3321: . -pc_smg_num_pre_relax <steps>  - number of smoothing steps before coarse grid
3322: . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid
3323: - -pc_smg_tol <tol>              - tolerance of SMG

3325:   Notes:
3326:   This is for CELL-centered descretizations

3328:   This must be used with the `MATHYPRESTRUCT` `MatType`.

3330:   This does not provide all the functionality of  hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`.

3332:   See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners

3334: .seealso:  `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG`
3335: M*/

3337: PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc)
3338: {
3339:   PC_SMG *ex;

3341:   PetscFunctionBegin;
3342:   PetscCall(PetscNew(&ex));
3343:   pc->data = ex;

3345:   ex->its            = 1;
3346:   ex->tol            = 1.e-8;
3347:   ex->num_pre_relax  = 1;
3348:   ex->num_post_relax = 1;

3350:   pc->ops->setfromoptions  = PCSetFromOptions_SMG;
3351:   pc->ops->view            = PCView_SMG;
3352:   pc->ops->destroy         = PCDestroy_SMG;
3353:   pc->ops->apply           = PCApply_SMG;
3354:   pc->ops->applyrichardson = PCApplyRichardson_SMG;
3355:   pc->ops->setup           = PCSetUp_SMG;

3357:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3358:   PetscHYPREInitialize();
3359:   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
3360:   PetscFunctionReturn(PETSC_SUCCESS);
3361: }