Actual source code: hypre.c

  1: /*
  2:    Provides an interface to the LLNL package hypre
  3: */

  5: #include <petscpkg_version.h>
  6: #include <petsc/private/pcimpl.h>
  7: /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */
  8: #include <petsc/private/matimpl.h>
  9: #include <petsc/private/vecimpl.h>
 10: #include <../src/vec/vec/impls/hypre/vhyp.h>
 11: #include <../src/mat/impls/hypre/mhypre.h>
 12: #include <../src/dm/impls/da/hypre/mhyp.h>
 13: #include <_hypre_parcsr_ls.h>
 14: #include <petscmathypre.h>

 16: #if defined(PETSC_HAVE_HYPRE_DEVICE)
 17: #include <petsc/private/deviceimpl.h>
 18: #endif

 20: static PetscBool  cite            = PETSC_FALSE;
 21: static const char hypreCitation[] = "@manual{hypre-web-page,\n  title  = {{\\sl hypre}: High Performance Preconditioners},\n  organization = {Lawrence Livermore National Laboratory},\n  note  = "
 22:                                     "{\\url{https://www.llnl.gov/casc/hypre}}\n}\n";

 24: /*
 25:    Private context (data structure) for the  preconditioner.
 26: */
 27: typedef struct {
 28:   HYPRE_Solver hsolver;
 29:   Mat          hpmat; /* MatHYPRE */

 31:   HYPRE_Int (*destroy)(HYPRE_Solver);
 32:   HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
 33:   HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);

 35:   MPI_Comm comm_hypre;
 36:   char    *hypre_type;

 38:   /* options for Pilut and BoomerAMG*/
 39:   PetscInt  maxiter;
 40:   PetscReal tol;

 42:   /* options for Pilut */
 43:   PetscInt factorrowsize;

 45:   /* options for ParaSails */
 46:   PetscInt  nlevels;
 47:   PetscReal threshold;
 48:   PetscReal filter;
 49:   PetscReal loadbal;
 50:   PetscInt  logging;
 51:   PetscInt  ruse;
 52:   PetscInt  symt;

 54:   /* options for BoomerAMG */
 55:   PetscBool printstatistics;

 57:   /* options for BoomerAMG */
 58:   PetscInt  cycletype;
 59:   PetscInt  maxlevels;
 60:   PetscReal strongthreshold;
 61:   PetscReal maxrowsum;
 62:   PetscInt  gridsweeps[3];
 63:   PetscInt  coarsentype;
 64:   PetscInt  measuretype;
 65:   PetscInt  smoothtype;
 66:   PetscInt  smoothsweeps;
 67:   PetscInt  smoothnumlevels;
 68:   PetscInt  eu_level;         /* Number of levels for ILU(k) in Euclid */
 69:   PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */
 70:   PetscInt  eu_bj;            /* Defines use of Block Jacobi ILU in Euclid */
 71:   PetscInt  relaxtype[3];
 72:   PetscReal relaxweight;
 73:   PetscReal outerrelaxweight;
 74:   PetscInt  relaxorder;
 75:   PetscReal truncfactor;
 76:   PetscBool applyrichardson;
 77:   PetscInt  pmax;
 78:   PetscInt  interptype;
 79:   PetscInt  maxc;
 80:   PetscInt  minc;
 81: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
 82:   char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG
 83: #endif
 84:   /* GPU */
 85:   PetscBool keeptranspose;
 86:   PetscInt  rap2;
 87:   PetscInt  mod_rap2;

 89:   /* AIR */
 90:   PetscInt  Rtype;
 91:   PetscReal Rstrongthreshold;
 92:   PetscReal Rfilterthreshold;
 93:   PetscInt  Adroptype;
 94:   PetscReal Adroptol;

 96:   PetscInt  agg_nl;
 97:   PetscInt  agg_interptype;
 98:   PetscInt  agg_num_paths;
 99:   PetscBool nodal_relax;
100:   PetscInt  nodal_relax_levels;

102:   PetscInt  nodal_coarsening;
103:   PetscInt  nodal_coarsening_diag;
104:   PetscInt  vec_interp_variant;
105:   PetscInt  vec_interp_qmax;
106:   PetscBool vec_interp_smooth;
107:   PetscInt  interp_refine;

109:   /* NearNullSpace support */
110:   VecHYPRE_IJVector *hmnull;
111:   HYPRE_ParVector   *phmnull;
112:   PetscInt           n_hmnull;
113:   Vec                hmnull_constant;

115:   /* options for AS (Auxiliary Space preconditioners) */
116:   PetscInt  as_print;
117:   PetscInt  as_max_iter;
118:   PetscReal as_tol;
119:   PetscInt  as_relax_type;
120:   PetscInt  as_relax_times;
121:   PetscReal as_relax_weight;
122:   PetscReal as_omega;
123:   PetscInt  as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */
124:   PetscReal as_amg_alpha_theta;   /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */
125:   PetscInt  as_amg_beta_opts[5];  /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */
126:   PetscReal as_amg_beta_theta;    /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS)  */
127:   PetscInt  ams_cycle_type;
128:   PetscInt  ads_cycle_type;

130:   /* additional data */
131:   Mat G;             /* MatHYPRE */
132:   Mat C;             /* MatHYPRE */
133:   Mat alpha_Poisson; /* MatHYPRE */
134:   Mat beta_Poisson;  /* MatHYPRE */

136:   /* extra information for AMS */
137:   PetscInt          dim; /* geometrical dimension */
138:   VecHYPRE_IJVector coords[3];
139:   VecHYPRE_IJVector constants[3];
140:   VecHYPRE_IJVector interior;
141:   Mat               RT_PiFull, RT_Pi[3];
142:   Mat               ND_PiFull, ND_Pi[3];
143:   PetscBool         ams_beta_is_zero;
144:   PetscBool         ams_beta_is_zero_part;
145:   PetscInt          ams_proj_freq;
146: } PC_HYPRE;

148: /*
149:   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
150:   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
151:   It is used in PCHMG. Other users should avoid using this function.
152: */
153: static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[])
154: {
155:   PC_HYPRE            *jac  = (PC_HYPRE *)pc->data;
156:   PetscBool            same = PETSC_FALSE;
157:   PetscInt             num_levels, l;
158:   Mat                 *mattmp;
159:   hypre_ParCSRMatrix **A_array;

161:   PetscFunctionBegin;
162:   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
163:   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG ");
164:   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver);
165:   PetscCall(PetscMalloc1(num_levels, &mattmp));
166:   A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)jac->hsolver);
167:   for (l = 1; l < num_levels; l++) {
168:     PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &mattmp[num_levels - 1 - l]));
169:     /* We want to own the data, and HYPRE can not touch this matrix any more */
170:     A_array[l] = NULL;
171:   }
172:   *nlevels   = num_levels;
173:   *operators = mattmp;
174:   PetscFunctionReturn(PETSC_SUCCESS);
175: }

177: /*
178:   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
179:   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
180:   It is used in PCHMG. Other users should avoid using this function.
181: */
182: static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[])
183: {
184:   PC_HYPRE            *jac  = (PC_HYPRE *)pc->data;
185:   PetscBool            same = PETSC_FALSE;
186:   PetscInt             num_levels, l;
187:   Mat                 *mattmp;
188:   hypre_ParCSRMatrix **P_array;

190:   PetscFunctionBegin;
191:   PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same));
192:   PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG ");
193:   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)jac->hsolver);
194:   PetscCall(PetscMalloc1(num_levels, &mattmp));
195:   P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)jac->hsolver);
196:   for (l = 1; l < num_levels; l++) {
197:     PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &mattmp[l - 1]));
198:     /* We want to own the data, and HYPRE can not touch this matrix any more */
199:     P_array[num_levels - 1 - l] = NULL;
200:   }
201:   *nlevels        = num_levels;
202:   *interpolations = mattmp;
203:   PetscFunctionReturn(PETSC_SUCCESS);
204: }

206: /* Resets (frees) Hypre's representation of the near null space */
207: static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc)
208: {
209:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
210:   PetscInt  i;

212:   PetscFunctionBegin;
213:   for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i]));
214:   PetscCall(PetscFree(jac->hmnull));
215:   PetscCall(PetscFree(jac->phmnull));
216:   PetscCall(VecDestroy(&jac->hmnull_constant));
217:   jac->n_hmnull = 0;
218:   PetscFunctionReturn(PETSC_SUCCESS);
219: }

221: static PetscErrorCode PCSetUp_HYPRE(PC pc)
222: {
223:   PC_HYPRE          *jac = (PC_HYPRE *)pc->data;
224:   Mat_HYPRE         *hjac;
225:   HYPRE_ParCSRMatrix hmat;
226:   HYPRE_ParVector    bv, xv;
227:   PetscBool          ishypre;

229:   PetscFunctionBegin;
230:   /* default type is boomerAMG */
231:   if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg"));

233:   /* get hypre matrix */
234:   if (pc->flag == DIFFERENT_NONZERO_PATTERN) PetscCall(MatDestroy(&jac->hpmat));
235:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre));
236:   if (!ishypre) {
237:     /* Temporary fix since we do not support MAT_REUSE_MATRIX with HYPRE device */
238: #if defined(PETSC_HAVE_HYPRE_DEVICE)
239:     PetscBool iscuda, iship, iskokkos;

241:     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
242:     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
243:     PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &iskokkos, MATSEQAIJKOKKOS, MATMPIAIJKOKKOS, ""));
244:     if (iscuda || iship || iskokkos) PetscCall(MatDestroy(&jac->hpmat));
245: #endif
246:     PetscCall(MatConvert(pc->pmat, MATHYPRE, jac->hpmat ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &jac->hpmat));
247:   } else {
248:     PetscCall(PetscObjectReference((PetscObject)pc->pmat));
249:     PetscCall(MatDestroy(&jac->hpmat));
250:     jac->hpmat = pc->pmat;
251:   }

253:   /* allow debug */
254:   PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view"));
255:   hjac = (Mat_HYPRE *)jac->hpmat->data;

257:   /* special case for BoomerAMG */
258:   if (jac->setup == HYPRE_BoomerAMGSetup) {
259:     MatNullSpace mnull;
260:     PetscBool    has_const;
261:     PetscInt     bs, nvec, i;
262:     const Vec   *vecs;

264:     PetscCall(MatGetBlockSize(pc->pmat, &bs));
265:     if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
266:     PetscCall(MatGetNearNullSpace(pc->mat, &mnull));
267:     if (mnull) {
268:       PetscCall(PCHYPREResetNearNullSpace_Private(pc));
269:       PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
270:       PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull));
271:       PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull));
272:       for (i = 0; i < nvec; i++) {
273:         PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i]));
274:         PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i]));
275:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]);
276:       }
277:       if (has_const) {
278:         PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL));
279:         PetscCall(VecSet(jac->hmnull_constant, 1));
280:         PetscCall(VecNormalize(jac->hmnull_constant, NULL));
281:         PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec]));
282:         PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec]));
283:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]);
284:         nvec++;
285:       }
286:       PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull);
287:       jac->n_hmnull = nvec;
288:     }
289:   }

291:   /* special case for AMS */
292:   if (jac->setup == HYPRE_AMSSetup) {
293:     Mat_HYPRE         *hm;
294:     HYPRE_ParCSRMatrix parcsr;
295:     if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
296:       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()");
297:     }
298:     if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim);
299:     if (jac->constants[0]) {
300:       HYPRE_ParVector ozz, zoz, zzo = NULL;
301:       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz));
302:       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz));
303:       if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo));
304:       PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo);
305:     }
306:     if (jac->coords[0]) {
307:       HYPRE_ParVector coords[3];
308:       coords[0] = NULL;
309:       coords[1] = NULL;
310:       coords[2] = NULL;
311:       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
312:       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
313:       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
314:       PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
315:     }
316:     PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
317:     hm = (Mat_HYPRE *)jac->G->data;
318:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
319:     PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr);
320:     if (jac->alpha_Poisson) {
321:       hm = (Mat_HYPRE *)jac->alpha_Poisson->data;
322:       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
323:       PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr);
324:     }
325:     if (jac->ams_beta_is_zero) {
326:       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL);
327:     } else if (jac->beta_Poisson) {
328:       hm = (Mat_HYPRE *)jac->beta_Poisson->data;
329:       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
330:       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr);
331:     } else if (jac->ams_beta_is_zero_part) {
332:       if (jac->interior) {
333:         HYPRE_ParVector interior = NULL;
334:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior));
335:         PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior);
336:       } else {
337:         jac->ams_beta_is_zero_part = PETSC_FALSE;
338:       }
339:     }
340:     if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) {
341:       PetscInt           i;
342:       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
343:       if (jac->ND_PiFull) {
344:         hm = (Mat_HYPRE *)jac->ND_PiFull->data;
345:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
346:       } else {
347:         nd_parcsrfull = NULL;
348:       }
349:       for (i = 0; i < 3; ++i) {
350:         if (jac->ND_Pi[i]) {
351:           hm = (Mat_HYPRE *)jac->ND_Pi[i]->data;
352:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
353:         } else {
354:           nd_parcsr[i] = NULL;
355:         }
356:       }
357:       PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
358:     }
359:   }
360:   /* special case for ADS */
361:   if (jac->setup == HYPRE_ADSSetup) {
362:     Mat_HYPRE         *hm;
363:     HYPRE_ParCSRMatrix parcsr;
364:     if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) {
365:       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations");
366:     } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead");
367:     PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient");
368:     PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient");
369:     if (jac->coords[0]) {
370:       HYPRE_ParVector coords[3];
371:       coords[0] = NULL;
372:       coords[1] = NULL;
373:       coords[2] = NULL;
374:       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
375:       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
376:       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
377:       PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
378:     }
379:     hm = (Mat_HYPRE *)jac->G->data;
380:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
381:     PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr);
382:     hm = (Mat_HYPRE *)jac->C->data;
383:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
384:     PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr);
385:     if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
386:       PetscInt           i;
387:       HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3];
388:       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
389:       if (jac->RT_PiFull) {
390:         hm = (Mat_HYPRE *)jac->RT_PiFull->data;
391:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull));
392:       } else {
393:         rt_parcsrfull = NULL;
394:       }
395:       for (i = 0; i < 3; ++i) {
396:         if (jac->RT_Pi[i]) {
397:           hm = (Mat_HYPRE *)jac->RT_Pi[i]->data;
398:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i]));
399:         } else {
400:           rt_parcsr[i] = NULL;
401:         }
402:       }
403:       if (jac->ND_PiFull) {
404:         hm = (Mat_HYPRE *)jac->ND_PiFull->data;
405:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
406:       } else {
407:         nd_parcsrfull = NULL;
408:       }
409:       for (i = 0; i < 3; ++i) {
410:         if (jac->ND_Pi[i]) {
411:           hm = (Mat_HYPRE *)jac->ND_Pi[i]->data;
412:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
413:         } else {
414:           nd_parcsr[i] = NULL;
415:         }
416:       }
417:       PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
418:     }
419:   }
420:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
421:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv);
422:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv);
423:   PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
424:   PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv);
425:   PetscCall(PetscFPTrapPop());
426:   PetscFunctionReturn(PETSC_SUCCESS);
427: }

429: static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x)
430: {
431:   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
432:   Mat_HYPRE         *hjac = (Mat_HYPRE *)jac->hpmat->data;
433:   HYPRE_ParCSRMatrix hmat;
434:   HYPRE_ParVector    jbv, jxv;

436:   PetscFunctionBegin;
437:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
438:   if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0));
439:   PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b));
440:   if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x));
441:   else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x));
442:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
443:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
444:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
445:   PetscStackCallExternalVoid(
446:     "Hypre solve", do {
447:       HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv);
448:       if (hierr) {
449:         PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
450:         HYPRE_ClearAllErrors();
451:       }
452:     } while (0));

454:   if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv);
455:   PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
456:   PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
457:   PetscFunctionReturn(PETSC_SUCCESS);
458: }

460: static PetscErrorCode PCMatApply_HYPRE_BoomerAMG(PC pc, Mat B, Mat X)
461: {
462:   PC_HYPRE           *jac  = (PC_HYPRE *)pc->data;
463:   Mat_HYPRE          *hjac = (Mat_HYPRE *)jac->hpmat->data;
464:   hypre_ParCSRMatrix *par_matrix;
465:   HYPRE_ParVector     hb, hx;
466:   const PetscScalar  *b;
467:   PetscScalar        *x;
468:   PetscInt            m, N, lda;
469:   hypre_Vector       *x_local;
470:   PetscMemType        type;

472:   PetscFunctionBegin;
473:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
474:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&par_matrix);
475:   PetscCall(MatGetLocalSize(B, &m, NULL));
476:   PetscCall(MatGetSize(B, NULL, &N));
477:   PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hb);
478:   PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hx);
479:   PetscCall(MatZeroEntries(X));
480:   PetscCall(MatDenseGetArrayReadAndMemType(B, &b, &type));
481:   PetscCall(MatDenseGetLDA(B, &lda));
482:   PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
483:   PetscCall(MatDenseGetLDA(X, &lda));
484:   PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m);
485:   x_local = hypre_ParVectorLocalVector(hb);
486:   PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
487:   hypre_VectorData(x_local) = (HYPRE_Complex *)b;
488:   PetscCall(MatDenseGetArrayWriteAndMemType(X, &x, NULL));
489:   x_local = hypre_ParVectorLocalVector(hx);
490:   PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0);
491:   hypre_VectorData(x_local) = (HYPRE_Complex *)x;
492:   PetscCallExternal(hypre_ParVectorInitialize_v2, hb, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
493:   PetscCallExternal(hypre_ParVectorInitialize_v2, hx, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
494:   PetscStackCallExternalVoid(
495:     "Hypre solve", do {
496:       HYPRE_Int hierr = (*jac->solve)(jac->hsolver, par_matrix, hb, hx);
497:       if (hierr) {
498:         PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
499:         HYPRE_ClearAllErrors();
500:       }
501:     } while (0));
502:   PetscCallExternal(HYPRE_ParVectorDestroy, hb);
503:   PetscCallExternal(HYPRE_ParVectorDestroy, hx);
504:   PetscCall(MatDenseRestoreArrayReadAndMemType(B, &b));
505:   PetscCall(MatDenseRestoreArrayWriteAndMemType(X, &x));
506:   PetscFunctionReturn(PETSC_SUCCESS);
507: }

509: static PetscErrorCode PCReset_HYPRE(PC pc)
510: {
511:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

513:   PetscFunctionBegin;
514:   PetscCall(MatDestroy(&jac->hpmat));
515:   PetscCall(MatDestroy(&jac->G));
516:   PetscCall(MatDestroy(&jac->C));
517:   PetscCall(MatDestroy(&jac->alpha_Poisson));
518:   PetscCall(MatDestroy(&jac->beta_Poisson));
519:   PetscCall(MatDestroy(&jac->RT_PiFull));
520:   PetscCall(MatDestroy(&jac->RT_Pi[0]));
521:   PetscCall(MatDestroy(&jac->RT_Pi[1]));
522:   PetscCall(MatDestroy(&jac->RT_Pi[2]));
523:   PetscCall(MatDestroy(&jac->ND_PiFull));
524:   PetscCall(MatDestroy(&jac->ND_Pi[0]));
525:   PetscCall(MatDestroy(&jac->ND_Pi[1]));
526:   PetscCall(MatDestroy(&jac->ND_Pi[2]));
527:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
528:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
529:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
530:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
531:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
532:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
533:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
534:   PetscCall(PCHYPREResetNearNullSpace_Private(pc));
535:   jac->ams_beta_is_zero      = PETSC_FALSE;
536:   jac->ams_beta_is_zero_part = PETSC_FALSE;
537:   jac->dim                   = 0;
538:   PetscFunctionReturn(PETSC_SUCCESS);
539: }

541: static PetscErrorCode PCDestroy_HYPRE(PC pc)
542: {
543:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

545:   PetscFunctionBegin;
546:   PetscCall(PCReset_HYPRE(pc));
547:   if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver);
548:   PetscCall(PetscFree(jac->hypre_type));
549: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
550:   PetscCall(PetscFree(jac->spgemm_type));
551: #endif
552:   if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
553:   PetscCall(PetscFree(pc->data));

555:   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0));
556:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL));
557:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL));
558:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL));
559:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL));
560:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL));
561:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL));
562:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL));
563:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL));
564:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL));
565:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
566:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
567:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL));
568:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL));
569:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
570:   PetscFunctionReturn(PETSC_SUCCESS);
571: }

573: static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems *PetscOptionsObject)
574: {
575:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
576:   PetscBool flag;

578:   PetscFunctionBegin;
579:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options");
580:   PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag));
581:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter);
582:   PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag));
583:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol);
584:   PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag));
585:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize);
586:   PetscOptionsHeadEnd();
587:   PetscFunctionReturn(PETSC_SUCCESS);
588: }

590: static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer)
591: {
592:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
593:   PetscBool iascii;

595:   PetscFunctionBegin;
596:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
597:   if (iascii) {
598:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE Pilut preconditioning\n"));
599:     if (jac->maxiter != PETSC_DEFAULT) {
600:       PetscCall(PetscViewerASCIIPrintf(viewer, "    maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter));
601:     } else {
602:       PetscCall(PetscViewerASCIIPrintf(viewer, "    default maximum number of iterations \n"));
603:     }
604:     if (jac->tol != PETSC_DEFAULT) {
605:       PetscCall(PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->tol));
606:     } else {
607:       PetscCall(PetscViewerASCIIPrintf(viewer, "    default drop tolerance \n"));
608:     }
609:     if (jac->factorrowsize != PETSC_DEFAULT) {
610:       PetscCall(PetscViewerASCIIPrintf(viewer, "    factor row size %" PetscInt_FMT "\n", jac->factorrowsize));
611:     } else {
612:       PetscCall(PetscViewerASCIIPrintf(viewer, "    default factor row size \n"));
613:     }
614:   }
615:   PetscFunctionReturn(PETSC_SUCCESS);
616: }

618: static const char *HYPREILUType[] = {
619:   "Block-Jacobi-ILUk", "Block-Jacobi-ILUT", "", "", "", "", "", "", "", "", /* 0-9 */
620:   "GMRES-ILUk",        "GMRES-ILUT",        "", "", "", "", "", "", "", "", /* 10-19 */
621:   "NSH-ILUk",          "NSH-ILUT",          "", "", "", "", "", "", "", "", /* 20-29 */
622:   "RAS-ILUk",          "RAS-ILUT",          "", "", "", "", "", "", "", "", /* 30-39 */
623:   "ddPQ-GMRES-ILUk",   "ddPQ-GMRES-ILUT",   "", "", "", "", "", "", "", "", /* 40-49 */
624:   "GMRES-ILU0"                                                              /* 50 */
625: };

627: static const char *HYPREILUIterSetup[] = {"default", "async-in-place", "async-explicit", "sync-explicit", "semisync-explicit"};

629: static PetscErrorCode PCSetFromOptions_HYPRE_ILU(PC pc, PetscOptionItems *PetscOptionsObject)
630: {
631:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
632:   PetscBool flg;
633:   PetscInt  indx;
634:   PetscReal tmpdbl;
635:   PetscBool tmp_truth;

637:   PetscFunctionBegin;
638:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ILU Options");

640:   /* ILU: ILU Type */
641:   PetscCall(PetscOptionsEList("-pc_hypre_ilu_type", "Choose ILU Type", "None", HYPREILUType, PETSC_STATIC_ARRAY_LENGTH(HYPREILUType), HYPREILUType[0], &indx, &flg));
642:   if (flg) { PetscCallExternal(HYPRE_ILUSetType, jac->hsolver, indx); }

644:   /* ILU: ILU iterative setup type*/
645:   PetscCall(PetscOptionsEList("-pc_hypre_ilu_iterative_setup_type", "Set ILU iterative setup type", "None", HYPREILUIterSetup, PETSC_STATIC_ARRAY_LENGTH(HYPREILUIterSetup), HYPREILUIterSetup[0], &indx, &flg));
646:   if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupType, jac->hsolver, indx); }

648:   /* ILU: ILU iterative setup option*/
649:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_iterative_setup_option", "Set ILU iterative setup option", "None", 0, &indx, &flg));
650:   if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupOption, jac->hsolver, indx); }

652:   /* ILU: ILU iterative setup maxiter */
653:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_iterative_setup_maxiter", "Set ILU iterative setup maximum iteration count", "None", 0, &indx, &flg));
654:   if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupMaxIter, jac->hsolver, indx); }

656:   /* ILU: ILU iterative setup tolerance */
657:   PetscCall(PetscOptionsReal("-pc_hypre_ilu_iterative_setup_tolerance", "Set ILU iterative setup tolerance", "None", 0, &tmpdbl, &flg));
658:   if (flg) { PetscCallExternal(HYPRE_ILUSetIterativeSetupTolerance, jac->hsolver, tmpdbl); }

660:   /* ILU: ILU Print Level */
661:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_print_level", "Set ILU print level", "None", 0, &indx, &flg));
662:   if (flg) { PetscCallExternal(HYPRE_ILUSetPrintLevel, jac->hsolver, indx); }

664:   /* ILU: Logging */
665:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_logging", "Set ILU logging level", "None", 0, &indx, &flg));
666:   if (flg) { PetscCallExternal(HYPRE_ILUSetLogging, jac->hsolver, indx); }

668:   /* ILU: ILU Level */
669:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_level", "Set ILU level", "None", 0, &indx, &flg));
670:   if (flg) { PetscCallExternal(HYPRE_ILUSetLevelOfFill, jac->hsolver, indx); }

672:   /* ILU: ILU Max NNZ per row */
673:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_max_nnz_per_row", "Set maximum NNZ per row", "None", 0, &indx, &flg));
674:   if (flg) { PetscCallExternal(HYPRE_ILUSetMaxNnzPerRow, jac->hsolver, indx); }

676:   /* ILU: tolerance */
677:   PetscCall(PetscOptionsReal("-pc_hypre_ilu_tol", "Tolerance for ILU", "None", 0, &tmpdbl, &flg));
678:   if (flg) { PetscCallExternal(HYPRE_ILUSetTol, jac->hsolver, tmpdbl); }

680:   /* ILU: maximum iteration count */
681:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_maxiter", "Set ILU max iterations", "None", 0, &indx, &flg));
682:   if (flg) { PetscCallExternal(HYPRE_ILUSetMaxIter, jac->hsolver, indx); }

684:   /* ILU: drop threshold */
685:   PetscCall(PetscOptionsReal("-pc_hypre_ilu_drop_threshold", "Drop threshold for ILU", "None", 0, &tmpdbl, &flg));
686:   if (flg) { PetscCallExternal(HYPRE_ILUSetDropThreshold, jac->hsolver, tmpdbl); }

688:   /* ILU: Triangular Solve */
689:   PetscCall(PetscOptionsBool("-pc_hypre_ilu_tri_solve", "Enable triangular solve", "None", PETSC_FALSE, &tmp_truth, &flg));
690:   if (flg) { PetscCallExternal(HYPRE_ILUSetTriSolve, jac->hsolver, tmp_truth); }

692:   /* ILU: Lower Jacobi iteration */
693:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_lower_jacobi_iters", "Set lower Jacobi iteration count", "None", 0, &indx, &flg));
694:   if (flg) { PetscCallExternal(HYPRE_ILUSetLowerJacobiIters, jac->hsolver, indx); }

696:   /* ILU: Upper Jacobi iteration */
697:   PetscCall(PetscOptionsInt("-pc_hypre_ilu_upper_jacobi_iters", "Set upper Jacobi iteration count", "None", 0, &indx, &flg));
698:   if (flg) { PetscCallExternal(HYPRE_ILUSetUpperJacobiIters, jac->hsolver, indx); }

700:   /* ILU: local reordering */
701:   PetscCall(PetscOptionsBool("-pc_hypre_ilu_local_reordering", "Enable local reordering", "None", PETSC_FALSE, &tmp_truth, &flg));
702:   if (flg) { PetscCallExternal(HYPRE_ILUSetLocalReordering, jac->hsolver, tmp_truth); }

704:   PetscOptionsHeadEnd();
705:   PetscFunctionReturn(PETSC_SUCCESS);
706: }

708: static PetscErrorCode PCView_HYPRE_ILU(PC pc, PetscViewer viewer)
709: {
710:   PC_HYPRE         *jac      = (PC_HYPRE *)pc->data;
711:   hypre_ParILUData *ilu_data = (hypre_ParILUData *)jac->hsolver;
712:   PetscBool         iascii;
713:   PetscInt          indx;
714:   PetscReal         tmpdbl;
715:   PetscReal        *tmpdbl3;

717:   PetscFunctionBegin;
718:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
719:   if (iascii) {
720:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ILU preconditioning\n"));
721:     PetscStackCallExternalVoid("hypre_ParILUDataIluType", indx = hypre_ParILUDataIluType(ilu_data));
722:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU type              %s (%" PetscInt_FMT ")\n", HYPREILUType[indx], indx));
723:     PetscStackCallExternalVoid("hypre_ParILUDataLfil", indx = hypre_ParILUDataLfil(ilu_data));
724:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU level             %" PetscInt_FMT "\n", indx));
725:     PetscStackCallExternalVoid("hypre_ParILUDataMaxIter", indx = hypre_ParILUDataMaxIter(ilu_data));
726:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU max iterations    %" PetscInt_FMT "\n", indx));
727:     PetscStackCallExternalVoid("hypre_ParILUDataMaxRowNnz", indx = hypre_ParILUDataMaxRowNnz(ilu_data));
728:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU max NNZ per row   %" PetscInt_FMT "\n", indx));
729:     PetscStackCallExternalVoid("hypre_ParILUDataTriSolve", indx = hypre_ParILUDataTriSolve(ilu_data));
730:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU triangular solve  %" PetscInt_FMT "\n", indx));
731:     PetscStackCallExternalVoid("hypre_ParILUDataTol", tmpdbl = hypre_ParILUDataTol(ilu_data));
732:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU tolerance         %e\n", tmpdbl));
733:     PetscStackCallExternalVoid("hypre_ParILUDataDroptol", tmpdbl3 = hypre_ParILUDataDroptol(ilu_data));
734:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU drop tolerance    %e / %e / %e\n", tmpdbl3[0], tmpdbl3[1], tmpdbl3[2]));
735:     PetscStackCallExternalVoid("hypre_ParILUDataReorderingType", indx = hypre_ParILUDataReorderingType(ilu_data));
736:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU local reordering  %" PetscInt_FMT "\n", indx));
737:     PetscStackCallExternalVoid("hypre_ParILUDataLowerJacobiIters", indx = hypre_ParILUDataLowerJacobiIters(ilu_data));
738:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU lower Jacobi iterations  %" PetscInt_FMT "\n", indx));
739:     PetscStackCallExternalVoid("hypre_ParILUDataUpperJacobiIters", indx = hypre_ParILUDataUpperJacobiIters(ilu_data));
740:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU upper Jacobi iterations  %" PetscInt_FMT "\n", indx));
741:     PetscStackCallExternalVoid("hypre_ParILUDataPrintLevel", indx = hypre_ParILUDataPrintLevel(ilu_data));
742:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU print level      %" PetscInt_FMT "\n", indx));
743:     PetscStackCallExternalVoid("hypre_ParILUDataLogging", indx = hypre_ParILUDataLogging(ilu_data));
744:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU logging level    %" PetscInt_FMT "\n", indx));
745:     PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupType", indx = hypre_ParILUDataIterativeSetupType(ilu_data));
746:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup type           %s (%" PetscInt_FMT ")\n", HYPREILUIterSetup[indx], indx));
747:     PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupOption", indx = hypre_ParILUDataIterativeSetupOption(ilu_data));
748:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup option         %" PetscInt_FMT "\n", indx));
749:     PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupMaxIter", indx = hypre_ParILUDataIterativeSetupMaxIter(ilu_data));
750:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup max iterations %" PetscInt_FMT "\n", indx));
751:     PetscStackCallExternalVoid("hypre_ParILUDataIterativeSetupTolerance", tmpdbl = hypre_ParILUDataIterativeSetupTolerance(ilu_data));
752:     PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup tolerance      %e\n", tmpdbl));
753:   }
754:   PetscFunctionReturn(PETSC_SUCCESS);
755: }

757: static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems *PetscOptionsObject)
758: {
759:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
760:   PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE;

762:   PetscFunctionBegin;
763:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options");
764:   PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag));
765:   if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level);

767:   PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag));
768:   if (flag) {
769:     PetscMPIInt size;

771:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
772:     PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance");
773:     PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance);
774:   }

776:   PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag));
777:   if (flag) {
778:     jac->eu_bj = eu_bj ? 1 : 0;
779:     PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj);
780:   }
781:   PetscOptionsHeadEnd();
782:   PetscFunctionReturn(PETSC_SUCCESS);
783: }

785: static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer)
786: {
787:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
788:   PetscBool iascii;

790:   PetscFunctionBegin;
791:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
792:   if (iascii) {
793:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE Euclid preconditioning\n"));
794:     if (jac->eu_level != PETSC_DEFAULT) {
795:       PetscCall(PetscViewerASCIIPrintf(viewer, "    factorization levels %" PetscInt_FMT "\n", jac->eu_level));
796:     } else {
797:       PetscCall(PetscViewerASCIIPrintf(viewer, "    default factorization levels \n"));
798:     }
799:     PetscCall(PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->eu_droptolerance));
800:     PetscCall(PetscViewerASCIIPrintf(viewer, "    use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
801:   }
802:   PetscFunctionReturn(PETSC_SUCCESS);
803: }

805: static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x)
806: {
807:   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
808:   Mat_HYPRE         *hjac = (Mat_HYPRE *)jac->hpmat->data;
809:   HYPRE_ParCSRMatrix hmat;
810:   HYPRE_ParVector    jbv, jxv;

812:   PetscFunctionBegin;
813:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
814:   PetscCall(VecSet(x, 0.0));
815:   PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->x, b));
816:   PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->b, x));

818:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
819:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
820:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);

822:   PetscStackCallExternalVoid(
823:     "Hypre Transpose solve", do {
824:       HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv);
825:       if (hierr) {
826:         /* error code of 1 in BoomerAMG merely means convergence not achieved */
827:         PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr);
828:         HYPRE_ClearAllErrors();
829:       }
830:     } while (0));

832:   PetscCall(VecHYPRE_IJVectorPopVec(hjac->x));
833:   PetscCall(VecHYPRE_IJVectorPopVec(hjac->b));
834:   PetscFunctionReturn(PETSC_SUCCESS);
835: }

837: static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[])
838: {
839:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
840:   PetscBool flag;

842: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
843:   PetscFunctionBegin;
844:   if (jac->spgemm_type) {
845:     PetscCall(PetscStrcmp(jac->spgemm_type, name, &flag));
846:     PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE SpGEMM (really we can)");
847:     PetscFunctionReturn(PETSC_SUCCESS);
848:   } else {
849:     PetscCall(PetscStrallocpy(name, &jac->spgemm_type));
850:   }
851:   PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flag));
852:   if (flag) {
853:     PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1);
854:     PetscFunctionReturn(PETSC_SUCCESS);
855:   }
856:   PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flag));
857:   if (flag) {
858:     PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0);
859:     PetscFunctionReturn(PETSC_SUCCESS);
860:   }
861:   jac->spgemm_type = NULL;
862:   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEMM type %s; Choices are cusparse, hypre", name);
863: #endif
864: }

866: static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[])
867: {
868:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

870:   PetscFunctionBegin;
872: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
873:   *spgemm = jac->spgemm_type;
874: #endif
875:   PetscFunctionReturn(PETSC_SUCCESS);
876: }

878: static const char *HYPREBoomerAMGCycleType[]   = {"", "V", "W"};
879: static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"};
880: static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"};
881: /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */
882: static const char *HYPREBoomerAMGSmoothType[] = {"ILU", "Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"};
883: static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"};
884: static const char    *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"};
885: static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems *PetscOptionsObject)
886: {
887:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
888:   PetscInt    bs, n, indx, level;
889:   PetscBool   flg, tmp_truth;
890:   PetscReal   tmpdbl, twodbl[2];
891:   const char *symtlist[]           = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};
892:   const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"};

894:   PetscFunctionBegin;
895:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options");
896:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg));
897:   if (flg) {
898:     jac->cycletype = indx + 1;
899:     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
900:   }
901:   PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg, 2));
902:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
903:   PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg, 1));
904:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
905:   PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg, 0.0));
906:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
907:   bs = 1;
908:   if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs));
909:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg));
910:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);

912:   PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg, 0.0));
913:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);

915:   PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg, 0));
916:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);

918:   PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels));
919:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);

921:   PetscCall(PetscOptionsBoundedInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg, 1));
922:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);

924:   PetscCall(PetscOptionsBoundedReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg, 0.0));
925:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
926:   PetscCall(PetscOptionsRangeReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg, 0.0, 1.0));
927:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);

929:   /* Grid sweeps */
930:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg));
931:   if (flg) {
932:     PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx);
933:     /* modify the jac structure so we can view the updated options with PC_View */
934:     jac->gridsweeps[0] = indx;
935:     jac->gridsweeps[1] = indx;
936:     /*defaults coarse to 1 */
937:     jac->gridsweeps[2] = 1;
938:   }
939:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg));
940:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening);
941:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg));
942:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag);
943:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg));
944:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant);
945:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg));
946:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax);
947:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg));
948:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth);
949:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg));
950:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine);
951:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg));
952:   if (flg) {
953:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1);
954:     jac->gridsweeps[0] = indx;
955:   }
956:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg));
957:   if (flg) {
958:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2);
959:     jac->gridsweeps[1] = indx;
960:   }
961:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg));
962:   if (flg) {
963:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3);
964:     jac->gridsweeps[2] = indx;
965:   }

967:   /* Smooth type */
968:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg));
969:   if (flg) {
970:     jac->smoothtype = indx;
971:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 5);
972:     jac->smoothnumlevels = 25;
973:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25);
974:   }

976:   /* Number of smoothing levels */
977:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg));
978:   if (flg && (jac->smoothtype != -1)) {
979:     jac->smoothnumlevels = indx;
980:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx);
981:   }

983:   /* Smooth num sweeps */
984:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_sweeps", "Set number of smoother sweeps", "None", 1, &indx, &flg));
985:   if (flg && indx > 0) {
986:     jac->smoothsweeps = indx;
987:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumSweeps, jac->hsolver, indx);
988:   }

990:   /* ILU: ILU Type */
991:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_ilu_type", "Choose ILU Type", "None", HYPREILUType, PETSC_STATIC_ARRAY_LENGTH(HYPREILUType), HYPREILUType[0], &indx, &flg));
992:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUType, jac->hsolver, indx); }

994:   /* ILU: ILU iterative setup type*/
995:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_ilu_iterative_setup_type", "Set ILU iterative setup type", "None", HYPREILUIterSetup, PETSC_STATIC_ARRAY_LENGTH(HYPREILUIterSetup), HYPREILUIterSetup[0], &indx, &flg));
996:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupType, jac->hsolver, indx); }

998:   /* ILU: ILU iterative setup option*/
999:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_iterative_setup_option", "Set ILU iterative setup option", "None", 0, &indx, &flg));
1000:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupOption, jac->hsolver, indx); }

1002:   /* ILU: ILU iterative setup maxiter */
1003:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_iterative_setup_maxiter", "Set ILU iterative setup maximum iteration count", "None", 0, &indx, &flg));
1004:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUIterSetupMaxIter, jac->hsolver, indx); }

1006:   /* ILU: ILU iterative setup tolerance */
1007:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_ilu_iterative_setup_tolerance", "Set ILU iterative setup tolerance", "None", 0, &tmpdbl, &flg));
1008:   if (flg) { PetscCallExternal(hypre_BoomerAMGSetILUIterSetupTolerance, jac->hsolver, tmpdbl); }

1010:   /* ILU: ILU Print Level */
1011:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_print_level", "Set ILU print level", "None", 0, &indx, &flg));
1012:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, indx); }

1014:   /* ILU: Logging */
1015:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_logging", "Set ILU logging level", "None", 0, &indx, &flg));
1016:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetLogging, jac->hsolver, indx); }

1018:   /* ILU: ILU Level */
1019:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_level", "Set ILU level", "None", 0, &indx, &flg));
1020:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULevel, jac->hsolver, indx); }

1022:   /* ILU: ILU Max NNZ per row */
1023:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_max_nnz_per_row", "Set maximum NNZ per row", "None", 0, &indx, &flg));
1024:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUMaxRowNnz, jac->hsolver, indx); }

1026:   /* ILU: maximum iteration count */
1027:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_maxiter", "Set ILU max iterations", "None", 0, &indx, &flg));
1028:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUMaxIter, jac->hsolver, indx); }

1030:   /* ILU: drop threshold */
1031:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_ilu_drop_tol", "Drop tolerance for ILU", "None", 0, &tmpdbl, &flg));
1032:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUDroptol, jac->hsolver, tmpdbl); }

1034:   /* ILU: Triangular Solve */
1035:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_ilu_tri_solve", "Enable triangular solve", "None", PETSC_FALSE, &tmp_truth, &flg));
1036:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUTriSolve, jac->hsolver, tmp_truth); }

1038:   /* ILU: Lower Jacobi iteration */
1039:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_lower_jacobi_iters", "Set lower Jacobi iteration count", "None", 0, &indx, &flg));
1040:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULowerJacobiIters, jac->hsolver, indx); }

1042:   /* ILU: Upper Jacobi iteration */
1043:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_ilu_upper_jacobi_iters", "Set upper Jacobi iteration count", "None", 0, &indx, &flg));
1044:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILUUpperJacobiIters, jac->hsolver, indx); }

1046:   /* ILU: local reordering */
1047:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_ilu_local_reordering", "Enable local reordering", "None", PETSC_FALSE, &tmp_truth, &flg));
1048:   if (flg) { PetscCallExternal(HYPRE_BoomerAMGSetILULocalReordering, jac->hsolver, tmp_truth); }

1050:   /* Number of levels for ILU(k) for Euclid */
1051:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg));
1052:   if (flg && (jac->smoothtype == 4)) {
1053:     jac->eu_level = indx;
1054:     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx);
1055:   }

1057:   /* Filter for ILU(k) for Euclid */
1058:   PetscReal droptolerance;
1059:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg));
1060:   if (flg && (jac->smoothtype == 4)) {
1061:     jac->eu_droptolerance = droptolerance;
1062:     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance);
1063:   }

1065:   /* Use Block Jacobi ILUT for Euclid */
1066:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg));
1067:   if (flg && (jac->smoothtype == 4)) {
1068:     jac->eu_bj = tmp_truth;
1069:     PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj);
1070:   }

1072:   /* Relax type */
1073:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
1074:   if (flg) {
1075:     jac->relaxtype[0] = jac->relaxtype[1] = indx;
1076:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx);
1077:     /* by default, coarse type set to 9 */
1078:     jac->relaxtype[2] = 9;
1079:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3);
1080:   }
1081:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
1082:   if (flg) {
1083:     jac->relaxtype[0] = indx;
1084:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1);
1085:   }
1086:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg));
1087:   if (flg) {
1088:     jac->relaxtype[1] = indx;
1089:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2);
1090:   }
1091:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg));
1092:   if (flg) {
1093:     jac->relaxtype[2] = indx;
1094:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3);
1095:   }

1097:   /* Relaxation Weight */
1098:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg));
1099:   if (flg) {
1100:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl);
1101:     jac->relaxweight = tmpdbl;
1102:   }

1104:   n         = 2;
1105:   twodbl[0] = twodbl[1] = 1.0;
1106:   PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
1107:   if (flg) {
1108:     PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
1109:     indx = (int)PetscAbsReal(twodbl[1]);
1110:     PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx);
1111:   }

1113:   /* Outer relaxation Weight */
1114:   PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg));
1115:   if (flg) {
1116:     PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl);
1117:     jac->outerrelaxweight = tmpdbl;
1118:   }

1120:   n         = 2;
1121:   twodbl[0] = twodbl[1] = 1.0;
1122:   PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg));
1123:   if (flg) {
1124:     PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
1125:     indx = (int)PetscAbsReal(twodbl[1]);
1126:     PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx);
1127:   }

1129:   /* the Relax Order */
1130:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg));

1132:   if (flg && tmp_truth) {
1133:     jac->relaxorder = 0;
1134:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
1135:   }
1136:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg));
1137:   if (flg) {
1138:     jac->measuretype = indx;
1139:     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
1140:   }
1141:   /* update list length 3/07 */
1142:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg));
1143:   if (flg) {
1144:     jac->coarsentype = indx;
1145:     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
1146:   }

1148:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg));
1149:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
1150:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg));
1151:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
1152: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1153:   // global parameter but is closely associated with BoomerAMG
1154:   PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg));
1155:   #if defined(PETSC_HAVE_HYPRE_DEVICE)
1156:   if (!flg) indx = 0;
1157:   PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx]));
1158:   #else
1159:   PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, "hypre"));
1160:   #endif
1161: #endif
1162:   /* AIR */
1163: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
1164:   PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL));
1165:   PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
1166:   if (jac->Rtype) {
1167:     HYPRE_Int **grid_relax_points = hypre_TAlloc(HYPRE_Int *, 4, HYPRE_MEMORY_HOST);
1168:     char       *prerelax[256];
1169:     char       *postrelax[256];
1170:     char        stringF[2] = "F", stringC[2] = "C", stringA[2] = "A";
1171:     PetscInt    ns_down = 256, ns_up = 256;
1172:     PetscBool   matchF, matchC, matchA;

1174:     jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */

1176:     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL));
1177:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);

1179:     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL));
1180:     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);

1182:     PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL));
1183:     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);

1185:     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL));
1186:     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
1187:     PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_prerelax", "Defines prerelax scheme", "None", prerelax, &ns_down, NULL));
1188:     PetscCall(PetscOptionsStringArray("-pc_hypre_boomeramg_postrelax", "Defines postrelax scheme", "None", postrelax, &ns_up, NULL));
1189:     PetscCheck(ns_down == jac->gridsweeps[0], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_prerelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_down");
1190:     PetscCheck(ns_up == jac->gridsweeps[1], PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_SIZ, "The number of arguments passed to -pc_hypre_boomeramg_postrelax must match the number passed to -pc_hypre_bomeramg_grid_sweeps_up");

1192:     grid_relax_points[0]    = NULL;
1193:     grid_relax_points[1]    = hypre_TAlloc(HYPRE_Int, ns_down, HYPRE_MEMORY_HOST);
1194:     grid_relax_points[2]    = hypre_TAlloc(HYPRE_Int, ns_up, HYPRE_MEMORY_HOST);
1195:     grid_relax_points[3]    = hypre_TAlloc(HYPRE_Int, jac->gridsweeps[2], HYPRE_MEMORY_HOST);
1196:     grid_relax_points[3][0] = 0;

1198:     // set down relax scheme
1199:     for (PetscInt i = 0; i < ns_down; i++) {
1200:       PetscCall(PetscStrcasecmp(prerelax[i], stringF, &matchF));
1201:       PetscCall(PetscStrcasecmp(prerelax[i], stringC, &matchC));
1202:       PetscCall(PetscStrcasecmp(prerelax[i], stringA, &matchA));
1203:       PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_prerelax are C, F, and A");
1204:       if (matchF) grid_relax_points[1][i] = -1;
1205:       else if (matchC) grid_relax_points[1][i] = 1;
1206:       else if (matchA) grid_relax_points[1][i] = 0;
1207:     }

1209:     // set up relax scheme
1210:     for (PetscInt i = 0; i < ns_up; i++) {
1211:       PetscCall(PetscStrcasecmp(postrelax[i], stringF, &matchF));
1212:       PetscCall(PetscStrcasecmp(postrelax[i], stringC, &matchC));
1213:       PetscCall(PetscStrcasecmp(postrelax[i], stringA, &matchA));
1214:       PetscCheck(matchF || matchC || matchA, PetscObjectComm((PetscObject)jac), PETSC_ERR_ARG_WRONG, "Valid argument options for -pc_hypre_boomeramg_postrelax are C, F, and A");
1215:       if (matchF) grid_relax_points[2][i] = -1;
1216:       else if (matchC) grid_relax_points[2][i] = 1;
1217:       else if (matchA) grid_relax_points[2][i] = 0;
1218:     }

1220:     // set coarse relax scheme
1221:     for (PetscInt i = 0; i < jac->gridsweeps[2]; i++) grid_relax_points[3][i] = 0;

1223:     // Pass relax schemes to hypre
1224:     PetscCallExternal(HYPRE_BoomerAMGSetGridRelaxPoints, jac->hsolver, grid_relax_points);

1226:     // cleanup memory
1227:     for (PetscInt i = 0; i < ns_down; i++) PetscCall(PetscFree(prerelax[i]));
1228:     for (PetscInt i = 0; i < ns_up; i++) PetscCall(PetscFree(postrelax[i]));
1229:   }
1230: #endif

1232: #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9)
1233:   PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl);
1234: #endif

1236:   /* new 3/07 */
1237:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg));
1238:   if (flg || jac->Rtype) {
1239:     if (flg) jac->interptype = indx;
1240:     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
1241:   }

1243:   PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg));
1244:   if (flg) {
1245:     level = 3;
1246:     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL));

1248:     jac->printstatistics = PETSC_TRUE;
1249:     PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level);
1250:   }

1252:   PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg));
1253:   if (flg) {
1254:     level = 3;
1255:     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL));

1257:     jac->printstatistics = PETSC_TRUE;
1258:     PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level);
1259:   }

1261:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg));
1262:   if (flg && tmp_truth) {
1263:     PetscInt tmp_int;
1264:     PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg));
1265:     if (flg) jac->nodal_relax_levels = tmp_int;
1266:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6);
1267:     PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1);
1268:     PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0);
1269:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels);
1270:   }

1272:   PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL));
1273:   PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);

1275:   /* options for ParaSails solvers */
1276:   PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg));
1277:   if (flg) {
1278:     jac->symt = indx;
1279:     PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt);
1280:   }

1282:   PetscOptionsHeadEnd();
1283:   PetscFunctionReturn(PETSC_SUCCESS);
1284: }

1286: static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
1287: {
1288:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1289:   HYPRE_Int oits;

1291:   PetscFunctionBegin;
1292:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
1293:   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter);
1294:   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol);
1295:   jac->applyrichardson = PETSC_TRUE;
1296:   PetscCall(PCApply_HYPRE(pc, b, y));
1297:   jac->applyrichardson = PETSC_FALSE;
1298:   PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits);
1299:   *outits = oits;
1300:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
1301:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
1302:   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
1303:   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1304:   PetscFunctionReturn(PETSC_SUCCESS);
1305: }

1307: static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer)
1308: {
1309:   PC_HYPRE         *jac      = (PC_HYPRE *)pc->data;
1310:   hypre_ParAMGData *amg_data = (hypre_ParAMGData *)jac->hsolver;
1311:   PetscBool         iascii;
1312:   PetscInt          indx;
1313:   PetscReal         val;

1315:   PetscFunctionBegin;
1316:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1317:   if (iascii) {
1318:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE BoomerAMG preconditioning\n"));
1319:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype]));
1320:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels));
1321:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter));
1322:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Convergence tolerance PER hypre call %g\n", (double)jac->tol));
1323:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Threshold for strong coupling %g\n", (double)jac->strongthreshold));
1324:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation truncation factor %g\n", (double)jac->truncfactor));
1325:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax));
1326:     if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine));
1327:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl));
1328:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths));

1330:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum row sums %g\n", (double)jac->maxrowsum));

1332:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps down         %" PetscInt_FMT "\n", jac->gridsweeps[0]));
1333:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps up           %" PetscInt_FMT "\n", jac->gridsweeps[1]));
1334:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Sweeps on coarse    %" PetscInt_FMT "\n", jac->gridsweeps[2]));

1336:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax down          %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]]));
1337:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax up            %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]]));
1338:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax on coarse     %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]]));

1340:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Relax weight  (all)      %g\n", (double)jac->relaxweight));
1341:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Outer relax weight (all) %g\n", (double)jac->outerrelaxweight));

1343:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Maximum size of coarsest grid %" PetscInt_FMT "\n", jac->maxc));
1344:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Minimum size of coarsest grid %" PetscInt_FMT "\n", jac->minc));

1346:     if (jac->relaxorder) {
1347:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using CF-relaxation\n"));
1348:     } else {
1349:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using CF-relaxation\n"));
1350:     }
1351:     if (jac->smoothtype != -1) {
1352:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth type          %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype]));
1353:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth num levels    %" PetscInt_FMT "\n", jac->smoothnumlevels));
1354:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Smooth num sweeps    %" PetscInt_FMT "\n", jac->smoothsweeps));
1355:       if (jac->smoothtype == 0) {
1356:         PetscStackCallExternalVoid("hypre_ParAMGDataILUType", indx = hypre_ParAMGDataILUType(amg_data));
1357:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU type              %s (%" PetscInt_FMT ")\n", HYPREILUType[indx], indx));
1358:         PetscStackCallExternalVoid("hypre_ParAMGDataILULevel", indx = hypre_ParAMGDataILULevel(amg_data));
1359:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU level             %" PetscInt_FMT "\n", indx));
1360:         PetscStackCallExternalVoid("hypre_ParAMGDataILUMaxIter", indx = hypre_ParAMGDataILUMaxIter(amg_data));
1361:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU max iterations    %" PetscInt_FMT "\n", indx));
1362:         PetscStackCallExternalVoid("hypre_ParAMGDataILUMaxRowNnz", indx = hypre_ParAMGDataILUMaxRowNnz(amg_data));
1363:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU max NNZ per row   %" PetscInt_FMT "\n", indx));
1364:         PetscStackCallExternalVoid("hypre_ParAMGDataILUTriSolve", indx = hypre_ParAMGDataILUTriSolve(amg_data));
1365:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU triangular solve  %" PetscInt_FMT "\n", indx));
1366:         PetscStackCallExternalVoid("hypre_ParAMGDataTol", val = hypre_ParAMGDataTol(amg_data));
1367:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU tolerance         %e\n", val));
1368:         PetscStackCallExternalVoid("hypre_ParAMGDataILUDroptol", val = hypre_ParAMGDataILUDroptol(amg_data));
1369:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU drop tolerance    %e\n", val));
1370:         PetscStackCallExternalVoid("hypre_ParAMGDataILULocalReordering", indx = hypre_ParAMGDataILULocalReordering(amg_data));
1371:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU local reordering  %" PetscInt_FMT "\n", indx));
1372:         PetscStackCallExternalVoid("hypre_ParAMGDataILULowerJacobiIters", indx = hypre_ParAMGDataILULowerJacobiIters(amg_data));
1373:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU lower Jacobi iterations  %" PetscInt_FMT "\n", indx));
1374:         PetscStackCallExternalVoid("hypre_ParAMGDataILUUpperJacobiIters", indx = hypre_ParAMGDataILUUpperJacobiIters(amg_data));
1375:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU upper Jacobi iterations  %" PetscInt_FMT "\n", indx));
1376:         PetscStackCallExternalVoid("hypre_ParAMGDataPrintLevel", indx = hypre_ParAMGDataPrintLevel(amg_data));
1377:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU print level      %" PetscInt_FMT "\n", indx));
1378:         PetscStackCallExternalVoid("hypre_ParAMGDataLogging", indx = hypre_ParAMGDataLogging(amg_data));
1379:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU logging level    %" PetscInt_FMT "\n", indx));
1380:         PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupType", indx = hypre_ParAMGDataILUIterSetupType(amg_data));
1381:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup type           %s (%" PetscInt_FMT ")\n", HYPREILUIterSetup[indx], indx));
1382:         PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupOption", indx = hypre_ParAMGDataILUIterSetupOption(amg_data));
1383:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup option         %" PetscInt_FMT "\n", indx));
1384:         PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupMaxIter", indx = hypre_ParAMGDataILUIterSetupMaxIter(amg_data));
1385:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup max iterations %" PetscInt_FMT "\n", indx));
1386:         PetscStackCallExternalVoid("hypre_ParAMGDataILUIterSetupTolerance", val = hypre_ParAMGDataILUIterSetupTolerance(amg_data));
1387:         PetscCall(PetscViewerASCIIPrintf(viewer, "    ILU iterative setup tolerance      %e\n", val));
1388:       }
1389:     } else {
1390:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using more complex smoothers.\n"));
1391:     }
1392:     if (jac->smoothtype == 3) {
1393:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level));
1394:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance));
1395:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj));
1396:     }
1397:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Measure type        %s\n", HYPREBoomerAMGMeasureType[jac->measuretype]));
1398:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Coarsen type        %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype]));
1399:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Interpolation type  %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt"));
1400:     if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, "    Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening));
1401:     if (jac->vec_interp_variant) {
1402:       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant));
1403:       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax));
1404:       PetscCall(PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth));
1405:     }
1406:     if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, "    Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels));
1407: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1408:     PetscCall(PetscViewerASCIIPrintf(viewer, "    SpGEMM type         %s\n", jac->spgemm_type));
1409: #else
1410:     PetscCall(PetscViewerASCIIPrintf(viewer, "    SpGEMM type         %s\n", "hypre"));
1411: #endif
1412:     /* AIR */
1413:     if (jac->Rtype) {
1414:       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype));
1415:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Threshold for R %g\n", (double)jac->Rstrongthreshold));
1416:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Filter for R %g\n", (double)jac->Rfilterthreshold));
1417:       PetscCall(PetscViewerASCIIPrintf(viewer, "      A drop tolerance %g\n", (double)jac->Adroptol));
1418:       PetscCall(PetscViewerASCIIPrintf(viewer, "      A drop type %" PetscInt_FMT "\n", jac->Adroptype));
1419:     }
1420:   }
1421:   PetscFunctionReturn(PETSC_SUCCESS);
1422: }

1424: static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems *PetscOptionsObject)
1425: {
1426:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1427:   PetscInt    indx;
1428:   PetscBool   flag;
1429:   const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};

1431:   PetscFunctionBegin;
1432:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options");
1433:   PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0));
1434:   PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag));
1435:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);

1437:   PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag));
1438:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);

1440:   PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag));
1441:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);

1443:   PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag));
1444:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);

1446:   PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag));
1447:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);

1449:   PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag));
1450:   if (flag) {
1451:     jac->symt = indx;
1452:     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1453:   }

1455:   PetscOptionsHeadEnd();
1456:   PetscFunctionReturn(PETSC_SUCCESS);
1457: }

1459: static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer)
1460: {
1461:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1462:   PetscBool   iascii;
1463:   const char *symt = 0;

1465:   PetscFunctionBegin;
1466:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1467:   if (iascii) {
1468:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ParaSails preconditioning\n"));
1469:     PetscCall(PetscViewerASCIIPrintf(viewer, "    nlevels %" PetscInt_FMT "\n", jac->nlevels));
1470:     PetscCall(PetscViewerASCIIPrintf(viewer, "    threshold %g\n", (double)jac->threshold));
1471:     PetscCall(PetscViewerASCIIPrintf(viewer, "    filter %g\n", (double)jac->filter));
1472:     PetscCall(PetscViewerASCIIPrintf(viewer, "    load balance %g\n", (double)jac->loadbal));
1473:     PetscCall(PetscViewerASCIIPrintf(viewer, "    reuse nonzero structure %s\n", PetscBools[jac->ruse]));
1474:     PetscCall(PetscViewerASCIIPrintf(viewer, "    print info to screen %s\n", PetscBools[jac->logging]));
1475:     if (!jac->symt) symt = "nonsymmetric matrix and preconditioner";
1476:     else if (jac->symt == 1) symt = "SPD matrix and preconditioner";
1477:     else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner";
1478:     else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt);
1479:     PetscCall(PetscViewerASCIIPrintf(viewer, "    %s\n", symt));
1480:   }
1481:   PetscFunctionReturn(PETSC_SUCCESS);
1482: }

1484: static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems *PetscOptionsObject)
1485: {
1486:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1487:   PetscInt  n;
1488:   PetscBool flag, flag2, flag3, flag4;

1490:   PetscFunctionBegin;
1491:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options");
1492:   PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag));
1493:   if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
1494:   PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1495:   if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
1496:   PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag));
1497:   if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
1498:   PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1499:   if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
1500:   PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1501:   PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1502:   PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1503:   PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1504:   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1505:   PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1506:   n = 5;
1507:   PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2));
1508:   if (flag || flag2) {
1509:     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
1510:                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
1511:                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
1512:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
1513:                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
1514:   }
1515:   PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1516:   n = 5;
1517:   PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2));
1518:   if (flag || flag2) {
1519:     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1520:                       jac->as_amg_beta_opts[1],                                           /* AMG agg_levels */
1521:                       jac->as_amg_beta_opts[2],                                           /* AMG relax_type */
1522:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                   /* AMG interp_type */
1523:                       jac->as_amg_beta_opts[4]);                                          /* AMG Pmax */
1524:   }
1525:   PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag));
1526:   if (flag) { /* override HYPRE's default only if the options is used */
1527:     PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq);
1528:   }
1529:   PetscOptionsHeadEnd();
1530:   PetscFunctionReturn(PETSC_SUCCESS);
1531: }

1533: static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer)
1534: {
1535:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1536:   PetscBool iascii;

1538:   PetscFunctionBegin;
1539:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1540:   if (iascii) {
1541:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE AMS preconditioning\n"));
1542:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1543:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1544:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol));
1545:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1546:     PetscCall(PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1547:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight));
1548:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega));
1549:     if (jac->alpha_Poisson) {
1550:       PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (passed in by user)\n"));
1551:     } else {
1552:       PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (computed) \n"));
1553:     }
1554:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1555:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1556:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1557:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1558:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1559:     PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1560:     if (!jac->ams_beta_is_zero) {
1561:       if (jac->beta_Poisson) {
1562:         PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (passed in by user)\n"));
1563:       } else {
1564:         PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (computed) \n"));
1565:       }
1566:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1567:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1568:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1569:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1570:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1571:       PetscCall(PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta));
1572:       if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, "        compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq));
1573:     } else {
1574:       PetscCall(PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver not used (zero-conductivity everywhere) \n"));
1575:     }
1576:   }
1577:   PetscFunctionReturn(PETSC_SUCCESS);
1578: }

1580: static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems *PetscOptionsObject)
1581: {
1582:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1583:   PetscInt  n;
1584:   PetscBool flag, flag2, flag3, flag4;

1586:   PetscFunctionBegin;
1587:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options");
1588:   PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag));
1589:   if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
1590:   PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag));
1591:   if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
1592:   PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag));
1593:   if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type);
1594:   PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag));
1595:   if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
1596:   PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag));
1597:   PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2));
1598:   PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3));
1599:   PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4));
1600:   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1601:   PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag));
1602:   n = 5;
1603:   PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2));
1604:   PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3));
1605:   if (flag || flag2 || flag3) {
1606:     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */
1607:                       jac->as_amg_alpha_opts[0],                                 /* AMG coarsen type */
1608:                       jac->as_amg_alpha_opts[1],                                 /* AMG agg_levels */
1609:                       jac->as_amg_alpha_opts[2],                                 /* AMG relax_type */
1610:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],        /* AMG interp_type */
1611:                       jac->as_amg_alpha_opts[4]);                                /* AMG Pmax */
1612:   }
1613:   PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag));
1614:   n = 5;
1615:   PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2));
1616:   if (flag || flag2) {
1617:     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1618:                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
1619:                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
1620:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
1621:                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
1622:   }
1623:   PetscOptionsHeadEnd();
1624:   PetscFunctionReturn(PETSC_SUCCESS);
1625: }

1627: static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer)
1628: {
1629:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1630:   PetscBool iascii;

1632:   PetscFunctionBegin;
1633:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1634:   if (iascii) {
1635:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE ADS preconditioning\n"));
1636:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter));
1637:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type));
1638:     PetscCall(PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol));
1639:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type));
1640:     PetscCall(PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times));
1641:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight));
1642:     PetscCall(PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega));
1643:     PetscCall(PetscViewerASCIIPrintf(viewer, "    AMS solver using boomerAMG\n"));
1644:     PetscCall(PetscViewerASCIIPrintf(viewer, "        subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type));
1645:     PetscCall(PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]));
1646:     PetscCall(PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]));
1647:     PetscCall(PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]));
1648:     PetscCall(PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]));
1649:     PetscCall(PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]));
1650:     PetscCall(PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_alpha_theta));
1651:     PetscCall(PetscViewerASCIIPrintf(viewer, "    vector Poisson solver using boomerAMG\n"));
1652:     PetscCall(PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]));
1653:     PetscCall(PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]));
1654:     PetscCall(PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]));
1655:     PetscCall(PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]));
1656:     PetscCall(PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]));
1657:     PetscCall(PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_beta_theta));
1658:   }
1659:   PetscFunctionReturn(PETSC_SUCCESS);
1660: }

1662: static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G)
1663: {
1664:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1665:   PetscBool ishypre;

1667:   PetscFunctionBegin;
1668:   PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre));
1669:   if (ishypre) {
1670:     PetscCall(PetscObjectReference((PetscObject)G));
1671:     PetscCall(MatDestroy(&jac->G));
1672:     jac->G = G;
1673:   } else {
1674:     PetscCall(MatDestroy(&jac->G));
1675:     PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G));
1676:   }
1677:   PetscFunctionReturn(PETSC_SUCCESS);
1678: }

1680: /*@
1681:   PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads

1683:   Collective

1685:   Input Parameters:
1686: + pc - the preconditioning context
1687: - G  - the discrete gradient

1689:   Level: intermediate

1691:   Notes:
1692:   G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh

1694:   Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation

1696:   Developer Notes:
1697:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1699: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteCurl()`
1700: @*/
1701: PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G)
1702: {
1703:   PetscFunctionBegin;
1706:   PetscCheckSameComm(pc, 1, G, 2);
1707:   PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G));
1708:   PetscFunctionReturn(PETSC_SUCCESS);
1709: }

1711: static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C)
1712: {
1713:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1714:   PetscBool ishypre;

1716:   PetscFunctionBegin;
1717:   PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre));
1718:   if (ishypre) {
1719:     PetscCall(PetscObjectReference((PetscObject)C));
1720:     PetscCall(MatDestroy(&jac->C));
1721:     jac->C = C;
1722:   } else {
1723:     PetscCall(MatDestroy(&jac->C));
1724:     PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C));
1725:   }
1726:   PetscFunctionReturn(PETSC_SUCCESS);
1727: }

1729: /*@
1730:   PCHYPRESetDiscreteCurl - Set discrete curl matrx for `PCHYPRE` type of ads

1732:   Collective

1734:   Input Parameters:
1735: + pc - the preconditioning context
1736: - C  - the discrete curl

1738:   Level: intermediate

1740:   Notes:
1741:   C should have as many rows as the number of faces and as many columns as the number of edges in the mesh

1743:   Each row of G has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation

1745:   Developer Notes:
1746:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1748:   If this is only for  `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()`

1750: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`
1751: @*/
1752: PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C)
1753: {
1754:   PetscFunctionBegin;
1757:   PetscCheckSameComm(pc, 1, C, 2);
1758:   PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C));
1759:   PetscFunctionReturn(PETSC_SUCCESS);
1760: }

1762: static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1763: {
1764:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1765:   PetscBool ishypre;
1766:   PetscInt  i;

1768:   PetscFunctionBegin;
1769:   PetscCall(MatDestroy(&jac->RT_PiFull));
1770:   PetscCall(MatDestroy(&jac->ND_PiFull));
1771:   for (i = 0; i < 3; ++i) {
1772:     PetscCall(MatDestroy(&jac->RT_Pi[i]));
1773:     PetscCall(MatDestroy(&jac->ND_Pi[i]));
1774:   }

1776:   jac->dim = dim;
1777:   if (RT_PiFull) {
1778:     PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre));
1779:     if (ishypre) {
1780:       PetscCall(PetscObjectReference((PetscObject)RT_PiFull));
1781:       jac->RT_PiFull = RT_PiFull;
1782:     } else {
1783:       PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull));
1784:     }
1785:   }
1786:   if (RT_Pi) {
1787:     for (i = 0; i < dim; ++i) {
1788:       if (RT_Pi[i]) {
1789:         PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre));
1790:         if (ishypre) {
1791:           PetscCall(PetscObjectReference((PetscObject)RT_Pi[i]));
1792:           jac->RT_Pi[i] = RT_Pi[i];
1793:         } else {
1794:           PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i]));
1795:         }
1796:       }
1797:     }
1798:   }
1799:   if (ND_PiFull) {
1800:     PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre));
1801:     if (ishypre) {
1802:       PetscCall(PetscObjectReference((PetscObject)ND_PiFull));
1803:       jac->ND_PiFull = ND_PiFull;
1804:     } else {
1805:       PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull));
1806:     }
1807:   }
1808:   if (ND_Pi) {
1809:     for (i = 0; i < dim; ++i) {
1810:       if (ND_Pi[i]) {
1811:         PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre));
1812:         if (ishypre) {
1813:           PetscCall(PetscObjectReference((PetscObject)ND_Pi[i]));
1814:           jac->ND_Pi[i] = ND_Pi[i];
1815:         } else {
1816:           PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i]));
1817:         }
1818:       }
1819:     }
1820:   }
1821:   PetscFunctionReturn(PETSC_SUCCESS);
1822: }

1824: /*@
1825:   PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads

1827:   Collective

1829:   Input Parameters:
1830: + pc        - the preconditioning context
1831: . dim       - the dimension of the problem, only used in AMS
1832: . RT_PiFull - Raviart-Thomas interpolation matrix
1833: . RT_Pi     - x/y/z component of Raviart-Thomas interpolation matrix
1834: . ND_PiFull - Nedelec interpolation matrix
1835: - ND_Pi     - x/y/z component of Nedelec interpolation matrix

1837:   Level: intermediate

1839:   Notes:
1840:   For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL.

1842:   For ADS, both type of interpolation matrices are needed.

1844:   Developer Notes:
1845:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1847: .seealso: [](ch_ksp), `PCHYPRE`
1848: @*/
1849: PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1850: {
1851:   PetscInt i;

1853:   PetscFunctionBegin;
1855:   if (RT_PiFull) {
1857:     PetscCheckSameComm(pc, 1, RT_PiFull, 3);
1858:   }
1859:   if (RT_Pi) {
1860:     PetscAssertPointer(RT_Pi, 4);
1861:     for (i = 0; i < dim; ++i) {
1862:       if (RT_Pi[i]) {
1864:         PetscCheckSameComm(pc, 1, RT_Pi[i], 4);
1865:       }
1866:     }
1867:   }
1868:   if (ND_PiFull) {
1870:     PetscCheckSameComm(pc, 1, ND_PiFull, 5);
1871:   }
1872:   if (ND_Pi) {
1873:     PetscAssertPointer(ND_Pi, 6);
1874:     for (i = 0; i < dim; ++i) {
1875:       if (ND_Pi[i]) {
1877:         PetscCheckSameComm(pc, 1, ND_Pi[i], 6);
1878:       }
1879:     }
1880:   }
1881:   PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi));
1882:   PetscFunctionReturn(PETSC_SUCCESS);
1883: }

1885: static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha)
1886: {
1887:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1888:   PetscBool ishypre;

1890:   PetscFunctionBegin;
1891:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1892:   if (ishypre) {
1893:     if (isalpha) {
1894:       PetscCall(PetscObjectReference((PetscObject)A));
1895:       PetscCall(MatDestroy(&jac->alpha_Poisson));
1896:       jac->alpha_Poisson = A;
1897:     } else {
1898:       if (A) {
1899:         PetscCall(PetscObjectReference((PetscObject)A));
1900:       } else {
1901:         jac->ams_beta_is_zero = PETSC_TRUE;
1902:       }
1903:       PetscCall(MatDestroy(&jac->beta_Poisson));
1904:       jac->beta_Poisson = A;
1905:     }
1906:   } else {
1907:     if (isalpha) {
1908:       PetscCall(MatDestroy(&jac->alpha_Poisson));
1909:       PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson));
1910:     } else {
1911:       if (A) {
1912:         PetscCall(MatDestroy(&jac->beta_Poisson));
1913:         PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson));
1914:       } else {
1915:         PetscCall(MatDestroy(&jac->beta_Poisson));
1916:         jac->ams_beta_is_zero = PETSC_TRUE;
1917:       }
1918:     }
1919:   }
1920:   PetscFunctionReturn(PETSC_SUCCESS);
1921: }

1923: /*@
1924:   PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams

1926:   Collective

1928:   Input Parameters:
1929: + pc - the preconditioning context
1930: - A  - the matrix

1932:   Level: intermediate

1934:   Note:
1935:   A should be obtained by discretizing the vector valued Poisson problem with linear finite elements

1937:   Developer Notes:
1938:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1940:   If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()`

1942: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()`
1943: @*/
1944: PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A)
1945: {
1946:   PetscFunctionBegin;
1949:   PetscCheckSameComm(pc, 1, A, 2);
1950:   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE));
1951:   PetscFunctionReturn(PETSC_SUCCESS);
1952: }

1954: /*@
1955:   PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams

1957:   Collective

1959:   Input Parameters:
1960: + pc - the preconditioning context
1961: - A  - the matrix, or NULL to turn it off

1963:   Level: intermediate

1965:   Note:
1966:   A should be obtained by discretizing the Poisson problem with linear finite elements.

1968:   Developer Notes:
1969:   This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1971:   If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()`

1973: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1974: @*/
1975: PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A)
1976: {
1977:   PetscFunctionBegin;
1979:   if (A) {
1981:     PetscCheckSameComm(pc, 1, A, 2);
1982:   }
1983:   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE));
1984:   PetscFunctionReturn(PETSC_SUCCESS);
1985: }

1987: static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo)
1988: {
1989:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

1991:   PetscFunctionBegin;
1992:   /* throw away any vector if already set */
1993:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0]));
1994:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1]));
1995:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2]));
1996:   PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0]));
1997:   PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0]));
1998:   PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1]));
1999:   PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1]));
2000:   jac->dim = 2;
2001:   if (zzo) {
2002:     PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2]));
2003:     PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2]));
2004:     jac->dim++;
2005:   }
2006:   PetscFunctionReturn(PETSC_SUCCESS);
2007: }

2009: /*@
2010:   PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams

2012:   Collective

2014:   Input Parameters:
2015: + pc  - the preconditioning context
2016: . ozz - vector representing (1,0,0) (or (1,0) in 2D)
2017: . zoz - vector representing (0,1,0) (or (0,1) in 2D)
2018: - zzo - vector representing (0,0,1) (use NULL in 2D)

2020:   Level: intermediate

2022:   Developer Notes:
2023:   If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()`

2025: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
2026: @*/
2027: PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo)
2028: {
2029:   PetscFunctionBegin;
2034:   PetscCheckSameComm(pc, 1, ozz, 2);
2035:   PetscCheckSameComm(pc, 1, zoz, 3);
2036:   if (zzo) PetscCheckSameComm(pc, 1, zzo, 4);
2037:   PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo));
2038:   PetscFunctionReturn(PETSC_SUCCESS);
2039: }

2041: static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior)
2042: {
2043:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

2045:   PetscFunctionBegin;
2046:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior));
2047:   PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior));
2048:   PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior));
2049:   jac->ams_beta_is_zero_part = PETSC_TRUE;
2050:   PetscFunctionReturn(PETSC_SUCCESS);
2051: }

2053: /*@
2054:   PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams

2056:   Collective

2058:   Input Parameters:
2059: + pc       - the preconditioning context
2060: - interior - vector. node is interior if its entry in the array is 1.0.

2062:   Level: intermediate

2064:   Note:
2065:   This calls `HYPRE_AMSSetInteriorNodes()`

2067:   Developer Notes:
2068:   If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetInteriorNodes()`

2070: .seealso: [](ch_ksp), `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
2071: @*/
2072: PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior)
2073: {
2074:   PetscFunctionBegin;
2077:   PetscCheckSameComm(pc, 1, interior, 2);
2078:   PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior));
2079:   PetscFunctionReturn(PETSC_SUCCESS);
2080: }

2082: static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
2083: {
2084:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2085:   Vec       tv;
2086:   PetscInt  i;

2088:   PetscFunctionBegin;
2089:   /* throw away any coordinate vector if already set */
2090:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0]));
2091:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1]));
2092:   PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2]));
2093:   jac->dim = dim;

2095:   /* compute IJ vector for coordinates */
2096:   PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv));
2097:   PetscCall(VecSetType(tv, VECSTANDARD));
2098:   PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE));
2099:   for (i = 0; i < dim; i++) {
2100:     PetscScalar *array;
2101:     PetscInt     j;

2103:     PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i]));
2104:     PetscCall(VecGetArrayWrite(tv, &array));
2105:     for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i];
2106:     PetscCall(VecRestoreArrayWrite(tv, &array));
2107:     PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i]));
2108:   }
2109:   PetscCall(VecDestroy(&tv));
2110:   PetscFunctionReturn(PETSC_SUCCESS);
2111: }

2113: static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[])
2114: {
2115:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

2117:   PetscFunctionBegin;
2118:   *name = jac->hypre_type;
2119:   PetscFunctionReturn(PETSC_SUCCESS);
2120: }

2122: static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[])
2123: {
2124:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
2125:   PetscBool flag;

2127:   PetscFunctionBegin;
2128:   if (jac->hypre_type) {
2129:     PetscCall(PetscStrcmp(jac->hypre_type, name, &flag));
2130:     PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE preconditioner type once it has been set");
2131:     PetscFunctionReturn(PETSC_SUCCESS);
2132:   } else {
2133:     PetscCall(PetscStrallocpy(name, &jac->hypre_type));
2134:   }

2136:   jac->maxiter         = PETSC_DEFAULT;
2137:   jac->tol             = PETSC_DEFAULT;
2138:   jac->printstatistics = PetscLogPrintInfo;

2140:   PetscCall(PetscStrcmp("ilu", jac->hypre_type, &flag));
2141:   if (flag) {
2142:     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2143:     PetscCallExternal(HYPRE_ILUCreate, &jac->hsolver);
2144:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ILU;
2145:     pc->ops->view           = PCView_HYPRE_ILU;
2146:     jac->destroy            = HYPRE_ILUDestroy;
2147:     jac->setup              = HYPRE_ILUSetup;
2148:     jac->solve              = HYPRE_ILUSolve;
2149:     jac->factorrowsize      = PETSC_DEFAULT;
2150:     PetscFunctionReturn(PETSC_SUCCESS);
2151:   }

2153:   PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag));
2154:   if (flag) {
2155:     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2156:     PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver);
2157:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut;
2158:     pc->ops->view           = PCView_HYPRE_Pilut;
2159:     jac->destroy            = HYPRE_ParCSRPilutDestroy;
2160:     jac->setup              = HYPRE_ParCSRPilutSetup;
2161:     jac->solve              = HYPRE_ParCSRPilutSolve;
2162:     jac->factorrowsize      = PETSC_DEFAULT;
2163:     PetscFunctionReturn(PETSC_SUCCESS);
2164:   }
2165:   PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag));
2166:   if (flag) {
2167: #if defined(PETSC_USE_64BIT_INDICES)
2168:     SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64-bit indices");
2169: #endif
2170:     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2171:     PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver);
2172:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid;
2173:     pc->ops->view           = PCView_HYPRE_Euclid;
2174:     jac->destroy            = HYPRE_EuclidDestroy;
2175:     jac->setup              = HYPRE_EuclidSetup;
2176:     jac->solve              = HYPRE_EuclidSolve;
2177:     jac->factorrowsize      = PETSC_DEFAULT;
2178:     jac->eu_level           = PETSC_DEFAULT; /* default */
2179:     PetscFunctionReturn(PETSC_SUCCESS);
2180:   }
2181:   PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag));
2182:   if (flag) {
2183:     PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre));
2184:     PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver);
2185:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails;
2186:     pc->ops->view           = PCView_HYPRE_ParaSails;
2187:     jac->destroy            = HYPRE_ParaSailsDestroy;
2188:     jac->setup              = HYPRE_ParaSailsSetup;
2189:     jac->solve              = HYPRE_ParaSailsSolve;
2190:     /* initialize */
2191:     jac->nlevels   = 1;
2192:     jac->threshold = .1;
2193:     jac->filter    = .1;
2194:     jac->loadbal   = 0;
2195:     if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE;
2196:     else jac->logging = (int)PETSC_FALSE;

2198:     jac->ruse = (int)PETSC_FALSE;
2199:     jac->symt = 0;
2200:     PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
2201:     PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
2202:     PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
2203:     PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
2204:     PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
2205:     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
2206:     PetscFunctionReturn(PETSC_SUCCESS);
2207:   }
2208:   PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag));
2209:   if (flag) {
2210:     PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver);
2211:     pc->ops->setfromoptions  = PCSetFromOptions_HYPRE_BoomerAMG;
2212:     pc->ops->view            = PCView_HYPRE_BoomerAMG;
2213:     pc->ops->applytranspose  = PCApplyTranspose_HYPRE_BoomerAMG;
2214:     pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG;
2215:     pc->ops->matapply        = PCMatApply_HYPRE_BoomerAMG;
2216:     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG));
2217:     PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG));
2218:     jac->destroy         = HYPRE_BoomerAMGDestroy;
2219:     jac->setup           = HYPRE_BoomerAMGSetup;
2220:     jac->solve           = HYPRE_BoomerAMGSolve;
2221:     jac->applyrichardson = PETSC_FALSE;
2222:     /* these defaults match the hypre defaults */
2223:     jac->cycletype       = 1;
2224:     jac->maxlevels       = 25;
2225:     jac->maxiter         = 1;
2226:     jac->tol             = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */
2227:     jac->truncfactor     = 0.0;
2228:     jac->strongthreshold = .25;
2229:     jac->maxrowsum       = .9;
2230:     jac->coarsentype     = 6;
2231:     jac->measuretype     = 0;
2232:     jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1;
2233:     jac->smoothtype                                              = -1; /* Not set by default */
2234:     jac->smoothnumlevels                                         = 25;
2235:     jac->eu_level                                                = 0;
2236:     jac->eu_droptolerance                                        = 0;
2237:     jac->eu_bj                                                   = 0;
2238:     jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */
2239:     jac->relaxtype[2]                     = 9; /*G.E. */
2240:     jac->relaxweight                      = 1.0;
2241:     jac->outerrelaxweight                 = 1.0;
2242:     jac->relaxorder                       = 1;
2243:     jac->interptype                       = 0;
2244:     jac->Rtype                            = 0;
2245:     jac->Rstrongthreshold                 = 0.25;
2246:     jac->Rfilterthreshold                 = 0.0;
2247:     jac->Adroptype                        = -1;
2248:     jac->Adroptol                         = 0.0;
2249:     jac->agg_nl                           = 0;
2250:     jac->agg_interptype                   = 4;
2251:     jac->pmax                             = 0;
2252:     jac->truncfactor                      = 0.0;
2253:     jac->agg_num_paths                    = 1;
2254:     jac->maxc                             = 9;
2255:     jac->minc                             = 1;
2256:     jac->nodal_coarsening                 = 0;
2257:     jac->nodal_coarsening_diag            = 0;
2258:     jac->vec_interp_variant               = 0;
2259:     jac->vec_interp_qmax                  = 0;
2260:     jac->vec_interp_smooth                = PETSC_FALSE;
2261:     jac->interp_refine                    = 0;
2262:     jac->nodal_relax                      = PETSC_FALSE;
2263:     jac->nodal_relax_levels               = 1;
2264:     jac->rap2                             = 0;

2266:     /* GPU defaults
2267:          from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options
2268:          and /src/parcsr_ls/par_amg.c */
2269: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2270:     jac->keeptranspose  = PETSC_TRUE;
2271:     jac->mod_rap2       = 1;
2272:     jac->coarsentype    = 8;
2273:     jac->relaxorder     = 0;
2274:     jac->interptype     = 6;
2275:     jac->relaxtype[0]   = 18;
2276:     jac->relaxtype[1]   = 18;
2277:     jac->agg_interptype = 7;
2278: #else
2279:     jac->keeptranspose = PETSC_FALSE;
2280:     jac->mod_rap2      = 0;
2281: #endif
2282:     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
2283:     PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
2284:     PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
2285:     PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
2286:     PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
2287:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
2288:     PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
2289:     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
2290:     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
2291:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
2292:     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
2293:     PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
2294:     PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype);
2295:     PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
2296:     PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
2297:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]);  /* defaults coarse to 9 */
2298:     PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */
2299:     PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
2300:     PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
2301:     /* GPU */
2302: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2303:     PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);
2304:     PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2);
2305:     PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2);
2306: #endif

2308:     /* AIR */
2309: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
2310:     PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
2311:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
2312:     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
2313:     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
2314:     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
2315: #endif
2316:     PetscFunctionReturn(PETSC_SUCCESS);
2317:   }
2318:   PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag));
2319:   if (flag) {
2320:     PetscCallExternal(HYPRE_AMSCreate, &jac->hsolver);
2321:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS;
2322:     pc->ops->view           = PCView_HYPRE_AMS;
2323:     jac->destroy            = HYPRE_AMSDestroy;
2324:     jac->setup              = HYPRE_AMSSetup;
2325:     jac->solve              = HYPRE_AMSSolve;
2326:     jac->coords[0]          = NULL;
2327:     jac->coords[1]          = NULL;
2328:     jac->coords[2]          = NULL;
2329:     jac->interior           = NULL;
2330:     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */
2331:     jac->as_print       = 0;
2332:     jac->as_max_iter    = 1;  /* used as a preconditioner */
2333:     jac->as_tol         = 0.; /* used as a preconditioner */
2334:     jac->ams_cycle_type = 13;
2335:     /* Smoothing options */
2336:     jac->as_relax_type   = 2;
2337:     jac->as_relax_times  = 1;
2338:     jac->as_relax_weight = 1.0;
2339:     jac->as_omega        = 1.0;
2340:     /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2341:     jac->as_amg_alpha_opts[0] = 10;
2342:     jac->as_amg_alpha_opts[1] = 1;
2343:     jac->as_amg_alpha_opts[2] = 6;
2344:     jac->as_amg_alpha_opts[3] = 6;
2345:     jac->as_amg_alpha_opts[4] = 4;
2346:     jac->as_amg_alpha_theta   = 0.25;
2347:     /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2348:     jac->as_amg_beta_opts[0] = 10;
2349:     jac->as_amg_beta_opts[1] = 1;
2350:     jac->as_amg_beta_opts[2] = 6;
2351:     jac->as_amg_beta_opts[3] = 6;
2352:     jac->as_amg_beta_opts[4] = 4;
2353:     jac->as_amg_beta_theta   = 0.25;
2354:     PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
2355:     PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
2356:     PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2357:     PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
2358:     PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2359:     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
2360:                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
2361:                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
2362:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
2363:                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
2364:     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0],   /* AMG coarsen type */
2365:                       jac->as_amg_beta_opts[1],                                             /* AMG agg_levels */
2366:                       jac->as_amg_beta_opts[2],                                             /* AMG relax_type */
2367:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                     /* AMG interp_type */
2368:                       jac->as_amg_beta_opts[4]);                                            /* AMG Pmax */
2369:     /* Zero conductivity */
2370:     jac->ams_beta_is_zero      = PETSC_FALSE;
2371:     jac->ams_beta_is_zero_part = PETSC_FALSE;
2372:     PetscFunctionReturn(PETSC_SUCCESS);
2373:   }
2374:   PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag));
2375:   if (flag) {
2376:     PetscCallExternal(HYPRE_ADSCreate, &jac->hsolver);
2377:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS;
2378:     pc->ops->view           = PCView_HYPRE_ADS;
2379:     jac->destroy            = HYPRE_ADSDestroy;
2380:     jac->setup              = HYPRE_ADSSetup;
2381:     jac->solve              = HYPRE_ADSSolve;
2382:     jac->coords[0]          = NULL;
2383:     jac->coords[1]          = NULL;
2384:     jac->coords[2]          = NULL;
2385:     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */
2386:     jac->as_print       = 0;
2387:     jac->as_max_iter    = 1;  /* used as a preconditioner */
2388:     jac->as_tol         = 0.; /* used as a preconditioner */
2389:     jac->ads_cycle_type = 13;
2390:     /* Smoothing options */
2391:     jac->as_relax_type   = 2;
2392:     jac->as_relax_times  = 1;
2393:     jac->as_relax_weight = 1.0;
2394:     jac->as_omega        = 1.0;
2395:     /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */
2396:     jac->ams_cycle_type       = 14;
2397:     jac->as_amg_alpha_opts[0] = 10;
2398:     jac->as_amg_alpha_opts[1] = 1;
2399:     jac->as_amg_alpha_opts[2] = 6;
2400:     jac->as_amg_alpha_opts[3] = 6;
2401:     jac->as_amg_alpha_opts[4] = 4;
2402:     jac->as_amg_alpha_theta   = 0.25;
2403:     /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2404:     jac->as_amg_beta_opts[0] = 10;
2405:     jac->as_amg_beta_opts[1] = 1;
2406:     jac->as_amg_beta_opts[2] = 6;
2407:     jac->as_amg_beta_opts[3] = 6;
2408:     jac->as_amg_beta_opts[4] = 4;
2409:     jac->as_amg_beta_theta   = 0.25;
2410:     PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
2411:     PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
2412:     PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2413:     PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
2414:     PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2415:     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type,      /* AMG coarsen type */
2416:                       jac->as_amg_alpha_opts[0],                                      /* AMG coarsen type */
2417:                       jac->as_amg_alpha_opts[1],                                      /* AMG agg_levels */
2418:                       jac->as_amg_alpha_opts[2],                                      /* AMG relax_type */
2419:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],             /* AMG interp_type */
2420:                       jac->as_amg_alpha_opts[4]);                                     /* AMG Pmax */
2421:     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
2422:                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
2423:                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
2424:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
2425:                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
2426:     PetscFunctionReturn(PETSC_SUCCESS);
2427:   }
2428:   PetscCall(PetscFree(jac->hypre_type));

2430:   jac->hypre_type = NULL;
2431:   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, pilut, parasails, boomeramg, ams", name);
2432: }

2434: /*
2435:     It only gets here if the HYPRE type has not been set before the call to
2436:    ...SetFromOptions() which actually is most of the time
2437: */
2438: static PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptionsObject)
2439: {
2440:   PetscInt    indx;
2441:   const char *type[] = {"ilu", "euclid", "pilut", "parasails", "boomeramg", "ams", "ads"};
2442:   PetscBool   flg;

2444:   PetscFunctionBegin;
2445:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options");
2446:   PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg));
2447:   if (flg) {
2448:     PetscCall(PCHYPRESetType_HYPRE(pc, type[indx]));
2449:   } else {
2450:     PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg"));
2451:   }
2452:   PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject);
2453:   PetscOptionsHeadEnd();
2454:   PetscFunctionReturn(PETSC_SUCCESS);
2455: }

2457: /*@
2458:   PCHYPRESetType - Sets which hypre preconditioner you wish to use

2460:   Input Parameters:
2461: + pc   - the preconditioner context
2462: - name - either  euclid, pilut, parasails, boomeramg, ams, ads

2464:   Options Database Key:
2465: . pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads

2467:   Level: intermediate

2469: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE`
2470: @*/
2471: PetscErrorCode PCHYPRESetType(PC pc, const char name[])
2472: {
2473:   PetscFunctionBegin;
2475:   PetscAssertPointer(name, 2);
2476:   PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name));
2477:   PetscFunctionReturn(PETSC_SUCCESS);
2478: }

2480: /*@
2481:   PCHYPREGetType - Gets which hypre preconditioner you are using

2483:   Input Parameter:
2484: . pc - the preconditioner context

2486:   Output Parameter:
2487: . name - either  euclid, pilut, parasails, boomeramg, ams, ads

2489:   Level: intermediate

2491: .seealso: [](ch_ksp), `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE`
2492: @*/
2493: PetscErrorCode PCHYPREGetType(PC pc, const char *name[])
2494: {
2495:   PetscFunctionBegin;
2497:   PetscAssertPointer(name, 2);
2498:   PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name));
2499:   PetscFunctionReturn(PETSC_SUCCESS);
2500: }

2502: /*@
2503:   PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs

2505:   Logically Collective

2507:   Input Parameters:
2508: + pc   - the hypre context
2509: - name - one of 'cusparse', 'hypre'

2511:   Options Database Key:
2512: . -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre

2514:   Level: intermediate

2516:   Developer Notes:
2517:   How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`?

2519: .seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()`
2520: @*/
2521: PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[])
2522: {
2523:   PetscFunctionBegin;
2525:   PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name));
2526:   PetscFunctionReturn(PETSC_SUCCESS);
2527: }

2529: /*@
2530:   PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs

2532:   Not Collective

2534:   Input Parameter:
2535: . pc - the multigrid context

2537:   Output Parameter:
2538: . name - one of 'cusparse', 'hypre'

2540:   Level: intermediate

2542: .seealso: [](ch_ksp), `PCHYPRE`, `PCMGGalerkinSetMatProductAlgorithm()`
2543: @*/
2544: PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[])
2545: {
2546:   PetscFunctionBegin;
2548:   PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name));
2549:   PetscFunctionReturn(PETSC_SUCCESS);
2550: }

2552: /*MC
2553:      PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC`

2555:    Options Database Keys:
2556: +   -pc_hypre_type - One of `euclid`, `pilut`, `parasails`, `boomeramg`, `ams`, or `ads`
2557: .   -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BOOMERAMGSetNodal()`)
2558: .   -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`)
2559: -   Many others, run with `-pc_type hypre` `-pc_hypre_type XXX` `-help` to see options for the XXX preconditioner

2561:    Level: intermediate

2563:    Notes:
2564:     Apart from `-pc_hypre_type` (for which there is `PCHYPRESetType()`),
2565:           the many hypre options can ONLY be set via the options database (e.g. the command line
2566:           or with `PetscOptionsSetValue()`, there are no functions to set them)

2568:           The options `-pc_hypre_boomeramg_max_iter` and `-pc_hypre_boomeramg_tol` refer to the number of iterations
2569:           (V-cycles) and tolerance that boomerAMG does EACH time it is called. So for example, if
2570:           `-pc_hypre_boomeramg_max_iter` is set to 2 then 2-V-cycles are being used to define the preconditioner
2571:           (`-pc_hypre_boomeramg_tol` should be set to 0.0 - the default - to strictly use a fixed number of
2572:           iterations per hypre call). `-ksp_max_it` and `-ksp_rtol` STILL determine the total number of iterations
2573:           and tolerance for the Krylov solver. For example, if `-pc_hypre_boomeramg_max_iter` is 2 and `-ksp_max_it` is 10
2574:           then AT MOST twenty V-cycles of boomeramg will be used.

2576:            Note that the option `-pc_hypre_boomeramg_relax_type_all` defaults to symmetric relaxation
2577:            (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry.
2578:            Otherwise, you may want to use `-pc_hypre_boomeramg_relax_type_all SOR/Jacobi`.

2580:           `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use
2581:           the following two options: `-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>`

2583:           See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers

2585:           For `PCHYPRE` type of `ams` or `ads` auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`,
2586:           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2587:           `PCHYPREAMSSetInteriorNodes()`

2589:   Sometimes people want to try algebraic multigrid as a "standalone" solver, that is not accelerating it with a Krylov method. Though we generally do not recommend this
2590:   since it is usually slower, one should use a `KSPType` of `KSPRICHARDSON`
2591:   (or equivalently `-ksp_type richardson`) to achieve this. Using `KSPPREONLY` will not work since it only applies a single cycle of multigrid.

2593:    PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems

2595:    GPU Notes:
2596:      To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda
2597:      Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers.

2599:      To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip
2600:      Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers.

2602: .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`,
2603:           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2604:           PCHYPREAMSSetInteriorNodes()
2605: M*/

2607: PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc)
2608: {
2609:   PC_HYPRE *jac;

2611:   PetscFunctionBegin;
2612:   PetscCall(PetscNew(&jac));

2614:   pc->data                = jac;
2615:   pc->ops->reset          = PCReset_HYPRE;
2616:   pc->ops->destroy        = PCDestroy_HYPRE;
2617:   pc->ops->setfromoptions = PCSetFromOptions_HYPRE;
2618:   pc->ops->setup          = PCSetUp_HYPRE;
2619:   pc->ops->apply          = PCApply_HYPRE;
2620:   jac->comm_hypre         = MPI_COMM_NULL;
2621:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE));
2622:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE));
2623:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE));
2624:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE));
2625:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE));
2626:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE));
2627:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE));
2628:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE));
2629:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE));
2630:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG));
2631:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG));
2632: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2633:   #if defined(HYPRE_USING_HIP)
2634:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2635:   #endif
2636:   #if defined(HYPRE_USING_CUDA)
2637:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2638:   #endif
2639: #endif
2640:   PetscHYPREInitialize();
2641:   PetscFunctionReturn(PETSC_SUCCESS);
2642: }

2644: typedef struct {
2645:   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2646:   HYPRE_StructSolver hsolver;

2648:   /* keep copy of PFMG options used so may view them */
2649:   PetscInt  its;
2650:   PetscReal tol;
2651:   PetscInt  relax_type;
2652:   PetscInt  rap_type;
2653:   PetscInt  num_pre_relax, num_post_relax;
2654:   PetscInt  max_levels;
2655:   PetscInt  skip_relax;
2656:   PetscBool print_statistics;
2657: } PC_PFMG;

2659: static PetscErrorCode PCDestroy_PFMG(PC pc)
2660: {
2661:   PC_PFMG *ex = (PC_PFMG *)pc->data;

2663:   PetscFunctionBegin;
2664:   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2665:   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2666:   PetscCall(PetscFree(pc->data));
2667:   PetscFunctionReturn(PETSC_SUCCESS);
2668: }

2670: static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"};
2671: static const char *PFMGRAPType[]   = {"Galerkin", "non-Galerkin"};

2673: static PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer)
2674: {
2675:   PetscBool iascii;
2676:   PC_PFMG  *ex = (PC_PFMG *)pc->data;

2678:   PetscFunctionBegin;
2679:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2680:   if (iascii) {
2681:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE PFMG preconditioning\n"));
2682:     PetscCall(PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its));
2683:     PetscCall(PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol));
2684:     PetscCall(PetscViewerASCIIPrintf(viewer, "    relax type %s\n", PFMGRelaxType[ex->relax_type]));
2685:     PetscCall(PetscViewerASCIIPrintf(viewer, "    RAP type %s\n", PFMGRAPType[ex->rap_type]));
2686:     PetscCall(PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2687:     PetscCall(PetscViewerASCIIPrintf(viewer, "    max levels %" PetscInt_FMT "\n", ex->max_levels));
2688:     PetscCall(PetscViewerASCIIPrintf(viewer, "    skip relax %" PetscInt_FMT "\n", ex->skip_relax));
2689:   }
2690:   PetscFunctionReturn(PETSC_SUCCESS);
2691: }

2693: static PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2694: {
2695:   PC_PFMG *ex = (PC_PFMG *)pc->data;

2697:   PetscFunctionBegin;
2698:   PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options");
2699:   PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL));
2700:   PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL));
2701:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2702:   PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2703:   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2704:   PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2705:   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);

2707:   PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL));
2708:   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);

2710:   PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL));
2711:   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2712:   PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2713:   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2714:   PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL));
2715:   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2716:   PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL));
2717:   PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax);
2718:   PetscOptionsHeadEnd();
2719:   PetscFunctionReturn(PETSC_SUCCESS);
2720: }

2722: static PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y)
2723: {
2724:   PC_PFMG           *ex = (PC_PFMG *)pc->data;
2725:   PetscScalar       *yy;
2726:   const PetscScalar *xx;
2727:   PetscInt           ilower[3], iupper[3];
2728:   HYPRE_Int          hlower[3], hupper[3];
2729:   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)pc->pmat->data;

2731:   PetscFunctionBegin;
2732:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2733:   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2734:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2735:   iupper[0] += ilower[0] - 1;
2736:   iupper[1] += ilower[1] - 1;
2737:   iupper[2] += ilower[2] - 1;
2738:   hlower[0] = (HYPRE_Int)ilower[0];
2739:   hlower[1] = (HYPRE_Int)ilower[1];
2740:   hlower[2] = (HYPRE_Int)ilower[2];
2741:   hupper[0] = (HYPRE_Int)iupper[0];
2742:   hupper[1] = (HYPRE_Int)iupper[1];
2743:   hupper[2] = (HYPRE_Int)iupper[2];

2745:   /* copy x values over to hypre */
2746:   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2747:   PetscCall(VecGetArrayRead(x, &xx));
2748:   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2749:   PetscCall(VecRestoreArrayRead(x, &xx));
2750:   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2751:   PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);

2753:   /* copy solution values back to PETSc */
2754:   PetscCall(VecGetArray(y, &yy));
2755:   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2756:   PetscCall(VecRestoreArray(y, &yy));
2757:   PetscFunctionReturn(PETSC_SUCCESS);
2758: }

2760: static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2761: {
2762:   PC_PFMG  *jac = (PC_PFMG *)pc->data;
2763:   HYPRE_Int oits;

2765:   PetscFunctionBegin;
2766:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2767:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its);
2768:   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol);

2770:   PetscCall(PCApply_PFMG(pc, b, y));
2771:   PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits);
2772:   *outits = oits;
2773:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2774:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2775:   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol);
2776:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its);
2777:   PetscFunctionReturn(PETSC_SUCCESS);
2778: }

2780: static PetscErrorCode PCSetUp_PFMG(PC pc)
2781: {
2782:   PC_PFMG         *ex = (PC_PFMG *)pc->data;
2783:   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data;
2784:   PetscBool        flg;

2786:   PetscFunctionBegin;
2787:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
2788:   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");

2790:   /* create the hypre solver object and set its information */
2791:   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2792:   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);

2794:   // Print Hypre statistics about the solve process
2795:   if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3);

2797:   // The hypre options must be repeated here because the StructPFMG was destroyed and recreated
2798:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2799:   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2800:   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2801:   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2802:   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2803:   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2804:   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);

2806:   PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2807:   PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver);
2808:   PetscFunctionReturn(PETSC_SUCCESS);
2809: }

2811: /*MC
2812:      PCPFMG - the hypre PFMG multigrid solver

2814:    Options Database Keys:
2815: + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner
2816: . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve
2817: . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve
2818: . -pc_pfmg_tol <tol> - tolerance of PFMG
2819: . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel
2820: . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin
2821: - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations
2822:                         when the underlying problem is isotropic, one of 0,1

2824:    Level: advanced

2826:    Notes:
2827:    This is for CELL-centered descretizations

2829:    See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG`

2831:    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver

2833:    This must be used with the `MATHYPRESTRUCT` matrix type.

2835:    This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`.

2837: .seealso: [](ch_ksp), `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`
2838: M*/

2840: PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc)
2841: {
2842:   PC_PFMG *ex;

2844:   PetscFunctionBegin;
2845:   PetscCall(PetscNew(&ex));
2846:   pc->data = ex;

2848:   ex->its              = 1;
2849:   ex->tol              = 1.e-8;
2850:   ex->relax_type       = 1;
2851:   ex->rap_type         = 0;
2852:   ex->num_pre_relax    = 1;
2853:   ex->num_post_relax   = 1;
2854:   ex->max_levels       = 0;
2855:   ex->skip_relax       = 0;
2856:   ex->print_statistics = PETSC_FALSE;

2858:   pc->ops->setfromoptions  = PCSetFromOptions_PFMG;
2859:   pc->ops->view            = PCView_PFMG;
2860:   pc->ops->destroy         = PCDestroy_PFMG;
2861:   pc->ops->apply           = PCApply_PFMG;
2862:   pc->ops->applyrichardson = PCApplyRichardson_PFMG;
2863:   pc->ops->setup           = PCSetUp_PFMG;

2865:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2866:   PetscHYPREInitialize();
2867:   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2868:   PetscFunctionReturn(PETSC_SUCCESS);
2869: }

2871: /* we know we are working with a HYPRE_SStructMatrix */
2872: typedef struct {
2873:   MPI_Comm            hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */
2874:   HYPRE_SStructSolver ss_solver;

2876:   /* keep copy of SYSPFMG options used so may view them */
2877:   PetscInt  its;
2878:   PetscReal tol;
2879:   PetscInt  relax_type;
2880:   PetscInt  num_pre_relax, num_post_relax;
2881: } PC_SysPFMG;

2883: static PetscErrorCode PCDestroy_SysPFMG(PC pc)
2884: {
2885:   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;

2887:   PetscFunctionBegin;
2888:   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2889:   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
2890:   PetscCall(PetscFree(pc->data));
2891:   PetscFunctionReturn(PETSC_SUCCESS);
2892: }

2894: static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"};

2896: static PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer)
2897: {
2898:   PetscBool   iascii;
2899:   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;

2901:   PetscFunctionBegin;
2902:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
2903:   if (iascii) {
2904:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE SysPFMG preconditioning\n"));
2905:     PetscCall(PetscViewerASCIIPrintf(viewer, "  max iterations %" PetscInt_FMT "\n", ex->its));
2906:     PetscCall(PetscViewerASCIIPrintf(viewer, "  tolerance %g\n", ex->tol));
2907:     PetscCall(PetscViewerASCIIPrintf(viewer, "  relax type %s\n", PFMGRelaxType[ex->relax_type]));
2908:     PetscCall(PetscViewerASCIIPrintf(viewer, "  number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
2909:   }
2910:   PetscFunctionReturn(PETSC_SUCCESS);
2911: }

2913: static PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2914: {
2915:   PC_SysPFMG *ex  = (PC_SysPFMG *)pc->data;
2916:   PetscBool   flg = PETSC_FALSE;

2918:   PetscFunctionBegin;
2919:   PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options");
2920:   PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL));
2921:   if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3);
2922:   PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL));
2923:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its);
2924:   PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
2925:   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax);
2926:   PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
2927:   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax);

2929:   PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL));
2930:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol);
2931:   PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL));
2932:   PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type);
2933:   PetscOptionsHeadEnd();
2934:   PetscFunctionReturn(PETSC_SUCCESS);
2935: }

2937: static PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y)
2938: {
2939:   PC_SysPFMG        *ex = (PC_SysPFMG *)pc->data;
2940:   PetscScalar       *yy;
2941:   const PetscScalar *xx;
2942:   PetscInt           ilower[3], iupper[3];
2943:   HYPRE_Int          hlower[3], hupper[3];
2944:   Mat_HYPRESStruct  *mx       = (Mat_HYPRESStruct *)pc->pmat->data;
2945:   PetscInt           ordering = mx->dofs_order;
2946:   PetscInt           nvars    = mx->nvars;
2947:   PetscInt           part     = 0;
2948:   PetscInt           size;
2949:   PetscInt           i;

2951:   PetscFunctionBegin;
2952:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
2953:   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
2954:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2955:   iupper[0] += ilower[0] - 1;
2956:   iupper[1] += ilower[1] - 1;
2957:   iupper[2] += ilower[2] - 1;
2958:   hlower[0] = (HYPRE_Int)ilower[0];
2959:   hlower[1] = (HYPRE_Int)ilower[1];
2960:   hlower[2] = (HYPRE_Int)ilower[2];
2961:   hupper[0] = (HYPRE_Int)iupper[0];
2962:   hupper[1] = (HYPRE_Int)iupper[1];
2963:   hupper[2] = (HYPRE_Int)iupper[2];

2965:   size = 1;
2966:   for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1);

2968:   /* copy x values over to hypre for variable ordering */
2969:   if (ordering) {
2970:     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
2971:     PetscCall(VecGetArrayRead(x, &xx));
2972:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i)));
2973:     PetscCall(VecRestoreArrayRead(x, &xx));
2974:     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
2975:     PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x);
2976:     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);

2978:     /* copy solution values back to PETSc */
2979:     PetscCall(VecGetArray(y, &yy));
2980:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i)));
2981:     PetscCall(VecRestoreArray(y, &yy));
2982:   } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */
2983:     PetscScalar *z;
2984:     PetscInt     j, k;

2986:     PetscCall(PetscMalloc1(nvars * size, &z));
2987:     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
2988:     PetscCall(VecGetArrayRead(x, &xx));

2990:     /* transform nodal to hypre's variable ordering for sys_pfmg */
2991:     for (i = 0; i < size; i++) {
2992:       k = i * nvars;
2993:       for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j];
2994:     }
2995:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
2996:     PetscCall(VecRestoreArrayRead(x, &xx));
2997:     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
2998:     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);

3000:     /* copy solution values back to PETSc */
3001:     PetscCall(VecGetArray(y, &yy));
3002:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
3003:     /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
3004:     for (i = 0; i < size; i++) {
3005:       k = i * nvars;
3006:       for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i];
3007:     }
3008:     PetscCall(VecRestoreArray(y, &yy));
3009:     PetscCall(PetscFree(z));
3010:   }
3011:   PetscFunctionReturn(PETSC_SUCCESS);
3012: }

3014: static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
3015: {
3016:   PC_SysPFMG *jac = (PC_SysPFMG *)pc->data;
3017:   HYPRE_Int   oits;

3019:   PetscFunctionBegin;
3020:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3021:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its);
3022:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol);
3023:   PetscCall(PCApply_SysPFMG(pc, b, y));
3024:   PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits);
3025:   *outits = oits;
3026:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
3027:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
3028:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol);
3029:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its);
3030:   PetscFunctionReturn(PETSC_SUCCESS);
3031: }

3033: static PetscErrorCode PCSetUp_SysPFMG(PC pc)
3034: {
3035:   PC_SysPFMG       *ex = (PC_SysPFMG *)pc->data;
3036:   Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)pc->pmat->data;
3037:   PetscBool         flg;

3039:   PetscFunctionBegin;
3040:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg));
3041:   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner");

3043:   /* create the hypre sstruct solver object and set its information */
3044:   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
3045:   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
3046:   PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver);
3047:   PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
3048:   PetscFunctionReturn(PETSC_SUCCESS);
3049: }

3051: /*MC
3052:      PCSYSPFMG - the hypre SysPFMG multigrid solver

3054:    Level: advanced

3056:    Options Database Keys:
3057: + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner
3058: . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid
3059: . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid
3060: . -pc_syspfmg_tol <tol> - tolerance of SysPFMG
3061: - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles

3063:    Notes:
3064:    See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG`

3066:    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver

3068:    This is for CELL-centered descretizations

3070:    This must be used with the `MATHYPRESSTRUCT` matrix type.

3072:    This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`.

3074: .seealso: [](ch_ksp), `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG`
3075: M*/

3077: PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc)
3078: {
3079:   PC_SysPFMG *ex;

3081:   PetscFunctionBegin;
3082:   PetscCall(PetscNew(&ex));
3083:   pc->data = ex;

3085:   ex->its            = 1;
3086:   ex->tol            = 1.e-8;
3087:   ex->relax_type     = 1;
3088:   ex->num_pre_relax  = 1;
3089:   ex->num_post_relax = 1;

3091:   pc->ops->setfromoptions  = PCSetFromOptions_SysPFMG;
3092:   pc->ops->view            = PCView_SysPFMG;
3093:   pc->ops->destroy         = PCDestroy_SysPFMG;
3094:   pc->ops->apply           = PCApply_SysPFMG;
3095:   pc->ops->applyrichardson = PCApplyRichardson_SysPFMG;
3096:   pc->ops->setup           = PCSetUp_SysPFMG;

3098:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3099:   PetscHYPREInitialize();
3100:   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
3101:   PetscFunctionReturn(PETSC_SUCCESS);
3102: }

3104: /* PC SMG */
3105: typedef struct {
3106:   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
3107:   HYPRE_StructSolver hsolver;
3108:   PetscInt           its; /* keep copy of SMG options used so may view them */
3109:   PetscReal          tol;
3110:   PetscBool          print_statistics;
3111:   PetscInt           num_pre_relax, num_post_relax;
3112: } PC_SMG;

3114: static PetscErrorCode PCDestroy_SMG(PC pc)
3115: {
3116:   PC_SMG *ex = (PC_SMG *)pc->data;

3118:   PetscFunctionBegin;
3119:   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
3120:   PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3121:   PetscCall(PetscFree(pc->data));
3122:   PetscFunctionReturn(PETSC_SUCCESS);
3123: }

3125: static PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer)
3126: {
3127:   PetscBool iascii;
3128:   PC_SMG   *ex = (PC_SMG *)pc->data;

3130:   PetscFunctionBegin;
3131:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
3132:   if (iascii) {
3133:     PetscCall(PetscViewerASCIIPrintf(viewer, "  HYPRE SMG preconditioning\n"));
3134:     PetscCall(PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its));
3135:     PetscCall(PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol));
3136:     PetscCall(PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax));
3137:   }
3138:   PetscFunctionReturn(PETSC_SUCCESS);
3139: }

3141: static PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems *PetscOptionsObject)
3142: {
3143:   PC_SMG *ex = (PC_SMG *)pc->data;

3145:   PetscFunctionBegin;
3146:   PetscOptionsHeadBegin(PetscOptionsObject, "SMG options");

3148:   PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL));
3149:   PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL));
3150:   PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL));
3151:   PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL));

3153:   PetscOptionsHeadEnd();
3154:   PetscFunctionReturn(PETSC_SUCCESS);
3155: }

3157: static PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y)
3158: {
3159:   PC_SMG            *ex = (PC_SMG *)pc->data;
3160:   PetscScalar       *yy;
3161:   const PetscScalar *xx;
3162:   PetscInt           ilower[3], iupper[3];
3163:   HYPRE_Int          hlower[3], hupper[3];
3164:   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)pc->pmat->data;

3166:   PetscFunctionBegin;
3167:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3168:   PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]));
3169:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
3170:   iupper[0] += ilower[0] - 1;
3171:   iupper[1] += ilower[1] - 1;
3172:   iupper[2] += ilower[2] - 1;
3173:   hlower[0] = (HYPRE_Int)ilower[0];
3174:   hlower[1] = (HYPRE_Int)ilower[1];
3175:   hlower[2] = (HYPRE_Int)ilower[2];
3176:   hupper[0] = (HYPRE_Int)iupper[0];
3177:   hupper[1] = (HYPRE_Int)iupper[1];
3178:   hupper[2] = (HYPRE_Int)iupper[2];

3180:   /* copy x values over to hypre */
3181:   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
3182:   PetscCall(VecGetArrayRead(x, &xx));
3183:   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
3184:   PetscCall(VecRestoreArrayRead(x, &xx));
3185:   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
3186:   PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);

3188:   /* copy solution values back to PETSc */
3189:   PetscCall(VecGetArray(y, &yy));
3190:   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
3191:   PetscCall(VecRestoreArray(y, &yy));
3192:   PetscFunctionReturn(PETSC_SUCCESS);
3193: }

3195: static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
3196: {
3197:   PC_SMG   *jac = (PC_SMG *)pc->data;
3198:   HYPRE_Int oits;

3200:   PetscFunctionBegin;
3201:   PetscCall(PetscCitationsRegister(hypreCitation, &cite));
3202:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its);
3203:   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol);

3205:   PetscCall(PCApply_SMG(pc, b, y));
3206:   PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits);
3207:   *outits = oits;
3208:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
3209:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
3210:   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol);
3211:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its);
3212:   PetscFunctionReturn(PETSC_SUCCESS);
3213: }

3215: static PetscErrorCode PCSetUp_SMG(PC pc)
3216: {
3217:   PetscInt         i, dim;
3218:   PC_SMG          *ex = (PC_SMG *)pc->data;
3219:   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)pc->pmat->data;
3220:   PetscBool        flg;
3221:   DMBoundaryType   p[3];
3222:   PetscInt         M[3];

3224:   PetscFunctionBegin;
3225:   PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg));
3226:   PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner");

3228:   PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0));
3229:   // Check if power of 2 in periodic directions
3230:   for (i = 0; i < dim; i++) {
3231:     if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) {
3232:       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]);
3233:     }
3234:   }

3236:   /* create the hypre solver object and set its information */
3237:   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
3238:   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
3239:   // The hypre options must be set here and not in SetFromOptions because it is created here!
3240:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its);
3241:   PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
3242:   PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
3243:   PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol);

3245:   PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
3246:   PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver);
3247:   PetscFunctionReturn(PETSC_SUCCESS);
3248: }

3250: /*MC
3251:      PCSMG - the hypre (structured grid) SMG multigrid solver

3253:    Level: advanced

3255:    Options Database Keys:
3256: + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner
3257: . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid
3258: . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid
3259: - -pc_smg_tol <tol> - tolerance of SMG

3261:    Notes:
3262:    This is for CELL-centered descretizations

3264:    This must be used with the `MATHYPRESTRUCT` `MatType`.

3266:    This does not provide all the functionality of  hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`.

3268:    See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners

3270: .seealso:  `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG`
3271: M*/

3273: PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc)
3274: {
3275:   PC_SMG *ex;

3277:   PetscFunctionBegin;
3278:   PetscCall(PetscNew(&ex));
3279:   pc->data = ex;

3281:   ex->its            = 1;
3282:   ex->tol            = 1.e-8;
3283:   ex->num_pre_relax  = 1;
3284:   ex->num_post_relax = 1;

3286:   pc->ops->setfromoptions  = PCSetFromOptions_SMG;
3287:   pc->ops->view            = PCView_SMG;
3288:   pc->ops->destroy         = PCDestroy_SMG;
3289:   pc->ops->apply           = PCApply_SMG;
3290:   pc->ops->applyrichardson = PCApplyRichardson_SMG;
3291:   pc->ops->setup           = PCSetUp_SMG;

3293:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm));
3294:   PetscHYPREInitialize();
3295:   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
3296:   PetscFunctionReturn(PETSC_SUCCESS);
3297: }