Actual source code: strumpack.c

  1: #include <../src/mat/impls/aij/seq/aij.h>
  2: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  3: #include <StrumpackSparseSolver.h>

  5: static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A, Vec v)
  6: {
  7:   SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type: STRUMPACK factor");
  8:   return 0;
  9: }

 11: static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
 12: {
 13:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->spptr;
 14:   PetscBool               flg;

 16:   /* Deallocate STRUMPACK storage */
 17:   PetscStackCallExternalVoid("STRUMPACK_destroy", STRUMPACK_destroy(S));
 18:   PetscFree(A->spptr);
 19:   PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &flg);
 20:   if (flg) {
 21:     MatDestroy_SeqAIJ(A);
 22:   } else {
 23:     MatDestroy_MPIAIJ(A);
 24:   }

 26:   /* clear composed functions */
 27:   PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL);
 28:   PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetReordering_C", NULL);
 29:   PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetColPerm_C", NULL);
 30:   PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSRelTol_C", NULL);
 31:   PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSAbsTol_C", NULL);
 32:   PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSMaxRank_C", NULL);
 33:   PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSLeafSize_C", NULL);
 34:   PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetHSSMinSepSize_C", NULL);

 36:   return 0;
 37: }

 39: static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering reordering)
 40: {
 41:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;

 43:   PetscStackCallExternalVoid("STRUMPACK_reordering_method", STRUMPACK_set_reordering_method(*S, (STRUMPACK_REORDERING_STRATEGY)reordering));
 44:   return 0;
 45: }

 47: /*@
 48:   MatSTRUMPACKSetReordering - Set STRUMPACK fill-reducing reordering

 50:    Input Parameters:
 51: +  F - the factored matrix obtained by calling `MatGetFactor(`) from PETSc-STRUMPACK interface
 52: -  reordering - the code to be used to find the fill-reducing reordering
 53:       Possible values: NATURAL=0 METIS=1 PARMETIS=2 SCOTCH=3 PTSCOTCH=4 RCM=5

 55:   Options Database Key:
 56: .   -mat_strumpack_reordering <METIS>  - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)

 58:    Level: beginner

 60:    References:
 61: .  * - STRUMPACK manual

 63: .seealso: `MatGetFactor()`
 64: @*/
 65: PetscErrorCode MatSTRUMPACKSetReordering(Mat F, MatSTRUMPACKReordering reordering)
 66: {
 69:   PetscTryMethod(F, "MatSTRUMPACKSetReordering_C", (Mat, MatSTRUMPACKReordering), (F, reordering));
 70:   return 0;
 71: }

 73: static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F, PetscBool cperm)
 74: {
 75:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;

 77:   PetscStackCallExternalVoid("STRUMPACK_set_mc64job", STRUMPACK_set_mc64job(*S, cperm ? 5 : 0));
 78:   return 0;
 79: }

 81: /*@
 82:   MatSTRUMPACKSetColPerm - Set whether STRUMPACK should try to permute the columns of the matrix in order to get a nonzero diagonal

 84:    Logically Collective

 86:    Input Parameters:
 87: +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
 88: -  cperm - `PETSC_TRUE` to permute (internally) the columns of the matrix

 90:   Options Database Key:
 91: .   -mat_strumpack_colperm <cperm> - true to use the permutation

 93:    Level: beginner

 95:    References:
 96: .  * - STRUMPACK manual

 98: .seealso: `MatGetFactor()`
 99: @*/
100: PetscErrorCode MatSTRUMPACKSetColPerm(Mat F, PetscBool cperm)
101: {
104:   PetscTryMethod(F, "MatSTRUMPACKSetColPerm_C", (Mat, PetscBool), (F, cperm));
105:   return 0;
106: }

108: static PetscErrorCode MatSTRUMPACKSetHSSRelTol_STRUMPACK(Mat F, PetscReal rtol)
109: {
110:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;

112:   PetscStackCallExternalVoid("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S, rtol));
113:   return 0;
114: }

116: /*@
117:   MatSTRUMPACKSetHSSRelTol - Set STRUMPACK relative tolerance for HSS compression

119:   Logically Collective

121:    Input Parameters:
122: +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
123: -  rtol - relative compression tolerance

125:   Options Database Key:
126: .   -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)

128:    Level: beginner

130:    References:
131: .  * - STRUMPACK manual

133: .seealso: `MatGetFactor()`
134: @*/
135: PetscErrorCode MatSTRUMPACKSetHSSRelTol(Mat F, PetscReal rtol)
136: {
139:   PetscTryMethod(F, "MatSTRUMPACKSetHSSRelTol_C", (Mat, PetscReal), (F, rtol));
140:   return 0;
141: }

143: static PetscErrorCode MatSTRUMPACKSetHSSAbsTol_STRUMPACK(Mat F, PetscReal atol)
144: {
145:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;

147:   PetscStackCallExternalVoid("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S, atol));
148:   return 0;
149: }

151: /*@
152:   MatSTRUMPACKSetHSSAbsTol - Set STRUMPACK absolute tolerance for HSS compression

154:    Logically Collective

156:    Input Parameters:
157: +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
158: -  atol - absolute compression tolerance

160:   Options Database Key:
161: .   -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)

163:    Level: beginner

165:    References:
166: .  * - STRUMPACK manual

168: .seealso: `MatGetFactor()`
169: @*/
170: PetscErrorCode MatSTRUMPACKSetHSSAbsTol(Mat F, PetscReal atol)
171: {
174:   PetscTryMethod(F, "MatSTRUMPACKSetHSSAbsTol_C", (Mat, PetscReal), (F, atol));
175:   return 0;
176: }

178: static PetscErrorCode MatSTRUMPACKSetHSSMaxRank_STRUMPACK(Mat F, PetscInt hssmaxrank)
179: {
180:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;

182:   PetscStackCallExternalVoid("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S, hssmaxrank));
183:   return 0;
184: }

186: /*@
187:   MatSTRUMPACKSetHSSMaxRank - Set STRUMPACK maximum HSS rank

189:    Logically Collective

191:    Input Parameters:
192: +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
193: -  hssmaxrank - maximum rank used in low-rank approximation

195:   Options Database Key:
196: .   -mat_strumpack_max_rank - Maximum rank in HSS compression, when using pctype ilu (None)

198:    Level: beginner

200:    References:
201: .  * - STRUMPACK manual

203: .seealso: `MatGetFactor()`
204: @*/
205: PetscErrorCode MatSTRUMPACKSetHSSMaxRank(Mat F, PetscInt hssmaxrank)
206: {
209:   PetscTryMethod(F, "MatSTRUMPACKSetHSSMaxRank_C", (Mat, PetscInt), (F, hssmaxrank));
210:   return 0;
211: }

213: static PetscErrorCode MatSTRUMPACKSetHSSLeafSize_STRUMPACK(Mat F, PetscInt leaf_size)
214: {
215:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;

217:   PetscStackCallExternalVoid("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S, leaf_size));
218:   return 0;
219: }

221: /*@
222:   MatSTRUMPACKSetHSSLeafSize - Set STRUMPACK HSS leaf size

224:    Logically Collective

226:    Input Parameters:
227: +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
228: -  leaf_size - Size of diagonal blocks in HSS approximation

230:   Options Database Key:
231: .   -mat_strumpack_leaf_size - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)

233:    Level: beginner

235:    References:
236: .  * - STRUMPACK manual

238: .seealso: `MatGetFactor()`
239: @*/
240: PetscErrorCode MatSTRUMPACKSetHSSLeafSize(Mat F, PetscInt leaf_size)
241: {
244:   PetscTryMethod(F, "MatSTRUMPACKSetHSSLeafSize_C", (Mat, PetscInt), (F, leaf_size));
245:   return 0;
246: }

248: static PetscErrorCode MatSTRUMPACKSetHSSMinSepSize_STRUMPACK(Mat F, PetscInt hssminsize)
249: {
250:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;

252:   PetscStackCallExternalVoid("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S, hssminsize));
253:   return 0;
254: }

256: /*@
257:   MatSTRUMPACKSetHSSMinSepSize - Set STRUMPACK minimum separator size for low-rank approximation

259:    Logically Collective

261:    Input Parameters:
262: +  F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
263: -  hssminsize - minimum dense matrix size for low-rank approximation

265:   Options Database Key:
266: .   -mat_strumpack_hss_min_sep_size <hssminsize> - set the minimum separator size

268:    Level: beginner

270:    References:
271: .  * - STRUMPACK manual

273: .seealso: `MatGetFactor()`
274: @*/
275: PetscErrorCode MatSTRUMPACKSetHSSMinSepSize(Mat F, PetscInt hssminsize)
276: {
279:   PetscTryMethod(F, "MatSTRUMPACKSetHSSMinSepSize_C", (Mat, PetscInt), (F, hssminsize));
280:   return 0;
281: }

283: static PetscErrorCode MatSolve_STRUMPACK(Mat A, Vec b_mpi, Vec x)
284: {
285:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->spptr;
286:   STRUMPACK_RETURN_CODE   sp_err;
287:   const PetscScalar      *bptr;
288:   PetscScalar            *xptr;

290:   VecGetArray(x, &xptr);
291:   VecGetArrayRead(b_mpi, &bptr);

293:   PetscStackCallExternalVoid("STRUMPACK_solve", sp_err = STRUMPACK_solve(*S, (PetscScalar *)bptr, xptr, 0));
294:   switch (sp_err) {
295:   case STRUMPACK_SUCCESS:
296:     break;
297:   case STRUMPACK_MATRIX_NOT_SET: {
298:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
299:     break;
300:   }
301:   case STRUMPACK_REORDERING_ERROR: {
302:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
303:     break;
304:   }
305:   default:
306:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
307:   }
308:   VecRestoreArray(x, &xptr);
309:   VecRestoreArrayRead(b_mpi, &bptr);
310:   return 0;
311: }

313: static PetscErrorCode MatMatSolve_STRUMPACK(Mat A, Mat B_mpi, Mat X)
314: {
315:   PetscBool flg;

317:   PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL);
319:   PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL);
321:   SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatMatSolve_STRUMPACK() is not implemented yet");
322:   return 0;
323: }

325: static PetscErrorCode MatView_Info_STRUMPACK(Mat A, PetscViewer viewer)
326: {
327:   /* check if matrix is strumpack type */
328:   if (A->ops->solve != MatSolve_STRUMPACK) return 0;
329:   PetscViewerASCIIPrintf(viewer, "STRUMPACK sparse solver!\n");
330:   return 0;
331: }

333: static PetscErrorCode MatView_STRUMPACK(Mat A, PetscViewer viewer)
334: {
335:   PetscBool         iascii;
336:   PetscViewerFormat format;

338:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
339:   if (iascii) {
340:     PetscViewerGetFormat(viewer, &format);
341:     if (format == PETSC_VIEWER_ASCII_INFO) MatView_Info_STRUMPACK(A, viewer);
342:   }
343:   return 0;
344: }

346: static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F, Mat A, const MatFactorInfo *info)
347: {
348:   STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->spptr;
349:   STRUMPACK_RETURN_CODE   sp_err;
350:   Mat_SeqAIJ             *A_d, *A_o;
351:   Mat_MPIAIJ             *mat;
352:   PetscInt                M = A->rmap->N, m = A->rmap->n;
353:   PetscBool               flg;

355:   PetscObjectTypeCompare((PetscObject)A, MATMPIAIJ, &flg);
356:   if (flg) { /* A is MATMPIAIJ */
357:     mat = (Mat_MPIAIJ *)A->data;
358:     A_d = (Mat_SeqAIJ *)(mat->A)->data;
359:     A_o = (Mat_SeqAIJ *)(mat->B)->data;
360:     PetscStackCallExternalVoid("STRUMPACK_set_MPIAIJ_matrix", STRUMPACK_set_MPIAIJ_matrix(*S, &m, A_d->i, A_d->j, A_d->a, A_o->i, A_o->j, A_o->a, mat->garray));
361:   } else { /* A is MATSEQAIJ */
362:     A_d = (Mat_SeqAIJ *)A->data;
363:     PetscStackCallExternalVoid("STRUMPACK_set_csr_matrix", STRUMPACK_set_csr_matrix(*S, &M, A_d->i, A_d->j, A_d->a, 0));
364:   }

366:   /* Reorder and Factor the matrix. */
367:   /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
368:   PetscStackCallExternalVoid("STRUMPACK_reorder", sp_err = STRUMPACK_reorder(*S));
369:   PetscStackCallExternalVoid("STRUMPACK_factor", sp_err = STRUMPACK_factor(*S));
370:   switch (sp_err) {
371:   case STRUMPACK_SUCCESS:
372:     break;
373:   case STRUMPACK_MATRIX_NOT_SET: {
374:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
375:     break;
376:   }
377:   case STRUMPACK_REORDERING_ERROR: {
378:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
379:     break;
380:   }
381:   default:
382:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: factorization failed");
383:   }
384:   F->assembled    = PETSC_TRUE;
385:   F->preallocated = PETSC_TRUE;
386:   return 0;
387: }

389: static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
390: {
391:   STRUMPACK_SparseSolver       *S = (STRUMPACK_SparseSolver *)F->spptr;
392:   PetscBool                     flg, set;
393:   PetscReal                     ctol;
394:   PetscInt                      hssminsize, max_rank, leaf_size;
395:   STRUMPACK_REORDERING_STRATEGY ndcurrent, ndvalue;
396:   STRUMPACK_KRYLOV_SOLVER       itcurrent, itsolver;
397:   const char *const             STRUMPACKNDTypes[] = {"NATURAL", "METIS", "PARMETIS", "SCOTCH", "PTSCOTCH", "RCM", "STRUMPACKNDTypes", "", 0};
398:   const char *const             SolverTypes[]      = {"AUTO", "NONE", "REFINE", "PREC_GMRES", "GMRES", "PREC_BICGSTAB", "BICGSTAB", "SolverTypes", "", 0};

400:   /* Set options to F */
401:   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "STRUMPACK Options", "Mat");
402:   PetscStackCallExternalVoid("STRUMPACK_HSS_rel_tol", ctol = (PetscReal)STRUMPACK_HSS_rel_tol(*S));
403:   PetscOptionsReal("-mat_strumpack_hss_rel_tol", "Relative compression tolerance", "None", ctol, &ctol, &set);
404:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_rel_tol", STRUMPACK_set_HSS_rel_tol(*S, (double)ctol));

406:   PetscStackCallExternalVoid("STRUMPACK_HSS_abs_tol", ctol = (PetscReal)STRUMPACK_HSS_abs_tol(*S));
407:   PetscOptionsReal("-mat_strumpack_hss_abs_tol", "Absolute compression tolerance", "None", ctol, &ctol, &set);
408:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_abs_tol", STRUMPACK_set_HSS_abs_tol(*S, (double)ctol));

410:   PetscStackCallExternalVoid("STRUMPACK_mc64job", flg = (STRUMPACK_mc64job(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
411:   PetscOptionsBool("-mat_strumpack_colperm", "Find a col perm to get nonzero diagonal", "None", flg, &flg, &set);
412:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_mc64job", STRUMPACK_set_mc64job(*S, flg ? 5 : 0));

414:   PetscStackCallExternalVoid("STRUMPACK_HSS_min_sep_size", hssminsize = (PetscInt)STRUMPACK_HSS_min_sep_size(*S));
415:   PetscOptionsInt("-mat_strumpack_hss_min_sep_size", "Minimum size of separator for HSS compression", "None", hssminsize, &hssminsize, &set);
416:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_min_sep_size", STRUMPACK_set_HSS_min_sep_size(*S, (int)hssminsize));

418:   PetscStackCallExternalVoid("STRUMPACK_HSS_max_rank", max_rank = (PetscInt)STRUMPACK_HSS_max_rank(*S));
419:   PetscOptionsInt("-mat_strumpack_max_rank", "Maximum rank in HSS approximation", "None", max_rank, &max_rank, &set);
420:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_max_rank", STRUMPACK_set_HSS_max_rank(*S, (int)max_rank));

422:   PetscStackCallExternalVoid("STRUMPACK_HSS_leaf_size", leaf_size = (PetscInt)STRUMPACK_HSS_leaf_size(*S));
423:   PetscOptionsInt("-mat_strumpack_leaf_size", "Size of diagonal blocks in HSS approximation", "None", leaf_size, &leaf_size, &set);
424:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_HSS_leaf_size", STRUMPACK_set_HSS_leaf_size(*S, (int)leaf_size));

426:   PetscStackCallExternalVoid("STRUMPACK_reordering_method", ndcurrent = STRUMPACK_reordering_method(*S));
427:   PetscOptionsEnum("-mat_strumpack_reordering", "Sparsity reducing matrix reordering", "None", STRUMPACKNDTypes, (PetscEnum)ndcurrent, (PetscEnum *)&ndvalue, &set);
428:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, ndvalue));

430:   /* Disable the outer iterative solver from STRUMPACK.                                       */
431:   /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement.   */
432:   /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling       */
433:   /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable    */
434:   /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP.                   */
435:   PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));

437:   PetscStackCallExternalVoid("STRUMPACK_Krylov_solver", itcurrent = STRUMPACK_Krylov_solver(*S));
438:   PetscOptionsEnum("-mat_strumpack_iterative_solver", "Select iterative solver from STRUMPACK", "None", SolverTypes, (PetscEnum)itcurrent, (PetscEnum *)&itsolver, &set);
439:   if (set) PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, itsolver));
440:   PetscOptionsEnd();

442:   F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
443:   F->ops->solve           = MatSolve_STRUMPACK;
444:   F->ops->matsolve        = MatMatSolve_STRUMPACK;
445:   return 0;
446: }

448: static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A, MatSolverType *type)
449: {
450:   *type = MATSOLVERSTRUMPACK;
451:   return 0;
452: }

454: /*MC
455:   MATSOLVERSSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
456:   and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK.

458:   Consult the STRUMPACK-sparse manual for more info.

460:   Use
461:      ./configure --download-strumpack
462:   to have PETSc installed with STRUMPACK

464:   Use
465:     -pc_type lu -pc_factor_mat_solver_type strumpack
466:   to use this as an exact (direct) solver, use
467:     -pc_type ilu -pc_factor_mat_solver_type strumpack
468:   to enable low-rank compression (i.e, use as a preconditioner).

470:   Works with AIJ matrices

472:   Options Database Keys:
473: + -mat_strumpack_verbose                    - verbose info
474: . -mat_strumpack_hss_rel_tol <1e-2>         - Relative compression tolerance (None)
475: . -mat_strumpack_hss_abs_tol <1e-8>         - Absolute compression tolerance (None)
476: . -mat_strumpack_colperm <TRUE>             - Permute matrix to make diagonal nonzeros (None)
477: . -mat_strumpack_hss_min_sep_size <256>     - Minimum size of separator for HSS compression (None)
478: . -mat_strumpack_max_rank                   - Maximum rank in HSS compression, when using pctype ilu (None)
479: . -mat_strumpack_leaf_size                  - Size of diagonal blocks in HSS approximation, when using pctype ilu (None)
480: . -mat_strumpack_reordering <METIS>         - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM (None)
481: - -mat_strumpack_iterative_solver <DIRECT>  - Select iterative solver from STRUMPACK (choose one of) AUTO DIRECT REFINE PREC_GMRES GMRES PREC_BICGSTAB BICGSTAB (None)

483:  Level: beginner

485: .seealso: `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
486: M*/
487: static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat *F)
488: {
489:   Mat                       B;
490:   PetscInt                  M = A->rmap->N, N = A->cmap->N;
491:   PetscBool                 verb, flg;
492:   STRUMPACK_SparseSolver   *S;
493:   STRUMPACK_INTERFACE       iface;
494:   const STRUMPACK_PRECISION table[2][2][2] = {
495:     {{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
496:     {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX},       {STRUMPACK_FLOAT, STRUMPACK_DOUBLE}      }
497:   };
498:   const STRUMPACK_PRECISION prec = table[(sizeof(PetscInt) == 8) ? 0 : 1][(PETSC_SCALAR == PETSC_COMPLEX) ? 0 : 1][(PETSC_REAL == PETSC_FLOAT) ? 0 : 1];

500:   /* Create the factorization matrix */
501:   MatCreate(PetscObjectComm((PetscObject)A), &B);
502:   MatSetSizes(B, A->rmap->n, A->cmap->n, M, N);
503:   MatSetType(B, ((PetscObject)A)->type_name);
504:   MatSeqAIJSetPreallocation(B, 0, NULL);
505:   MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL);
506:   B->trivialsymbolic = PETSC_TRUE;
507:   if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
508:     B->ops->lufactorsymbolic  = MatLUFactorSymbolic_STRUMPACK;
509:     B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
510:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported");
511:   B->ops->view        = MatView_STRUMPACK;
512:   B->ops->destroy     = MatDestroy_STRUMPACK;
513:   B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
514:   PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_strumpack);
515:   PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetReordering_C", MatSTRUMPACKSetReordering_STRUMPACK);
516:   PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetColPerm_C", MatSTRUMPACKSetColPerm_STRUMPACK);
517:   PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSRelTol_C", MatSTRUMPACKSetHSSRelTol_STRUMPACK);
518:   PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSAbsTol_C", MatSTRUMPACKSetHSSAbsTol_STRUMPACK);
519:   PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSMaxRank_C", MatSTRUMPACKSetHSSMaxRank_STRUMPACK);
520:   PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSLeafSize_C", MatSTRUMPACKSetHSSLeafSize_STRUMPACK);
521:   PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetHSSMinSepSize_C", MatSTRUMPACKSetHSSMinSepSize_STRUMPACK);
522:   B->factortype = ftype;
523:   PetscNew(&S);
524:   B->spptr = S;

526:   PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &flg);
527:   iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;

529:   PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "STRUMPACK Options", "Mat");
530:   verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
531:   PetscOptionsBool("-mat_strumpack_verbose", "Print STRUMPACK information", "None", verb, &verb, NULL);

533:   PetscStackCallExternalVoid("STRUMPACK_init", STRUMPACK_init(S, PetscObjectComm((PetscObject)A), prec, iface, 0, NULL, verb));

535:   if (ftype == MAT_FACTOR_ILU) {
536:     /* When enabling HSS compression, the STRUMPACK solver becomes an incomplete              */
537:     /* (or approximate) LU factorization.                                                     */
538:     PetscStackCallExternalVoid("STRUMPACK_enable_HSS", STRUMPACK_enable_HSS(*S));
539:   }
540:   PetscOptionsEnd();

542:   /* set solvertype */
543:   PetscFree(B->solvertype);
544:   PetscStrallocpy(MATSOLVERSTRUMPACK, &B->solvertype);

546:   *F = B;
547:   return 0;
548: }

550: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
551: {
552:   MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack);
553:   MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack);
554:   MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack);
555:   MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack);
556:   return 0;
557: }