Actual source code: strumpack.c
1: #include <../src/mat/impls/aij/seq/aij.h>
2: #include <../src/mat/impls/aij/mpi/mpiaij.h>
3: #include <StrumpackSparseSolver.h>
5: static PetscErrorCode MatGetDiagonal_STRUMPACK(Mat A, Vec v)
6: {
7: PetscFunctionBegin;
8: SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type: STRUMPACK factor");
9: PetscFunctionReturn(PETSC_SUCCESS);
10: }
12: static PetscErrorCode MatDestroy_STRUMPACK(Mat A)
13: {
14: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
16: PetscFunctionBegin;
17: /* Deallocate STRUMPACK storage */
18: PetscStackCallExternalVoid("STRUMPACK_destroy", STRUMPACK_destroy(S));
19: PetscCall(PetscFree(A->data));
21: /* clear composed functions */
22: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
23: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetReordering_C", NULL));
24: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetReordering_C", NULL));
25: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetColPerm_C", NULL));
26: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetColPerm_C", NULL));
27: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricNxyz_C", NULL));
28: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricComponents_C", NULL));
29: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGeometricWidth_C", NULL));
30: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetGPU_C", NULL));
31: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetGPU_C", NULL));
32: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompression_C", NULL));
33: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompression_C", NULL));
34: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompRelTol_C", NULL));
35: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompRelTol_C", NULL));
36: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompAbsTol_C", NULL));
37: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompAbsTol_C", NULL));
38: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompMaxRank_C", NULL));
39: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompMaxRank_C", NULL));
40: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompLeafSize_C", NULL));
41: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompLeafSize_C", NULL));
42: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompMinSepSize_C", NULL));
43: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompMinSepSize_C", NULL));
44: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompLossyPrecision_C", NULL));
45: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompLossyPrecision_C", NULL));
46: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKSetCompButterflyLevels_C", NULL));
47: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSTRUMPACKGetCompButterflyLevels_C", NULL));
48: PetscFunctionReturn(PETSC_SUCCESS);
49: }
51: static PetscErrorCode MatSTRUMPACKSetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering reordering)
52: {
53: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
55: PetscFunctionBegin;
56: PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, (STRUMPACK_REORDERING_STRATEGY)reordering));
57: PetscFunctionReturn(PETSC_SUCCESS);
58: }
59: static PetscErrorCode MatSTRUMPACKGetReordering_STRUMPACK(Mat F, MatSTRUMPACKReordering *reordering)
60: {
61: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
63: PetscFunctionBegin;
64: PetscStackCallExternalVoid("STRUMPACK_reordering_method", *reordering = (MatSTRUMPACKReordering)STRUMPACK_reordering_method(*S));
65: PetscFunctionReturn(PETSC_SUCCESS);
66: }
68: /*@
69: MatSTRUMPACKSetReordering - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> fill-reducing reordering
71: Logically Collective
73: Input Parameters:
74: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
75: - reordering - the code to be used to find the fill-reducing reordering
77: Options Database Key:
78: . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering, see `MatSTRUMPACKReordering`
80: Level: intermediate
82: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatSTRUMPACKReordering`, `MatGetFactor()`, `MatSTRUMPACKSetColPerm()`, `MatSTRUMPACKGetReordering()`
83: @*/
84: PetscErrorCode MatSTRUMPACKSetReordering(Mat F, MatSTRUMPACKReordering reordering)
85: {
86: PetscFunctionBegin;
89: PetscTryMethod(F, "MatSTRUMPACKSetReordering_C", (Mat, MatSTRUMPACKReordering), (F, reordering));
90: PetscFunctionReturn(PETSC_SUCCESS);
91: }
92: /*@
93: MatSTRUMPACKGetReordering - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> fill-reducing reordering
95: Logically Collective
97: Input Parameters:
98: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
100: Output Parameter:
101: . reordering - the code to be used to find the fill-reducing reordering
103: Level: intermediate
105: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatSTRUMPACKReordering`, `MatGetFactor()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
106: @*/
107: PetscErrorCode MatSTRUMPACKGetReordering(Mat F, MatSTRUMPACKReordering *reordering)
108: {
109: PetscFunctionBegin;
111: PetscTryMethod(F, "MatSTRUMPACKGetReordering_C", (Mat, MatSTRUMPACKReordering *), (F, reordering));
113: PetscFunctionReturn(PETSC_SUCCESS);
114: }
116: static PetscErrorCode MatSTRUMPACKSetColPerm_STRUMPACK(Mat F, PetscBool cperm)
117: {
118: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
120: PetscFunctionBegin;
121: PetscStackCallExternalVoid("STRUMPACK_set_matching", STRUMPACK_set_matching(*S, cperm ? STRUMPACK_MATCHING_MAX_DIAGONAL_PRODUCT_SCALING : STRUMPACK_MATCHING_NONE));
122: PetscFunctionReturn(PETSC_SUCCESS);
123: }
124: static PetscErrorCode MatSTRUMPACKGetColPerm_STRUMPACK(Mat F, PetscBool *cperm)
125: {
126: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
128: PetscFunctionBegin;
129: PetscStackCallExternalVoid("STRUMPACK_matching", *cperm = (PetscBool)(STRUMPACK_matching(*S) != STRUMPACK_MATCHING_NONE));
130: PetscFunctionReturn(PETSC_SUCCESS);
131: }
133: /*@
134: MatSTRUMPACKSetColPerm - Set whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
135: should try to permute the columns of the matrix in order to get a nonzero diagonal
137: Logically Collective
139: Input Parameters:
140: + F - the factored matrix obtained by calling `MatGetFactor()`
141: - cperm - `PETSC_TRUE` to permute (internally) the columns of the matrix
143: Options Database Key:
144: . -mat_strumpack_colperm <cperm> - true to use the permutation
146: Level: intermediate
148: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `MatSTRUMPACKSetReordering()`, `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetColPerm()`
149: @*/
150: PetscErrorCode MatSTRUMPACKSetColPerm(Mat F, PetscBool cperm)
151: {
152: PetscFunctionBegin;
155: PetscTryMethod(F, "MatSTRUMPACKSetColPerm_C", (Mat, PetscBool), (F, cperm));
156: PetscFunctionReturn(PETSC_SUCCESS);
157: }
158: /*@
159: MatSTRUMPACKGetColPerm - Get whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
160: will try to permute the columns of the matrix in order to get a nonzero diagonal
162: Logically Collective
164: Input Parameters:
165: . F - the factored matrix obtained by calling `MatGetFactor()`
167: Output Parameter:
168: . cperm - Indicates whether STRUMPACK will permute columns
170: Level: intermediate
172: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `MatSTRUMPACKSetReordering()`, `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetColPerm()`
173: @*/
174: PetscErrorCode MatSTRUMPACKGetColPerm(Mat F, PetscBool *cperm)
175: {
176: PetscFunctionBegin;
178: PetscTryMethod(F, "MatSTRUMPACKGetColPerm_C", (Mat, PetscBool *), (F, cperm));
180: PetscFunctionReturn(PETSC_SUCCESS);
181: }
183: static PetscErrorCode MatSTRUMPACKSetGPU_STRUMPACK(Mat F, PetscBool gpu)
184: {
185: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
187: PetscFunctionBegin;
188: if (gpu) {
189: #if !(defined(STRUMPACK_USE_CUDA) || defined(STRUMPACK_USE_HIP) || defined(STRUMPACK_USE_SYCL))
190: PetscCall(PetscPrintf(PETSC_COMM_SELF, " Warning: strumpack was not configured with GPU support\n"));
191: #endif
192: PetscStackCallExternalVoid("STRUMPACK_enable_gpu", STRUMPACK_enable_gpu(*S));
193: } else PetscStackCallExternalVoid("STRUMPACK_disable_gpu", STRUMPACK_disable_gpu(*S));
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
196: static PetscErrorCode MatSTRUMPACKGetGPU_STRUMPACK(Mat F, PetscBool *gpu)
197: {
198: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
200: PetscFunctionBegin;
201: PetscStackCallExternalVoid("STRUMPACK_use_gpu", *gpu = (PetscBool)STRUMPACK_use_gpu(*S));
202: PetscFunctionReturn(PETSC_SUCCESS);
203: }
205: /*@
206: MatSTRUMPACKSetGPU - Set whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
207: should enable GPU acceleration (not supported for all compression types)
209: Logically Collective
211: Input Parameters:
212: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
213: - gpu - whether or not to use GPU acceleration
215: Options Database Key:
216: . -mat_strumpack_gpu <gpu> - true to use gpu offload
218: Level: intermediate
220: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetGPU()`
221: @*/
222: PetscErrorCode MatSTRUMPACKSetGPU(Mat F, PetscBool gpu)
223: {
224: PetscFunctionBegin;
227: PetscTryMethod(F, "MatSTRUMPACKSetGPU_C", (Mat, PetscBool), (F, gpu));
228: PetscFunctionReturn(PETSC_SUCCESS);
229: }
230: /*@
231: MatSTRUMPACKGetGPU - Get whether STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
232: will try to use GPU acceleration (not supported for all compression types)
234: Logically Collective
236: Input Parameters:
237: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
239: Output Parameter:
240: . gpu - whether or not STRUMPACK will try to use GPU acceleration
242: Level: intermediate
244: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetGPU()`
245: @*/
246: PetscErrorCode MatSTRUMPACKGetGPU(Mat F, PetscBool *gpu)
247: {
248: PetscFunctionBegin;
250: PetscTryMethod(F, "MatSTRUMPACKGetGPU_C", (Mat, PetscBool *), (F, gpu));
252: PetscFunctionReturn(PETSC_SUCCESS);
253: }
255: static PetscErrorCode MatSTRUMPACKSetCompression_STRUMPACK(Mat F, MatSTRUMPACKCompressionType comp)
256: {
257: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
259: PetscFunctionBegin;
260: #if !defined(STRUMPACK_USE_BPACK)
261: PetscCheck(comp != MAT_STRUMPACK_COMPRESSION_TYPE_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_BLR_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_ZFP_BLR_HODLR, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Compression scheme requires ButterflyPACK, please reconfigure with --download-butterflypack");
262: #endif
263: #if !defined(STRUMPACK_USE_ZFP)
264: PetscCheck(comp != MAT_STRUMPACK_COMPRESSION_TYPE_ZFP_BLR_HODLR && comp != MAT_STRUMPACK_COMPRESSION_TYPE_LOSSLESS && comp != MAT_STRUMPACK_COMPRESSION_TYPE_LOSSY, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Compression scheme requires ZFP, please reconfigure with --download-zfp");
265: #endif
266: PetscStackCallExternalVoid("STRUMPACK_set_compression", STRUMPACK_set_compression(*S, (STRUMPACK_COMPRESSION_TYPE)comp));
267: PetscFunctionReturn(PETSC_SUCCESS);
268: }
269: static PetscErrorCode MatSTRUMPACKGetCompression_STRUMPACK(Mat F, MatSTRUMPACKCompressionType *comp)
270: {
271: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
273: PetscFunctionBegin;
274: PetscStackCallExternalVoid("STRUMPACK_compression", *comp = (MatSTRUMPACKCompressionType)STRUMPACK_compression(*S));
275: PetscFunctionReturn(PETSC_SUCCESS);
276: }
278: /*@
279: MatSTRUMPACKSetCompression - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> compression type
281: Input Parameters:
282: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
283: - comp - Type of compression to be used in the approximate sparse factorization
285: Options Database Key:
286: . -mat_strumpack_compression <NONE> - Type of rank-structured compression in sparse LU factors (choose one of) NONE HSS BLR HODLR BLR_HODLR ZFP_BLR_HODLR LOSSLESS LOSSY
288: Level: intermediate
290: Note:
291: Default for `comp` is `MAT_STRUMPACK_COMPRESSION_TYPE_NONE` for `-pc_type lu` and `MAT_STRUMPACK_COMPRESSION_TYPE_BLR`
292: for `-pc_type ilu`
294: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKGetCompression()`
295: @*/
296: PetscErrorCode MatSTRUMPACKSetCompression(Mat F, MatSTRUMPACKCompressionType comp)
297: {
298: PetscFunctionBegin;
301: PetscTryMethod(F, "MatSTRUMPACKSetCompression_C", (Mat, MatSTRUMPACKCompressionType), (F, comp));
302: PetscFunctionReturn(PETSC_SUCCESS);
303: }
304: /*@
305: MatSTRUMPACKGetCompression - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> compression type
307: Input Parameters:
308: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
310: Output Parameter:
311: . comp - Type of compression to be used in the approximate sparse factorization
313: Level: intermediate
315: Note:
316: Default is `MAT_STRUMPACK_COMPRESSION_TYPE_NONE` for `-pc_type lu` and `MAT_STRUMPACK_COMPRESSION_TYPE_BLR` for `-pc_type ilu`
318: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKSetCompression()`
319: @*/
320: PetscErrorCode MatSTRUMPACKGetCompression(Mat F, MatSTRUMPACKCompressionType *comp)
321: {
322: PetscFunctionBegin;
324: PetscTryMethod(F, "MatSTRUMPACKGetCompression_C", (Mat, MatSTRUMPACKCompressionType *), (F, comp));
326: PetscFunctionReturn(PETSC_SUCCESS);
327: }
329: static PetscErrorCode MatSTRUMPACKSetCompRelTol_STRUMPACK(Mat F, PetscReal rtol)
330: {
331: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
333: PetscFunctionBegin;
334: PetscStackCallExternalVoid("STRUMPACK_set_compression_rel_tol", STRUMPACK_set_compression_rel_tol(*S, rtol));
335: PetscFunctionReturn(PETSC_SUCCESS);
336: }
337: static PetscErrorCode MatSTRUMPACKGetCompRelTol_STRUMPACK(Mat F, PetscReal *rtol)
338: {
339: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
341: PetscFunctionBegin;
342: PetscStackCallExternalVoid("STRUMPACK_compression_rel_tol", *rtol = (PetscReal)STRUMPACK_compression_rel_tol(*S));
343: PetscFunctionReturn(PETSC_SUCCESS);
344: }
346: /*@
347: MatSTRUMPACKSetCompRelTol - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> relative tolerance for compression
349: Logically Collective
351: Input Parameters:
352: + F - the factored matrix obtained by calling `MatGetFactor()`
353: - rtol - relative compression tolerance
355: Options Database Key:
356: . -mat_strumpack_compression_rel_tol <1e-4> - Relative compression tolerance, when using `-pctype ilu`
358: Level: intermediate
360: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompRelTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
361: @*/
362: PetscErrorCode MatSTRUMPACKSetCompRelTol(Mat F, PetscReal rtol)
363: {
364: PetscFunctionBegin;
367: PetscTryMethod(F, "MatSTRUMPACKSetCompRelTol_C", (Mat, PetscReal), (F, rtol));
368: PetscFunctionReturn(PETSC_SUCCESS);
369: }
370: /*@
371: MatSTRUMPACKGetCompRelTol - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> relative tolerance for compression
373: Logically Collective
375: Input Parameters:
376: . F - the factored matrix obtained by calling `MatGetFactor()`
378: Output Parameter:
379: . rtol - relative compression tolerance
381: Level: intermediate
383: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompRelTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
384: @*/
385: PetscErrorCode MatSTRUMPACKGetCompRelTol(Mat F, PetscReal *rtol)
386: {
387: PetscFunctionBegin;
389: PetscTryMethod(F, "MatSTRUMPACKGetCompRelTol_C", (Mat, PetscReal *), (F, rtol));
391: PetscFunctionReturn(PETSC_SUCCESS);
392: }
394: static PetscErrorCode MatSTRUMPACKSetCompAbsTol_STRUMPACK(Mat F, PetscReal atol)
395: {
396: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
398: PetscFunctionBegin;
399: PetscStackCallExternalVoid("STRUMPACK_set_compression_abs_tol", STRUMPACK_set_compression_abs_tol(*S, atol));
400: PetscFunctionReturn(PETSC_SUCCESS);
401: }
402: static PetscErrorCode MatSTRUMPACKGetCompAbsTol_STRUMPACK(Mat F, PetscReal *atol)
403: {
404: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
406: PetscFunctionBegin;
407: PetscStackCallExternalVoid("STRUMPACK_compression_abs_tol", *atol = (PetscReal)STRUMPACK_compression_abs_tol(*S));
408: PetscFunctionReturn(PETSC_SUCCESS);
409: }
411: /*@
412: MatSTRUMPACKSetCompAbsTol - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> absolute tolerance for compression
414: Logically Collective
416: Input Parameters:
417: + F - the factored matrix obtained by calling `MatGetFactor()`
418: - atol - absolute compression tolerance
420: Options Database Key:
421: . -mat_strumpack_compression_abs_tol <1e-10> - Absolute compression tolerance, when using `-pctype ilu`
423: Level: intermediate
425: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompAbsTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
426: @*/
427: PetscErrorCode MatSTRUMPACKSetCompAbsTol(Mat F, PetscReal atol)
428: {
429: PetscFunctionBegin;
432: PetscTryMethod(F, "MatSTRUMPACKSetCompAbsTol_C", (Mat, PetscReal), (F, atol));
433: PetscFunctionReturn(PETSC_SUCCESS);
434: }
435: /*@
436: MatSTRUMPACKGetCompAbsTol - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> absolute tolerance for compression
438: Logically Collective
440: Input Parameters:
441: . F - the factored matrix obtained by calling `MatGetFactor()`
443: Output Parameter:
444: . atol - absolute compression tolerance
446: Level: intermediate
448: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompAbsTol()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
449: @*/
450: PetscErrorCode MatSTRUMPACKGetCompAbsTol(Mat F, PetscReal *atol)
451: {
452: PetscFunctionBegin;
454: PetscTryMethod(F, "MatSTRUMPACKGetCompAbsTol_C", (Mat, PetscReal *), (F, atol));
456: PetscFunctionReturn(PETSC_SUCCESS);
457: }
459: static PetscErrorCode MatSTRUMPACKSetCompLeafSize_STRUMPACK(Mat F, PetscInt leaf_size)
460: {
461: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
463: PetscFunctionBegin;
464: PetscStackCallExternalVoid("STRUMPACK_set_compression_leaf_size", STRUMPACK_set_compression_leaf_size(*S, leaf_size));
465: PetscFunctionReturn(PETSC_SUCCESS);
466: }
467: static PetscErrorCode MatSTRUMPACKGetCompLeafSize_STRUMPACK(Mat F, PetscInt *leaf_size)
468: {
469: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
471: PetscFunctionBegin;
472: PetscStackCallExternalVoid("STRUMPACK_compression_leaf_size", *leaf_size = (PetscInt)STRUMPACK_compression_leaf_size(*S));
473: PetscFunctionReturn(PETSC_SUCCESS);
474: }
476: /*@
477: MatSTRUMPACKSetCompLeafSize - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> leaf size for HSS, BLR, HODLR...
479: Logically Collective
481: Input Parameters:
482: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
483: - leaf_size - Size of diagonal blocks in rank-structured approximation
485: Options Database Key:
486: . -mat_strumpack_compression_leaf_size - Size of diagonal blocks in rank-structured approximation, when using `-pctype ilu`
488: Level: intermediate
490: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKGetCompLeafSize()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
491: @*/
492: PetscErrorCode MatSTRUMPACKSetCompLeafSize(Mat F, PetscInt leaf_size)
493: {
494: PetscFunctionBegin;
497: PetscTryMethod(F, "MatSTRUMPACKSetCompLeafSize_C", (Mat, PetscInt), (F, leaf_size));
498: PetscFunctionReturn(PETSC_SUCCESS);
499: }
500: /*@
501: MatSTRUMPACKGetCompLeafSize - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> leaf size for HSS, BLR, HODLR...
503: Logically Collective
505: Input Parameters:
506: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
508: Output Parameter:
509: . leaf_size - Size of diagonal blocks in rank-structured approximation
511: Level: intermediate
513: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSTRUMPACKSetCompLeafSize()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKSetColPerm()`
514: @*/
515: PetscErrorCode MatSTRUMPACKGetCompLeafSize(Mat F, PetscInt *leaf_size)
516: {
517: PetscFunctionBegin;
519: PetscTryMethod(F, "MatSTRUMPACKGetCompLeafSize_C", (Mat, PetscInt *), (F, leaf_size));
521: PetscFunctionReturn(PETSC_SUCCESS);
522: }
524: static PetscErrorCode MatSTRUMPACKSetGeometricNxyz_STRUMPACK(Mat F, PetscInt nx, PetscInt ny, PetscInt nz)
525: {
526: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
528: PetscFunctionBegin;
529: if (nx < 1) {
530: PetscCheck(nx == PETSC_DECIDE || nx == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "nx < 1");
531: nx = 1;
532: }
533: PetscStackCallExternalVoid("STRUMPACK_set_nx", STRUMPACK_set_nx(*S, nx));
534: if (ny < 1) {
535: PetscCheck(ny == PETSC_DECIDE || ny == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "ny < 1");
536: ny = 1;
537: }
538: PetscStackCallExternalVoid("STRUMPACK_set_ny", STRUMPACK_set_ny(*S, ny));
539: if (nz < 1) {
540: PetscCheck(nz == PETSC_DECIDE || nz == PETSC_DEFAULT, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "nz < 1");
541: nz = 1;
542: }
543: PetscStackCallExternalVoid("STRUMPACK_set_nz", STRUMPACK_set_nz(*S, nz));
544: PetscFunctionReturn(PETSC_SUCCESS);
545: }
546: static PetscErrorCode MatSTRUMPACKSetGeometricComponents_STRUMPACK(Mat F, PetscInt nc)
547: {
548: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
550: PetscFunctionBegin;
551: PetscStackCallExternalVoid("STRUMPACK_set_components", STRUMPACK_set_components(*S, nc));
552: PetscFunctionReturn(PETSC_SUCCESS);
553: }
554: static PetscErrorCode MatSTRUMPACKSetGeometricWidth_STRUMPACK(Mat F, PetscInt w)
555: {
556: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
558: PetscFunctionBegin;
559: PetscStackCallExternalVoid("STRUMPACK_set_separator_width", STRUMPACK_set_separator_width(*S, w));
560: PetscFunctionReturn(PETSC_SUCCESS);
561: }
563: /*@
564: MatSTRUMPACKSetGeometricNxyz - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> mesh x, y and z dimensions, for use with GEOMETRIC ordering.
566: Logically Collective
568: Input Parameters:
569: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
570: . nx - x dimension of the mesh
571: . ny - y dimension of the mesh
572: - nz - z dimension of the mesh
574: Level: intermediate
576: Note:
577: If the mesh is two (or one) dimensional one can use 1, `PETSC_DECIDE` or `PETSC_DEFAULT`
578: for the missing z (and y) dimensions.
580: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
581: @*/
582: PetscErrorCode MatSTRUMPACKSetGeometricNxyz(Mat F, PetscInt nx, PetscInt ny, PetscInt nz)
583: {
584: PetscFunctionBegin;
589: PetscTryMethod(F, "MatSTRUMPACKSetGeometricNxyz_C", (Mat, PetscInt, PetscInt, PetscInt), (F, nx, ny, nz));
590: PetscFunctionReturn(PETSC_SUCCESS);
591: }
592: /*@
593: MatSTRUMPACKSetGeometricComponents - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
594: number of degrees of freedom per mesh point, for use with GEOMETRIC ordering.
596: Logically Collective
598: Input Parameters:
599: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
600: - nc - Number of components/dof's per grid point
602: Options Database Key:
603: . -mat_strumpack_geometric_components <1> - Number of components per mesh point, for geometric nested dissection ordering
605: Level: intermediate
607: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
608: @*/
609: PetscErrorCode MatSTRUMPACKSetGeometricComponents(Mat F, PetscInt nc)
610: {
611: PetscFunctionBegin;
614: PetscTryMethod(F, "MatSTRUMPACKSetGeometricComponents_C", (Mat, PetscInt), (F, nc));
615: PetscFunctionReturn(PETSC_SUCCESS);
616: }
617: /*@
618: MatSTRUMPACKSetGeometricWidth - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> width of the separator, for use with GEOMETRIC ordering.
620: Logically Collective
622: Input Parameters:
623: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
624: - w - width of the separator
626: Options Database Key:
627: . -mat_strumpack_geometric_width <1> - Width of the separator of the mesh, for geometric nested dissection ordering
629: Level: intermediate
631: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`
632: @*/
633: PetscErrorCode MatSTRUMPACKSetGeometricWidth(Mat F, PetscInt w)
634: {
635: PetscFunctionBegin;
638: PetscTryMethod(F, "MatSTRUMPACKSetGeometricWidth_C", (Mat, PetscInt), (F, w));
639: PetscFunctionReturn(PETSC_SUCCESS);
640: }
642: static PetscErrorCode MatSTRUMPACKSetCompMinSepSize_STRUMPACK(Mat F, PetscInt min_sep_size)
643: {
644: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
646: PetscFunctionBegin;
647: PetscStackCallExternalVoid("STRUMPACK_set_compression_min_sep_size", STRUMPACK_set_compression_min_sep_size(*S, min_sep_size));
648: PetscFunctionReturn(PETSC_SUCCESS);
649: }
650: static PetscErrorCode MatSTRUMPACKGetCompMinSepSize_STRUMPACK(Mat F, PetscInt *min_sep_size)
651: {
652: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
654: PetscFunctionBegin;
655: PetscStackCallExternalVoid("STRUMPACK_compression_min_sep_size", *min_sep_size = (PetscInt)STRUMPACK_compression_min_sep_size(*S));
656: PetscFunctionReturn(PETSC_SUCCESS);
657: }
659: /*@
660: MatSTRUMPACKSetCompMinSepSize - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> minimum separator size for low-rank approximation
662: Logically Collective
664: Input Parameters:
665: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
666: - min_sep_size - minimum dense matrix size for low-rank approximation
668: Options Database Key:
669: . -mat_strumpack_compression_min_sep_size <min_sep_size> - Minimum size of dense sub-block for low-rank compression
671: Level: intermediate
673: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompMinSepSize()`
674: @*/
675: PetscErrorCode MatSTRUMPACKSetCompMinSepSize(Mat F, PetscInt min_sep_size)
676: {
677: PetscFunctionBegin;
680: PetscTryMethod(F, "MatSTRUMPACKSetCompMinSepSize_C", (Mat, PetscInt), (F, min_sep_size));
681: PetscFunctionReturn(PETSC_SUCCESS);
682: }
683: /*@
684: MatSTRUMPACKGetCompMinSepSize - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> minimum separator size for low-rank approximation
686: Logically Collective
688: Input Parameters:
689: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
691: Output Parameter:
692: . min_sep_size - minimum dense matrix size for low-rank approximation
694: Level: intermediate
696: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompMinSepSize()`
697: @*/
698: PetscErrorCode MatSTRUMPACKGetCompMinSepSize(Mat F, PetscInt *min_sep_size)
699: {
700: PetscFunctionBegin;
702: PetscTryMethod(F, "MatSTRUMPACKGetCompMinSepSize_C", (Mat, PetscInt *), (F, min_sep_size));
704: PetscFunctionReturn(PETSC_SUCCESS);
705: }
707: static PetscErrorCode MatSTRUMPACKSetCompLossyPrecision_STRUMPACK(Mat F, PetscInt lossy_prec)
708: {
709: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
711: PetscFunctionBegin;
712: PetscStackCallExternalVoid("STRUMPACK_set_compression_lossy_precision", STRUMPACK_set_compression_lossy_precision(*S, lossy_prec));
713: PetscFunctionReturn(PETSC_SUCCESS);
714: }
715: static PetscErrorCode MatSTRUMPACKGetCompLossyPrecision_STRUMPACK(Mat F, PetscInt *lossy_prec)
716: {
717: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
719: PetscFunctionBegin;
720: PetscStackCallExternalVoid("STRUMPACK_compression_lossy_precision", *lossy_prec = (PetscInt)STRUMPACK_compression_lossy_precision(*S));
721: PetscFunctionReturn(PETSC_SUCCESS);
722: }
724: /*@
725: MatSTRUMPACKSetCompLossyPrecision - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> precision for lossy compression (requires ZFP support)
727: Logically Collective
729: Input Parameters:
730: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
731: - lossy_prec - Number of bitplanes to use in lossy compression
733: Options Database Key:
734: . -mat_strumpack_compression_lossy_precision <lossy_prec> - Precision when using lossy compression [1-64], when using `-pctype ilu -mat_strumpack_compression MAT_STRUMPACK_COMPRESSION_TYPE_LOSSY`
736: Level: intermediate
738: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompLossyPrecision()`
739: @*/
740: PetscErrorCode MatSTRUMPACKSetCompLossyPrecision(Mat F, PetscInt lossy_prec)
741: {
742: PetscFunctionBegin;
745: PetscTryMethod(F, "MatSTRUMPACKSetCompLossyPrecision_C", (Mat, PetscInt), (F, lossy_prec));
746: PetscFunctionReturn(PETSC_SUCCESS);
747: }
748: /*@
749: MatSTRUMPACKGetCompLossyPrecision - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master> precision for lossy compression (requires ZFP support)
751: Logically Collective
753: Input Parameters:
754: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
756: Output Parameter:
757: . lossy_prec - Number of bitplanes to use in lossy compression
759: Level: intermediate
761: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompLossyPrecision()`
762: @*/
763: PetscErrorCode MatSTRUMPACKGetCompLossyPrecision(Mat F, PetscInt *lossy_prec)
764: {
765: PetscFunctionBegin;
767: PetscTryMethod(F, "MatSTRUMPACKGetCompLossyPrecision_C", (Mat, PetscInt *), (F, lossy_prec));
769: PetscFunctionReturn(PETSC_SUCCESS);
770: }
772: static PetscErrorCode MatSTRUMPACKSetCompButterflyLevels_STRUMPACK(Mat F, PetscInt bfly_lvls)
773: {
774: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
776: PetscFunctionBegin;
777: PetscStackCallExternalVoid("STRUMPACK_set_compression_butterfly_levels", STRUMPACK_set_compression_butterfly_levels(*S, bfly_lvls));
778: PetscFunctionReturn(PETSC_SUCCESS);
779: }
780: static PetscErrorCode MatSTRUMPACKGetCompButterflyLevels_STRUMPACK(Mat F, PetscInt *bfly_lvls)
781: {
782: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
784: PetscFunctionBegin;
785: PetscStackCallExternalVoid("STRUMPACK_compression_butterfly_levels", *bfly_lvls = (PetscInt)STRUMPACK_compression_butterfly_levels(*S));
786: PetscFunctionReturn(PETSC_SUCCESS);
787: }
789: /*@
790: MatSTRUMPACKSetCompButterflyLevels - Set STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
791: number of butterfly levels in HODLR compression (requires ButterflyPACK support)
793: Logically Collective
795: Input Parameters:
796: + F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
797: - bfly_lvls - Number of levels of butterfly compression in HODLR compression
799: Options Database Key:
800: . -mat_strumpack_compression_butterfly_levels <bfly_lvls> - Number of levels in the hierarchically off-diagonal matrix for which to use butterfly,
801: when using `-pctype ilu`, (BLR_)HODLR compression
803: Level: intermediate
805: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKGetCompButterflyLevels()`
806: @*/
807: PetscErrorCode MatSTRUMPACKSetCompButterflyLevels(Mat F, PetscInt bfly_lvls)
808: {
809: PetscFunctionBegin;
812: PetscTryMethod(F, "MatSTRUMPACKSetButterflyLevels_C", (Mat, PetscInt), (F, bfly_lvls));
813: PetscFunctionReturn(PETSC_SUCCESS);
814: }
815: /*@
816: MatSTRUMPACKGetCompButterflyLevels - Get STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>
817: number of butterfly levels in HODLR compression (requires ButterflyPACK support)
819: Logically Collective
821: Input Parameters:
822: . F - the factored matrix obtained by calling `MatGetFactor()` from PETSc-STRUMPACK interface
824: Output Parameter:
825: . bfly_lvls - Number of levels of butterfly compression in HODLR compression
827: Level: intermediate
829: .seealso: `MATSOLVERSTRUMPACK`, `MatGetFactor()`, `MatSTRUMPACKSetCompButterflyLevels()`
830: @*/
831: PetscErrorCode MatSTRUMPACKGetCompButterflyLevels(Mat F, PetscInt *bfly_lvls)
832: {
833: PetscFunctionBegin;
835: PetscTryMethod(F, "MatSTRUMPACKGetButterflyLevels_C", (Mat, PetscInt *), (F, bfly_lvls));
837: PetscFunctionReturn(PETSC_SUCCESS);
838: }
840: static PetscErrorCode MatSolve_STRUMPACK(Mat A, Vec b_mpi, Vec x)
841: {
842: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
843: STRUMPACK_RETURN_CODE sp_err;
844: const PetscScalar *bptr;
845: PetscScalar *xptr;
847: PetscFunctionBegin;
848: PetscCall(VecGetArray(x, &xptr));
849: PetscCall(VecGetArrayRead(b_mpi, &bptr));
851: PetscStackCallExternalVoid("STRUMPACK_solve", sp_err = STRUMPACK_solve(*S, (PetscScalar *)bptr, xptr, 0));
852: switch (sp_err) {
853: case STRUMPACK_SUCCESS:
854: break;
855: case STRUMPACK_MATRIX_NOT_SET: {
856: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
857: break;
858: }
859: case STRUMPACK_REORDERING_ERROR: {
860: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
861: break;
862: }
863: default:
864: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
865: }
866: PetscCall(VecRestoreArray(x, &xptr));
867: PetscCall(VecRestoreArrayRead(b_mpi, &bptr));
868: PetscFunctionReturn(PETSC_SUCCESS);
869: }
871: static PetscErrorCode MatMatSolve_STRUMPACK(Mat A, Mat B_mpi, Mat X)
872: {
873: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)A->data;
874: STRUMPACK_RETURN_CODE sp_err;
875: PetscBool flg;
876: PetscInt m = A->rmap->n, nrhs;
877: const PetscScalar *bptr;
878: PetscScalar *xptr;
880: PetscFunctionBegin;
881: PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
882: PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATDENSE matrix");
883: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
884: PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATDENSE matrix");
886: PetscCall(MatGetSize(B_mpi, NULL, &nrhs));
887: PetscCall(MatDenseGetArray(X, &xptr));
888: PetscCall(MatDenseGetArrayRead(B_mpi, &bptr));
890: PetscStackCallExternalVoid("STRUMPACK_matsolve", sp_err = STRUMPACK_matsolve(*S, nrhs, bptr, m, xptr, m, 0));
891: switch (sp_err) {
892: case STRUMPACK_SUCCESS:
893: break;
894: case STRUMPACK_MATRIX_NOT_SET: {
895: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
896: break;
897: }
898: case STRUMPACK_REORDERING_ERROR: {
899: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
900: break;
901: }
902: default:
903: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: solve failed");
904: }
905: PetscCall(MatDenseRestoreArrayRead(B_mpi, &bptr));
906: PetscCall(MatDenseRestoreArray(X, &xptr));
907: PetscFunctionReturn(PETSC_SUCCESS);
908: }
910: static PetscErrorCode MatView_Info_STRUMPACK(Mat A, PetscViewer viewer)
911: {
912: PetscFunctionBegin;
913: /* check if matrix is strumpack type */
914: if (A->ops->solve != MatSolve_STRUMPACK) PetscFunctionReturn(PETSC_SUCCESS);
915: PetscCall(PetscViewerASCIIPrintf(viewer, "STRUMPACK sparse solver!\n"));
916: PetscFunctionReturn(PETSC_SUCCESS);
917: }
919: static PetscErrorCode MatView_STRUMPACK(Mat A, PetscViewer viewer)
920: {
921: PetscBool iascii;
922: PetscViewerFormat format;
924: PetscFunctionBegin;
925: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
926: if (iascii) {
927: PetscCall(PetscViewerGetFormat(viewer, &format));
928: if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_STRUMPACK(A, viewer));
929: }
930: PetscFunctionReturn(PETSC_SUCCESS);
931: }
933: static PetscErrorCode MatLUFactorNumeric_STRUMPACK(Mat F, Mat A, const MatFactorInfo *info)
934: {
935: STRUMPACK_SparseSolver *S = (STRUMPACK_SparseSolver *)F->data;
936: STRUMPACK_RETURN_CODE sp_err;
937: Mat Aloc;
938: const PetscScalar *av;
939: const PetscInt *ai = NULL, *aj = NULL;
940: PetscInt M = A->rmap->N, m = A->rmap->n, dummy;
941: PetscBool ismpiaij, isseqaij, flg;
943: PetscFunctionBegin;
944: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
945: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
946: if (ismpiaij) {
947: PetscCall(MatMPIAIJGetLocalMat(A, MAT_INITIAL_MATRIX, &Aloc));
948: } else if (isseqaij) {
949: PetscCall(PetscObjectReference((PetscObject)A));
950: Aloc = A;
951: } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)A)->type_name);
953: PetscCall(MatGetRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &dummy, &ai, &aj, &flg));
954: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "GetRowIJ failed");
955: PetscCall(MatSeqAIJGetArrayRead(Aloc, &av));
957: if (ismpiaij) {
958: const PetscInt *dist = NULL;
959: PetscCall(MatGetOwnershipRanges(A, &dist));
960: PetscStackCallExternalVoid("STRUMPACK_set_distributed_csr_matrix", STRUMPACK_set_distributed_csr_matrix(*S, &m, ai, aj, av, dist, 0));
961: } else if (isseqaij) {
962: PetscStackCallExternalVoid("STRUMPACK_set_csr_matrix", STRUMPACK_set_csr_matrix(*S, &M, ai, aj, av, 0));
963: }
965: PetscCall(MatRestoreRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &dummy, &ai, &aj, &flg));
966: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "RestoreRowIJ failed");
967: PetscCall(MatSeqAIJRestoreArrayRead(Aloc, &av));
968: PetscCall(MatDestroy(&Aloc));
970: /* Reorder and Factor the matrix. */
971: /* TODO figure out how to avoid reorder if the matrix values changed, but the pattern remains the same. */
972: PetscStackCallExternalVoid("STRUMPACK_reorder", sp_err = STRUMPACK_reorder(*S));
973: PetscStackCallExternalVoid("STRUMPACK_factor", sp_err = STRUMPACK_factor(*S));
974: switch (sp_err) {
975: case STRUMPACK_SUCCESS:
976: break;
977: case STRUMPACK_MATRIX_NOT_SET: {
978: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix was not set");
979: break;
980: }
981: case STRUMPACK_REORDERING_ERROR: {
982: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: matrix reordering failed");
983: break;
984: }
985: default:
986: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "STRUMPACK error: factorization failed");
987: }
988: F->assembled = PETSC_TRUE;
989: F->preallocated = PETSC_TRUE;
990: PetscFunctionReturn(PETSC_SUCCESS);
991: }
993: static PetscErrorCode MatLUFactorSymbolic_STRUMPACK(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
994: {
995: PetscFunctionBegin;
996: F->ops->lufactornumeric = MatLUFactorNumeric_STRUMPACK;
997: F->ops->solve = MatSolve_STRUMPACK;
998: F->ops->matsolve = MatMatSolve_STRUMPACK;
999: PetscFunctionReturn(PETSC_SUCCESS);
1000: }
1002: static PetscErrorCode MatFactorGetSolverType_aij_strumpack(Mat A, MatSolverType *type)
1003: {
1004: PetscFunctionBegin;
1005: *type = MATSOLVERSTRUMPACK;
1006: PetscFunctionReturn(PETSC_SUCCESS);
1007: }
1009: /*MC
1010: MATSOLVERSTRUMPACK = "strumpack" - A solver package providing a direct sparse solver (PCLU)
1011: and a preconditioner (PCILU) using low-rank compression via the external package STRUMPACK <https://portal.nersc.gov/project/sparse/strumpack/master>.
1013: Use `./configure --download-strumpack --download-metis` to have PETSc installed with STRUMPACK.
1015: For full functionality, add `--download-slate --download-magma --download-parmetis --download-ptscotch --download-zfp --download-butterflypack`.
1016: SLATE provides GPU support in the multi-GPU setting, providing ScaLAPACK functionality but with GPU acceleration.
1017: MAGMA can optionally be used for on node GPU support instead cuBLAS/cuSOLVER, and performs slightly better.
1018: ParMETIS and PTScotch can be used for parallel fill-reducing ordering.
1019: ZFP is used for floating point compression of the sparse factors (LOSSY or LOSSLESS compression).
1020: ButterflyPACK is used for HODLR (Hierarchically Off-Diagonal Low Rank) and HODBF (Hierarchically Off-Diagonal Butterfly) compression of the sparse factors.
1022: Options Database Keys:
1023: + -mat_strumpack_verbose - Enable verbose output
1024: . -mat_strumpack_compression - Type of rank-structured compression in sparse LU factors (choose one of) NONE HSS BLR HODLR BLR_HODLR ZFP_BLR_HODLR LOSSLESS LOSSY
1025: . -mat_strumpack_compression_rel_tol - Relative compression tolerance, when using `-pctype ilu`
1026: . -mat_strumpack_compression_abs_tol - Absolute compression tolerance, when using `-pctype ilu`
1027: . -mat_strumpack_compression_min_sep_size - Minimum size of separator for rank-structured compression, when using `-pctype ilu`
1028: . -mat_strumpack_compression_leaf_size - Size of diagonal blocks in rank-structured approximation, when using `-pctype ilu`
1029: . -mat_strumpack_compression_lossy_precision - Precision when using lossy compression [1-64], when using `-pctype ilu`, compression LOSSY (requires ZFP support)
1030: . -mat_strumpack_compression_butterfly_levels - Number of levels in the hierarchically off-diagonal matrix for which to use butterfly, when using `-pctype ilu`, (BLR_)HODLR compression (requires ButterflyPACK support)
1031: . -mat_strumpack_gpu - Enable GPU acceleration in numerical factorization (not supported for all compression types)
1032: . -mat_strumpack_colperm <TRUE> - Permute matrix to make diagonal nonzeros
1033: . -mat_strumpack_reordering <METIS> - Sparsity reducing matrix reordering (choose one of) NATURAL METIS PARMETIS SCOTCH PTSCOTCH RCM GEOMETRIC AMD MMD AND MLF SPECTRAL
1034: . -mat_strumpack_geometric_xyz <1,1,1> - Mesh x,y,z dimensions, for use with GEOMETRIC ordering
1035: . -mat_strumpack_geometric_components <1> - Number of components per mesh point, for geometric nested dissection ordering
1036: . -mat_strumpack_geometric_width <1> - Width of the separator of the mesh, for geometric nested dissection ordering
1037: - -mat_strumpack_metis_nodeNDP - Use METIS_NodeNDP instead of METIS_NodeND, for a more balanced tree
1039: Level: beginner
1041: Notes:
1042: Recommended use is 1 MPI process per GPU.
1044: Use `-pc_type lu` `-pc_factor_mat_solver_type strumpack` to use this as an exact (direct) solver.
1046: Use `-pc_type ilu` `-pc_factor_mat_solver_type strumpack` to enable low-rank compression (i.e, use as a preconditioner), by default using block low rank (BLR).
1048: Works with `MATAIJ` matrices
1050: HODLR, BLR_HODBF and ZFP_BLR_HODLR compression require STRUMPACK to be configured with ButterflyPACK support (`--download-butterflypack`).
1052: LOSSY, LOSSLESS and ZFP_BLR_HODLR compression require STRUMPACK to be configured with ZFP support (`--download-zfp`).
1054: .seealso: `MATSOLVERSTRUMPACK`, [](ch_matrices), `Mat`, `PCLU`, `PCILU`, `MATSOLVERSUPERLU_DIST`, `MATSOLVERMUMPS`, `PCFactorSetMatSolverType()`, `MatSolverType`,
1055: `MatGetFactor()`, `MatSTRUMPACKSetReordering()`, `MatSTRUMPACKReordering`, `MatSTRUMPACKCompressionType`, `MatSTRUMPACKSetColPerm()`.
1056: M*/
1057: static PetscErrorCode MatGetFactor_aij_strumpack(Mat A, MatFactorType ftype, Mat *F)
1058: {
1059: Mat B;
1060: PetscInt M = A->rmap->N, N = A->cmap->N;
1061: PetscBool verb, flg, set;
1062: PetscReal ctol;
1063: PetscInt min_sep_size, leaf_size, nxyz[3], nrdims, nc, w;
1064: #if defined(STRUMPACK_USE_ZFP)
1065: PetscInt lossy_prec;
1066: #endif
1067: #if defined(STRUMPACK_USE_BPACK)
1068: PetscInt bfly_lvls;
1069: #endif
1070: #if defined(STRUMPACK_USE_SLATE_SCALAPACK)
1071: PetscMPIInt mpithreads;
1072: #endif
1073: STRUMPACK_SparseSolver *S;
1074: STRUMPACK_INTERFACE iface;
1075: STRUMPACK_REORDERING_STRATEGY ndcurrent, ndvalue;
1076: STRUMPACK_COMPRESSION_TYPE compcurrent, compvalue;
1077: const STRUMPACK_PRECISION table[2][2][2] = {
1078: {{STRUMPACK_FLOATCOMPLEX_64, STRUMPACK_DOUBLECOMPLEX_64}, {STRUMPACK_FLOAT_64, STRUMPACK_DOUBLE_64}},
1079: {{STRUMPACK_FLOATCOMPLEX, STRUMPACK_DOUBLECOMPLEX}, {STRUMPACK_FLOAT, STRUMPACK_DOUBLE} }
1080: };
1081: const STRUMPACK_PRECISION prec = table[(sizeof(PetscInt) == 8) ? 0 : 1][(PETSC_SCALAR == PETSC_COMPLEX) ? 0 : 1][(PETSC_REAL == PETSC_FLOAT) ? 0 : 1];
1082: const char *const STRUMPACKNDTypes[] = {"NATURAL", "METIS", "PARMETIS", "SCOTCH", "PTSCOTCH", "RCM", "GEOMETRIC", "AMD", "MMD", "AND", "MLF", "SPECTRAL", "STRUMPACKNDTypes", "", 0};
1083: const char *const CompTypes[] = {"NONE", "HSS", "BLR", "HODLR", "BLR_HODLR", "ZFP_BLR_HODLR", "LOSSLESS", "LOSSY", "CompTypes", "", 0};
1085: PetscFunctionBegin;
1086: #if defined(STRUMPACK_USE_SLATE_SCALAPACK)
1087: PetscCallMPI(MPI_Query_thread(&mpithreads));
1088: PetscCheck(mpithreads == MPI_THREAD_MULTIPLE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP_SYS, "SLATE requires MPI_THREAD_MULTIPLE");
1089: #endif
1090: /* Create the factorization matrix */
1091: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
1092: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, M, N));
1093: PetscCall(PetscStrallocpy("strumpack", &((PetscObject)B)->type_name));
1094: PetscCall(MatSetUp(B));
1095: PetscCall(MatSeqAIJSetPreallocation(B, 0, NULL));
1096: PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));
1097: B->trivialsymbolic = PETSC_TRUE;
1098: if (ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_ILU) {
1099: B->ops->lufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
1100: B->ops->ilufactorsymbolic = MatLUFactorSymbolic_STRUMPACK;
1101: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported");
1102: B->ops->getinfo = MatGetInfo_External;
1103: B->ops->view = MatView_STRUMPACK;
1104: B->ops->destroy = MatDestroy_STRUMPACK;
1105: B->ops->getdiagonal = MatGetDiagonal_STRUMPACK;
1106: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_strumpack));
1107: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetReordering_C", MatSTRUMPACKSetReordering_STRUMPACK));
1108: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetReordering_C", MatSTRUMPACKGetReordering_STRUMPACK));
1109: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetColPerm_C", MatSTRUMPACKSetColPerm_STRUMPACK));
1110: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetColPerm_C", MatSTRUMPACKGetColPerm_STRUMPACK));
1111: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricNxyz_C", MatSTRUMPACKSetGeometricNxyz_STRUMPACK));
1112: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricComponents_C", MatSTRUMPACKSetGeometricComponents_STRUMPACK));
1113: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGeometricWidth_C", MatSTRUMPACKSetGeometricWidth_STRUMPACK));
1114: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetGPU_C", MatSTRUMPACKSetGPU_STRUMPACK));
1115: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetGPU_C", MatSTRUMPACKGetGPU_STRUMPACK));
1116: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompression_C", MatSTRUMPACKSetCompression_STRUMPACK));
1117: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompression_C", MatSTRUMPACKGetCompression_STRUMPACK));
1118: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompRelTol_C", MatSTRUMPACKSetCompRelTol_STRUMPACK));
1119: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompRelTol_C", MatSTRUMPACKGetCompRelTol_STRUMPACK));
1120: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompAbsTol_C", MatSTRUMPACKSetCompAbsTol_STRUMPACK));
1121: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompAbsTol_C", MatSTRUMPACKGetCompAbsTol_STRUMPACK));
1122: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompLeafSize_C", MatSTRUMPACKSetCompLeafSize_STRUMPACK));
1123: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompLeafSize_C", MatSTRUMPACKGetCompLeafSize_STRUMPACK));
1124: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompMinSepSize_C", MatSTRUMPACKSetCompMinSepSize_STRUMPACK));
1125: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompMinSepSize_C", MatSTRUMPACKGetCompMinSepSize_STRUMPACK));
1126: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompLossyPrecision_C", MatSTRUMPACKSetCompLossyPrecision_STRUMPACK));
1127: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompLossyPrecision_C", MatSTRUMPACKGetCompLossyPrecision_STRUMPACK));
1128: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKSetCompButterflyLevels_C", MatSTRUMPACKSetCompButterflyLevels_STRUMPACK));
1129: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSTRUMPACKGetCompButterflyLevels_C", MatSTRUMPACKGetCompButterflyLevels_STRUMPACK));
1130: B->factortype = ftype;
1132: /* set solvertype */
1133: PetscCall(PetscFree(B->solvertype));
1134: PetscCall(PetscStrallocpy(MATSOLVERSTRUMPACK, &B->solvertype));
1136: PetscCall(PetscNew(&S));
1137: B->data = S;
1139: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg)); /* A might be MATSEQAIJCUSPARSE */
1140: iface = flg ? STRUMPACK_MT : STRUMPACK_MPI_DIST;
1142: PetscOptionsBegin(PetscObjectComm((PetscObject)B), ((PetscObject)B)->prefix, "STRUMPACK Options", "Mat");
1143: verb = PetscLogPrintInfo ? PETSC_TRUE : PETSC_FALSE;
1144: PetscCall(PetscOptionsBool("-mat_strumpack_verbose", "Print STRUMPACK information", "None", verb, &verb, NULL));
1146: PetscStackCallExternalVoid("STRUMPACK_init", STRUMPACK_init(S, PetscObjectComm((PetscObject)A), prec, iface, 0, NULL, verb));
1148: /* By default, no compression is done. Compression is enabled when the user enables it with */
1149: /* -mat_strumpack_compression with anything else than NONE, or when selecting ilu */
1150: /* preconditioning, in which case we default to STRUMPACK_BLR compression. */
1151: /* When compression is enabled, the STRUMPACK solver becomes an incomplete */
1152: /* (or approximate) LU factorization. */
1153: PetscStackCallExternalVoid("STRUMPACK_compression", compcurrent = STRUMPACK_compression(*S));
1154: PetscCall(PetscOptionsEnum("-mat_strumpack_compression", "Rank-structured compression type", "None", CompTypes, (PetscEnum)compcurrent, (PetscEnum *)&compvalue, &set));
1155: if (set) {
1156: PetscCall(MatSTRUMPACKSetCompression(B, (MatSTRUMPACKCompressionType)compvalue));
1157: } else {
1158: if (ftype == MAT_FACTOR_ILU) { PetscStackCallExternalVoid("STRUMPACK_set_compression", STRUMPACK_set_compression(*S, STRUMPACK_BLR)); }
1159: }
1161: PetscStackCallExternalVoid("STRUMPACK_compression_rel_tol", ctol = (PetscReal)STRUMPACK_compression_rel_tol(*S));
1162: PetscCall(PetscOptionsReal("-mat_strumpack_compression_rel_tol", "Relative compression tolerance", "None", ctol, &ctol, &set));
1163: if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_rel_tol", STRUMPACK_set_compression_rel_tol(*S, (double)ctol));
1165: PetscStackCallExternalVoid("STRUMPACK_compression_abs_tol", ctol = (PetscReal)STRUMPACK_compression_abs_tol(*S));
1166: PetscCall(PetscOptionsReal("-mat_strumpack_compression_abs_tol", "Absolute compression tolerance", "None", ctol, &ctol, &set));
1167: if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_abs_tol", STRUMPACK_set_compression_abs_tol(*S, (double)ctol));
1169: PetscStackCallExternalVoid("STRUMPACK_compression_min_sep_size", min_sep_size = (PetscInt)STRUMPACK_compression_min_sep_size(*S));
1170: PetscCall(PetscOptionsInt("-mat_strumpack_compression_min_sep_size", "Minimum size of separator for compression", "None", min_sep_size, &min_sep_size, &set));
1171: if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_min_sep_size", STRUMPACK_set_compression_min_sep_size(*S, (int)min_sep_size));
1173: PetscStackCallExternalVoid("STRUMPACK_compression_leaf_size", leaf_size = (PetscInt)STRUMPACK_compression_leaf_size(*S));
1174: PetscCall(PetscOptionsInt("-mat_strumpack_compression_leaf_size", "Size of diagonal blocks in rank-structured approximation", "None", leaf_size, &leaf_size, &set));
1175: if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_leaf_size", STRUMPACK_set_compression_leaf_size(*S, (int)leaf_size));
1177: #if defined(STRUMPACK_USE_ZFP)
1178: PetscStackCallExternalVoid("STRUMPACK_compression_lossy_precision", lossy_prec = (PetscInt)STRUMPACK_compression_lossy_precision(*S));
1179: PetscCall(PetscOptionsInt("-mat_strumpack_compression_lossy_precision", "Number of bitplanes to use in lossy compression", "None", lossy_prec, &lossy_prec, &set));
1180: if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_lossy_precision", STRUMPACK_set_compression_lossy_precision(*S, (int)lossy_prec));
1181: #endif
1183: #if defined(STRUMPACK_USE_BPACK)
1184: PetscStackCallExternalVoid("STRUMPACK_compression_butterfly_levels", bfly_lvls = (PetscInt)STRUMPACK_compression_butterfly_levels(*S));
1185: PetscCall(PetscOptionsInt("-mat_strumpack_compression_butterfly_levels", "Number of levels in the HODLR matrix for which to use butterfly compression", "None", bfly_lvls, &bfly_lvls, &set));
1186: if (set) PetscStackCallExternalVoid("STRUMPACK_set_compression_butterfly_levels", STRUMPACK_set_compression_butterfly_levels(*S, (int)bfly_lvls));
1187: #endif
1189: #if defined(STRUMPACK_USE_CUDA) || defined(STRUMPACK_USE_HIP) || defined(STRUMPACK_USE_SYCL)
1190: PetscStackCallExternalVoid("STRUMPACK_use_gpu", flg = (STRUMPACK_use_gpu(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
1191: PetscCall(PetscOptionsBool("-mat_strumpack_gpu", "Enable GPU acceleration (not supported for all compression types)", "None", flg, &flg, &set));
1192: if (set) MatSTRUMPACKSetGPU(B, flg);
1193: #endif
1195: PetscStackCallExternalVoid("STRUMPACK_matching", flg = (STRUMPACK_matching(*S) == STRUMPACK_MATCHING_NONE) ? PETSC_FALSE : PETSC_TRUE);
1196: PetscCall(PetscOptionsBool("-mat_strumpack_colperm", "Find a col perm to get nonzero diagonal", "None", flg, &flg, &set));
1197: if (set) PetscStackCallExternalVoid("STRUMPACK_set_matching", STRUMPACK_set_matching(*S, flg ? STRUMPACK_MATCHING_MAX_DIAGONAL_PRODUCT_SCALING : STRUMPACK_MATCHING_NONE));
1199: PetscStackCallExternalVoid("STRUMPACK_reordering_method", ndcurrent = STRUMPACK_reordering_method(*S));
1200: PetscCall(PetscOptionsEnum("-mat_strumpack_reordering", "Sparsity reducing matrix reordering", "None", STRUMPACKNDTypes, (PetscEnum)ndcurrent, (PetscEnum *)&ndvalue, &set));
1201: if (set) PetscStackCallExternalVoid("STRUMPACK_set_reordering_method", STRUMPACK_set_reordering_method(*S, ndvalue));
1203: /* geometric ordering, for a regular 1D/2D/3D mesh in the natural ordering, */
1204: /* with nc DOF's per gridpoint, and possibly a wider stencil */
1205: nrdims = 3;
1206: nxyz[0] = nxyz[1] = nxyz[2] = 1;
1207: PetscCall(PetscOptionsIntArray("-mat_strumpack_geometric_xyz", "Mesh sizes nx,ny,nz (Use 1 for default)", "", nxyz, &nrdims, &set));
1208: if (set) {
1209: PetscCheck(nrdims == 1 || nrdims == 2 || nrdims == 3, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "'-mat_strumpack_geometric_xyz' requires 1, 2, or 3 values.");
1210: PetscCall(MatSTRUMPACKSetGeometricNxyz(B, (int)nxyz[0], (int)nxyz[1], (int)nxyz[2]));
1211: }
1212: PetscCall(PetscOptionsInt("-mat_strumpack_geometric_components", "Number of components per mesh point, for geometric nested dissection ordering", "None", 1, &nc, &set));
1213: if (set) PetscStackCallExternalVoid("STRUMPACK_set_components", STRUMPACK_set_components(*S, (int)nc));
1214: PetscCall(PetscOptionsInt("-mat_strumpack_geometric_width", "Width of the separator (for instance a 1D 3-point wide stencil needs a 1 point wide separator, a 1D 5-point stencil needs a 2 point wide separator), for geometric nested dissection ordering", "None", 1, &w, &set));
1215: if (set) PetscStackCallExternalVoid("STRUMPACK_set_separator_width", STRUMPACK_set_separator_width(*S, (int)w));
1217: PetscStackCallExternalVoid("STRUMPACK_use_METIS_NodeNDP", flg = (STRUMPACK_use_METIS_NodeNDP(*S) == 0) ? PETSC_FALSE : PETSC_TRUE);
1218: PetscCall(PetscOptionsBool("-mat_strumpack_metis_nodeNDP", "Use METIS_NodeNDP instead of METIS_NodeND, for a more balanced tree", "None", flg, &flg, &set));
1219: if (set) {
1220: if (flg) {
1221: PetscStackCallExternalVoid("STRUMPACK_enable_METIS_NodeNDP", STRUMPACK_enable_METIS_NodeNDP(*S));
1222: } else {
1223: PetscStackCallExternalVoid("STRUMPACK_disable_METIS_NodeNDP", STRUMPACK_disable_METIS_NodeNDP(*S));
1224: }
1225: }
1227: /* Disable the outer iterative solver from STRUMPACK. */
1228: /* When STRUMPACK is used as a direct solver, it will by default do iterative refinement. */
1229: /* When STRUMPACK is used as an approximate factorization preconditioner (by enabling */
1230: /* low-rank compression), it will use it's own preconditioned GMRES. Here we can disable */
1231: /* the outer iterative solver, as PETSc uses STRUMPACK from within a KSP. */
1232: PetscStackCallExternalVoid("STRUMPACK_set_Krylov_solver", STRUMPACK_set_Krylov_solver(*S, STRUMPACK_DIRECT));
1234: PetscOptionsEnd();
1236: *F = B;
1237: PetscFunctionReturn(PETSC_SUCCESS);
1238: }
1240: PETSC_INTERN PetscErrorCode MatSolverTypeRegister_STRUMPACK(void)
1241: {
1242: PetscFunctionBegin;
1243: PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
1244: PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_strumpack));
1245: PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATMPIAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
1246: PetscCall(MatSolverTypeRegister(MATSOLVERSTRUMPACK, MATSEQAIJ, MAT_FACTOR_ILU, MatGetFactor_aij_strumpack));
1247: PetscFunctionReturn(PETSC_SUCCESS);
1248: }