Actual source code: pastix.c
1: /*
2: Provides an interface to the PaStiX sparse solver
3: */
4: #include <../src/mat/impls/aij/seq/aij.h>
5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: #if defined(PETSC_USE_COMPLEX)
10: #define _H_COMPLEX
11: #endif
13: #include <pastix.h>
15: #if defined(PETSC_USE_COMPLEX)
16: #if defined(PETSC_USE_REAL_SINGLE)
17: #define SPM_FLTTYPE SpmComplex32
18: #else
19: #define SPM_FLTTYPE SpmComplex64
20: #endif
21: #else /* PETSC_USE_COMPLEX */
23: #if defined(PETSC_USE_REAL_SINGLE)
24: #define SPM_FLTTYPE SpmFloat
25: #else
26: #define SPM_FLTTYPE SpmDouble
27: #endif
29: #endif /* PETSC_USE_COMPLEX */
31: typedef PetscScalar PastixScalar;
33: typedef struct Mat_Pastix_ {
34: pastix_data_t *pastix_data; /* Pastix data storage structure */
35: MPI_Comm comm; /* MPI Communicator used to initialize pastix */
36: spmatrix_t *spm; /* SPM matrix structure */
37: MatStructure matstruc; /* DIFFERENT_NONZERO_PATTERN if uninitilized, SAME otherwise */
38: PetscScalar *rhs; /* Right-hand-side member */
39: PetscInt rhsnbr; /* Right-hand-side number */
40: pastix_int_t iparm[IPARM_SIZE]; /* Integer parameters */
41: double dparm[DPARM_SIZE]; /* Floating point parameters */
42: } Mat_Pastix;
44: /*
45: convert PETSc matrix to SPM structure
47: input:
48: A - matrix in aij, baij or sbaij format
49: reuse - MAT_INITIAL_MATRIX: spaces are allocated and values are set for the triple
50: MAT_REUSE_MATRIX: only the values in v array are updated
51: output:
52: spm - The SPM built from A
53: */
54: static PetscErrorCode MatConvertToSPM(Mat A, MatReuse reuse, Mat_Pastix *pastix)
55: {
56: Mat A_loc, A_aij;
57: const PetscInt *row, *col;
58: PetscInt n, i;
59: const PetscScalar *val;
60: PetscBool ismpiaij, isseqaij, ismpisbaij, isseqsbaij;
61: PetscBool flag;
62: spmatrix_t spm2, *spm = NULL;
63: int spm_err;
65: PetscFunctionBegin;
66: /* Get A datas */
67: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
68: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
69: /* TODO: Block Aij should be handled with dof in spm */
70: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQSBAIJ, &isseqsbaij));
71: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPISBAIJ, &ismpisbaij));
73: if (isseqsbaij || ismpisbaij) PetscCall(MatConvert(A, MATAIJ, reuse, &A_aij));
74: else A_aij = A;
76: if (ismpiaij || ismpisbaij) PetscCall(MatMPIAIJGetLocalMat(A_aij, MAT_INITIAL_MATRIX, &A_loc));
77: else if (isseqaij || isseqsbaij) A_loc = A_aij;
78: else SETERRQ(PetscObjectComm((PetscObject)A_aij), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)A)->type_name);
80: /* Use getRowIJ and the trick CSC/CSR instead of GetColumnIJ for performance */
81: PetscCall(MatGetRowIJ(A_loc, 0, PETSC_FALSE, PETSC_FALSE, &n, &row, &col, &flag));
82: PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_SUP, "GetRowIJ failed");
83: PetscCall(MatSeqAIJGetArrayRead(A_loc, &val));
85: PetscCall(PetscMalloc1(1, &spm));
86: PetscStackCallExternalVoid("spmInitDist", spmInitDist(spm, pastix->comm));
88: spm->n = n;
89: spm->nnz = row[n];
90: spm->fmttype = SpmCSR;
91: spm->flttype = SPM_FLTTYPE;
92: spm->replicated = !(A->rmap->n != A->rmap->N);
94: PetscStackCallExternalVoid("spmUpdateComputedFields", spmUpdateComputedFields(spm));
95: PetscStackCallExternalVoid("spmAlloc", spmAlloc(spm));
97: /* Get data distribution */
98: if (!spm->replicated) {
99: for (i = A->rmap->rstart; i < A->rmap->rend; i++) spm->loc2glob[i - A->rmap->rstart] = i;
100: }
102: /* Copy arrays */
103: PetscCall(PetscArraycpy(spm->colptr, col, spm->nnz));
104: PetscCall(PetscArraycpy(spm->rowptr, row, spm->n + 1));
105: PetscCall(PetscArraycpy((PetscScalar *)spm->values, val, spm->nnzexp));
106: PetscCall(MatRestoreRowIJ(A_loc, 0, PETSC_FALSE, PETSC_FALSE, &n, &row, &col, &flag));
107: PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_SUP, "RestoreRowIJ failed");
108: PetscCall(MatSeqAIJRestoreArrayRead(A_loc, &val));
109: if (ismpiaij || ismpisbaij) PetscCall(MatDestroy(&A_loc));
111: /*
112: PaStiX works only with CSC matrices for now, so let's lure him by telling him
113: that the PETSc CSR matrix is a CSC matrix.
114: Note that this is not available yet for Hermitian matrices and LL^h/LDL^h factorizations.
115: */
116: {
117: spm_int_t *tmp;
118: spm->fmttype = SpmCSC;
119: tmp = spm->colptr;
120: spm->colptr = spm->rowptr;
121: spm->rowptr = tmp;
122: pastix->iparm[IPARM_TRANSPOSE_SOLVE] = PastixTrans;
123: }
125: /* Update matrix to be in PaStiX format */
126: PetscStackCallExternalVoid("spmCheckAndCorrect", spm_err = spmCheckAndCorrect(spm, &spm2));
127: if (spm_err != 0) {
128: PetscStackCallExternalVoid("spmExit", spmExit(spm));
129: *spm = spm2;
130: }
132: if (isseqsbaij || ismpisbaij) PetscCall(MatDestroy(&A_aij));
134: pastix->spm = spm;
135: PetscFunctionReturn(PETSC_SUCCESS);
136: }
138: /*
139: Call clean step of PaStiX if initialized
140: Free the SpM matrix and the PaStiX instance.
141: */
142: static PetscErrorCode MatDestroy_PaStiX(Mat A)
143: {
144: Mat_Pastix *pastix = (Mat_Pastix *)A->data;
146: PetscFunctionBegin;
147: /* Finalize SPM (matrix handler of PaStiX) */
148: if (pastix->spm) {
149: PetscStackCallExternalVoid("spmExit", spmExit(pastix->spm));
150: PetscCall(PetscFree(pastix->spm));
151: }
153: /* clear composed functions */
154: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
156: /* Finalize PaStiX */
157: if (pastix->pastix_data) pastixFinalize(&pastix->pastix_data);
159: /* Deallocate PaStiX structure */
160: PetscCall(PetscFree(A->data));
161: PetscFunctionReturn(PETSC_SUCCESS);
162: }
164: /*
165: Gather right-hand side.
166: Call for Solve step.
167: Scatter solution.
168: */
169: static PetscErrorCode MatSolve_PaStiX(Mat A, Vec b, Vec x)
170: {
171: Mat_Pastix *pastix = (Mat_Pastix *)A->data;
172: const PetscScalar *bptr;
173: PetscInt ldrhs;
175: PetscFunctionBegin;
176: pastix->rhsnbr = 1;
177: ldrhs = pastix->spm->n;
179: PetscCall(VecCopy(b, x));
180: PetscCall(VecGetArray(x, &pastix->rhs));
181: PetscCall(VecGetArrayRead(b, &bptr));
183: /* solve phase */
184: /*-------------*/
185: PetscCheck(pastix->pastix_data, PETSC_COMM_SELF, PETSC_ERR_SUP, "PaStiX hasn't been initialized");
186: PetscCallExternal(pastix_task_solve, pastix->pastix_data, ldrhs, pastix->rhsnbr, pastix->rhs, ldrhs);
187: PetscCallExternal(pastix_task_refine, pastix->pastix_data, ldrhs, pastix->rhsnbr, (PetscScalar *)bptr, ldrhs, pastix->rhs, ldrhs);
189: PetscCall(VecRestoreArray(x, &pastix->rhs));
190: PetscCall(VecRestoreArrayRead(b, &bptr));
191: PetscFunctionReturn(PETSC_SUCCESS);
192: }
194: /*
195: Numeric factorisation using PaStiX solver.
197: input:
198: F - PETSc matrix that contains PaStiX interface.
199: A - PETSC matrix in aij, bail or sbaij format
200: */
201: static PetscErrorCode MatFactorNumeric_PaStiX(Mat F, Mat A, const MatFactorInfo *info)
202: {
203: Mat_Pastix *pastix = (Mat_Pastix *)F->data;
205: PetscFunctionBegin;
206: F->ops->solve = MatSolve_PaStiX;
208: /* Perform Numerical Factorization */
209: PetscCheck(pastix->pastix_data, PETSC_COMM_SELF, PETSC_ERR_SUP, "PaStiX hasn't been initialized");
210: PetscCallExternal(pastix_task_numfact, pastix->pastix_data, pastix->spm);
212: F->assembled = PETSC_TRUE;
213: PetscFunctionReturn(PETSC_SUCCESS);
214: }
216: static PetscErrorCode MatLUFactorNumeric_PaStiX(Mat F, Mat A, const MatFactorInfo *info)
217: {
218: Mat_Pastix *pastix = (Mat_Pastix *)F->data;
220: PetscFunctionBegin;
221: PetscCheck(pastix->iparm[IPARM_FACTORIZATION] == PastixFactGETRF, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Incorrect factorization type for symbolic and numerical factorization by PaStiX");
222: pastix->iparm[IPARM_FACTORIZATION] = PastixFactGETRF;
223: PetscCall(MatFactorNumeric_PaStiX(F, A, info));
224: PetscFunctionReturn(PETSC_SUCCESS);
225: }
227: static PetscErrorCode MatCholeskyFactorNumeric_PaStiX(Mat F, Mat A, const MatFactorInfo *info)
228: {
229: Mat_Pastix *pastix = (Mat_Pastix *)F->data;
231: PetscFunctionBegin;
232: PetscCheck(pastix->iparm[IPARM_FACTORIZATION] == PastixFactSYTRF, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Incorrect factorization type for symbolic and numerical factorization by PaStiX");
233: pastix->iparm[IPARM_FACTORIZATION] = PastixFactSYTRF;
234: PetscCall(MatFactorNumeric_PaStiX(F, A, info));
235: PetscFunctionReturn(PETSC_SUCCESS);
236: }
238: /*
239: Perform Ordering step and Symbolic Factorization step
241: Note the Petsc r and c permutations are ignored
242: input:
243: F - PETSc matrix that contains PaStiX interface.
244: A - matrix in aij, bail or sbaij format
245: r - permutation ?
246: c - TODO
247: info - Informations about the factorization to perform.
248: output:
249: pastix_data - This instance will be updated with the SolverMatrix allocated.
250: */
251: static PetscErrorCode MatFactorSymbolic_PaStiX(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
252: {
253: Mat_Pastix *pastix = (Mat_Pastix *)F->data;
255: PetscFunctionBegin;
256: pastix->matstruc = DIFFERENT_NONZERO_PATTERN;
258: /* Initialise SPM structure */
259: PetscCall(MatConvertToSPM(A, MAT_INITIAL_MATRIX, pastix));
261: /* Ordering - Symbolic factorization - Build SolverMatrix */
262: PetscCheck(pastix->pastix_data, PETSC_COMM_SELF, PETSC_ERR_SUP, "PaStiX hasn't been initialized");
263: PetscCallExternal(pastix_task_analyze, pastix->pastix_data, pastix->spm);
264: PetscFunctionReturn(PETSC_SUCCESS);
265: }
267: static PetscErrorCode MatLUFactorSymbolic_PaStiX(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
268: {
269: Mat_Pastix *pastix = (Mat_Pastix *)F->data;
271: PetscFunctionBegin;
272: pastix->iparm[IPARM_FACTORIZATION] = PastixFactGETRF;
273: PetscCall(MatFactorSymbolic_PaStiX(F, A, r, c, info));
274: PetscFunctionReturn(PETSC_SUCCESS);
275: }
277: /* Note the Petsc r permutation is ignored */
278: static PetscErrorCode MatCholeskyFactorSymbolic_PaStiX(Mat F, Mat A, IS r, const MatFactorInfo *info)
279: {
280: Mat_Pastix *pastix = (Mat_Pastix *)F->data;
282: PetscFunctionBegin;
283: /* Warning: Cholesky in Petsc wrapper does not handle (complex) Hermitian matrices.
284: The factorization type can be forced using the parameter
285: mat_pastix_factorization (see enum pastix_factotype_e in
286: https://solverstack.gitlabpages.inria.fr/pastix/group__pastix__api.html). */
287: pastix->iparm[IPARM_FACTORIZATION] = PastixFactSYTRF;
288: PetscCall(MatFactorSymbolic_PaStiX(F, A, r, NULL, info));
289: PetscFunctionReturn(PETSC_SUCCESS);
290: }
292: static PetscErrorCode MatView_PaStiX(Mat A, PetscViewer viewer)
293: {
294: PetscBool iascii;
295: PetscViewerFormat format;
297: PetscFunctionBegin;
298: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
299: if (iascii) {
300: PetscCall(PetscViewerGetFormat(viewer, &format));
301: if (format == PETSC_VIEWER_ASCII_INFO) {
302: Mat_Pastix *pastix = (Mat_Pastix *)A->data;
303: spmatrix_t *spm = pastix->spm;
304: PetscCheck(!spm, PETSC_COMM_SELF, PETSC_ERR_SUP, "Sparse matrix isn't initialized");
306: PetscCall(PetscViewerASCIIPrintf(viewer, "PaStiX run parameters:\n"));
307: PetscCall(PetscViewerASCIIPrintf(viewer, " Matrix type : %s \n", ((spm->mtxtype == SpmSymmetric) ? "Symmetric" : "Unsymmetric")));
308: PetscCall(PetscViewerASCIIPrintf(viewer, " Level of printing (0,1,2): %ld \n", (long)pastix->iparm[IPARM_VERBOSE]));
309: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of refinements iterations : %ld \n", (long)pastix->iparm[IPARM_NBITER]));
310: PetscCall(PetscPrintf(PETSC_COMM_SELF, " Error : %e \n", pastix->dparm[DPARM_RELATIVE_ERROR]));
311: if (pastix->iparm[IPARM_VERBOSE] > 0) spmPrintInfo(spm, stdout);
312: }
313: }
314: PetscFunctionReturn(PETSC_SUCCESS);
315: }
317: /*MC
318: MATSOLVERPASTIX - A solver package providing direct solvers (LU) for distributed
319: and sequential matrices via the external package PaStiX.
321: Use `./configure --download-hwloc --download-metis --download-ptscotch --download-pastix --download-netlib-lapack [or MKL for ex. --with-blaslapack-dir=${MKLROOT}]`
322: to have PETSc installed with PasTiX.
324: Use `-pc_type lu` `-pc_factor_mat_solver_type pastix` to use this direct solver.
326: Options Database Keys:
327: -mat_pastix_verbose <0,1,2> - print level of information messages from PaStiX
328: -mat_pastix_factorization <0,1,2,3,4> - Factorization algorithm (Cholesky, LDL^t, LU, LL^t, LDL^h)
329: -mat_pastix_itermax <integer> - Maximum number of iterations during refinement
330: -mat_pastix_epsilon_refinement <double> - Epsilon for refinement
331: -mat_pastix_epsilon_magn_ctrl <double> - Epsilon for magnitude control
332: -mat_pastix_ordering <0,1> - Ordering (Scotch or Metis)
333: -mat_pastix_thread_nbr <integer> - Set the numbers of threads for each MPI process
334: -mat_pastix_scheduler <0,1,2,3,4> - Scheduler (sequential, static, parsec, starpu, dynamic)
335: -mat_pastix_compress_when <0,1,2,3> - When to compress (never, minimal-theory, just-in-time, supernodes)
336: -mat_pastix_compress_tolerance <double> - Tolerance for low-rank kernels.
338: Notes:
339: This only works for matrices with symmetric nonzero structure, if you pass it a matrix with
340: nonsymmetric structure PasTiX, and hence, PETSc return with an error.
342: Level: beginner
344: .seealso: [](ch_matrices), `Mat`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
345: M*/
347: static PetscErrorCode MatGetInfo_PaStiX(Mat A, MatInfoType flag, MatInfo *info)
348: {
349: Mat_Pastix *pastix = (Mat_Pastix *)A->data;
351: PetscFunctionBegin;
352: info->block_size = 1.0;
353: info->nz_allocated = pastix->iparm[IPARM_ALLOCATED_TERMS];
354: info->nz_used = pastix->iparm[IPARM_NNZEROS];
355: info->nz_unneeded = 0.0;
356: info->assemblies = 0.0;
357: info->mallocs = 0.0;
358: info->memory = 0.0;
359: info->fill_ratio_given = 0;
360: info->fill_ratio_needed = 0;
361: info->factor_mallocs = 0;
362: PetscFunctionReturn(PETSC_SUCCESS);
363: }
365: static PetscErrorCode MatFactorGetSolverType_PaStiX(Mat A, MatSolverType *type)
366: {
367: PetscFunctionBegin;
368: *type = MATSOLVERPASTIX;
369: PetscFunctionReturn(PETSC_SUCCESS);
370: }
372: /* Sets PaStiX options from the options database */
373: static PetscErrorCode MatSetFromOptions_PaStiX(Mat A)
374: {
375: Mat_Pastix *pastix = (Mat_Pastix *)A->data;
376: pastix_int_t *iparm = pastix->iparm;
377: double *dparm = pastix->dparm;
378: PetscInt icntl;
379: PetscReal dcntl;
380: PetscBool set;
382: PetscFunctionBegin;
383: PetscOptionsBegin(PetscObjectComm((PetscObject)A), ((PetscObject)A)->prefix, "PaStiX Options", "Mat");
385: iparm[IPARM_VERBOSE] = 0;
386: iparm[IPARM_ITERMAX] = 20;
388: PetscCall(PetscOptionsRangeInt("-mat_pastix_verbose", "iparm[IPARM_VERBOSE] : level of printing (0 to 2)", "None", iparm[IPARM_VERBOSE], &icntl, &set, 0, 2));
389: if (set) iparm[IPARM_VERBOSE] = (pastix_int_t)icntl;
391: PetscCall(PetscOptionsRangeInt("-mat_pastix_factorization", "iparm[IPARM_FACTORIZATION]: Factorization algorithm", "None", iparm[IPARM_FACTORIZATION], &icntl, &set, 0, 4));
392: if (set) iparm[IPARM_FACTORIZATION] = (pastix_int_t)icntl;
394: PetscCall(PetscOptionsBoundedInt("-mat_pastix_itermax", "iparm[IPARM_ITERMAX]: Max iterations", "None", iparm[IPARM_ITERMAX], &icntl, &set, 1));
395: if (set) iparm[IPARM_ITERMAX] = (pastix_int_t)icntl;
397: PetscCall(PetscOptionsBoundedReal("-mat_pastix_epsilon_refinement", "dparm[DPARM_EPSILON_REFINEMENT]: Epsilon refinement", "None", dparm[DPARM_EPSILON_REFINEMENT], &dcntl, &set, -1.));
398: if (set) dparm[DPARM_EPSILON_REFINEMENT] = (double)dcntl;
400: PetscCall(PetscOptionsReal("-mat_pastix_epsilon_magn_ctrl", "dparm[DPARM_EPSILON_MAGN_CTRL]: Epsilon magnitude control", "None", dparm[DPARM_EPSILON_MAGN_CTRL], &dcntl, &set));
401: if (set) dparm[DPARM_EPSILON_MAGN_CTRL] = (double)dcntl;
403: PetscCall(PetscOptionsRangeInt("-mat_pastix_ordering", "iparm[IPARM_ORDERING]: Ordering algorithm", "None", iparm[IPARM_ORDERING], &icntl, &set, 0, 2));
404: if (set) iparm[IPARM_ORDERING] = (pastix_int_t)icntl;
406: PetscCall(PetscOptionsBoundedInt("-mat_pastix_thread_nbr", "iparm[IPARM_THREAD_NBR]: Number of thread by MPI node", "None", iparm[IPARM_THREAD_NBR], &icntl, &set, -1));
407: if (set) iparm[IPARM_THREAD_NBR] = (pastix_int_t)icntl;
409: PetscCall(PetscOptionsRangeInt("-mat_pastix_scheduler", "iparm[IPARM_SCHEDULER]: Scheduler", "None", iparm[IPARM_SCHEDULER], &icntl, &set, 0, 4));
410: if (set) iparm[IPARM_SCHEDULER] = (pastix_int_t)icntl;
412: PetscCall(PetscOptionsRangeInt("-mat_pastix_compress_when", "iparm[IPARM_COMPRESS_WHEN]: When to compress", "None", iparm[IPARM_COMPRESS_WHEN], &icntl, &set, 0, 3));
413: if (set) iparm[IPARM_COMPRESS_WHEN] = (pastix_int_t)icntl;
415: PetscCall(PetscOptionsBoundedReal("-mat_pastix_compress_tolerance", "dparm[DPARM_COMPRESS_TOLERANCE]: Tolerance for low-rank kernels", "None", dparm[DPARM_COMPRESS_TOLERANCE], &dcntl, &set, 0.));
416: if (set) dparm[DPARM_COMPRESS_TOLERANCE] = (double)dcntl;
418: PetscOptionsEnd();
419: PetscFunctionReturn(PETSC_SUCCESS);
420: }
422: static PetscErrorCode MatGetFactor_pastix(Mat A, MatFactorType ftype, Mat *F, const char *mattype)
423: {
424: Mat B;
425: Mat_Pastix *pastix;
427: PetscFunctionBegin;
428: /* Create the factorization matrix */
429: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
430: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
431: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &((PetscObject)B)->type_name));
432: PetscCall(MatSetUp(B));
434: PetscCheck(ftype == MAT_FACTOR_LU || ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Factor type not supported by PaStiX");
436: /* set solvertype */
437: PetscCall(PetscFree(B->solvertype));
438: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
440: B->ops->lufactorsymbolic = MatLUFactorSymbolic_PaStiX;
441: B->ops->lufactornumeric = MatLUFactorNumeric_PaStiX;
442: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_PaStiX;
443: B->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_PaStiX;
444: B->ops->view = MatView_PaStiX;
445: B->ops->getinfo = MatGetInfo_PaStiX;
446: B->ops->destroy = MatDestroy_PaStiX;
448: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_PaStiX));
450: B->factortype = ftype;
452: /* Create the pastix structure */
453: PetscCall(PetscNew(&pastix));
454: B->data = (void *)pastix;
456: /* Call to set default pastix options */
457: PetscStackCallExternalVoid("pastixInitParam", pastixInitParam(pastix->iparm, pastix->dparm));
458: PetscCall(MatSetFromOptions_PaStiX(B));
460: /* Get PETSc communicator */
461: PetscCall(PetscObjectGetComm((PetscObject)A, &pastix->comm));
463: /* Initialise PaStiX structure */
464: pastix->iparm[IPARM_SCOTCH_MT] = 0;
465: PetscStackCallExternalVoid("pastixInit", pastixInit(&pastix->pastix_data, pastix->comm, pastix->iparm, pastix->dparm));
467: /* Warning: Cholesky in Petsc wrapper does not handle (complex) Hermitian matrices.
468: The factorization type can be forced using the parameter
469: mat_pastix_factorization (see enum pastix_factotype_e in
470: https://solverstack.gitlabpages.inria.fr/pastix/group__pastix__api.html). */
471: pastix->iparm[IPARM_FACTORIZATION] = ftype == MAT_FACTOR_CHOLESKY ? PastixFactSYTRF : PastixFactGETRF;
473: *F = B;
474: PetscFunctionReturn(PETSC_SUCCESS);
475: }
477: static PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A, MatFactorType ftype, Mat *F)
478: {
479: PetscFunctionBegin;
480: PetscCheck(ftype == MAT_FACTOR_LU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
481: PetscCall(MatGetFactor_pastix(A, ftype, F, MATMPIAIJ));
482: PetscFunctionReturn(PETSC_SUCCESS);
483: }
485: static PetscErrorCode MatGetFactor_seqaij_pastix(Mat A, MatFactorType ftype, Mat *F)
486: {
487: PetscFunctionBegin;
488: PetscCheck(ftype == MAT_FACTOR_LU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
489: PetscCall(MatGetFactor_pastix(A, ftype, F, MATSEQAIJ));
490: PetscFunctionReturn(PETSC_SUCCESS);
491: }
493: static PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A, MatFactorType ftype, Mat *F)
494: {
495: PetscFunctionBegin;
496: PetscCheck(ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
497: PetscCall(MatGetFactor_pastix(A, ftype, F, MATMPISBAIJ));
498: PetscFunctionReturn(PETSC_SUCCESS);
499: }
501: static PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A, MatFactorType ftype, Mat *F)
502: {
503: PetscFunctionBegin;
504: PetscCheck(ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
505: PetscCall(MatGetFactor_pastix(A, ftype, F, MATSEQSBAIJ));
506: PetscFunctionReturn(PETSC_SUCCESS);
507: }
509: PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Pastix(void)
510: {
511: PetscFunctionBegin;
512: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_pastix));
513: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_pastix));
514: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPISBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_mpisbaij_pastix));
515: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATSEQSBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_seqsbaij_pastix));
516: PetscFunctionReturn(PETSC_SUCCESS);
517: }