Actual source code: superlu_dist.c
1: /*
2: Provides an interface to the SuperLU_DIST sparse solver
3: */
5: #include <../src/mat/impls/aij/seq/aij.h>
6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
7: #include <petscpkg_version.h>
9: PETSC_PRAGMA_DIAGNOSTIC_IGNORED_BEGIN("-Wundef")
10: EXTERN_C_BEGIN
11: // To suppress compiler warnings
12: #define DISABLE_CUSPARSE_DEPRECATED
13: #if defined(PETSC_USE_COMPLEX)
14: #define CASTDOUBLECOMPLEX (doublecomplex *)
15: #define CASTDOUBLECOMPLEXSTAR (doublecomplex **)
16: #include <superlu_zdefs.h>
17: #define LUstructInit zLUstructInit
18: #define ScalePermstructInit zScalePermstructInit
19: #define ScalePermstructFree zScalePermstructFree
20: #define LUstructFree zLUstructFree
21: #define Destroy_LU zDestroy_LU
22: #define ScalePermstruct_t zScalePermstruct_t
23: #define LUstruct_t zLUstruct_t
24: #define SOLVEstruct_t zSOLVEstruct_t
25: #define SolveFinalize zSolveFinalize
26: #define pGetDiagU pzGetDiagU
27: #define pgssvx pzgssvx
28: #define allocateA_dist zallocateA_dist
29: #define Create_CompRowLoc_Matrix_dist zCreate_CompRowLoc_Matrix_dist
30: #define SLU SLU_Z
31: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
32: #define DeAllocLlu_3d zDeAllocLlu_3d
33: #define DeAllocGlu_3d zDeAllocGlu_3d
34: #define Destroy_A3d_gathered_on_2d zDestroy_A3d_gathered_on_2d
35: #define pgssvx3d pzgssvx3d
36: #endif
37: #elif defined(PETSC_USE_REAL_SINGLE)
38: #define CASTDOUBLECOMPLEX
39: #define CASTDOUBLECOMPLEXSTAR
40: #include <superlu_sdefs.h>
41: #define LUstructInit sLUstructInit
42: #define ScalePermstructInit sScalePermstructInit
43: #define ScalePermstructFree sScalePermstructFree
44: #define LUstructFree sLUstructFree
45: #define Destroy_LU sDestroy_LU
46: #define ScalePermstruct_t sScalePermstruct_t
47: #define LUstruct_t sLUstruct_t
48: #define SOLVEstruct_t sSOLVEstruct_t
49: #define SolveFinalize sSolveFinalize
50: #define pGetDiagU psGetDiagU
51: #define pgssvx psgssvx
52: #define allocateA_dist sallocateA_dist
53: #define Create_CompRowLoc_Matrix_dist sCreate_CompRowLoc_Matrix_dist
54: #define SLU SLU_S
55: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
56: #define DeAllocLlu_3d sDeAllocLlu_3d
57: #define DeAllocGlu_3d sDeAllocGlu_3d
58: #define Destroy_A3d_gathered_on_2d sDestroy_A3d_gathered_on_2d
59: #define pgssvx3d psgssvx3d
60: #endif
61: #else
62: #define CASTDOUBLECOMPLEX
63: #define CASTDOUBLECOMPLEXSTAR
64: #include <superlu_ddefs.h>
65: #define LUstructInit dLUstructInit
66: #define ScalePermstructInit dScalePermstructInit
67: #define ScalePermstructFree dScalePermstructFree
68: #define LUstructFree dLUstructFree
69: #define Destroy_LU dDestroy_LU
70: #define ScalePermstruct_t dScalePermstruct_t
71: #define LUstruct_t dLUstruct_t
72: #define SOLVEstruct_t dSOLVEstruct_t
73: #define SolveFinalize dSolveFinalize
74: #define pGetDiagU pdGetDiagU
75: #define pgssvx pdgssvx
76: #define allocateA_dist dallocateA_dist
77: #define Create_CompRowLoc_Matrix_dist dCreate_CompRowLoc_Matrix_dist
78: #define SLU SLU_D
79: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
80: #define DeAllocLlu_3d dDeAllocLlu_3d
81: #define DeAllocGlu_3d dDeAllocGlu_3d
82: #define Destroy_A3d_gathered_on_2d dDestroy_A3d_gathered_on_2d
83: #define pgssvx3d pdgssvx3d
84: #endif
85: #endif
86: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
87: #include <superlu_sdefs.h>
88: #endif
89: EXTERN_C_END
90: PETSC_PRAGMA_DIAGNOSTIC_IGNORED_END()
92: typedef struct {
93: int_t nprow, npcol, *row, *col;
94: gridinfo_t grid;
95: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
96: PetscBool use3d;
97: int_t npdep; /* replication factor, must be power of two */
98: gridinfo3d_t grid3d;
99: #endif
100: superlu_dist_options_t options;
101: SuperMatrix A_sup;
102: ScalePermstruct_t ScalePermstruct;
103: LUstruct_t LUstruct;
104: int StatPrint;
105: SOLVEstruct_t SOLVEstruct;
106: fact_t FactPattern;
107: MPI_Comm comm_superlu;
108: PetscScalar *val;
109: PetscBool matsolve_iscalled, matmatsolve_iscalled;
110: PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */
111: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
112: sScalePermstruct_t sScalePermstruct;
113: sLUstruct_t sLUstruct;
114: sSOLVEstruct_t sSOLVEstruct;
115: float *sval;
116: PetscBool singleprecision; /* use single precision SuperLU_DIST from double precision PETSc */
117: float *sbptr;
118: #endif
119: } Mat_SuperLU_DIST;
121: static PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F, PetscScalar *diagU)
122: {
123: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST *)F->data;
125: PetscFunctionBegin;
126: PetscCallExternalVoid("SuperLU_DIST:pGetDiagU", pGetDiagU(F->rmap->N, &lu->LUstruct, &lu->grid, CASTDOUBLECOMPLEX diagU));
127: PetscFunctionReturn(PETSC_SUCCESS);
128: }
130: PetscErrorCode MatSuperluDistGetDiagU(Mat F, PetscScalar *diagU)
131: {
132: PetscFunctionBegin;
134: PetscTryMethod(F, "MatSuperluDistGetDiagU_C", (Mat, PetscScalar *), (F, diagU));
135: PetscFunctionReturn(PETSC_SUCCESS);
136: }
138: /* This allows reusing the Superlu_DIST communicator and grid when only a single SuperLU_DIST matrix is used at a time */
139: typedef struct {
140: MPI_Comm comm;
141: PetscBool busy;
142: gridinfo_t grid;
143: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
144: PetscBool use3d;
145: gridinfo3d_t grid3d;
146: #endif
147: } PetscSuperLU_DIST;
149: static PetscMPIInt Petsc_Superlu_dist_keyval = MPI_KEYVAL_INVALID;
151: PETSC_EXTERN PetscMPIInt MPIAPI Petsc_Superlu_dist_keyval_DeleteFn(MPI_Comm comm, PetscMPIInt keyval, void *attr_val, void *extra_state)
152: {
153: PetscSuperLU_DIST *context = (PetscSuperLU_DIST *)attr_val;
155: PetscFunctionBegin;
156: PetscCheckReturnMPI(keyval == Petsc_Superlu_dist_keyval, PETSC_COMM_SELF, PETSC_ERR_ARG_CORRUPT, "Unexpected keyval");
157: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
158: if (context->use3d) {
159: PetscCallExternalVoid("SuperLU_DIST:superlu_gridexit3d", superlu_gridexit3d(&context->grid3d));
160: } else
161: #endif
162: PetscCallExternalVoid("SuperLU_DIST:superlu_gridexit", superlu_gridexit(&context->grid));
163: PetscCallMPIReturnMPI(MPI_Comm_free(&context->comm));
164: PetscCallReturnMPI(PetscFree(context));
165: PetscFunctionReturn(MPI_SUCCESS);
166: }
168: /*
169: Performs MPI_Comm_free_keyval() on Petsc_Superlu_dist_keyval but keeps the global variable for
170: users who do not destroy all PETSc objects before PetscFinalize().
172: The value Petsc_Superlu_dist_keyval is retained so that Petsc_Superlu_dist_keyval_DeleteFn()
173: can still check that the keyval associated with the MPI communicator is correct when the MPI
174: communicator is destroyed.
176: This is called in PetscFinalize()
177: */
178: static PetscErrorCode Petsc_Superlu_dist_keyval_free(void)
179: {
180: PetscMPIInt Petsc_Superlu_dist_keyval_temp = Petsc_Superlu_dist_keyval;
182: PetscFunctionBegin;
183: PetscCallMPI(MPI_Comm_free_keyval(&Petsc_Superlu_dist_keyval_temp));
184: PetscFunctionReturn(PETSC_SUCCESS);
185: }
187: static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
188: {
189: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST *)A->data;
191: PetscFunctionBegin;
192: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
193: PetscCall(PetscFree(lu->sbptr));
194: #endif
195: if (lu->CleanUpSuperLU_Dist) {
196: /* Deallocate SuperLU_DIST storage */
197: PetscCallExternalVoid("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist", Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
198: if (lu->options.SolveInitialized) {
199: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
200: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:SolveFinalize", sSolveFinalize(&lu->options, &lu->sSOLVEstruct));
201: else
202: #endif
203: PetscCallExternalVoid("SuperLU_DIST:SolveFinalize", SolveFinalize(&lu->options, &lu->SOLVEstruct));
204: }
205: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
206: if (lu->use3d) {
207: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
208: if (lu->singleprecision) {
209: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->sLUstruct));
210: PetscCallExternalVoid("SuperLU_DIST:Destroy_A3d_gathered_on_2d", sDestroy_A3d_gathered_on_2d(&lu->sSOLVEstruct, &lu->grid3d));
211: PetscCallExternalVoid("SuperLU_DIST:ScalePermstructFree", sScalePermstructFree(&lu->sScalePermstruct));
212: PetscCallExternalVoid("SuperLU_DIST:LUstructFree", sLUstructFree(&lu->sLUstruct));
213: } else
214: #endif
215: {
216: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->LUstruct));
217: PetscCallExternalVoid("SuperLU_DIST:Destroy_A3d_gathered_on_2d", Destroy_A3d_gathered_on_2d(&lu->SOLVEstruct, &lu->grid3d));
218: PetscCallExternalVoid("SuperLU_DIST:ScalePermstructFree", ScalePermstructFree(&lu->ScalePermstruct));
219: PetscCallExternalVoid("SuperLU_DIST:LUstructFree", LUstructFree(&lu->LUstruct));
220: }
221: } else
222: #endif
223: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
224: if (lu->singleprecision) {
225: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->cmap->N, &lu->grid, &lu->sLUstruct));
226: PetscCallExternalVoid("SuperLU_DIST:ScalePermstructFree", sScalePermstructFree(&lu->sScalePermstruct));
227: PetscCallExternalVoid("SuperLU_DIST:LUstructFree", sLUstructFree(&lu->sLUstruct));
228: } else
229: #endif
230: {
231: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
232: PetscCallExternalVoid("SuperLU_DIST:ScalePermstructFree", ScalePermstructFree(&lu->ScalePermstruct));
233: PetscCallExternalVoid("SuperLU_DIST:LUstructFree", LUstructFree(&lu->LUstruct));
234: }
235: /* Release the SuperLU_DIST process grid only if the matrix has its own copy, that is it is not in the communicator context */
236: if (lu->comm_superlu) {
237: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
238: if (lu->use3d) {
239: PetscCallExternalVoid("SuperLU_DIST:superlu_gridexit3d", superlu_gridexit3d(&lu->grid3d));
240: } else
241: #endif
242: PetscCallExternalVoid("SuperLU_DIST:superlu_gridexit", superlu_gridexit(&lu->grid));
243: }
244: }
245: /*
246: * We always need to release the communicator that was created in MatGetFactor_aij_superlu_dist.
247: * lu->CleanUpSuperLU_Dist was turned on in MatLUFactorSymbolic_SuperLU_DIST. There are some use
248: * cases where we only create a matrix but do not solve mat. In these cases, lu->CleanUpSuperLU_Dist
249: * is off, and the communicator was not released or marked as "not busy " in the old code.
250: * Here we try to release comm regardless.
251: */
252: if (lu->comm_superlu) {
253: PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &lu->comm_superlu));
254: } else {
255: PetscSuperLU_DIST *context;
256: MPI_Comm comm;
257: PetscMPIInt iflg;
259: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
260: PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Superlu_dist_keyval, &context, &iflg));
261: if (iflg) context->busy = PETSC_FALSE;
262: }
264: PetscCall(PetscFree(A->data));
265: /* clear composed functions */
266: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
267: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSuperluDistGetDiagU_C", NULL));
268: PetscFunctionReturn(PETSC_SUCCESS);
269: }
271: static PetscErrorCode MatSolve_SuperLU_DIST(Mat A, Vec b_mpi, Vec x)
272: {
273: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST *)A->data;
274: PetscInt m = A->rmap->n;
275: SuperLUStat_t stat;
276: PetscReal berr[1];
277: PetscScalar *bptr = NULL;
278: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
279: float sberr[1];
280: #endif
281: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
282: static PetscBool cite = PETSC_FALSE;
284: PetscFunctionBegin;
285: PetscCheck(lu->options.Fact == FACTORED, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "SuperLU_DIST options.Fact must equal FACTORED");
286: PetscCall(PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM "
287: "Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",
288: &cite));
290: if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
291: /* see comments in MatMatSolve() */
292: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
293: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:SolveFinalize", sSolveFinalize(&lu->options, &lu->sSOLVEstruct));
294: else
295: #endif
296: PetscCallExternalVoid("SuperLU_DIST:SolveFinalize", SolveFinalize(&lu->options, &lu->SOLVEstruct));
297: lu->options.SolveInitialized = NO;
298: }
299: PetscCall(VecCopy(b_mpi, x));
300: PetscCall(VecGetArray(x, &bptr));
301: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
302: if (lu->singleprecision) {
303: PetscInt n;
304: PetscCall(VecGetLocalSize(x, &n));
305: if (!lu->sbptr) PetscCall(PetscMalloc1(n, &lu->sbptr));
306: for (PetscInt i = 0; i < n; i++) lu->sbptr[i] = PetscRealPart(bptr[i]); /* PetscRealPart() is a no-op to allow compiling with complex */
307: }
308: #endif
310: PetscCallExternalVoid("SuperLU_DIST:PStatInit", PStatInit(&stat)); /* Initialize the statistics variables. */
311: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
312: if (lu->use3d) {
313: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
314: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:pgssvx3d", psgssvx3d(&lu->options, &lu->A_sup, &lu->sScalePermstruct, lu->sbptr, (int)m, 1, &lu->grid3d, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &info));
315: else
316: #endif
317: PetscCallExternalVoid("SuperLU_DIST:pgssvx3d", pgssvx3d(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, (int)m, 1, &lu->grid3d, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info));
318: PetscCheck(!info, PETSC_COMM_SELF, PETSC_ERR_LIB, "pgssvx3d fails, info: %d", info);
319: } else
320: #endif
321: {
322: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
323: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:pgssvx", psgssvx(&lu->options, &lu->A_sup, &lu->sScalePermstruct, lu->sbptr, (int)m, 1, &lu->grid, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &info));
324: else
325: #endif
326: PetscCallExternalVoid("SuperLU_DIST:pgssvx", pgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, (int)m, 1, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info));
327: PetscCheck(!info, PETSC_COMM_SELF, PETSC_ERR_LIB, "pgssvx fails, info: %d", info);
328: }
329: if (lu->options.PrintStat) PetscCallExternalVoid("SuperLU_DIST:PStatPrint", PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
330: PetscCallExternalVoid("SuperLU_DIST:PStatFree", PStatFree(&stat));
331: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
332: if (lu->singleprecision) {
333: PetscInt n;
334: PetscCall(VecGetLocalSize(x, &n));
335: for (PetscInt i = 0; i < n; i++) bptr[i] = lu->sbptr[i];
336: }
337: #endif
338: PetscCall(VecRestoreArray(x, &bptr));
339: lu->matsolve_iscalled = PETSC_TRUE;
340: lu->matmatsolve_iscalled = PETSC_FALSE;
341: PetscFunctionReturn(PETSC_SUCCESS);
342: }
344: static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A, Mat B_mpi, Mat X)
345: {
346: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST *)A->data;
347: PetscInt m = A->rmap->n, nrhs;
348: SuperLUStat_t stat;
349: PetscReal berr[1];
350: PetscScalar *bptr;
351: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
352: PetscBool flg;
354: PetscFunctionBegin;
355: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
356: PetscCheck(!lu->singleprecision, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for -pc_precision single");
357: #endif
358: PetscCheck(lu->options.Fact == FACTORED, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "SuperLU_DIST options.Fact must equal FACTORED");
359: PetscCall(PetscObjectTypeCompareAny((PetscObject)B_mpi, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
360: PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix B must be MATDENSE matrix");
361: if (X != B_mpi) {
362: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &flg, MATSEQDENSE, MATMPIDENSE, NULL));
363: PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Matrix X must be MATDENSE matrix");
364: }
366: if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
367: /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
368: thus destroy it and create a new SOLVEstruct.
369: Otherwise it may result in memory corruption or incorrect solution
370: See src/mat/tests/ex125.c */
371: PetscCallExternalVoid("SuperLU_DIST:SolveFinalize", SolveFinalize(&lu->options, &lu->SOLVEstruct));
372: lu->options.SolveInitialized = NO;
373: }
374: if (X != B_mpi) PetscCall(MatCopy(B_mpi, X, SAME_NONZERO_PATTERN));
376: PetscCall(MatGetSize(B_mpi, NULL, &nrhs));
378: PetscCallExternalVoid("SuperLU_DIST:PStatInit", PStatInit(&stat)); /* Initialize the statistics variables. */
379: PetscCall(MatDenseGetArray(X, &bptr));
381: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
382: if (lu->use3d) PetscCallExternalVoid("SuperLU_DIST:pgssvx3d", pgssvx3d(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, (int)m, (int)nrhs, &lu->grid3d, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info));
383: else
384: #endif
385: PetscCallExternalVoid("SuperLU_DIST:pgssvx", pgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, CASTDOUBLECOMPLEX bptr, (int)m, (int)nrhs, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info));
386: PetscCheck(!info, PETSC_COMM_SELF, PETSC_ERR_LIB, "pdgssvx fails, info: %d", info);
387: PetscCall(MatDenseRestoreArray(X, &bptr));
389: if (lu->options.PrintStat) PetscCallExternalVoid("SuperLU_DIST:PStatPrint", PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
390: PetscCallExternalVoid("SuperLU_DIST:PStatFree", PStatFree(&stat));
391: lu->matsolve_iscalled = PETSC_FALSE;
392: lu->matmatsolve_iscalled = PETSC_TRUE;
393: PetscFunctionReturn(PETSC_SUCCESS);
394: }
396: /*
397: input:
398: F: numeric Cholesky factor
399: output:
400: nneg: total number of negative pivots
401: nzero: total number of zero pivots
402: npos: (global dimension of F) - nneg - nzero
403: */
404: static PetscErrorCode MatGetInertia_SuperLU_DIST(Mat F, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
405: {
406: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST *)F->data;
407: PetscScalar *diagU = NULL;
408: PetscInt M, i, neg = 0, zero = 0, pos = 0;
409: PetscReal r;
411: PetscFunctionBegin;
412: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
413: PetscCheck(!lu->singleprecision, PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Not for -pc_precision single");
414: #endif
415: PetscCheck(F->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Matrix factor F is not assembled");
416: PetscCheck(lu->options.RowPerm == NOROWPERM, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Must set NOROWPERM");
417: PetscCall(MatGetSize(F, &M, NULL));
418: PetscCall(PetscMalloc1(M, &diagU));
419: PetscCall(MatSuperluDistGetDiagU(F, diagU));
420: for (i = 0; i < M; i++) {
421: #if defined(PETSC_USE_COMPLEX)
422: r = PetscAbsReal(PetscImaginaryPart(diagU[i]));
423: PetscCheck(r < 1000 * PETSC_MACHINE_EPSILON, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "diagU[%" PetscInt_FMT "]=%g + i %g is non-real", i, (double)PetscRealPart(diagU[i]), (double)PetscImaginaryPart(diagU[i]));
424: r = PetscRealPart(diagU[i]);
425: #else
426: r = diagU[i];
427: #endif
428: if (r > 0) {
429: pos++;
430: } else if (r < 0) {
431: neg++;
432: } else zero++;
433: }
435: PetscCall(PetscFree(diagU));
436: if (nneg) *nneg = neg;
437: if (nzero) *nzero = zero;
438: if (npos) *npos = pos;
439: PetscFunctionReturn(PETSC_SUCCESS);
440: }
442: static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F, Mat A, const MatFactorInfo *info)
443: {
444: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST *)F->data;
445: Mat Aloc;
446: const PetscScalar *av;
447: const PetscInt *ai = NULL, *aj = NULL;
448: PetscInt nz, unused;
449: int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */
450: SuperLUStat_t stat;
451: PetscReal *berr = 0;
452: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
453: float *sberr = 0;
454: #endif
455: PetscBool ismpiaij, isseqaij, flg;
457: PetscFunctionBegin;
458: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
459: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
460: if (ismpiaij) PetscCall(MatMPIAIJGetLocalMat(A, MAT_INITIAL_MATRIX, &Aloc));
461: else if (isseqaij) {
462: PetscCall(PetscObjectReference((PetscObject)A));
463: Aloc = A;
464: } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)A)->type_name);
466: PetscCall(MatGetRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &unused, &ai, &aj, &flg));
467: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "GetRowIJ failed");
468: PetscCall(MatSeqAIJGetArrayRead(Aloc, &av));
469: nz = ai[Aloc->rmap->n];
471: /* Allocations for A_sup */
472: if (lu->options.Fact == DOFACT) { /* first numeric factorization */
473: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
474: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:allocateA_dist", sallocateA_dist(Aloc->rmap->n, nz, &lu->sval, &lu->col, &lu->row));
475: else
476: #endif
477: PetscCallExternalVoid("SuperLU_DIST:allocateA_dist", allocateA_dist(Aloc->rmap->n, nz, CASTDOUBLECOMPLEXSTAR & lu->val, &lu->col, &lu->row));
478: } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
479: if (lu->FactPattern == SamePattern_SameRowPerm) {
480: lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
481: } else if (lu->FactPattern == SamePattern) {
482: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
483: if (lu->use3d) {
484: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
485: if (lu->singleprecision) {
486: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->sLUstruct));
487: PetscCallExternalVoid("SuperLU_DIST:SolveFinalize", sSolveFinalize(&lu->options, &lu->sSOLVEstruct));
488: } else
489: #endif
490: {
491: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->cmap->N, &lu->grid3d.grid2d, &lu->LUstruct));
492: PetscCallExternalVoid("SuperLU_DIST:SolveFinalize", SolveFinalize(&lu->options, &lu->SOLVEstruct));
493: }
494: } else
495: #endif
496: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
497: if (lu->singleprecision)
498: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->rmap->N, &lu->grid, &lu->sLUstruct));
499: else
500: #endif
501: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->rmap->N, &lu->grid, &lu->LUstruct));
502: lu->options.Fact = SamePattern;
503: } else if (lu->FactPattern == DOFACT) {
504: PetscCallExternalVoid("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist", Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
505: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
506: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", sDestroy_LU(A->rmap->N, &lu->grid, &lu->sLUstruct));
507: else
508: #endif
509: PetscCallExternalVoid("SuperLU_DIST:Destroy_LU", Destroy_LU(A->rmap->N, &lu->grid, &lu->LUstruct));
510: lu->options.Fact = DOFACT;
511: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
512: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:allocateA_dist", sallocateA_dist(Aloc->rmap->n, nz, &lu->sval, &lu->col, &lu->row));
513: else
514: #endif
515: PetscCallExternalVoid("SuperLU_DIST:allocateA_dist", allocateA_dist(Aloc->rmap->n, nz, CASTDOUBLECOMPLEXSTAR & lu->val, &lu->col, &lu->row));
516: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "options.Fact must be one of SamePattern SamePattern_SameRowPerm DOFACT");
517: }
519: /* Copy AIJ matrix to superlu_dist matrix */
520: PetscCall(PetscArraycpy(lu->row, ai, Aloc->rmap->n + 1));
521: PetscCall(PetscArraycpy(lu->col, aj, nz));
522: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
523: if (lu->singleprecision)
524: for (PetscInt i = 0; i < nz; i++) lu->sval[i] = PetscRealPart(av[i]); /* PetscRealPart() is a no-op to allow compiling with complex */
525: else
526: #endif
527: PetscCall(PetscArraycpy(lu->val, av, nz));
528: PetscCall(MatRestoreRowIJ(Aloc, 0, PETSC_FALSE, PETSC_FALSE, &unused, &ai, &aj, &flg));
529: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "RestoreRowIJ failed");
530: PetscCall(MatSeqAIJRestoreArrayRead(Aloc, &av));
531: PetscCall(MatDestroy(&Aloc));
533: /* Create and setup A_sup */
534: if (lu->options.Fact == DOFACT) {
535: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
536: if (lu->singleprecision)
537: PetscCallExternalVoid("SuperLU_DIST:Create_CompRowLoc_Matrix_dist", sCreate_CompRowLoc_Matrix_dist(&lu->A_sup, A->rmap->N, A->cmap->N, nz, A->rmap->n, A->rmap->rstart, lu->sval, lu->col, lu->row, SLU_NR_loc, SLU_S, SLU_GE));
538: else
539: #endif
540: PetscCallExternalVoid("SuperLU_DIST:Create_CompRowLoc_Matrix_dist", Create_CompRowLoc_Matrix_dist(&lu->A_sup, A->rmap->N, A->cmap->N, nz, A->rmap->n, A->rmap->rstart, CASTDOUBLECOMPLEX lu->val, lu->col, lu->row, SLU_NR_loc, SLU, SLU_GE));
541: }
543: /* Factor the matrix. */
544: PetscCallExternalVoid("SuperLU_DIST:PStatInit", PStatInit(&stat)); /* Initialize the statistics variables. */
545: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
546: if (lu->use3d) {
547: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
548: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:pgssvx3d", psgssvx3d(&lu->options, &lu->A_sup, &lu->sScalePermstruct, 0, (int)A->rmap->n, 0, &lu->grid3d, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &sinfo));
549: else
550: #endif
551: PetscCallExternalVoid("SuperLU_DIST:pgssvx3d", pgssvx3d(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, (int)A->rmap->n, 0, &lu->grid3d, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
552: } else
553: #endif
554: {
555: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
556: if (lu->singleprecision) PetscCallExternalVoid("SuperLU_DIST:pgssvx", psgssvx(&lu->options, &lu->A_sup, &lu->sScalePermstruct, 0, (int)A->rmap->n, 0, &lu->grid, &lu->sLUstruct, &lu->sSOLVEstruct, sberr, &stat, &sinfo));
557: else
558: #endif
559: PetscCallExternalVoid("SuperLU_DIST:pgssvx", pgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, (int)A->rmap->n, 0, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
560: }
561: if (sinfo > 0) {
562: PetscCheck(!A->erroriffailure, PETSC_COMM_SELF, PETSC_ERR_MAT_LU_ZRPVT, "Zero pivot in row %d", sinfo);
563: if (sinfo <= lu->A_sup.ncol) {
564: F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
565: PetscCall(PetscInfo(F, "U(i,i) is exactly zero, i= %d\n", sinfo));
566: } else if (sinfo > lu->A_sup.ncol) {
567: /*
568: number of bytes allocated when memory allocation
569: failure occurred, plus A->ncol.
570: */
571: F->factorerrortype = MAT_FACTOR_OUTMEMORY;
572: PetscCall(PetscInfo(F, "Number of bytes allocated when memory allocation fails %d\n", sinfo));
573: }
574: } else PetscCheck(sinfo >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "info = %d, argument in p*gssvx() had an illegal value", sinfo);
576: if (lu->options.PrintStat) PetscCallExternalVoid("SuperLU_DIST:PStatPrint", PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
577: PetscCallExternalVoid("SuperLU_DIST:PStatFree", PStatFree(&stat));
578: F->assembled = PETSC_TRUE;
579: F->preallocated = PETSC_TRUE;
580: lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
581: PetscFunctionReturn(PETSC_SUCCESS);
582: }
584: /* Note the PETSc r and c permutations are ignored */
585: static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
586: {
587: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST *)F->data;
588: PetscInt M = A->rmap->N, N = A->cmap->N, indx;
589: PetscMPIInt size, iflg;
590: PetscBool flg, set;
591: const char *colperm[] = {"NATURAL", "MMD_AT_PLUS_A", "MMD_ATA", "METIS_AT_PLUS_A", "PARMETIS"};
592: const char *rowperm[] = {"NOROWPERM", "LargeDiag_MC64", "LargeDiag_AWPM", "MY_PERMR"};
593: const char *factPattern[] = {"SamePattern", "SamePattern_SameRowPerm", "DOFACT"};
594: MPI_Comm comm;
595: PetscSuperLU_DIST *context = NULL;
596: PetscPrecision precision = PETSC_PRECISION_INVALID;
598: PetscFunctionBegin;
599: /* Set options to F */
600: PetscCall(PetscObjectGetComm((PetscObject)F, &comm));
601: PetscCallMPI(MPI_Comm_size(comm, &size));
603: PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "SuperLU_Dist Options", "Mat");
604: PetscCall(PetscOptionsBool("-mat_superlu_dist_equil", "Equilibrate matrix", "None", lu->options.Equil ? PETSC_TRUE : PETSC_FALSE, &flg, &set));
605: if (set && !flg) lu->options.Equil = NO;
607: PetscCall(PetscOptionsEnum("-pc_precision", "Precision used by SuperLU_DIST", "MATSOLVERSUPERLU_DIST", PetscPrecisionTypes, (PetscEnum)precision, (PetscEnum *)&precision, &flg));
608: if (flg) {
609: PetscCheck(precision == PETSC_PRECISION_SINGLE || precision == PETSC_PRECISION_DOUBLE, PetscObjectComm((PetscObject)A), PETSC_ERR_USER_INPUT, "-pc_precision only accepts single or double as option for SuperLU_DIST");
610: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
611: lu->singleprecision = (PetscBool)(precision == PETSC_PRECISION_SINGLE); // It also implies PetscReal is not single; not merely SuperLU_DIST is running in single
612: #endif
613: }
615: PetscCall(PetscOptionsEList("-mat_superlu_dist_rowperm", "Row permutation", "None", rowperm, 4, rowperm[1], &indx, &flg));
616: if (flg) {
617: switch (indx) {
618: case 0:
619: lu->options.RowPerm = NOROWPERM;
620: break;
621: case 1:
622: lu->options.RowPerm = LargeDiag_MC64;
623: break;
624: case 2:
625: lu->options.RowPerm = LargeDiag_AWPM;
626: break;
627: case 3:
628: lu->options.RowPerm = MY_PERMR;
629: break;
630: default:
631: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown row permutation");
632: }
633: }
635: PetscCall(PetscOptionsEList("-mat_superlu_dist_colperm", "Column permutation", "None", colperm, 5, colperm[3], &indx, &flg));
636: if (flg) {
637: switch (indx) {
638: case 0:
639: lu->options.ColPerm = NATURAL;
640: break;
641: case 1:
642: lu->options.ColPerm = MMD_AT_PLUS_A;
643: break;
644: case 2:
645: lu->options.ColPerm = MMD_ATA;
646: break;
647: case 3:
648: lu->options.ColPerm = METIS_AT_PLUS_A;
649: break;
650: case 4:
651: lu->options.ColPerm = PARMETIS; /* only works for np>1 */
652: break;
653: default:
654: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown column permutation");
655: }
656: }
658: lu->options.ReplaceTinyPivot = NO;
659: PetscCall(PetscOptionsBool("-mat_superlu_dist_replacetinypivot", "Replace tiny pivots", "None", lu->options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE, &flg, &set));
660: if (set && flg) lu->options.ReplaceTinyPivot = YES;
662: lu->options.ParSymbFact = NO;
663: PetscCall(PetscOptionsBool("-mat_superlu_dist_parsymbfact", "Parallel symbolic factorization", "None", PETSC_FALSE, &flg, &set));
664: if (set && flg && size > 1) {
665: #if defined(PETSC_HAVE_PARMETIS)
666: lu->options.ParSymbFact = YES;
667: lu->options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
668: #else
669: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "parsymbfact needs PARMETIS");
670: #endif
671: }
673: lu->FactPattern = SamePattern;
674: PetscCall(PetscOptionsEList("-mat_superlu_dist_fact", "Sparsity pattern for repeated matrix factorization", "None", factPattern, 3, factPattern[0], &indx, &flg));
675: if (flg) {
676: switch (indx) {
677: case 0:
678: lu->FactPattern = SamePattern;
679: break;
680: case 1:
681: lu->FactPattern = SamePattern_SameRowPerm;
682: break;
683: case 2:
684: lu->FactPattern = DOFACT;
685: break;
686: }
687: }
689: lu->options.IterRefine = NOREFINE;
690: PetscCall(PetscOptionsBool("-mat_superlu_dist_iterrefine", "Use iterative refinement", "None", lu->options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE, &flg, &set));
691: if (set && flg) lu->options.IterRefine = SLU_DOUBLE;
693: if (PetscLogPrintInfo) lu->options.PrintStat = YES;
694: else lu->options.PrintStat = NO;
695: PetscCall(PetscOptionsDeprecated("-mat_superlu_dist_statprint", "-mat_superlu_dist_printstat", "3.19", NULL));
696: PetscCall(PetscOptionsBool("-mat_superlu_dist_printstat", "Print factorization information", "None", (PetscBool)lu->options.PrintStat, (PetscBool *)&lu->options.PrintStat, NULL));
698: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(8, 0, 0)
699: lu->options.superlu_acc_offload = 1;
700: PetscCall(PetscOptionsBool("-mat_superlu_dist_gpuoffload", "Offload factorization onto the GPUs", "None", (PetscBool)lu->options.superlu_acc_offload, (PetscBool *)&lu->options.superlu_acc_offload, NULL));
701: #endif
703: PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Superlu_dist_keyval, &context, &iflg));
704: if (!iflg || context->busy) { /* additional options */
705: if (!iflg) {
706: PetscCall(PetscNew(&context));
707: context->busy = PETSC_TRUE;
708: PetscCallMPI(MPI_Comm_dup(comm, &context->comm));
709: PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Superlu_dist_keyval, context));
710: } else {
711: PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)A), &lu->comm_superlu));
712: }
714: /* Default number of process columns and rows */
715: lu->nprow = (int_t)(0.5 + PetscSqrtReal((PetscReal)size));
716: if (!lu->nprow) lu->nprow = 1;
717: while (lu->nprow > 0) {
718: lu->npcol = (int_t)(size / lu->nprow);
719: if (size == lu->nprow * lu->npcol) break;
720: lu->nprow--;
721: }
722: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
723: lu->use3d = PETSC_FALSE;
724: lu->npdep = 1;
725: #endif
727: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
728: PetscCall(PetscOptionsBool("-mat_superlu_dist_3d", "Use SuperLU_DIST 3D distribution", "None", lu->use3d, &lu->use3d, NULL));
729: if (lu->use3d) {
730: PetscInt t;
731: PetscCall(PetscOptionsInt("-mat_superlu_dist_d", "Number of z entries in processor partition", "None", lu->npdep, (PetscInt *)&lu->npdep, NULL));
732: t = (PetscInt)PetscLog2Real((PetscReal)lu->npdep);
733: PetscCheck(PetscPowInt(2, t) == lu->npdep, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "-mat_superlu_dist_d %lld must be a power of 2", (long long)lu->npdep);
734: if (lu->npdep > 1) {
735: lu->nprow = (int_t)(0.5 + PetscSqrtReal((PetscReal)(size / lu->npdep)));
736: if (!lu->nprow) lu->nprow = 1;
737: while (lu->nprow > 0) {
738: lu->npcol = (int_t)(size / (lu->npdep * lu->nprow));
739: if (size == lu->nprow * lu->npcol * lu->npdep) break;
740: lu->nprow--;
741: }
742: }
743: }
744: #endif
745: PetscCall(PetscOptionsInt("-mat_superlu_dist_r", "Number rows in processor partition", "None", lu->nprow, (PetscInt *)&lu->nprow, NULL));
746: PetscCall(PetscOptionsInt("-mat_superlu_dist_c", "Number columns in processor partition", "None", lu->npcol, (PetscInt *)&lu->npcol, NULL));
747: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
748: PetscCheck(size == lu->nprow * lu->npcol * lu->npdep, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of processes %d must equal to nprow %lld * npcol %lld * npdep %lld", size, (long long)lu->nprow, (long long)lu->npcol, (long long)lu->npdep);
749: #else
750: PetscCheck(size == lu->nprow * lu->npcol, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of processes %d must equal to nprow %lld * npcol %lld", size, (long long)lu->nprow, (long long)lu->npcol);
751: #endif
752: /* end of adding additional options */
754: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
755: if (lu->use3d) {
756: PetscCallExternalVoid("SuperLU_DIST:superlu_gridinit3d", superlu_gridinit3d(context ? context->comm : lu->comm_superlu, (int)lu->nprow, (int)lu->npcol, (int)lu->npdep, &lu->grid3d));
757: if (context) {
758: context->grid3d = lu->grid3d;
759: context->use3d = lu->use3d;
760: }
761: } else {
762: #endif
763: PetscCallExternalVoid("SuperLU_DIST:superlu_gridinit", superlu_gridinit(context ? context->comm : lu->comm_superlu, (int)lu->nprow, (int)lu->npcol, &lu->grid));
764: if (context) context->grid = lu->grid;
765: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
766: }
767: #endif
768: PetscCall(PetscInfo(NULL, "Duplicating a communicator for SuperLU_DIST and calling superlu_gridinit()\n"));
769: if (iflg) {
770: PetscCall(PetscInfo(NULL, "Communicator attribute already in use so not saving communicator and SuperLU_DIST grid in communicator attribute \n"));
771: } else {
772: PetscCall(PetscInfo(NULL, "Storing communicator and SuperLU_DIST grid in communicator attribute\n"));
773: }
774: } else { /* (iflg && !context->busy) */
775: PetscCall(PetscInfo(NULL, "Reusing communicator and superlu_gridinit() for SuperLU_DIST from communicator attribute.\n"));
776: context->busy = PETSC_TRUE;
777: lu->grid = context->grid;
778: }
779: PetscOptionsEnd();
781: /* Initialize ScalePermstruct and LUstruct. */
782: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
783: if (lu->singleprecision) {
784: PetscCallExternalVoid("SuperLU_DIST:ScalePermstructInit", sScalePermstructInit(M, N, &lu->sScalePermstruct));
785: PetscCallExternalVoid("SuperLU_DIST:LUstructInit", sLUstructInit(N, &lu->sLUstruct));
786: } else
787: #endif
788: {
789: PetscCallExternalVoid("SuperLU_DIST:ScalePermstructInit", ScalePermstructInit(M, N, &lu->ScalePermstruct));
790: PetscCallExternalVoid("SuperLU_DIST:LUstructInit", LUstructInit(N, &lu->LUstruct));
791: }
792: F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
793: F->ops->solve = MatSolve_SuperLU_DIST;
794: F->ops->matsolve = MatMatSolve_SuperLU_DIST;
795: F->ops->getinertia = NULL;
797: if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) F->ops->getinertia = MatGetInertia_SuperLU_DIST;
798: lu->CleanUpSuperLU_Dist = PETSC_TRUE;
799: PetscFunctionReturn(PETSC_SUCCESS);
800: }
802: static PetscErrorCode MatCholeskyFactorSymbolic_SuperLU_DIST(Mat F, Mat A, IS r, const MatFactorInfo *info)
803: {
804: PetscFunctionBegin;
805: PetscCall(MatLUFactorSymbolic_SuperLU_DIST(F, A, r, r, info));
806: F->ops->choleskyfactornumeric = MatLUFactorNumeric_SuperLU_DIST;
807: PetscFunctionReturn(PETSC_SUCCESS);
808: }
810: static PetscErrorCode MatFactorGetSolverType_aij_superlu_dist(Mat A, MatSolverType *type)
811: {
812: PetscFunctionBegin;
813: *type = MATSOLVERSUPERLU_DIST;
814: PetscFunctionReturn(PETSC_SUCCESS);
815: }
817: static PetscErrorCode MatView_Info_SuperLU_DIST(Mat A, PetscViewer viewer)
818: {
819: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST *)A->data;
820: superlu_dist_options_t options;
822: PetscFunctionBegin;
823: /* check if matrix is superlu_dist type */
824: if (A->ops->solve != MatSolve_SuperLU_DIST) PetscFunctionReturn(PETSC_SUCCESS);
826: options = lu->options;
827: PetscCall(PetscViewerASCIIPrintf(viewer, "SuperLU_DIST run parameters:\n"));
828: /* would love to use superlu 'IFMT' macro but it looks like it's inconsistently applied, the
829: * format spec for int64_t is set to %d for whatever reason */
830: PetscCall(PetscViewerASCIIPrintf(viewer, " Process grid nprow %lld x npcol %lld \n", (long long)lu->nprow, (long long)lu->npcol));
831: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(9, 0, 0)
832: if (lu->use3d) PetscCall(PetscViewerASCIIPrintf(viewer, " Using 3d decomposition with npdep %lld \n", (long long)lu->npdep));
833: #endif
835: PetscCall(PetscViewerASCIIPrintf(viewer, " Equilibrate matrix %s \n", PetscBools[options.Equil != NO]));
836: PetscCall(PetscViewerASCIIPrintf(viewer, " Replace tiny pivots %s \n", PetscBools[options.ReplaceTinyPivot != NO]));
837: PetscCall(PetscViewerASCIIPrintf(viewer, " Use iterative refinement %s \n", PetscBools[options.IterRefine == SLU_DOUBLE]));
838: PetscCall(PetscViewerASCIIPrintf(viewer, " Processors in row %lld col partition %lld \n", (long long)lu->nprow, (long long)lu->npcol));
840: switch (options.RowPerm) {
841: case NOROWPERM:
842: PetscCall(PetscViewerASCIIPrintf(viewer, " Row permutation NOROWPERM\n"));
843: break;
844: case LargeDiag_MC64:
845: PetscCall(PetscViewerASCIIPrintf(viewer, " Row permutation LargeDiag_MC64\n"));
846: break;
847: case LargeDiag_AWPM:
848: PetscCall(PetscViewerASCIIPrintf(viewer, " Row permutation LargeDiag_AWPM\n"));
849: break;
850: case MY_PERMR:
851: PetscCall(PetscViewerASCIIPrintf(viewer, " Row permutation MY_PERMR\n"));
852: break;
853: default:
854: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown column permutation");
855: }
857: switch (options.ColPerm) {
858: case NATURAL:
859: PetscCall(PetscViewerASCIIPrintf(viewer, " Column permutation NATURAL\n"));
860: break;
861: case MMD_AT_PLUS_A:
862: PetscCall(PetscViewerASCIIPrintf(viewer, " Column permutation MMD_AT_PLUS_A\n"));
863: break;
864: case MMD_ATA:
865: PetscCall(PetscViewerASCIIPrintf(viewer, " Column permutation MMD_ATA\n"));
866: break;
867: /* Even though this is called METIS, the SuperLU_DIST code sets this by default if PARMETIS is defined, not METIS */
868: case METIS_AT_PLUS_A:
869: PetscCall(PetscViewerASCIIPrintf(viewer, " Column permutation METIS_AT_PLUS_A\n"));
870: break;
871: case PARMETIS:
872: PetscCall(PetscViewerASCIIPrintf(viewer, " Column permutation PARMETIS\n"));
873: break;
874: default:
875: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown column permutation");
876: }
878: PetscCall(PetscViewerASCIIPrintf(viewer, " Parallel symbolic factorization %s \n", PetscBools[options.ParSymbFact != NO]));
880: if (lu->FactPattern == SamePattern) {
881: PetscCall(PetscViewerASCIIPrintf(viewer, " Repeated factorization SamePattern\n"));
882: } else if (lu->FactPattern == SamePattern_SameRowPerm) {
883: PetscCall(PetscViewerASCIIPrintf(viewer, " Repeated factorization SamePattern_SameRowPerm\n"));
884: } else if (lu->FactPattern == DOFACT) {
885: PetscCall(PetscViewerASCIIPrintf(viewer, " Repeated factorization DOFACT\n"));
886: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown factorization pattern");
887: #if defined(PETSC_HAVE_SUPERLU_DIST_SINGLE)
888: if (lu->singleprecision) PetscCall(PetscViewerASCIIPrintf(viewer, " Using SuperLU_DIST in single precision\n"));
889: #endif
890: PetscFunctionReturn(PETSC_SUCCESS);
891: }
893: static PetscErrorCode MatView_SuperLU_DIST(Mat A, PetscViewer viewer)
894: {
895: PetscBool isascii;
896: PetscViewerFormat format;
898: PetscFunctionBegin;
899: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
900: if (isascii) {
901: PetscCall(PetscViewerGetFormat(viewer, &format));
902: if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_SuperLU_DIST(A, viewer));
903: }
904: PetscFunctionReturn(PETSC_SUCCESS);
905: }
907: static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A, MatFactorType ftype, Mat *F)
908: {
909: Mat B;
910: Mat_SuperLU_DIST *lu;
911: PetscInt M = A->rmap->N, N = A->cmap->N;
912: PetscMPIInt size;
913: superlu_dist_options_t options;
915: PetscFunctionBegin;
916: /* Create the factorization matrix */
917: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
918: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, M, N));
919: PetscCall(PetscStrallocpy("superlu_dist", &((PetscObject)B)->type_name));
920: PetscCall(MatSetUp(B));
921: B->ops->getinfo = MatGetInfo_External;
922: B->ops->view = MatView_SuperLU_DIST;
923: B->ops->destroy = MatDestroy_SuperLU_DIST;
925: /* set_default_options_dist() sets the default input options to the following values:
926: options.Fact = DOFACT;
927: options.Equil = YES;
928: options.ParSymbFact = NO;
929: options.ColPerm = METIS_AT_PLUS_A;
930: options.RowPerm = LargeDiag_MC64;
931: options.ReplaceTinyPivot = NO;
932: options.IterRefine = SLU_DOUBLE;
933: options.Trans = NOTRANS;
934: options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
935: options.RefineInitialized = NO;
936: options.PrintStat = YES;
937: options.superlu_acc_offload = 1;
938: options.SymPattern = NO;
939: */
940: set_default_options_dist(&options);
942: B->trivialsymbolic = PETSC_TRUE;
943: if (ftype == MAT_FACTOR_LU) {
944: B->factortype = MAT_FACTOR_LU;
945: B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
946: } else {
947: B->factortype = MAT_FACTOR_CHOLESKY;
948: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SuperLU_DIST;
949: options.SymPattern = YES;
950: }
952: /* set solvertype */
953: PetscCall(PetscFree(B->solvertype));
954: PetscCall(PetscStrallocpy(MATSOLVERSUPERLU_DIST, &B->solvertype));
956: PetscCall(PetscNew(&lu));
957: B->data = lu;
958: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
960: lu->options = options;
961: lu->options.Fact = DOFACT;
962: lu->matsolve_iscalled = PETSC_FALSE;
963: lu->matmatsolve_iscalled = PETSC_FALSE;
965: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_aij_superlu_dist));
966: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSuperluDistGetDiagU_C", MatSuperluDistGetDiagU_SuperLU_DIST));
968: *F = B;
969: PetscFunctionReturn(PETSC_SUCCESS);
970: }
972: PETSC_INTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void)
973: {
974: PetscFunctionBegin;
975: PetscCall(MatSolverTypeRegister(MATSOLVERSUPERLU_DIST, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_aij_superlu_dist));
976: PetscCall(MatSolverTypeRegister(MATSOLVERSUPERLU_DIST, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_aij_superlu_dist));
977: PetscCall(MatSolverTypeRegister(MATSOLVERSUPERLU_DIST, MATMPIAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_aij_superlu_dist));
978: PetscCall(MatSolverTypeRegister(MATSOLVERSUPERLU_DIST, MATSEQAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_aij_superlu_dist));
979: if (Petsc_Superlu_dist_keyval == MPI_KEYVAL_INVALID) {
980: PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, Petsc_Superlu_dist_keyval_DeleteFn, &Petsc_Superlu_dist_keyval, NULL));
981: PetscCall(PetscRegisterFinalize(Petsc_Superlu_dist_keyval_free));
982: }
983: PetscFunctionReturn(PETSC_SUCCESS);
984: }
986: /*MC
987: MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
989: Use `./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch` to have PETSc installed with SuperLU_DIST
991: Use `-pc_type lu` `-pc_factor_mat_solver_type superlu_dist` to use this direct solver
993: Works with `MATAIJ` matrices
995: Options Database Keys:
996: + -mat_superlu_dist_r <n> - number of rows in processor partition
997: . -mat_superlu_dist_c <n> - number of columns in processor partition
998: . -mat_superlu_dist_3d - use 3d partition, requires SuperLU_DIST 7.2 or later
999: . -mat_superlu_dist_d <n> - depth in 3d partition (valid only if `-mat_superlu_dist_3d`) is provided
1000: . -mat_superlu_dist_equil - equilibrate the matrix
1001: . -mat_superlu_dist_rowperm <NOROWPERM,LargeDiag_MC64,LargeDiag_AWPM,MY_PERMR> - row permutation
1002: . -mat_superlu_dist_colperm <NATURAL,MMD_AT_PLUS_A,MMD_ATA,METIS_AT_PLUS_A,PARMETIS> - column permutation
1003: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
1004: . -mat_superlu_dist_fact <SamePattern> - (choose one of) `SamePattern`, `SamePattern_SameRowPerm`, `DOFACT`
1005: . -mat_superlu_dist_iterrefine - use iterative refinement
1006: . -mat_superlu_dist_printstat - print factorization information
1007: . -mat_superlu_dist_gpuoffload - offload factorization onto the GPUs, requires SuperLU_DIST 8.0.0 or later
1008: - -pc_precision single - use SuperLU_DIST single precision with PETSc double precision
1010: Level: beginner
1012: Note:
1013: If PETSc was configured with `--with-cuda` then this solver will use the GPUs by default.
1015: .seealso: [](ch_matrices), `Mat`, `PCLU`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
1016: M*/