Actual source code: sbaijcholmod.c

  1: /*
  2:    Provides an interface to the CHOLMOD sparse solver available through SuiteSparse version 4.2.1

  4:    When built with PETSC_USE_64BIT_INDICES this will use Suitesparse_long as the
  5:    integer type in UMFPACK, otherwise it will use int. This means
  6:    all integers in this file as simply declared as PetscInt. Also it means
  7:    that one cannot use 64BIT_INDICES on 32-bit pointer systems [as Suitesparse_long is 32-bit only]

  9: */

 11: #include <../src/mat/impls/sbaij/seq/sbaij.h>
 12: #include <../src/mat/impls/sbaij/seq/cholmod/cholmodimpl.h>

 14: /*
 15:    This is a terrible hack, but it allows the error handler to retain a context.
 16:    Note that this hack really cannot be made both reentrant and concurrent.
 17: */
 18: static Mat static_F;

 20: static void CholmodErrorHandler(int status, const char *file, int line, const char *message)
 21: {
 22:   PetscFunctionBegin;
 23:   if (status > CHOLMOD_OK) {
 24:     PetscCallVoid(PetscInfo(static_F, "CHOLMOD warning %d at %s:%d: %s\n", status, file, line, message));
 25:   } else if (status == CHOLMOD_OK) { /* Documentation says this can happen, but why? */
 26:     PetscCallVoid(PetscInfo(static_F, "CHOLMOD OK at %s:%d: %s\n", file, line, message));
 27:   } else {
 28:     PetscCallVoid(PetscErrorPrintf("CHOLMOD error %d at %s:%d: %s\n", status, file, line, message));
 29:   }
 30:   PetscFunctionReturnVoid();
 31: }

 33: #define CHOLMOD_OPTION_DOUBLE(name, help) \
 34:   do { \
 35:     PetscReal tmp = (PetscReal)c->name; \
 36:     PetscCall(PetscOptionsReal("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL)); \
 37:     c->name = (double)tmp; \
 38:   } while (0)

 40: #define CHOLMOD_OPTION_INT(name, help) \
 41:   do { \
 42:     PetscInt tmp = (PetscInt)c->name; \
 43:     PetscCall(PetscOptionsInt("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL)); \
 44:     c->name = (int)tmp; \
 45:   } while (0)

 47: #define CHOLMOD_OPTION_SIZE_T(name, help) \
 48:   do { \
 49:     PetscReal tmp = (PetscInt)c->name; \
 50:     PetscCall(PetscOptionsBoundedReal("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL, 0.0)); \
 51:     c->name = (size_t)tmp; \
 52:   } while (0)

 54: #define CHOLMOD_OPTION_BOOL(name, help) \
 55:   do { \
 56:     PetscBool tmp = (PetscBool)!!c->name; \
 57:     PetscCall(PetscOptionsBool("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL)); \
 58:     c->name = (int)tmp; \
 59:   } while (0)

 61: static PetscErrorCode CholmodSetOptions(Mat F)
 62: {
 63:   Mat_CHOLMOD    *chol = (Mat_CHOLMOD *)F->data;
 64:   cholmod_common *c    = chol->common;
 65:   PetscBool       flg;

 67:   PetscFunctionBegin;
 68:   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "CHOLMOD Options", "Mat");
 69:   CHOLMOD_OPTION_INT(nmethods, "Number of different ordering methods to try");

 71: #if defined(PETSC_USE_SUITESPARSE_GPU)
 72:   c->useGPU = 1;
 73:   CHOLMOD_OPTION_INT(useGPU, "Use GPU for BLAS 1, otherwise 0");
 74:   CHOLMOD_OPTION_SIZE_T(maxGpuMemBytes, "Maximum memory to allocate on the GPU");
 75:   CHOLMOD_OPTION_DOUBLE(maxGpuMemFraction, "Fraction of available GPU memory to allocate");
 76: #endif

 78:   /* CHOLMOD handles first-time packing and refactor-packing separately, but we usually want them to be the same. */
 79:   chol->pack = (PetscBool)c->final_pack;
 80:   PetscCall(PetscOptionsBool("-mat_cholmod_pack", "Pack factors after factorization [disable for frequent repeat factorization]", "None", chol->pack, &chol->pack, NULL));
 81:   c->final_pack = (int)chol->pack;

 83:   CHOLMOD_OPTION_DOUBLE(dbound, "Minimum absolute value of diagonal entries of D");
 84:   CHOLMOD_OPTION_DOUBLE(grow0, "Global growth ratio when factors are modified");
 85:   CHOLMOD_OPTION_DOUBLE(grow1, "Column growth ratio when factors are modified");
 86:   CHOLMOD_OPTION_SIZE_T(grow2, "Affine column growth constant when factors are modified");
 87:   CHOLMOD_OPTION_SIZE_T(maxrank, "Max rank of update, larger values are faster but use more memory [2,4,8]");
 88:   {
 89:     static const char *const list[] = {"SIMPLICIAL", "AUTO", "SUPERNODAL", "MatCholmodFactorType", "MAT_CHOLMOD_FACTOR_", 0};
 90:     PetscCall(PetscOptionsEnum("-mat_cholmod_factor", "Factorization method", "None", list, (PetscEnum)c->supernodal, (PetscEnum *)&c->supernodal, NULL));
 91:   }
 92:   if (c->supernodal) CHOLMOD_OPTION_DOUBLE(supernodal_switch, "flop/nnz_L threshold for switching to supernodal factorization");
 93:   CHOLMOD_OPTION_BOOL(final_asis, "Leave factors \"as is\"");
 94:   CHOLMOD_OPTION_BOOL(final_pack, "Pack the columns when finished (use FALSE if the factors will be updated later)");
 95:   if (!c->final_asis) {
 96:     CHOLMOD_OPTION_BOOL(final_super, "Leave supernodal factors instead of converting to simplicial");
 97:     CHOLMOD_OPTION_BOOL(final_ll, "Turn LDL' factorization into LL'");
 98:     CHOLMOD_OPTION_BOOL(final_monotonic, "Ensure columns are monotonic when done");
 99:     CHOLMOD_OPTION_BOOL(final_resymbol, "Remove numerically zero values resulting from relaxed supernodal amalgamation");
100:   }
101:   {
102:     PetscReal tmp[] = {(PetscReal)c->zrelax[0], (PetscReal)c->zrelax[1], (PetscReal)c->zrelax[2]};
103:     PetscInt  n     = 3;
104:     PetscCall(PetscOptionsRealArray("-mat_cholmod_zrelax", "3 real supernodal relaxed amalgamation parameters", "None", tmp, &n, &flg));
105:     PetscCheck(!flg || n == 3, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "must provide exactly 3 parameters to -mat_cholmod_zrelax");
106:     if (flg)
107:       while (n--) c->zrelax[n] = (double)tmp[n];
108:   }
109:   {
110:     PetscInt n, tmp[] = {(PetscInt)c->nrelax[0], (PetscInt)c->nrelax[1], (PetscInt)c->nrelax[2]};
111:     PetscCall(PetscOptionsIntArray("-mat_cholmod_nrelax", "3 size_t supernodal relaxed amalgamation parameters", "None", tmp, &n, &flg));
112:     PetscCheck(!flg || n == 3, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "must provide exactly 3 parameters to -mat_cholmod_nrelax");
113:     if (flg)
114:       while (n--) c->nrelax[n] = (size_t)tmp[n];
115:   }
116:   CHOLMOD_OPTION_BOOL(prefer_upper, "Work with upper triangular form [faster when using fill-reducing ordering, slower in natural ordering]");
117:   CHOLMOD_OPTION_BOOL(default_nesdis, "Use NESDIS instead of METIS for nested dissection");
118:   CHOLMOD_OPTION_INT(print, "Verbosity level");
119:   PetscOptionsEnd();
120:   PetscFunctionReturn(PETSC_SUCCESS);
121: }

123: PetscErrorCode CholmodStart(Mat F)
124: {
125:   Mat_CHOLMOD    *chol = (Mat_CHOLMOD *)F->data;
126:   cholmod_common *c;

128:   PetscFunctionBegin;
129:   if (chol->common) PetscFunctionReturn(PETSC_SUCCESS);
130:   PetscCall(PetscMalloc1(1, &chol->common));
131:   PetscCallExternal(!cholmod_X_start, chol->common);

133:   c                = chol->common;
134:   c->error_handler = CholmodErrorHandler;
135:   PetscFunctionReturn(PETSC_SUCCESS);
136: }

138: static PetscErrorCode MatWrapCholmod_seqsbaij(Mat A, PetscBool values, cholmod_sparse *C, PetscBool *aijalloc, PetscBool *valloc)
139: {
140:   Mat_SeqSBAIJ *sbaij    = (Mat_SeqSBAIJ *)A->data;
141:   PetscBool     vallocin = PETSC_FALSE;

143:   PetscFunctionBegin;
144:   PetscCall(PetscMemzero(C, sizeof(*C)));
145:   /* CHOLMOD uses column alignment, SBAIJ stores the upper factor, so we pass it on as a lower factor, swapping the meaning of row and column */
146:   C->nrow  = (size_t)A->cmap->n;
147:   C->ncol  = (size_t)A->rmap->n;
148:   C->nzmax = (size_t)sbaij->maxnz;
149:   C->p     = sbaij->i;
150:   C->i     = sbaij->j;
151:   if (values) {
152: #if defined(PETSC_USE_COMPLEX)
153:     /* we need to pass CHOLMOD the conjugate matrix */
154:     PetscScalar *v;
155:     PetscInt     i;

157:     PetscCall(PetscMalloc1(sbaij->maxnz, &v));
158:     for (i = 0; i < sbaij->maxnz; i++) v[i] = PetscConj(sbaij->a[i]);
159:     C->x     = v;
160:     vallocin = PETSC_TRUE;
161: #else
162:     C->x = sbaij->a;
163: #endif
164:   }
165:   C->stype  = -1;
166:   C->itype  = CHOLMOD_INT_TYPE;
167:   C->xtype  = values ? CHOLMOD_SCALAR_TYPE : CHOLMOD_PATTERN;
168:   C->dtype  = CHOLMOD_DOUBLE;
169:   C->sorted = 1;
170:   C->packed = 1;
171:   *aijalloc = PETSC_FALSE;
172:   *valloc   = vallocin;
173:   PetscFunctionReturn(PETSC_SUCCESS);
174: }

176: #define GET_ARRAY_READ  0
177: #define GET_ARRAY_WRITE 1

179: PetscErrorCode VecWrapCholmod(Vec X, PetscInt rw, cholmod_dense *Y)
180: {
181:   PetscScalar *x;
182:   PetscInt     n;

184:   PetscFunctionBegin;
185:   PetscCall(PetscMemzero(Y, sizeof(*Y)));
186:   switch (rw) {
187:   case GET_ARRAY_READ:
188:     PetscCall(VecGetArrayRead(X, (const PetscScalar **)&x));
189:     break;
190:   case GET_ARRAY_WRITE:
191:     PetscCall(VecGetArrayWrite(X, &x));
192:     break;
193:   default:
194:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
195:     break;
196:   }
197:   PetscCall(VecGetSize(X, &n));

199:   Y->x     = x;
200:   Y->nrow  = n;
201:   Y->ncol  = 1;
202:   Y->nzmax = n;
203:   Y->d     = n;
204:   Y->xtype = CHOLMOD_SCALAR_TYPE;
205:   Y->dtype = CHOLMOD_DOUBLE;
206:   PetscFunctionReturn(PETSC_SUCCESS);
207: }

209: PetscErrorCode VecUnWrapCholmod(Vec X, PetscInt rw, cholmod_dense *Y)
210: {
211:   PetscFunctionBegin;
212:   switch (rw) {
213:   case GET_ARRAY_READ:
214:     PetscCall(VecRestoreArrayRead(X, (const PetscScalar **)&Y->x));
215:     break;
216:   case GET_ARRAY_WRITE:
217:     PetscCall(VecRestoreArrayWrite(X, (PetscScalar **)&Y->x));
218:     break;
219:   default:
220:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
221:     break;
222:   }
223:   PetscFunctionReturn(PETSC_SUCCESS);
224: }

226: PetscErrorCode MatDenseWrapCholmod(Mat X, PetscInt rw, cholmod_dense *Y)
227: {
228:   PetscScalar *x;
229:   PetscInt     m, n, lda;

231:   PetscFunctionBegin;
232:   PetscCall(PetscMemzero(Y, sizeof(*Y)));
233:   switch (rw) {
234:   case GET_ARRAY_READ:
235:     PetscCall(MatDenseGetArrayRead(X, (const PetscScalar **)&x));
236:     break;
237:   case GET_ARRAY_WRITE:
238:     PetscCall(MatDenseGetArrayWrite(X, &x));
239:     break;
240:   default:
241:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
242:     break;
243:   }
244:   PetscCall(MatDenseGetLDA(X, &lda));
245:   PetscCall(MatGetLocalSize(X, &m, &n));

247:   Y->x     = x;
248:   Y->nrow  = m;
249:   Y->ncol  = n;
250:   Y->nzmax = lda * n;
251:   Y->d     = lda;
252:   Y->xtype = CHOLMOD_SCALAR_TYPE;
253:   Y->dtype = CHOLMOD_DOUBLE;
254:   PetscFunctionReturn(PETSC_SUCCESS);
255: }

257: PetscErrorCode MatDenseUnWrapCholmod(Mat X, PetscInt rw, cholmod_dense *Y)
258: {
259:   PetscFunctionBegin;
260:   switch (rw) {
261:   case GET_ARRAY_READ:
262:     PetscCall(MatDenseRestoreArrayRead(X, (const PetscScalar **)&Y->x));
263:     break;
264:   case GET_ARRAY_WRITE:
265:     /* we don't have MatDenseRestoreArrayWrite */
266:     PetscCall(MatDenseRestoreArray(X, (PetscScalar **)&Y->x));
267:     break;
268:   default:
269:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
270:     break;
271:   }
272:   PetscFunctionReturn(PETSC_SUCCESS);
273: }

275: PETSC_INTERN PetscErrorCode MatDestroy_CHOLMOD(Mat F)
276: {
277:   Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;

279:   PetscFunctionBegin;
280:   if (chol->spqrfact) PetscCallExternal(!SuiteSparseQR_C_free, &chol->spqrfact, chol->common);
281:   if (chol->factor) PetscCallExternal(!cholmod_X_free_factor, &chol->factor, chol->common);
282:   if (chol->common->itype == CHOLMOD_INT) PetscCallExternal(!cholmod_finish, chol->common);
283:   else PetscCallExternal(!cholmod_l_finish, chol->common);
284:   PetscCall(PetscFree(chol->common));
285:   PetscCall(PetscFree(chol->matrix));
286:   PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatFactorGetSolverType_C", NULL));
287:   PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatQRFactorSymbolic_C", NULL));
288:   PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatQRFactorNumeric_C", NULL));
289:   PetscCall(PetscFree(F->data));
290:   PetscFunctionReturn(PETSC_SUCCESS);
291: }

293: static PetscErrorCode MatSolve_CHOLMOD(Mat, Vec, Vec);
294: static PetscErrorCode MatMatSolve_CHOLMOD(Mat, Mat, Mat);

296: /*static const char *const CholmodOrderingMethods[] = {"User","AMD","METIS","NESDIS(default)","Natural","NESDIS(small=20000)","NESDIS(small=4,no constrained)","NESDIS()"};*/

298: static PetscErrorCode MatView_Info_CHOLMOD(Mat F, PetscViewer viewer)
299: {
300:   Mat_CHOLMOD          *chol = (Mat_CHOLMOD *)F->data;
301:   const cholmod_common *c    = chol->common;
302:   PetscInt              i;

304:   PetscFunctionBegin;
305:   if (F->ops->solve != MatSolve_CHOLMOD) PetscFunctionReturn(PETSC_SUCCESS);
306:   PetscCall(PetscViewerASCIIPrintf(viewer, "CHOLMOD run parameters:\n"));
307:   PetscCall(PetscViewerASCIIPushTab(viewer));
308:   PetscCall(PetscViewerASCIIPrintf(viewer, "Pack factors after symbolic factorization: %s\n", chol->pack ? "TRUE" : "FALSE"));
309:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.dbound            %g  (Smallest absolute value of diagonal entries of D)\n", c->dbound));
310:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.grow0             %g\n", c->grow0));
311:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.grow1             %g\n", c->grow1));
312:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.grow2             %u\n", (unsigned)c->grow2));
313:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.maxrank           %u\n", (unsigned)c->maxrank));
314:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.supernodal_switch %g\n", c->supernodal_switch));
315:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.supernodal        %d\n", c->supernodal));
316:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_asis        %d\n", c->final_asis));
317:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_super       %d\n", c->final_super));
318:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_ll          %d\n", c->final_ll));
319:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_pack        %d\n", c->final_pack));
320:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_monotonic   %d\n", c->final_monotonic));
321:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_resymbol    %d\n", c->final_resymbol));
322:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.zrelax            [%g,%g,%g]\n", c->zrelax[0], c->zrelax[1], c->zrelax[2]));
323:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.nrelax            [%u,%u,%u]\n", (unsigned)c->nrelax[0], (unsigned)c->nrelax[1], (unsigned)c->nrelax[2]));
324:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.prefer_upper      %d\n", c->prefer_upper));
325:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.print             %d\n", c->print));
326:   for (i = 0; i < c->nmethods; i++) {
327:     PetscCall(PetscViewerASCIIPrintf(viewer, "Ordering method %" PetscInt_FMT "%s:\n", i, i == c->selected ? " [SELECTED]" : ""));
328:     PetscCall(PetscViewerASCIIPrintf(viewer, "  lnz %g, fl %g, prune_dense %g, prune_dense2 %g\n", c->method[i].lnz, c->method[i].fl, c->method[i].prune_dense, c->method[i].prune_dense2));
329:   }
330:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.postorder         %d\n", c->postorder));
331:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.default_nesdis    %d (use NESDIS instead of METIS for nested dissection)\n", c->default_nesdis));
332:   /* Statistics */
333:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.fl                %g (flop count from most recent analysis)\n", c->fl));
334:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.lnz               %g (fundamental nz in L)\n", c->lnz));
335:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.anz               %g\n", c->anz));
336:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.modfl             %g (flop count from most recent update)\n", c->modfl));
337:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.malloc_count      %g (number of live objects)\n", (double)c->malloc_count));
338:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.memory_usage      %g (peak memory usage in bytes)\n", (double)c->memory_usage));
339:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.memory_inuse      %g (current memory usage in bytes)\n", (double)c->memory_inuse));
340:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.nrealloc_col      %g (number of column reallocations)\n", c->nrealloc_col));
341:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.nrealloc_factor   %g (number of factor reallocations due to column reallocations)\n", c->nrealloc_factor));
342:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.ndbounds_hit      %g (number of times diagonal was modified by dbound)\n", c->ndbounds_hit));
343:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.rowfacfl          %g (number of flops in last call to cholmod_rowfac)\n", c->rowfacfl));
344:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.aatfl             %g (number of flops to compute A(:,f)*A(:,f)')\n", c->aatfl));
345: #if defined(PETSC_USE_SUITESPARSE_GPU)
346:   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.useGPU            %d\n", c->useGPU));
347: #endif
348:   PetscCall(PetscViewerASCIIPopTab(viewer));
349:   PetscFunctionReturn(PETSC_SUCCESS);
350: }

352: PETSC_INTERN PetscErrorCode MatView_CHOLMOD(Mat F, PetscViewer viewer)
353: {
354:   PetscBool         isascii;
355:   PetscViewerFormat format;

357:   PetscFunctionBegin;
358:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
359:   if (isascii) {
360:     PetscCall(PetscViewerGetFormat(viewer, &format));
361:     if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_CHOLMOD(F, viewer));
362:   }
363:   PetscFunctionReturn(PETSC_SUCCESS);
364: }

366: static PetscErrorCode MatSolve_CHOLMOD(Mat F, Vec B, Vec X)
367: {
368:   Mat_CHOLMOD  *chol = (Mat_CHOLMOD *)F->data;
369:   cholmod_dense cholB, cholX, *X_handle, *Y_handle = NULL, *E_handle = NULL;

371:   PetscFunctionBegin;
372:   if (!F->rmap->n) PetscFunctionReturn(PETSC_SUCCESS);
373:   static_F = F;
374:   PetscCall(VecWrapCholmod(B, GET_ARRAY_READ, &cholB));
375:   PetscCall(VecWrapCholmod(X, GET_ARRAY_WRITE, &cholX));
376:   X_handle = &cholX;
377:   PetscCallExternal(!cholmod_X_solve2, CHOLMOD_A, chol->factor, &cholB, NULL, &X_handle, NULL, &Y_handle, &E_handle, chol->common);
378:   PetscCallExternal(!cholmod_X_free_dense, &Y_handle, chol->common);
379:   PetscCallExternal(!cholmod_X_free_dense, &E_handle, chol->common);
380:   PetscCall(VecUnWrapCholmod(B, GET_ARRAY_READ, &cholB));
381:   PetscCall(VecUnWrapCholmod(X, GET_ARRAY_WRITE, &cholX));
382:   PetscCall(PetscLogFlops(4.0 * chol->common->lnz));
383:   PetscFunctionReturn(PETSC_SUCCESS);
384: }

386: static PetscErrorCode MatMatSolve_CHOLMOD(Mat F, Mat B, Mat X)
387: {
388:   Mat_CHOLMOD  *chol = (Mat_CHOLMOD *)F->data;
389:   cholmod_dense cholB, cholX, *X_handle, *Y_handle = NULL, *E_handle = NULL;

391:   PetscFunctionBegin;
392:   static_F = F;
393:   PetscCall(MatDenseWrapCholmod(B, GET_ARRAY_READ, &cholB));
394:   PetscCall(MatDenseWrapCholmod(X, GET_ARRAY_WRITE, &cholX));
395:   X_handle = &cholX;
396:   PetscCallExternal(!cholmod_X_solve2, CHOLMOD_A, chol->factor, &cholB, NULL, &X_handle, NULL, &Y_handle, &E_handle, chol->common);
397:   PetscCallExternal(!cholmod_X_free_dense, &Y_handle, chol->common);
398:   PetscCallExternal(!cholmod_X_free_dense, &E_handle, chol->common);
399:   PetscCall(MatDenseUnWrapCholmod(B, GET_ARRAY_READ, &cholB));
400:   PetscCall(MatDenseUnWrapCholmod(X, GET_ARRAY_WRITE, &cholX));
401:   PetscCall(PetscLogFlops(4.0 * B->cmap->n * chol->common->lnz));
402:   PetscFunctionReturn(PETSC_SUCCESS);
403: }

405: static PetscErrorCode MatCholeskyFactorNumeric_CHOLMOD(Mat F, Mat A, const MatFactorInfo *info)
406: {
407:   Mat_CHOLMOD   *chol = (Mat_CHOLMOD *)F->data;
408:   cholmod_sparse cholA;
409:   PetscBool      aijalloc, valloc;
410:   int            err;

412:   PetscFunctionBegin;
413:   if (!A->rmap->n) PetscFunctionReturn(PETSC_SUCCESS);
414:   PetscCall((*chol->Wrap)(A, PETSC_TRUE, &cholA, &aijalloc, &valloc));
415:   static_F = F;
416:   err      = !cholmod_X_factorize(&cholA, chol->factor, chol->common);
417:   PetscCheck(!err, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CHOLMOD factorization failed with status %d", chol->common->status);
418:   PetscCheck(chol->common->status != CHOLMOD_NOT_POSDEF, PetscObjectComm((PetscObject)F), PETSC_ERR_MAT_CH_ZRPVT, "CHOLMOD detected that the matrix is not positive definite, failure at column %u", (unsigned)chol->factor->minor);

420:   PetscCall(PetscLogFlops(chol->common->fl));
421:   if (aijalloc) PetscCall(PetscFree2(cholA.p, cholA.i));
422:   if (valloc) PetscCall(PetscFree(cholA.x));
423: #if defined(PETSC_USE_SUITESPARSE_GPU)
424:   PetscCall(PetscLogGpuTimeAdd(chol->common->CHOLMOD_GPU_GEMM_TIME + chol->common->CHOLMOD_GPU_SYRK_TIME + chol->common->CHOLMOD_GPU_TRSM_TIME + chol->common->CHOLMOD_GPU_POTRF_TIME));
425: #endif

427:   F->ops->solve             = MatSolve_CHOLMOD;
428:   F->ops->solvetranspose    = MatSolve_CHOLMOD;
429:   F->ops->matsolve          = MatMatSolve_CHOLMOD;
430:   F->ops->matsolvetranspose = MatMatSolve_CHOLMOD;
431:   PetscFunctionReturn(PETSC_SUCCESS);
432: }

434: PETSC_INTERN PetscErrorCode MatCholeskyFactorSymbolic_CHOLMOD(Mat F, Mat A, IS perm, const MatFactorInfo *info)
435: {
436:   Mat_CHOLMOD   *chol = (Mat_CHOLMOD *)F->data;
437:   int            err;
438:   cholmod_sparse cholA;
439:   PetscBool      aijalloc, valloc;
440:   PetscInt      *fset  = 0;
441:   size_t         fsize = 0;

443:   PetscFunctionBegin;
444:   F->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_CHOLMOD;
445:   if (!A->rmap->n) PetscFunctionReturn(PETSC_SUCCESS);
446:   /* Set options to F */
447:   PetscCall(CholmodSetOptions(F));

449:   PetscCall((*chol->Wrap)(A, PETSC_FALSE, &cholA, &aijalloc, &valloc));
450:   static_F = F;
451:   if (chol->factor) {
452:     err = !cholmod_X_resymbol(&cholA, fset, fsize, (int)chol->pack, chol->factor, chol->common);
453:     PetscCheck(!err, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CHOLMOD analysis failed with status %d", chol->common->status);
454:   } else if (perm) {
455:     const PetscInt *ip;
456:     PetscCall(ISGetIndices(perm, &ip));
457:     chol->factor = cholmod_X_analyze_p(&cholA, (PetscInt *)ip, fset, fsize, chol->common);
458:     PetscCheck(chol->factor, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CHOLMOD analysis failed using PETSc ordering with status %d", chol->common->status);
459:     PetscCall(ISRestoreIndices(perm, &ip));
460:   } else {
461:     chol->factor = cholmod_X_analyze(&cholA, chol->common);
462:     PetscCheck(chol->factor, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CHOLMOD analysis failed using internal ordering with status %d", chol->common->status);
463:   }

465:   if (aijalloc) PetscCall(PetscFree2(cholA.p, cholA.i));
466:   if (valloc) PetscCall(PetscFree(cholA.x));
467:   PetscFunctionReturn(PETSC_SUCCESS);
468: }

470: static PetscErrorCode MatFactorGetSolverType_seqsbaij_cholmod(Mat A, MatSolverType *type)
471: {
472:   PetscFunctionBegin;
473:   *type = MATSOLVERCHOLMOD;
474:   PetscFunctionReturn(PETSC_SUCCESS);
475: }

477: PETSC_INTERN PetscErrorCode MatGetInfo_CHOLMOD(Mat F, MatInfoType flag, MatInfo *info)
478: {
479:   Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;

481:   PetscFunctionBegin;
482:   info->block_size        = 1.0;
483:   info->nz_allocated      = chol->common->lnz;
484:   info->nz_used           = chol->common->lnz;
485:   info->nz_unneeded       = 0.0;
486:   info->assemblies        = 0.0;
487:   info->mallocs           = 0.0;
488:   info->memory            = chol->common->memory_inuse;
489:   info->fill_ratio_given  = 0;
490:   info->fill_ratio_needed = 0;
491:   info->factor_mallocs    = chol->common->malloc_count;
492:   PetscFunctionReturn(PETSC_SUCCESS);
493: }

495: /*MC
496:   MATSOLVERCHOLMOD

498:   A matrix type providing direct solvers (Cholesky) for sequential matrices
499:   via the external package CHOLMOD.

501:   Use `./configure --download-suitesparse` to install PETSc to use CHOLMOD

503:   Use `-pc_type cholesky` `-pc_factor_mat_solver_type cholmod` to use this direct solver

505:   Consult CHOLMOD documentation for more information about the common parameters
506:   which correspond to the options database keys below.

508:   Options Database Keys:
509: + -mat_cholmod_dbound 0             - Minimum absolute value of diagonal entries of D (None)
510: . -mat_cholmod_grow0 1.2            - Global growth ratio when factors are modified (None)
511: . -mat_cholmod_grow1 1.2            - Column growth ratio when factors are modified (None)
512: . -mat_cholmod_grow2 5              - Affine column growth constant when factors are modified (None)
513: . -mat_cholmod_maxrank 8            - Max rank of update, larger values are faster but use more memory [2,4,8] (None)
514: . -mat_cholmod_factor AUTO          - (choose one of) `SIMPLICIAL`, `AUTO`, `SUPERNODAL`
515: . -mat_cholmod_supernodal_switch 40 - flop/nnz_L threshold for switching to supernodal factorization (None)
516: . -mat_cholmod_final_asis TRUE      - Leave factors "as is" (None)
517: . -mat_cholmod_final_pack TRUE      - Pack the columns when finished (use FALSE if the factors will be updated later) (None)
518: . -mat_cholmod_zrelax 0.8           - 3 real supernodal relaxed amalgamation parameters (None)
519: . -mat_cholmod_nrelax 4             - 3 size_t supernodal relaxed amalgamation parameters (None)
520: . -mat_cholmod_prefer_upper TRUE    - Work with upper triangular form (faster when using fill-reducing ordering, slower in natural ordering) (None)
521: . -mat_cholmod_print 3              - Verbosity level (None)
522: - -mat_ordering_type internal       - Use the ordering provided by Cholmod

524:    Level: beginner

526:    Note:
527:    CHOLMOD is part of SuiteSparse <http://faculty.cse.tamu.edu/davis/suitesparse.html>

529: .seealso: [](ch_matrices), `Mat`, `PCCHOLESKY`, `PCFactorSetMatSolverType()`, `MatSolverType`
530: M*/

532: PETSC_INTERN PetscErrorCode MatGetFactor_seqsbaij_cholmod(Mat A, MatFactorType ftype, Mat *F)
533: {
534:   Mat          B;
535:   Mat_CHOLMOD *chol;
536:   PetscInt     m = A->rmap->n, n = A->cmap->n, bs;

538:   PetscFunctionBegin;
539:   PetscCall(MatGetBlockSize(A, &bs));
540:   *F = NULL;
541:   if (bs != 1) {
542:     PetscCall(PetscInfo(A, "CHOLMOD only supports block size=1.\n"));
543:     PetscFunctionReturn(PETSC_SUCCESS);
544:   }
545:   if (PetscDefined(USE_COMPLEX) && A->hermitian != PETSC_BOOL3_TRUE) {
546:     PetscCall(PetscInfo(A, "Only for Hermitian matrices.\n"));
547:     PetscFunctionReturn(PETSC_SUCCESS);
548:   }
549:   /* Create the factorization matrix F */
550:   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
551:   PetscCall(MatSetSizes(B, PETSC_DECIDE, PETSC_DECIDE, m, n));
552:   PetscCall(PetscStrallocpy("cholmod", &((PetscObject)B)->type_name));
553:   PetscCall(MatSetUp(B));
554:   PetscCall(PetscNew(&chol));

556:   chol->Wrap = MatWrapCholmod_seqsbaij;
557:   B->data    = chol;

559:   B->ops->getinfo                = MatGetInfo_CHOLMOD;
560:   B->ops->view                   = MatView_CHOLMOD;
561:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_CHOLMOD;
562:   B->ops->destroy                = MatDestroy_CHOLMOD;
563:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_seqsbaij_cholmod));
564:   B->factortype   = MAT_FACTOR_CHOLESKY;
565:   B->assembled    = PETSC_TRUE;
566:   B->preallocated = PETSC_TRUE;

568:   PetscCall(CholmodStart(B));

570:   PetscCall(PetscFree(B->solvertype));
571:   PetscCall(PetscStrallocpy(MATSOLVERCHOLMOD, &B->solvertype));
572:   B->canuseordering = PETSC_TRUE;
573:   PetscCall(PetscStrallocpy(MATORDERINGEXTERNAL, (char **)&B->preferredordering[MAT_FACTOR_CHOLESKY]));
574:   *F = B;
575:   PetscFunctionReturn(PETSC_SUCCESS);
576: }