Actual source code: ex52.c

  1: static char help[] = "Solves a linear system in parallel with KSP. Modified from ex2.c \n\
  2:                       Illustrate how to use external packages MUMPS, SUPERLU and STRUMPACK \n\
  3: Input parameters include:\n\
  4:   -random_exact_sol : use a random exact solution vector\n\
  5:   -view_exact_sol   : write exact solution vector to stdout\n\
  6:   -m <mesh_x>       : number of mesh points in x-direction\n\
  7:   -n <mesh_y>       : number of mesh points in y-direction\n\n";

  9: #include <petscksp.h>

 11: #if defined(PETSC_HAVE_MUMPS)
 12: /* Subroutine contributed by Varun Hiremath */
 13: PetscErrorCode printMumpsMemoryInfo(Mat F)
 14: {
 15:   PetscInt maxMem, sumMem;

 17:   PetscFunctionBeginUser;
 18:   PetscCall(MatMumpsGetInfog(F, 16, &maxMem));
 19:   PetscCall(MatMumpsGetInfog(F, 17, &sumMem));
 20:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n MUMPS INFOG(16) :: Max memory in MB = %" PetscInt_FMT, maxMem));
 21:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n MUMPS INFOG(17) :: Sum memory in MB = %" PetscInt_FMT "\n", sumMem));
 22:   PetscFunctionReturn(PETSC_SUCCESS);
 23: }
 24: #endif

 26: int main(int argc, char **args)
 27: {
 28:   Vec         x, b, u; /* approx solution, RHS, exact solution */
 29:   Mat         A, F;
 30:   KSP         ksp; /* linear solver context */
 31:   PC          pc;
 32:   PetscRandom rctx; /* random number generator context */
 33:   PetscReal   norm; /* norm of solution error */
 34:   PetscInt    i, j, Ii, J, Istart, Iend, m = 8, n = 7, its;
 35:   PetscBool   flg = PETSC_FALSE, flg_ilu = PETSC_FALSE, flg_ch = PETSC_FALSE;
 36: #if defined(PETSC_HAVE_MUMPS)
 37:   PetscBool flg_mumps = PETSC_FALSE, flg_mumps_ch = PETSC_FALSE;
 38: #endif
 39: #if defined(PETSC_HAVE_SUPERLU) || defined(PETSC_HAVE_SUPERLU_DIST)
 40:   PetscBool flg_superlu = PETSC_FALSE;
 41: #endif
 42: #if defined(PETSC_HAVE_STRUMPACK)
 43:   PetscBool flg_strumpack = PETSC_FALSE;
 44: #endif
 45:   PetscScalar   v;
 46:   PetscMPIInt   rank, size;
 47:   PetscLogStage stage;

 49: #if defined(PETSC_HAVE_STRUMPACK) && defined(PETSC_HAVE_SLATE)
 50:   PETSC_MPI_THREAD_REQUIRED = MPI_THREAD_MULTIPLE;
 51: #endif
 52:   PetscFunctionBeginUser;
 53:   PetscCall(PetscInitialize(&argc, &args, NULL, help));
 54:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 55:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 56:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
 57:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
 58:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 59:          Compute the matrix and right-hand-side vector that define
 60:          the linear system, Ax = b.
 61:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 62:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 63:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
 64:   PetscCall(MatSetFromOptions(A));
 65:   PetscCall(MatMPIAIJSetPreallocation(A, 5, NULL, 5, NULL));
 66:   PetscCall(MatSeqAIJSetPreallocation(A, 5, NULL));
 67:   PetscCall(MatSetUp(A));

 69:   /*
 70:      Currently, all PETSc parallel matrix formats are partitioned by
 71:      contiguous chunks of rows across the processors.  Determine which
 72:      rows of the matrix are locally owned.
 73:   */
 74:   PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));

 76:   /*
 77:      Set matrix elements for the 2-D, five-point stencil in parallel.
 78:       - Each processor needs to insert only elements that it owns
 79:         locally (but any non-local elements will be sent to the
 80:         appropriate processor during matrix assembly).
 81:       - Always specify global rows and columns of matrix entries.

 83:      Note: this uses the less common natural ordering that orders first
 84:      all the unknowns for x = h then for x = 2h etc; Hence you see J = Ii +- n
 85:      instead of J = I +- m as you might expect. The more standard ordering
 86:      would first do all variables for y = h, then y = 2h etc.

 88:    */
 89:   PetscCall(PetscLogStageRegister("Assembly", &stage));
 90:   PetscCall(PetscLogStagePush(stage));
 91:   for (Ii = Istart; Ii < Iend; Ii++) {
 92:     v = -1.0;
 93:     i = Ii / n;
 94:     j = Ii - i * n;
 95:     if (i > 0) {
 96:       J = Ii - n;
 97:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 98:     }
 99:     if (i < m - 1) {
100:       J = Ii + n;
101:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
102:     }
103:     if (j > 0) {
104:       J = Ii - 1;
105:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
106:     }
107:     if (j < n - 1) {
108:       J = Ii + 1;
109:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
110:     }
111:     v = 4.0;
112:     PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
113:   }

115:   /*
116:      Assemble matrix, using the 2-step process:
117:        MatAssemblyBegin(), MatAssemblyEnd()
118:      Computations can be done while messages are in transition
119:      by placing code between these two statements.
120:   */
121:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
122:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
123:   PetscCall(PetscLogStagePop());

125:   /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
126:   PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE));

128:   /* Create parallel vectors */
129:   PetscCall(MatCreateVecs(A, &u, &b));
130:   PetscCall(VecDuplicate(u, &x));

132:   /*
133:      Set exact solution; then compute right-hand-side vector.
134:      By default we use an exact solution of a vector with all
135:      elements of 1.0;  Alternatively, using the runtime option
136:      -random_sol forms a solution vector with random components.
137:   */
138:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-random_exact_sol", &flg, NULL));
139:   if (flg) {
140:     PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
141:     PetscCall(PetscRandomSetFromOptions(rctx));
142:     PetscCall(VecSetRandom(u, rctx));
143:     PetscCall(PetscRandomDestroy(&rctx));
144:   } else {
145:     PetscCall(VecSet(u, 1.0));
146:   }
147:   PetscCall(MatMult(A, u, b));

149:   /*
150:      View the exact solution vector if desired
151:   */
152:   flg = PETSC_FALSE;
153:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-view_exact_sol", &flg, NULL));
154:   if (flg) PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));

156:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
157:                 Create the linear solver and set various options
158:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

160:   /*
161:      Create linear solver context
162:   */
163:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
164:   PetscCall(KSPSetOperators(ksp, A, A));

166:   /*
167:     Example of how to use external package MUMPS
168:     Note: runtime options
169:           '-ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -mat_mumps_icntl_7 3 -mat_mumps_icntl_1 0.0'
170:           are equivalent to these procedural calls
171:   */
172: #if defined(PETSC_HAVE_MUMPS)
173:   flg_mumps    = PETSC_FALSE;
174:   flg_mumps_ch = PETSC_FALSE;
175:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_mumps_lu", &flg_mumps, NULL));
176:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_mumps_ch", &flg_mumps_ch, NULL));
177:   if (flg_mumps || flg_mumps_ch) {
178:     PetscCall(KSPSetType(ksp, KSPPREONLY));
179:     PetscInt  ival, icntl;
180:     PetscReal val;
181:     PetscCall(KSPGetPC(ksp, &pc));
182:     if (flg_mumps) {
183:       PetscCall(PCSetType(pc, PCLU));
184:     } else if (flg_mumps_ch) {
185:       PetscCall(MatSetOption(A, MAT_SPD, PETSC_TRUE)); /* set MUMPS id%SYM=1 */
186:       PetscCall(PCSetType(pc, PCCHOLESKY));
187:     }
188:     PetscCall(PCFactorSetMatSolverType(pc, MATSOLVERMUMPS));
189:     PetscCall(PCFactorSetUpMatSolverType(pc)); /* call MatGetFactor() to create F */
190:     PetscCall(PCFactorGetMatrix(pc, &F));
191:     PetscCall(MatMumpsSetIcntl(F, 24, 1));
192:     PetscCall(MatMumpsGetIcntl(F, 24, &ival));
193:     PetscCheck(ival == 1, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "ICNTL(24) = %" PetscInt_FMT " (!= 1)", ival);
194:     PetscCall(MatMumpsSetCntl(F, 3, 1e-6));
195:     PetscCall(MatMumpsGetCntl(F, 3, &val));
196:     PetscCheck(PetscEqualReal(val, 1e-6), PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CNTL(3) = %g (!= %g)", (double)val, 1e-6);
197:     if (flg_mumps) {
198:       /* Zero the first and last rows in the rank, they should then show up in corresponding null pivot rows output via
199:          MatMumpsGetNullPivots */
200:       flg = PETSC_FALSE;
201:       PetscCall(PetscOptionsGetBool(NULL, NULL, "-zero_first_and_last_rows", &flg, NULL));
202:       if (flg) {
203:         PetscInt rows[2];
204:         rows[0] = Istart;   /* first row of the rank */
205:         rows[1] = Iend - 1; /* last row of the rank */
206:         PetscCall(MatZeroRows(A, 2, rows, 0.0, NULL, NULL));
207:       }
208:       /* Get memory estimates from MUMPS' MatLUFactorSymbolic(), e.g. INFOG(16), INFOG(17).
209:          KSPSetUp() below will do nothing inside MatLUFactorSymbolic() */
210:       MatFactorInfo info;
211:       PetscCall(MatLUFactorSymbolic(F, A, NULL, NULL, &info));
212:       flg = PETSC_FALSE;
213:       PetscCall(PetscOptionsGetBool(NULL, NULL, "-print_mumps_memory", &flg, NULL));
214:       if (flg) PetscCall(printMumpsMemoryInfo(F));
215:     }

217:     /* sequential ordering */
218:     icntl = 7;
219:     ival  = 2;
220:     PetscCall(MatMumpsSetIcntl(F, icntl, ival));

222:     /* threshold for row pivot detection */
223:     PetscCall(MatMumpsGetIcntl(F, 24, &ival));
224:     PetscCheck(ival == 1, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "ICNTL(24) = %" PetscInt_FMT " (!= 1)", ival);
225:     icntl = 3;
226:     PetscCall(MatMumpsGetCntl(F, icntl, &val));
227:     PetscCheck(PetscEqualReal(val, 1e-6), PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CNTL(3) = %g (!= %g)", (double)val, 1e-6);

229:     /* compute determinant of A */
230:     PetscCall(MatMumpsSetIcntl(F, 33, 1));
231:   }
232: #endif

234:   /*
235:     Example of how to use external package SuperLU
236:     Note: runtime options
237:           '-ksp_type preonly -pc_type ilu -pc_factor_mat_solver_type superlu -mat_superlu_ilu_droptol 1.e-8'
238:           are equivalent to these procedual calls
239:   */
240: #if defined(PETSC_HAVE_SUPERLU) || defined(PETSC_HAVE_SUPERLU_DIST)
241:   flg_ilu     = PETSC_FALSE;
242:   flg_superlu = PETSC_FALSE;
243:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_superlu_lu", &flg_superlu, NULL));
244:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_superlu_ilu", &flg_ilu, NULL));
245:   if (flg_superlu || flg_ilu) {
246:     PetscCall(KSPSetType(ksp, KSPPREONLY));
247:     PetscCall(KSPGetPC(ksp, &pc));
248:     if (flg_superlu) PetscCall(PCSetType(pc, PCLU));
249:     else if (flg_ilu) PetscCall(PCSetType(pc, PCILU));
250:     if (size == 1) {
251:   #if !defined(PETSC_HAVE_SUPERLU)
252:       SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "This test requires SUPERLU");
253:   #else
254:       PetscCall(PCFactorSetMatSolverType(pc, MATSOLVERSUPERLU));
255:   #endif
256:     } else {
257:   #if !defined(PETSC_HAVE_SUPERLU_DIST)
258:       SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "This test requires SUPERLU_DIST");
259:   #else
260:       PetscCall(PCFactorSetMatSolverType(pc, MATSOLVERSUPERLU_DIST));
261:   #endif
262:     }
263:     PetscCall(PCFactorSetUpMatSolverType(pc)); /* call MatGetFactor() to create F */
264:     PetscCall(PCFactorGetMatrix(pc, &F));
265:   #if defined(PETSC_HAVE_SUPERLU)
266:     if (size == 1) PetscCall(MatSuperluSetILUDropTol(F, 1.e-8));
267:   #endif
268:   }
269: #endif

271:   /*
272:     Example of how to use external package STRUMPACK
273:     Note: runtime options
274:           '-pc_type lu/ilu \
275:            -pc_factor_mat_solver_type strumpack \
276:            -mat_strumpack_reordering GEOMETRIC \
277:            -mat_strumpack_geometric_xyz n,m \
278:            -mat_strumpack_colperm 0 \
279:            -mat_strumpack_compression_rel_tol 1.e-3 \
280:            -mat_strumpack_compression_min_sep_size 15 \
281:            -mat_strumpack_leaf_size 4'
282:        are equivalent to these procedural calls

284:     We refer to the STRUMPACK manual for more info on
285:     how to tune the preconditioner, see for instance:
286:      https://portal.nersc.gov/project/sparse/strumpack/master/prec.html
287:   */
288: #if defined(PETSC_HAVE_STRUMPACK)
289:   flg_ilu       = PETSC_FALSE;
290:   flg_strumpack = PETSC_FALSE;
291:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_strumpack_lu", &flg_strumpack, NULL));
292:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_strumpack_ilu", &flg_ilu, NULL));
293:   if (flg_strumpack || flg_ilu) {
294:     PetscCall(KSPSetType(ksp, KSPPREONLY));
295:     PetscCall(KSPGetPC(ksp, &pc));
296:     if (flg_strumpack) PetscCall(PCSetType(pc, PCLU));
297:     else if (flg_ilu) PetscCall(PCSetType(pc, PCILU));
298:   #if !defined(PETSC_HAVE_STRUMPACK)
299:     SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "This test requires STRUMPACK");
300:   #endif
301:     PetscCall(PCFactorSetMatSolverType(pc, MATSOLVERSTRUMPACK));
302:     PetscCall(PCFactorSetUpMatSolverType(pc)); /* call MatGetFactor() to create F */
303:     PetscCall(PCFactorGetMatrix(pc, &F));

305:     /* Set the fill-reducing reordering, MAT_STRUMPACK_METIS is       */
306:     /* always supported, but is sequential. Parallel alternatives are */
307:     /* MAT_STRUMPACK_PARMETIS and MAT_STRUMPACK_PTSCOTCH, but         */
308:     /* strumpack needs to be configured with support for these.       */
309:     /*PetscCall(MatSTRUMPACKSetReordering(F, MAT_STRUMPACK_METIS));   */
310:     /* However, since this is a problem on a regular grid, we can use */
311:     /* a simple geometric nested dissection implementation, which     */
312:     /* requires passing the grid dimensions to strumpack.             */
313:     PetscCall(MatSTRUMPACKSetReordering(F, MAT_STRUMPACK_GEOMETRIC));
314:     PetscCall(MatSTRUMPACKSetGeometricNxyz(F, n, m, PETSC_DECIDE));
315:     /* These are optional, defaults are 1                             */
316:     PetscCall(MatSTRUMPACKSetGeometricComponents(F, 1));
317:     PetscCall(MatSTRUMPACKSetGeometricWidth(F, 1));

319:     /* Since this is a simple discretization, the diagonal is always  */
320:     /* nonzero, and there is no need for the extra MC64 permutation.  */
321:     PetscCall(MatSTRUMPACKSetColPerm(F, PETSC_FALSE));

323:     if (flg_ilu) {
324:       /* The compression tolerance used when doing low-rank compression */
325:       /* in the preconditioner. This is problem specific!               */
326:       PetscCall(MatSTRUMPACKSetCompRelTol(F, 1.e-3));

328:       /* Set a small minimum (dense) matrix size for compression to     */
329:       /* demonstrate the preconditioner on small problems.              */
330:       /* For performance the default value should be better.            */
331:       /* This size corresponds to the size of separators in the graph.  */
332:       /* For instance on an m x n mesh, the top level separator is of   */
333:       /* size m (if m <= n)                                             */
334:       /*PetscCall(MatSTRUMPACKSetCompMinSepSize(F,15));*/

336:       /* Set the size of the diagonal blocks (the leafs) in the HSS     */
337:       /* approximation. The default value should be better for real     */
338:       /* problems. This is mostly for illustration on a small problem.  */
339:       /*PetscCall(MatSTRUMPACKSetCompLeafSize(F,4));*/
340:     }
341:   }
342: #endif

344:   /*
345:     Example of how to use procedural calls that are equivalent to
346:           '-ksp_type preonly -pc_type lu/ilu -pc_factor_mat_solver_type petsc'
347:   */
348:   flg     = PETSC_FALSE;
349:   flg_ilu = PETSC_FALSE;
350:   flg_ch  = PETSC_FALSE;
351:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_petsc_lu", &flg, NULL));
352:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_petsc_ilu", &flg_ilu, NULL));
353:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_petsc_ch", &flg_ch, NULL));
354:   if (flg || flg_ilu || flg_ch) {
355:     Vec diag;

357:     PetscCall(KSPSetType(ksp, KSPPREONLY));
358:     PetscCall(KSPGetPC(ksp, &pc));
359:     if (flg) PetscCall(PCSetType(pc, PCLU));
360:     else if (flg_ilu) PetscCall(PCSetType(pc, PCILU));
361:     else if (flg_ch) PetscCall(PCSetType(pc, PCCHOLESKY));
362:     PetscCall(PCFactorSetMatSolverType(pc, MATSOLVERPETSC));
363:     PetscCall(PCFactorSetUpMatSolverType(pc)); /* call MatGetFactor() to create F */
364:     PetscCall(PCFactorGetMatrix(pc, &F));

366:     /* Test MatGetDiagonal() */
367:     PetscCall(KSPSetUp(ksp));
368:     PetscCall(VecDuplicate(x, &diag));
369:     PetscCall(MatGetDiagonal(F, diag));
370:     /* PetscCall(VecView(diag,PETSC_VIEWER_STDOUT_WORLD)); */
371:     PetscCall(VecDestroy(&diag));
372:   }

374:   PetscCall(KSPSetFromOptions(ksp));

376:   /* Get info from matrix factors */
377:   PetscCall(KSPSetUp(ksp));

379: #if defined(PETSC_HAVE_MUMPS)
380:   if (flg_mumps || flg_mumps_ch) {
381:     PetscInt  icntl, infog34, num_null_pivots, *null_pivots;
382:     PetscReal cntl, rinfo12, rinfo13;
383:     icntl = 3;
384:     PetscCall(MatMumpsGetCntl(F, icntl, &cntl));

386:     /* compute determinant and check for any null pivots*/
387:     if (rank == 0) {
388:       PetscCall(MatMumpsGetInfog(F, 34, &infog34));
389:       PetscCall(MatMumpsGetRinfog(F, 12, &rinfo12));
390:       PetscCall(MatMumpsGetRinfog(F, 13, &rinfo13));
391:       PetscCall(MatMumpsGetNullPivots(F, &num_null_pivots, &null_pivots));
392:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Mumps row pivot threshold = %g\n", cntl));
393:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Mumps determinant = (%g, %g) * 2^%" PetscInt_FMT " \n", (double)rinfo12, (double)rinfo13, infog34));
394:       if (num_null_pivots > 0) {
395:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Mumps num of null pivots detected = %" PetscInt_FMT "\n", num_null_pivots));
396:         PetscCall(PetscSortInt(num_null_pivots, null_pivots)); /* just make the printf deterministic */
397:         for (j = 0; j < num_null_pivots; j++) PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Mumps row with null pivots is = %" PetscInt_FMT "\n", null_pivots[j]));
398:       }
399:       PetscCall(PetscFree(null_pivots));
400:     }
401:   }
402: #endif

404:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
405:                       Solve the linear system
406:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
407:   PetscCall(KSPSolve(ksp, b, x));

409:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
410:                       Check solution and clean up
411:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
412:   PetscCall(VecAXPY(x, -1.0, u));
413:   PetscCall(VecNorm(x, NORM_2, &norm));
414:   PetscCall(KSPGetIterationNumber(ksp, &its));

416:   /*
417:      Print convergence information.  PetscPrintf() produces a single
418:      print statement from all processes that share a communicator.
419:      An alternative is PetscFPrintf(), which prints to a file.
420:   */
421:   if (norm < 1.e-12) {
422:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error < 1.e-12 iterations %" PetscInt_FMT "\n", its));
423:   } else {
424:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));
425:   }

427:   /*
428:      Free work space.  All PETSc objects should be destroyed when they
429:      are no longer needed.
430:   */
431:   PetscCall(KSPDestroy(&ksp));
432:   PetscCall(VecDestroy(&u));
433:   PetscCall(VecDestroy(&x));
434:   PetscCall(VecDestroy(&b));
435:   PetscCall(MatDestroy(&A));

437:   /*
438:      Always call PetscFinalize() before exiting a program.  This routine
439:        - finalizes the PETSc libraries as well as MPI
440:        - provides summary and diagnostic information if certain runtime
441:          options are chosen (e.g., -log_view).
442:   */
443:   PetscCall(PetscFinalize());
444:   return 0;
445: }

447: /*TEST

449:    test:
450:       args: -use_petsc_lu
451:       output_file: output/ex52_2.out

453:    test:
454:       suffix: mumps
455:       nsize: 3
456:       requires: mumps
457:       args: -use_mumps_lu
458:       output_file: output/ex52_1.out

460:    test:
461:       suffix: mumps_2
462:       nsize: 3
463:       requires: mumps
464:       args: -use_mumps_ch
465:       output_file: output/ex52_1.out

467:    test:
468:       suffix: mumps_3
469:       nsize: 3
470:       requires: mumps
471:       args: -use_mumps_ch -mat_type sbaij
472:       output_file: output/ex52_1.out

474:    test:
475:       suffix: mumps_4
476:       nsize: 3
477:       requires: mumps !complex !single
478:       args: -use_mumps_lu -m 50 -n 50 -print_mumps_memory
479:       output_file: output/ex52_4.out

481:    test:
482:       suffix: mumps_5
483:       nsize: 3
484:       requires: mumps !complex !single
485:       args: -use_mumps_lu -m 50 -n 50 -zero_first_and_last_rows
486:       output_file: output/ex52_5.out

488:    test:
489:       suffix: mumps_omp_2
490:       nsize: 4
491:       requires: mumps hwloc openmp pthread defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
492:       args: -use_mumps_lu -mat_mumps_use_omp_threads 2
493:       output_file: output/ex52_1.out

495:    test:
496:       suffix: mumps_omp_3
497:       nsize: 4
498:       requires: mumps hwloc openmp pthread defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
499:       args: -use_mumps_ch -mat_mumps_use_omp_threads 3 -mat_mumps_icntl_48 0
500:       # Ignore the warning since we are intentionally testing the imbalanced case
501:       filter: grep -v "Warning: number of OpenMP threads"
502:       output_file: output/ex52_1.out

504:    test:
505:       suffix: mumps_omp_4
506:       nsize: 4
507:       requires: mumps hwloc openmp pthread defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
508:       # let petsc guess a proper number for threads
509:       args: -use_mumps_ch -mat_type sbaij -mat_mumps_use_omp_threads
510:       output_file: output/ex52_1.out

512:    testset:
513:       suffix: strumpack_2
514:       nsize: {{1 2}}
515:       requires: strumpack
516:       args: -use_strumpack_lu
517:       output_file: output/ex52_3.out

519:       test:
520:         suffix: aij
521:         args: -mat_type aij

523:       test:
524:         requires: kokkos_kernels
525:         suffix: kok
526:         args: -mat_type aijkokkos

528:       test:
529:         requires: cuda
530:         suffix: cuda
531:         args: -mat_type aijcusparse

533:       test:
534:         requires: hip
535:         suffix: hip
536:         args: -mat_type aijhipsparse

538:    test:
539:       suffix: strumpack_ilu
540:       nsize: {{1 2}}
541:       requires: strumpack
542:       args: -use_strumpack_ilu
543:       output_file: output/ex52_3.out

545:    testset:
546:       suffix: superlu_dist
547:       nsize: {{1 2}}
548:       requires: superlu superlu_dist
549:       args: -use_superlu_lu
550:       output_file: output/ex52_2.out

552:       test:
553:         suffix: aij
554:         args: -mat_type aij

556:       test:
557:         requires: kokkos_kernels
558:         suffix: kok
559:         args: -mat_type aijkokkos

561:       test:
562:         requires: cuda
563:         suffix: cuda
564:         args: -mat_type aijcusparse

566:       test:
567:         requires: hip
568:         suffix: hip
569:         args: -mat_type aijhipsparse

571:    test:
572:       suffix: superlu_ilu
573:       requires: superlu superlu_dist
574:       args: -use_superlu_ilu
575:       output_file: output/ex52_2.out

577: TEST*/