Actual source code: ex72.c

  1: static char help[] = "Reads a PETSc matrix and vector from a file and solves a linear system.\n\
  2: This version first preloads and solves a small system, then loads \n\
  3: another (larger) system and solves it as well.  This example illustrates\n\
  4: preloading of instructions with the smaller system so that more accurate\n\
  5: performance monitoring can be done with the larger one (that actually\n\
  6: is the system of interest).  See the 'Performance Hints' chapter of the\n\
  7: users manual for a discussion of preloading.  Input parameters include\n\
  8:   -f0 <input_file> : first file to load (small system)\n\
  9:   -f1 <input_file> : second file to load (larger system)\n\n\
 10:   -nearnulldim <0> : number of vectors in the near-null space immediately following matrix\n\n\
 11:   -trans  : solve transpose system instead\n\n";
 12: /*
 13:   This code can be used to test PETSc interface to other packages.\n\
 14:   Examples of command line options:       \n\
 15:    ./ex72 -f0 <datafile> -ksp_type preonly  \n\
 16:         -help -ksp_view                  \n\
 17:         -num_numfac <num_numfac> -num_rhs <num_rhs> \n\
 18:         -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu or superlu_dist or mumps \n\
 19:         -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps \n\
 20:    mpiexec -n <np> ./ex72 -f0 <datafile> -ksp_type cg -pc_type asm -pc_asm_type basic -sub_pc_type icc -mat_type sbaij
 21:  \n\n";
 22: */

 24: /*
 25:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 26:   automatically includes:
 27:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 28:      petscmat.h - matrices
 29:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 30:      petscviewer.h - viewers               petscpc.h  - preconditioners
 31: */
 32: #include <petscksp.h>

 34: int main(int argc, char **args)
 35: {
 36:   KSP         ksp;                         /* linear solver context */
 37:   Mat         A;                           /* matrix */
 38:   Vec         x, b, u;                     /* approx solution, RHS, exact solution */
 39:   PetscViewer viewer;                      /* viewer */
 40:   char        file[4][PETSC_MAX_PATH_LEN]; /* input file name */
 41:   PetscBool   table = PETSC_FALSE, flg, trans = PETSC_FALSE, initialguess = PETSC_FALSE;
 42:   PetscBool   outputSoln = PETSC_FALSE, constantnullspace = PETSC_FALSE;
 43:   PetscInt    its, num_numfac, m, n, M, p, nearnulldim = 0;
 44:   PetscReal   norm;
 45:   PetscBool   preload = PETSC_TRUE, isSymmetric, cknorm = PETSC_FALSE, initialguessfile = PETSC_FALSE;
 46:   PetscMPIInt rank;
 47:   char        initialguessfilename[PETSC_MAX_PATH_LEN];
 48:   char        mtype[PETSC_MAX_PATH_LEN];

 50:   PetscFunctionBeginUser;
 51:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 52:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 53:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-table", &table, NULL));
 54:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-constantnullspace", &constantnullspace, NULL));
 55:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-trans", &trans, NULL));
 56:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-initialguess", &initialguess, NULL));
 57:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-output_solution", &outputSoln, NULL));
 58:   PetscCall(PetscOptionsGetString(NULL, NULL, "-initialguessfilename", initialguessfilename, sizeof(initialguessfilename), &initialguessfile));
 59:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nearnulldim", &nearnulldim, NULL));

 61:   /*
 62:      Determine files from which we read the two linear systems
 63:      (matrix and right-hand-side vector).
 64:   */
 65:   PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file[0], sizeof(file[0]), &flg));
 66:   if (flg) {
 67:     PetscCall(PetscStrncpy(file[1], file[0], sizeof(file[1])));
 68:     preload = PETSC_FALSE;
 69:   } else {
 70:     PetscCall(PetscOptionsGetString(NULL, NULL, "-f0", file[0], sizeof(file[0]), &flg));
 71:     PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must indicate binary file with the -f0 or -f option");
 72:     PetscCall(PetscOptionsGetString(NULL, NULL, "-f1", file[1], sizeof(file[1]), &flg));
 73:     if (!flg) preload = PETSC_FALSE; /* don't bother with second system */
 74:   }

 76:   /* -----------------------------------------------------------
 77:                   Beginning of linear solver loop
 78:      ----------------------------------------------------------- */
 79:   /*
 80:      Loop through the linear solve 2 times.
 81:       - The intention here is to preload and solve a small system;
 82:         then load another (larger) system and solve it as well.
 83:         This process preloads the instructions with the smaller
 84:         system so that more accurate performance monitoring (via
 85:         -log_view) can be done with the larger one (that actually
 86:         is the system of interest).
 87:   */
 88:   PetscPreLoadBegin(preload, "Load system");

 90:   /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
 91:                          Load system
 92:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 94:   /*
 95:      Open binary file.  Note that we use FILE_MODE_READ to indicate
 96:      reading from this file.
 97:   */
 98:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[PetscPreLoadIt], FILE_MODE_READ, &viewer));

100:   /*
101:      Load the matrix and vector; then destroy the viewer.
102:   */
103:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
104:   PetscCall(MatSetFromOptions(A));
105:   PetscCall(MatLoad(A, viewer));

107:   PetscCall(PetscOptionsGetString(NULL, NULL, "-mat_convert_type", mtype, sizeof(mtype), &flg));
108:   if (flg) PetscCall(MatConvert(A, mtype, MAT_INPLACE_MATRIX, &A));

110:   if (nearnulldim) {
111:     MatNullSpace nullsp;
112:     Vec         *nullvecs;
113:     PetscInt     i;
114:     PetscCall(PetscMalloc1(nearnulldim, &nullvecs));
115:     for (i = 0; i < nearnulldim; i++) {
116:       PetscCall(VecCreate(PETSC_COMM_WORLD, &nullvecs[i]));
117:       PetscCall(VecLoad(nullvecs[i], viewer));
118:     }
119:     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_FALSE, nearnulldim, nullvecs, &nullsp));
120:     PetscCall(MatSetNearNullSpace(A, nullsp));
121:     for (i = 0; i < nearnulldim; i++) PetscCall(VecDestroy(&nullvecs[i]));
122:     PetscCall(PetscFree(nullvecs));
123:     PetscCall(MatNullSpaceDestroy(&nullsp));
124:   }
125:   if (constantnullspace) {
126:     MatNullSpace constant;
127:     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &constant));
128:     PetscCall(MatSetNullSpace(A, constant));
129:     PetscCall(MatNullSpaceDestroy(&constant));
130:   }
131:   flg = PETSC_FALSE;
132:   PetscCall(PetscOptionsGetString(NULL, NULL, "-rhs", file[2], sizeof(file[2]), &flg));
133:   PetscCall(VecCreate(PETSC_COMM_WORLD, &b));
134:   if (flg) { /* rhs is stored in a separate file */
135:     if (file[2][0] == '0' || file[2][0] == 0) {
136:       PetscInt    m;
137:       PetscScalar one = 1.0;
138:       PetscCall(PetscInfo(0, "Using vector of ones for RHS\n"));
139:       PetscCall(MatGetLocalSize(A, &m, NULL));
140:       PetscCall(VecSetSizes(b, m, PETSC_DECIDE));
141:       PetscCall(VecSetFromOptions(b));
142:       PetscCall(VecSet(b, one));
143:     } else {
144:       PetscCall(PetscViewerDestroy(&viewer));
145:       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[2], FILE_MODE_READ, &viewer));
146:       PetscCall(VecSetFromOptions(b));
147:       PetscCall(VecLoad(b, viewer));
148:     }
149:   } else { /* rhs is stored in the same file as matrix */
150:     PetscCall(VecSetFromOptions(b));
151:     PetscCall(VecLoad(b, viewer));
152:   }
153:   PetscCall(PetscViewerDestroy(&viewer));

155:   /* Make A singular for testing zero-pivot of ilu factorization */
156:   /* Example: ./ex72 -f0 <datafile> -test_zeropivot -pc_factor_shift_type <shift_type> */
157:   flg = PETSC_FALSE;
158:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_zeropivot", &flg, NULL));
159:   if (flg) { /* set a row as zeros */
160:     PetscInt row = 0;
161:     PetscCall(MatSetOption(A, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
162:     PetscCall(MatZeroRows(A, 1, &row, 0.0, NULL, NULL));
163:   }

165:   /* Check whether A is symmetric, then set A->symmetric option */
166:   flg = PETSC_FALSE;
167:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_symmetry", &flg, NULL));
168:   if (flg) {
169:     PetscCall(MatIsSymmetric(A, 0.0, &isSymmetric));
170:     if (!isSymmetric) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Warning: A is non-symmetric \n"));
171:   }

173:   /*
174:      If the loaded matrix is larger than the vector (due to being padded
175:      to match the block size of the system), then create a new padded vector.
176:   */

178:   PetscCall(MatGetLocalSize(A, NULL, &n));
179:   PetscCall(MatGetSize(A, &M, NULL));
180:   PetscCall(VecGetSize(b, &m));
181:   PetscCall(VecGetLocalSize(b, &p));
182:   preload = (PetscBool)(M != m || p != n); /* Global or local dimension mismatch */
183:   PetscCall(MPIU_Allreduce(&preload, &flg, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
184:   if (flg) { /* Create a new vector b by padding the old one */
185:     PetscInt     j, mvec, start, end, indx;
186:     Vec          tmp;
187:     PetscScalar *bold;

189:     PetscCall(VecCreate(PETSC_COMM_WORLD, &tmp));
190:     PetscCall(VecSetSizes(tmp, n, PETSC_DECIDE));
191:     PetscCall(VecSetFromOptions(tmp));
192:     PetscCall(VecGetOwnershipRange(b, &start, &end));
193:     PetscCall(VecGetLocalSize(b, &mvec));
194:     PetscCall(VecGetArray(b, &bold));
195:     for (j = 0; j < mvec; j++) {
196:       indx = start + j;
197:       PetscCall(VecSetValues(tmp, 1, &indx, bold + j, INSERT_VALUES));
198:     }
199:     PetscCall(VecRestoreArray(b, &bold));
200:     PetscCall(VecDestroy(&b));
201:     PetscCall(VecAssemblyBegin(tmp));
202:     PetscCall(VecAssemblyEnd(tmp));
203:     b = tmp;
204:   }

206:   PetscCall(MatCreateVecs(A, &x, NULL));
207:   PetscCall(VecDuplicate(b, &u));
208:   if (initialguessfile) {
209:     PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, initialguessfilename, FILE_MODE_READ, &viewer));
210:     PetscCall(VecLoad(x, viewer));
211:     PetscCall(PetscViewerDestroy(&viewer));
212:     initialguess = PETSC_TRUE;
213:   } else if (initialguess) {
214:     PetscCall(VecSet(x, 1.0));
215:   } else {
216:     PetscCall(VecSet(x, 0.0));
217:   }

219:   /* Check scaling in A */
220:   flg = PETSC_FALSE;
221:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_scaling", &flg, NULL));
222:   if (flg) {
223:     Vec       max, min, l1;
224:     PetscInt  idx;
225:     PetscReal val;

227:     PetscCall(VecDuplicate(x, &max));
228:     PetscCall(VecDuplicate(x, &min));
229:     PetscCall(VecDuplicate(x, &l1));
230:     PetscCall(MatGetRowMaxAbs(A, max, NULL));
231:     PetscCall(MatGetRowMinAbs(A, min, NULL));
232:     PetscCall(MatGetRowSumAbs(A, l1));
233:     if (PETSC_FALSE) {
234:       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "max.data", &viewer));
235:       PetscCall(VecView(max, viewer));
236:       PetscCall(PetscViewerDestroy(&viewer));
237:       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "min.data", &viewer));
238:       PetscCall(VecView(min, viewer));
239:       PetscCall(PetscViewerDestroy(&viewer));
240:       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "l1.data", &viewer));
241:       PetscCall(VecView(l1, viewer));
242:       PetscCall(PetscViewerDestroy(&viewer));
243:     }
244:     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
245:     PetscCall(VecMax(max, &idx, &val));
246:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
247:     PetscCall(VecView(min, PETSC_VIEWER_DRAW_WORLD));
248:     PetscCall(VecMin(min, &idx, &val));
249:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest min row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
250:     PetscCall(VecMin(max, &idx, &val));
251:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
252:     PetscCall(VecPointwiseDivide(max, max, min));
253:     PetscCall(VecMax(max, &idx, &val));
254:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest row ratio %g at row %" PetscInt_FMT "\n", (double)val, idx));
255:     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
256:     PetscCall(VecMax(max, &idx, &val));
257:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest row ratio %g at row %" PetscInt_FMT "\n", (double)val, idx));
258:     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
259:     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
260:     PetscCall(VecMax(l1, &idx, &val));
261:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest l1 row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
262:     PetscCall(VecDestroy(&max));
263:     PetscCall(VecDestroy(&min));
264:     PetscCall(VecDestroy(&l1));
265:   }

267:   /*  PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
268:   /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
269:                     Setup solve for system
270:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
271:   /*
272:      Conclude profiling last stage; begin profiling next stage.
273:   */
274:   PetscPreLoadStage("KSPSetUpSolve");

276:   /*
277:      Create linear solver; set operators; set runtime options.
278:   */
279:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
280:   PetscCall(KSPSetInitialGuessNonzero(ksp, initialguess));
281:   num_numfac = 1;
282:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-num_numfac", &num_numfac, NULL));
283:   while (num_numfac--) {
284:     PC        pc;
285:     PetscBool lsqr, isbddc, ismatis;
286:     char      str[32];

288:     PetscCall(PetscOptionsGetString(NULL, NULL, "-ksp_type", str, sizeof(str), &lsqr));
289:     if (lsqr) PetscCall(PetscStrcmp("lsqr", str, &lsqr));
290:     if (lsqr) {
291:       Mat BtB;
292:       PetscCall(MatTransposeMatMult(A, A, MAT_INITIAL_MATRIX, 4, &BtB));
293:       PetscCall(KSPSetOperators(ksp, A, BtB));
294:       PetscCall(MatDestroy(&BtB));
295:     } else {
296:       PetscCall(KSPSetOperators(ksp, A, A));
297:     }
298:     PetscCall(KSPSetFromOptions(ksp));

300:     /* if we test BDDC, make sure pmat is of type MATIS */
301:     PetscCall(KSPGetPC(ksp, &pc));
302:     PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBDDC, &isbddc));
303:     PetscCall(PetscObjectTypeCompare((PetscObject)A, MATIS, &ismatis));
304:     if (isbddc && !ismatis) {
305:       Mat J;

307:       PetscCall(MatConvert(A, MATIS, MAT_INITIAL_MATRIX, &J));
308:       PetscCall(KSPSetOperators(ksp, A, J));
309:       PetscCall(MatDestroy(&J));
310:     }

312:     /*
313:      Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
314:      enable more precise profiling of setting up the preconditioner.
315:      These calls are optional, since both will be called within
316:      KSPSolve() if they haven't been called already.
317:     */
318:     PetscCall(KSPSetUp(ksp));
319:     PetscCall(KSPSetUpOnBlocks(ksp));

321:     /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
322:                          Solve system
323:       - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

325:     /*
326:      Solve linear system;
327:     */
328:     if (trans) {
329:       PetscCall(KSPSolveTranspose(ksp, b, x));
330:       PetscCall(KSPGetIterationNumber(ksp, &its));
331:     } else {
332:       PetscInt num_rhs = 1;
333:       PetscCall(PetscOptionsGetInt(NULL, NULL, "-num_rhs", &num_rhs, NULL));
334:       cknorm = PETSC_FALSE;
335:       PetscCall(PetscOptionsGetBool(NULL, NULL, "-cknorm", &cknorm, NULL));
336:       while (num_rhs--) {
337:         if (num_rhs == 1) PetscCall(VecSet(x, 0.0));
338:         PetscCall(KSPSolve(ksp, b, x));
339:       }
340:       PetscCall(KSPGetIterationNumber(ksp, &its));
341:       if (cknorm) { /* Check error for each rhs */
342:         if (trans) {
343:           PetscCall(MatMultTranspose(A, x, u));
344:         } else {
345:           PetscCall(MatMult(A, x, u));
346:         }
347:         PetscCall(VecAXPY(u, -1.0, b));
348:         PetscCall(VecNorm(u, NORM_2, &norm));
349:         PetscCall(PetscPrintf(PETSC_COMM_WORLD, "  Number of iterations = %3" PetscInt_FMT "\n", its));
350:         if (!PetscIsNanScalar(norm)) {
351:           if (norm < 1.e-12) {
352:             PetscCall(PetscPrintf(PETSC_COMM_WORLD, "  Residual norm < 1.e-12\n"));
353:           } else {
354:             PetscCall(PetscPrintf(PETSC_COMM_WORLD, "  Residual norm %g\n", (double)norm));
355:           }
356:         }
357:       }
358:     } /* while (num_rhs--) */

360:     /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
361:           Check error, print output, free data structures.
362:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

364:     /*
365:        Check error
366:     */
367:     if (trans) {
368:       PetscCall(MatMultTranspose(A, x, u));
369:     } else {
370:       PetscCall(MatMult(A, x, u));
371:     }
372:     PetscCall(VecAXPY(u, -1.0, b));
373:     PetscCall(VecNorm(u, NORM_2, &norm));
374:     /*
375:      Write output (optionally using table for solver details).
376:       - PetscPrintf() handles output for multiprocessor jobs
377:         by printing from only one processor in the communicator.
378:       - KSPView() prints information about the linear solver.
379:     */
380:     if (table) {
381:       char *matrixname = NULL, kspinfo[120];

383:       /*
384:        Open a string viewer; then write info to it.
385:       */
386:       PetscCall(PetscViewerStringOpen(PETSC_COMM_WORLD, kspinfo, sizeof(kspinfo), &viewer));
387:       PetscCall(KSPView(ksp, viewer));
388:       PetscCall(PetscStrrchr(file[PetscPreLoadIt], '/', &matrixname));
389:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%-8.8s %3" PetscInt_FMT " %2.0e %s \n", matrixname, its, (double)norm, kspinfo));

391:       /*
392:         Destroy the viewer
393:       */
394:       PetscCall(PetscViewerDestroy(&viewer));
395:     } else {
396:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of iterations = %3" PetscInt_FMT "\n", its));
397:       if (!PetscIsNanScalar(norm)) {
398:         if (norm < 1.e-12 && !PetscIsNanScalar((PetscScalar)norm)) {
399:           PetscCall(PetscPrintf(PETSC_COMM_WORLD, "  Residual norm < 1.e-12\n"));
400:         } else {
401:           PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Residual norm %g\n", (double)norm));
402:         }
403:       }
404:     }
405:     PetscCall(PetscOptionsGetString(NULL, NULL, "-solution", file[3], sizeof(file[3]), &flg));
406:     if (flg) {
407:       Vec       xstar;
408:       PetscReal norm;

410:       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[3], FILE_MODE_READ, &viewer));
411:       PetscCall(VecCreate(PETSC_COMM_WORLD, &xstar));
412:       PetscCall(VecLoad(xstar, viewer));
413:       PetscCall(VecAXPY(xstar, -1.0, x));
414:       PetscCall(VecNorm(xstar, NORM_2, &norm));
415:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)norm));
416:       PetscCall(VecDestroy(&xstar));
417:       PetscCall(PetscViewerDestroy(&viewer));
418:     }
419:     if (outputSoln) {
420:       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "solution.petsc", FILE_MODE_WRITE, &viewer));
421:       PetscCall(VecView(x, viewer));
422:       PetscCall(PetscViewerDestroy(&viewer));
423:     }

425:     flg = PETSC_FALSE;
426:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-ksp_reason", &flg, NULL));
427:     if (flg) {
428:       KSPConvergedReason reason;
429:       PetscCall(KSPGetConvergedReason(ksp, &reason));
430:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "KSPConvergedReason: %s\n", KSPConvergedReasons[reason]));
431:     }

433:   } /* while (num_numfac--) */

435:   /*
436:      Free work space.  All PETSc objects should be destroyed when they
437:      are no longer needed.
438:   */
439:   PetscCall(MatDestroy(&A));
440:   PetscCall(VecDestroy(&b));
441:   PetscCall(VecDestroy(&u));
442:   PetscCall(VecDestroy(&x));
443:   PetscCall(KSPDestroy(&ksp));
444:   PetscPreLoadEnd();
445:   /* -----------------------------------------------------------
446:                       End of linear solver loop
447:      ----------------------------------------------------------- */

449:   PetscCall(PetscFinalize());
450:   return 0;
451: }

453: /*TEST

455:    build:
456:       requires: !complex

458:    testset:
459:       suffix: 1
460:       nsize: 2
461:       args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
462:       requires: !__float128

464:    testset:
465:       suffix: 1a
466:       args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
467:       requires: !__float128

469:    testset:
470:       nsize: 2
471:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
472:       args: -f0 ${DATAFILESPATH}/matrices/medium
473:       args: -ksp_type bicg
474:       test:
475:          suffix: 2

477:    testset:
478:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
479:       args: -f0 ${DATAFILESPATH}/matrices/medium
480:       args: -ksp_type bicg
481:       test:
482:          suffix: 4
483:          args: -pc_type lu
484:       test:
485:          suffix: 5

487:    testset:
488:       suffix: 6
489:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
490:       args: -f0 ${DATAFILESPATH}/matrices/fem1
491:       args: -pc_factor_levels 2 -pc_factor_fill 1.73 -ksp_gmres_cgs_refinement_type refine_always

493:    testset:
494:       TODO: Matrix row/column sizes are not compatible with block size
495:       suffix: 7
496:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
497:       args: -f0 ${DATAFILESPATH}/matrices/medium
498:       args: -viewer_binary_skip_info -mat_type seqbaij
499:       args: -matload_block_size {{2 3 4 5 6 7 8}separate output}
500:       args: -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always
501:       args: -ksp_rtol 1.0e-15 -ksp_monitor_short
502:       test:
503:          suffix: a
504:       test:
505:          suffix: b
506:          args: -pc_factor_mat_ordering_type nd
507:       test:
508:          suffix: c
509:          args: -pc_factor_levels 1
510:       test:
511:          requires: metis
512:          suffix: d
513:          args: -pc_factor_mat_ordering_type metisnd

515:    testset:
516:       TODO: Matrix row/column sizes are not compatible with block size
517:       suffix: 7_d
518:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
519:       args: -f0 ${DATAFILESPATH}/matrices/medium
520:       args: -viewer_binary_skip_info -mat_type seqbaij
521:       args: -matload_block_size {{2 3 4 5 6 7 8}shared output}
522:       args: -ksp_type preonly -pc_type lu

524:    testset:
525:       suffix: 8
526:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
527:       args: -f0 ${DATAFILESPATH}/matrices/medium
528:       args: -ksp_diagonal_scale -pc_type eisenstat -ksp_monitor_short -ksp_diagonal_scale_fix -ksp_gmres_cgs_refinement_type refine_always -mat_no_inode

530:    testset:
531:       TODO: Matrix row/column sizes are not compatible with block size
532:       suffix: 9
533:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
534:       args: -f0 ${DATAFILESPATH}/matrices/medium
535:       args: -viewer_binary_skip_info -matload_block_size {{1 2 3 4 5 6 7}separate output} -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always -ksp_rtol 1.0e-15 -ksp_monitor_short
536:       test:
537:          suffix: a
538:          args: -mat_type seqbaij
539:       test:
540:          suffix: b
541:          args: -mat_type seqbaij -trans
542:       test:
543:          suffix: c
544:          nsize: 2
545:          args: -mat_type mpibaij
546:       test:
547:          suffix: d
548:          nsize: 2
549:          args: -mat_type mpibaij -trans
550:       test:
551:          suffix: e
552:          nsize: 3
553:          args: -mat_type mpibaij
554:       test:
555:          suffix: f
556:          nsize: 3
557:          args: -mat_type mpibaij -trans

559:    testset:
560:       suffix: 10
561:       nsize: 2
562:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
563:       args: -ksp_type fgmres -pc_type ksp -f0 ${DATAFILESPATH}/matrices/medium -ksp_fgmres_modifypcksp -ksp_monitor_short

565:    testset:
566:       suffix: 12
567:       requires: datafilespath matlab
568:       args: -pc_type lu -pc_factor_mat_solver_type matlab -f0 ${DATAFILESPATH}/matrices/arco1

570:    testset:
571:       suffix: 13
572:       requires: datafilespath lusol
573:       args: -f0 ${DATAFILESPATH}/matrices/arco1
574:       args: -mat_type lusol -pc_type lu

576:    testset:
577:       nsize: 3
578:       args: -f0 ${DATAFILESPATH}/matrices/medium
579:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
580:       test:
581:          suffix: 14
582:          requires: spai
583:          args: -pc_type spai
584:       test:
585:          suffix: 15
586:          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
587:          args: -pc_type hypre -pc_hypre_type pilut
588:       test:
589:          suffix: 16
590:          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
591:          args: -pc_type hypre -pc_hypre_type parasails
592:       test:
593:          suffix: 17
594:          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
595:          args: -pc_type hypre -pc_hypre_type boomeramg
596:       test:
597:          suffix: 18
598:          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
599:          args: -pc_type hypre -pc_hypre_type euclid

601:    testset:
602:       suffix: 19
603:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
604:       args: -f0 ${DATAFILESPATH}/matrices/poisson1
605:       args: -ksp_type cg -pc_type icc
606:       args: -pc_factor_levels {{0 2 4}separate output}
607:       test:
608:       test:
609:          args: -mat_type seqsbaij

611:    testset:
612:       suffix: ILU
613:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
614:       args: -f0 ${DATAFILESPATH}/matrices/small
615:       args: -pc_factor_levels 1
616:       test:
617:       test:
618:          # This is tested against regular ILU (used to be denoted ILUBAIJ)
619:          args: -mat_type baij

621:    testset:
622:       suffix: aijcusparse
623:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) cuda
624:       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info -mat_type aijcusparse -pc_factor_mat_solver_type cusparse -pc_type ilu -vec_type cuda

626:    testset:
627:       TODO: No output file. Need to determine if deprecated
628:       suffix: asm_viennacl
629:       nsize: 2
630:       requires: viennacl
631:       args: -pc_type asm -pc_asm_sub_mat_type aijviennacl -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int${PETSC_INDEX_SIZE}-float${PETSC_SCALAR_SIZE}

633:    testset:
634:       nsize: 2
635:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
636:       args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz -ksp_monitor_short -ksp_rtol 1.E-9 -pc_type hypre -pc_hypre_type boomeramg
637:       test:
638:          suffix: boomeramg_euclid
639:          args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01
640:       test:
641:          suffix: boomeramg_euclid_bj
642:          args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01 -pc_hypre_boomeramg_eu_bj
643:       test:
644:          suffix: boomeramg_parasails
645:          args: -pc_hypre_boomeramg_smooth_type ParaSails -pc_hypre_boomeramg_smooth_num_levels 2
646:       test:
647:          suffix: boomeramg_pilut
648:          args: -pc_hypre_boomeramg_smooth_type Pilut -pc_hypre_boomeramg_smooth_num_levels 2
649:       test:
650:          suffix: boomeramg_schwarz
651:          args: -pc_hypre_boomeramg_smooth_type Schwarz-smoothers

653:    testset:
654:       suffix: cg_singlereduction
655:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
656:       args: -f0 ${DATAFILESPATH}/matrices/small
657:       args: -mat_type mpisbaij -ksp_type cg -pc_type eisenstat -ksp_monitor_short -ksp_converged_reason
658:       test:
659:       test:
660:          args: -ksp_cg_single_reduction

662:    testset:
663:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
664:       args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz
665:       args: -ksp_monitor_short -pc_type icc
666:       test:
667:          suffix: cr
668:          args: -ksp_type cr
669:       test:
670:          suffix: lcd
671:          args: -ksp_type lcd

673:    testset:
674:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
675:       args: -f0 ${DATAFILESPATH}/matrices/small
676:       args: -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info
677:       test:
678:          suffix: seqaijcrl
679:          args: -mat_type seqaijcrl
680:       test:
681:          suffix: seqaijperm
682:          args: -mat_type seqaijperm

684:    testset:
685:       nsize: 2
686:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
687:       args: -f0 ${DATAFILESPATH}/matrices/small
688:       args: -ksp_monitor_short -ksp_view
689:       # Different output files
690:       test:
691:          suffix: mpiaijcrl
692:          args: -mat_type mpiaijcrl
693:       test:
694:          suffix: mpiaijperm
695:          args: -mat_type mpiaijperm

697:    testset:
698:       nsize: 4
699:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !defined(PETSC_HAVE_I_MPI_NUMVERSION)
700:       args: -ksp_monitor_short -ksp_view
701:       test:
702:          suffix: xxt
703:          args: -f0 ${DATAFILESPATH}/matrices/poisson1 -check_symmetry -check_scaling -ksp_type cg -pc_type tfs
704:       test:
705:          suffix: xyt
706:          args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type gmres -pc_type tfs

708:    testset:
709:       # The output file here is the same as mumps
710:       suffix: mumps_cholesky
711:       output_file: output/ex72_mumps.out
712:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
713:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
714:       nsize: {{1 2}}
715:       test:
716:          args: -mat_type sbaij -mat_ignore_lower_triangular
717:       test:
718:          args: -mat_type aij
719:       test:
720:          args: -mat_type aij -matload_spd

722:    testset:
723:       # The output file here is the same as mumps
724:       suffix: mumps_lu
725:       output_file: output/ex72_mumps.out
726:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
727:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
728:       test:
729:          args: -mat_type seqaij
730:       test:
731:          nsize: 2
732:          args: -mat_type mpiaij
733:       test:
734:          args: -mat_type seqbaij -matload_block_size 2
735:       test:
736:          nsize: 2
737:          args: -mat_type mpibaij -matload_block_size 2
738:       test:
739:          args: -mat_type aij -mat_mumps_icntl_7 5
740:          TODO: Need to determine if deprecated

742:    test:
743:       suffix: mumps_lu_parmetis
744:       output_file: output/ex72_mumps.out
745:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps parmetis
746:       nsize: 2
747:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 2

749:    test:
750:       suffix: mumps_lu_ptscotch
751:       output_file: output/ex72_mumps.out
752:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps ptscotch
753:       nsize: 2
754:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 1

756:    testset:
757:       # The output file here is the same as mumps
758:       suffix: mumps_redundant
759:       output_file: output/ex72_mumps_redundant.out
760:       nsize: 8
761:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
762:       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2

764:    testset:
765:       suffix: pastix_cholesky
766:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
767:       output_file: output/ex72_mumps.out
768:       nsize: {{1 2}}
769:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2 -pc_type cholesky -mat_type sbaij -mat_ignore_lower_triangular

771:    testset:
772:       suffix: pastix_lu
773:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
774:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
775:       output_file: output/ex72_mumps.out
776:       test:
777:          args: -mat_type seqaij
778:       test:
779:          nsize: 2
780:          args: -mat_type mpiaij

782:    testset:
783:       suffix: pastix_redundant
784:       output_file: output/ex72_mumps_redundant.out
785:       nsize: 8
786:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
787:       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2

789:    testset:
790:       suffix: superlu_dist_lu
791:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
792:       output_file: output/ex72_mumps.out
793:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
794:       nsize: {{1 2}}

796:    testset:
797:       suffix: superlu_dist_redundant
798:       nsize: 8
799:       output_file: output/ex72_mumps_redundant.out
800:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
801:       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2

803:    testset:
804:       suffix: superlu_lu
805:       output_file: output/ex72_mumps.out
806:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu
807:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu -num_numfac 2 -num_rhs 2

809:    testset:
810:       suffix: umfpack
811:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) suitesparse
812:       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -mat_type seqaij -pc_factor_mat_solver_type umfpack -num_numfac 2 -num_rhs 2

814:    testset:
815:       suffix: zeropivot
816:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
817:       args: -f0 ${DATAFILESPATH}/matrices/small -test_zeropivot -ksp_converged_reason -ksp_type fgmres -pc_type ksp -fp_trap 0
818:       test:
819:          nsize: 3
820:          args: -ksp_pc_type bjacobi
821:       test:
822:          nsize: 2
823:          args: -ksp_ksp_type cg -ksp_pc_type bjacobi -ksp_pc_bjacobi_blocks 1
824:       #test:
825:          #nsize: 3
826:          #args: -ksp_ksp_converged_reason -ksp_pc_type bjacobi -ksp_sub_ksp_converged_reason
827:          #TODO: Need to determine if deprecated

829:    testset:
830:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
831:       args: -mat_convert_type is -f0 ${DATAFILESPATH}/matrices/medium -ksp_type fgmres
832:       test:
833:          suffix: aij_gdsw
834:          nsize: 4
835:          args: -mat_convert_type aij -pc_type mg -pc_mg_levels 2 -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
836:       test:
837:          output_file: output/ex72_aij_gdsw.out
838:          suffix: is_gdsw
839:          nsize: 4
840:          args: -pc_type mg -pc_mg_levels 2 -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
841:       test:
842:          suffix: is_asm
843:          nsize: {{1 2}separate output}
844:          args: -pc_type asm
845:       test:
846:          suffix: bddc_seq
847:          nsize: 1
848:          args: -pc_type bddc
849:       test:
850:          suffix: bddc_par
851:          nsize: 2
852:          args: -pc_type bddc
853:       test:
854:          requires: parmetis
855:          suffix: bddc_par_nd_parmetis
856:          filter: sed -e "s/Number of iterations =   [0-9]/Number of iterations = 9/g"
857:          nsize: 4
858:          args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type parmetis
859:       test:
860:          requires: ptscotch defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
861:          suffix: bddc_par_nd_ptscotch
862:          filter: sed -e "s/Number of iterations =   [0-9]/Number of iterations = 9/g"
863:          nsize: 4
864:          args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type ptscotch

866:    testset:
867:       requires: !__float128 hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
868:       test:
869:          suffix: hpddm_mat
870:          output_file: output/ex72_bddc_seq.out
871:          filter: sed -e "s/Number of iterations =   2/Number of iterations =   1/g"
872:          nsize: 2
873:          args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_pc_type mat
874:       test:
875:          requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
876:          suffix: hpddm_gen_non_hermitian
877:          output_file: output/ex72_2.out
878:          nsize: 4
879:          args: -f0 ${DATAFILESPATH}/matrices/arco1 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.7 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right
880:       test:
881:          requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps !defined(PETSCTEST_VALGRIND)
882:          suffix: hpddm_gen_non_hermitian_baij
883:          output_file: output/ex72_10.out
884:          nsize: 4
885:          timeoutfactor: 2
886:          args: -f0 ${DATAFILESPATH}/matrices/arco6 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.8 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_eps_tol 1.0e-2 -ksp_monitor_short
887: TEST*/