Actual source code: ex72.c
1: static char help[] = "Reads a PETSc matrix and vector from a file and solves a linear system.\n\
2: This version first preloads and solves a small system, then loads \n\
3: another (larger) system and solves it as well. This example illustrates\n\
4: preloading of instructions with the smaller system so that more accurate\n\
5: performance monitoring can be done with the larger one (that actually\n\
6: is the system of interest). See the 'Performance Hints' chapter of the\n\
7: users manual for a discussion of preloading. Input parameters include\n\
8: -f0 <input_file> : first file to load (small system)\n\
9: -f1 <input_file> : second file to load (larger system)\n\n\
10: -nearnulldim <0> : number of vectors in the near-null space immediately following matrix\n\n\
11: -trans : solve transpose system instead\n\n";
12: /*
13: This code can be used to test PETSc interface to other packages.\n\
14: Examples of command line options: \n\
15: ./ex72 -f0 <datafile> -ksp_type preonly \n\
16: -help -ksp_view \n\
17: -num_numfac <num_numfac> -num_rhs <num_rhs> \n\
18: -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu or superlu_dist or mumps \n\
19: -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps \n\
20: mpiexec -n <np> ./ex72 -f0 <datafile> -ksp_type cg -pc_type asm -pc_asm_type basic -sub_pc_type icc -mat_type sbaij
21: \n\n";
22: */
24: /*
25: Include "petscksp.h" so that we can use KSP solvers. Note that this file
26: automatically includes:
27: petscsys.h - base PETSc routines petscvec.h - vectors
28: petscmat.h - matrices
29: petscis.h - index sets petscksp.h - Krylov subspace methods
30: petscviewer.h - viewers petscpc.h - preconditioners
31: */
32: #include <petscksp.h>
34: int main(int argc, char **args)
35: {
36: KSP ksp; /* linear solver context */
37: Mat A; /* matrix */
38: Vec x, b, u; /* approx solution, RHS, exact solution */
39: PetscViewer viewer; /* viewer */
40: char file[4][PETSC_MAX_PATH_LEN]; /* input file name */
41: PetscBool table = PETSC_FALSE, flg, trans = PETSC_FALSE, initialguess = PETSC_FALSE;
42: PetscBool outputSoln = PETSC_FALSE, constantnullspace = PETSC_FALSE;
43: PetscInt its, num_numfac, m, n, M, p, nearnulldim = 0;
44: PetscReal norm;
45: PetscBool preload = PETSC_TRUE, isSymmetric, cknorm = PETSC_FALSE, initialguessfile = PETSC_FALSE;
46: PetscMPIInt rank;
47: char initialguessfilename[PETSC_MAX_PATH_LEN];
48: char mtype[PETSC_MAX_PATH_LEN];
50: PetscFunctionBeginUser;
51: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
52: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
53: PetscCall(PetscOptionsGetBool(NULL, NULL, "-table", &table, NULL));
54: PetscCall(PetscOptionsGetBool(NULL, NULL, "-constantnullspace", &constantnullspace, NULL));
55: PetscCall(PetscOptionsGetBool(NULL, NULL, "-trans", &trans, NULL));
56: PetscCall(PetscOptionsGetBool(NULL, NULL, "-initialguess", &initialguess, NULL));
57: PetscCall(PetscOptionsGetBool(NULL, NULL, "-output_solution", &outputSoln, NULL));
58: PetscCall(PetscOptionsGetString(NULL, NULL, "-initialguessfilename", initialguessfilename, sizeof(initialguessfilename), &initialguessfile));
59: PetscCall(PetscOptionsGetInt(NULL, NULL, "-nearnulldim", &nearnulldim, NULL));
61: /*
62: Determine files from which we read the two linear systems
63: (matrix and right-hand-side vector).
64: */
65: PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file[0], sizeof(file[0]), &flg));
66: if (flg) {
67: PetscCall(PetscStrncpy(file[1], file[0], sizeof(file[1])));
68: preload = PETSC_FALSE;
69: } else {
70: PetscCall(PetscOptionsGetString(NULL, NULL, "-f0", file[0], sizeof(file[0]), &flg));
71: PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must indicate binary file with the -f0 or -f option");
72: PetscCall(PetscOptionsGetString(NULL, NULL, "-f1", file[1], sizeof(file[1]), &flg));
73: if (!flg) preload = PETSC_FALSE; /* don't bother with second system */
74: }
76: /* -----------------------------------------------------------
77: Beginning of linear solver loop
78: ----------------------------------------------------------- */
79: /*
80: Loop through the linear solve 2 times.
81: - The intention here is to preload and solve a small system;
82: then load another (larger) system and solve it as well.
83: This process preloads the instructions with the smaller
84: system so that more accurate performance monitoring (via
85: -log_view) can be done with the larger one (that actually
86: is the system of interest).
87: */
88: PetscPreLoadBegin(preload, "Load system");
90: /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
91: Load system
92: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
94: /*
95: Open binary file. Note that we use FILE_MODE_READ to indicate
96: reading from this file.
97: */
98: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[PetscPreLoadIt], FILE_MODE_READ, &viewer));
100: /*
101: Load the matrix and vector; then destroy the viewer.
102: */
103: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
104: PetscCall(MatSetFromOptions(A));
105: PetscCall(MatLoad(A, viewer));
107: PetscCall(PetscOptionsGetString(NULL, NULL, "-mat_convert_type", mtype, sizeof(mtype), &flg));
108: if (flg) PetscCall(MatConvert(A, mtype, MAT_INPLACE_MATRIX, &A));
110: if (nearnulldim) {
111: MatNullSpace nullsp;
112: Vec *nullvecs;
113: PetscInt i;
114: PetscCall(PetscMalloc1(nearnulldim, &nullvecs));
115: for (i = 0; i < nearnulldim; i++) {
116: PetscCall(VecCreate(PETSC_COMM_WORLD, &nullvecs[i]));
117: PetscCall(VecLoad(nullvecs[i], viewer));
118: }
119: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_FALSE, nearnulldim, nullvecs, &nullsp));
120: PetscCall(MatSetNearNullSpace(A, nullsp));
121: for (i = 0; i < nearnulldim; i++) PetscCall(VecDestroy(&nullvecs[i]));
122: PetscCall(PetscFree(nullvecs));
123: PetscCall(MatNullSpaceDestroy(&nullsp));
124: }
125: if (constantnullspace) {
126: MatNullSpace constant;
127: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &constant));
128: PetscCall(MatSetNullSpace(A, constant));
129: PetscCall(MatNullSpaceDestroy(&constant));
130: }
131: flg = PETSC_FALSE;
132: PetscCall(PetscOptionsGetString(NULL, NULL, "-rhs", file[2], sizeof(file[2]), &flg));
133: PetscCall(VecCreate(PETSC_COMM_WORLD, &b));
134: if (flg) { /* rhs is stored in a separate file */
135: if (file[2][0] == '0' || file[2][0] == 0) {
136: PetscInt m;
137: PetscScalar one = 1.0;
138: PetscCall(PetscInfo(0, "Using vector of ones for RHS\n"));
139: PetscCall(MatGetLocalSize(A, &m, NULL));
140: PetscCall(VecSetSizes(b, m, PETSC_DECIDE));
141: PetscCall(VecSetFromOptions(b));
142: PetscCall(VecSet(b, one));
143: } else {
144: PetscCall(PetscViewerDestroy(&viewer));
145: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[2], FILE_MODE_READ, &viewer));
146: PetscCall(VecSetFromOptions(b));
147: PetscCall(VecLoad(b, viewer));
148: }
149: } else { /* rhs is stored in the same file as matrix */
150: PetscCall(VecSetFromOptions(b));
151: PetscCall(VecLoad(b, viewer));
152: }
153: PetscCall(PetscViewerDestroy(&viewer));
155: /* Make A singular for testing zero-pivot of ilu factorization */
156: /* Example: ./ex72 -f0 <datafile> -test_zeropivot -pc_factor_shift_type <shift_type> */
157: flg = PETSC_FALSE;
158: PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_zeropivot", &flg, NULL));
159: if (flg) { /* set a row as zeros */
160: PetscInt row = 0;
161: PetscCall(MatSetOption(A, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
162: PetscCall(MatZeroRows(A, 1, &row, 0.0, NULL, NULL));
163: }
165: /* Check whether A is symmetric, then set A->symmetric option */
166: flg = PETSC_FALSE;
167: PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_symmetry", &flg, NULL));
168: if (flg) {
169: PetscCall(MatIsSymmetric(A, 0.0, &isSymmetric));
170: if (!isSymmetric) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Warning: A is non-symmetric \n"));
171: }
173: /*
174: If the loaded matrix is larger than the vector (due to being padded
175: to match the block size of the system), then create a new padded vector.
176: */
178: PetscCall(MatGetLocalSize(A, NULL, &n));
179: PetscCall(MatGetSize(A, &M, NULL));
180: PetscCall(VecGetSize(b, &m));
181: PetscCall(VecGetLocalSize(b, &p));
182: preload = (PetscBool)(M != m || p != n); /* Global or local dimension mismatch */
183: PetscCall(MPIU_Allreduce(&preload, &flg, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
184: if (flg) { /* Create a new vector b by padding the old one */
185: PetscInt j, mvec, start, end, indx;
186: Vec tmp;
187: PetscScalar *bold;
189: PetscCall(VecCreate(PETSC_COMM_WORLD, &tmp));
190: PetscCall(VecSetSizes(tmp, n, PETSC_DECIDE));
191: PetscCall(VecSetFromOptions(tmp));
192: PetscCall(VecGetOwnershipRange(b, &start, &end));
193: PetscCall(VecGetLocalSize(b, &mvec));
194: PetscCall(VecGetArray(b, &bold));
195: for (j = 0; j < mvec; j++) {
196: indx = start + j;
197: PetscCall(VecSetValues(tmp, 1, &indx, bold + j, INSERT_VALUES));
198: }
199: PetscCall(VecRestoreArray(b, &bold));
200: PetscCall(VecDestroy(&b));
201: PetscCall(VecAssemblyBegin(tmp));
202: PetscCall(VecAssemblyEnd(tmp));
203: b = tmp;
204: }
206: PetscCall(MatCreateVecs(A, &x, NULL));
207: PetscCall(VecDuplicate(b, &u));
208: if (initialguessfile) {
209: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, initialguessfilename, FILE_MODE_READ, &viewer));
210: PetscCall(VecLoad(x, viewer));
211: PetscCall(PetscViewerDestroy(&viewer));
212: initialguess = PETSC_TRUE;
213: } else if (initialguess) {
214: PetscCall(VecSet(x, 1.0));
215: } else {
216: PetscCall(VecSet(x, 0.0));
217: }
219: /* Check scaling in A */
220: flg = PETSC_FALSE;
221: PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_scaling", &flg, NULL));
222: if (flg) {
223: Vec max, min;
224: PetscInt idx;
225: PetscReal val;
227: PetscCall(VecDuplicate(x, &max));
228: PetscCall(VecDuplicate(x, &min));
229: PetscCall(MatGetRowMaxAbs(A, max, NULL));
230: PetscCall(MatGetRowMinAbs(A, min, NULL));
231: {
232: PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "max.data", &viewer));
233: PetscCall(VecView(max, viewer));
234: PetscCall(PetscViewerDestroy(&viewer));
235: PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "min.data", &viewer));
236: PetscCall(VecView(min, viewer));
237: PetscCall(PetscViewerDestroy(&viewer));
238: }
239: PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
240: PetscCall(VecMax(max, &idx, &val));
241: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
242: PetscCall(VecView(min, PETSC_VIEWER_DRAW_WORLD));
243: PetscCall(VecMin(min, &idx, &val));
244: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest min row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
245: PetscCall(VecMin(max, &idx, &val));
246: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
247: PetscCall(VecPointwiseDivide(max, max, min));
248: PetscCall(VecMax(max, &idx, &val));
249: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest row ratio %g at row %" PetscInt_FMT "\n", (double)val, idx));
250: PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
251: PetscCall(VecDestroy(&max));
252: PetscCall(VecDestroy(&min));
253: }
255: /* PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
256: /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
257: Setup solve for system
258: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
259: /*
260: Conclude profiling last stage; begin profiling next stage.
261: */
262: PetscPreLoadStage("KSPSetUpSolve");
264: /*
265: Create linear solver; set operators; set runtime options.
266: */
267: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
268: PetscCall(KSPSetInitialGuessNonzero(ksp, initialguess));
269: num_numfac = 1;
270: PetscCall(PetscOptionsGetInt(NULL, NULL, "-num_numfac", &num_numfac, NULL));
271: while (num_numfac--) {
272: PC pc;
273: PetscBool lsqr, isbddc, ismatis;
274: char str[32];
276: PetscCall(PetscOptionsGetString(NULL, NULL, "-ksp_type", str, sizeof(str), &lsqr));
277: if (lsqr) PetscCall(PetscStrcmp("lsqr", str, &lsqr));
278: if (lsqr) {
279: Mat BtB;
280: PetscCall(MatTransposeMatMult(A, A, MAT_INITIAL_MATRIX, 4, &BtB));
281: PetscCall(KSPSetOperators(ksp, A, BtB));
282: PetscCall(MatDestroy(&BtB));
283: } else {
284: PetscCall(KSPSetOperators(ksp, A, A));
285: }
286: PetscCall(KSPSetFromOptions(ksp));
288: /* if we test BDDC, make sure pmat is of type MATIS */
289: PetscCall(KSPGetPC(ksp, &pc));
290: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBDDC, &isbddc));
291: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATIS, &ismatis));
292: if (isbddc && !ismatis) {
293: Mat J;
295: PetscCall(MatConvert(A, MATIS, MAT_INITIAL_MATRIX, &J));
296: PetscCall(KSPSetOperators(ksp, A, J));
297: PetscCall(MatDestroy(&J));
298: }
300: /*
301: Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
302: enable more precise profiling of setting up the preconditioner.
303: These calls are optional, since both will be called within
304: KSPSolve() if they haven't been called already.
305: */
306: PetscCall(KSPSetUp(ksp));
307: PetscCall(KSPSetUpOnBlocks(ksp));
309: /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
310: Solve system
311: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
313: /*
314: Solve linear system;
315: */
316: if (trans) {
317: PetscCall(KSPSolveTranspose(ksp, b, x));
318: PetscCall(KSPGetIterationNumber(ksp, &its));
319: } else {
320: PetscInt num_rhs = 1;
321: PetscCall(PetscOptionsGetInt(NULL, NULL, "-num_rhs", &num_rhs, NULL));
322: cknorm = PETSC_FALSE;
323: PetscCall(PetscOptionsGetBool(NULL, NULL, "-cknorm", &cknorm, NULL));
324: while (num_rhs--) {
325: if (num_rhs == 1) PetscCall(VecSet(x, 0.0));
326: PetscCall(KSPSolve(ksp, b, x));
327: }
328: PetscCall(KSPGetIterationNumber(ksp, &its));
329: if (cknorm) { /* Check error for each rhs */
330: if (trans) {
331: PetscCall(MatMultTranspose(A, x, u));
332: } else {
333: PetscCall(MatMult(A, x, u));
334: }
335: PetscCall(VecAXPY(u, -1.0, b));
336: PetscCall(VecNorm(u, NORM_2, &norm));
337: PetscCall(PetscPrintf(PETSC_COMM_WORLD, " Number of iterations = %3" PetscInt_FMT "\n", its));
338: if (!PetscIsNanScalar(norm)) {
339: if (norm < 1.e-12) {
340: PetscCall(PetscPrintf(PETSC_COMM_WORLD, " Residual norm < 1.e-12\n"));
341: } else {
342: PetscCall(PetscPrintf(PETSC_COMM_WORLD, " Residual norm %g\n", (double)norm));
343: }
344: }
345: }
346: } /* while (num_rhs--) */
348: /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
349: Check error, print output, free data structures.
350: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
352: /*
353: Check error
354: */
355: if (trans) {
356: PetscCall(MatMultTranspose(A, x, u));
357: } else {
358: PetscCall(MatMult(A, x, u));
359: }
360: PetscCall(VecAXPY(u, -1.0, b));
361: PetscCall(VecNorm(u, NORM_2, &norm));
362: /*
363: Write output (optionally using table for solver details).
364: - PetscPrintf() handles output for multiprocessor jobs
365: by printing from only one processor in the communicator.
366: - KSPView() prints information about the linear solver.
367: */
368: if (table) {
369: char *matrixname = NULL, kspinfo[120];
371: /*
372: Open a string viewer; then write info to it.
373: */
374: PetscCall(PetscViewerStringOpen(PETSC_COMM_WORLD, kspinfo, sizeof(kspinfo), &viewer));
375: PetscCall(KSPView(ksp, viewer));
376: PetscCall(PetscStrrchr(file[PetscPreLoadIt], '/', &matrixname));
377: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%-8.8s %3" PetscInt_FMT " %2.0e %s \n", matrixname, its, (double)norm, kspinfo));
379: /*
380: Destroy the viewer
381: */
382: PetscCall(PetscViewerDestroy(&viewer));
383: } else {
384: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of iterations = %3" PetscInt_FMT "\n", its));
385: if (!PetscIsNanScalar(norm)) {
386: if (norm < 1.e-12 && !PetscIsNanScalar((PetscScalar)norm)) {
387: PetscCall(PetscPrintf(PETSC_COMM_WORLD, " Residual norm < 1.e-12\n"));
388: } else {
389: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Residual norm %g\n", (double)norm));
390: }
391: }
392: }
393: PetscCall(PetscOptionsGetString(NULL, NULL, "-solution", file[3], sizeof(file[3]), &flg));
394: if (flg) {
395: Vec xstar;
396: PetscReal norm;
398: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[3], FILE_MODE_READ, &viewer));
399: PetscCall(VecCreate(PETSC_COMM_WORLD, &xstar));
400: PetscCall(VecLoad(xstar, viewer));
401: PetscCall(VecAXPY(xstar, -1.0, x));
402: PetscCall(VecNorm(xstar, NORM_2, &norm));
403: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)norm));
404: PetscCall(VecDestroy(&xstar));
405: PetscCall(PetscViewerDestroy(&viewer));
406: }
407: if (outputSoln) {
408: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "solution.petsc", FILE_MODE_WRITE, &viewer));
409: PetscCall(VecView(x, viewer));
410: PetscCall(PetscViewerDestroy(&viewer));
411: }
413: flg = PETSC_FALSE;
414: PetscCall(PetscOptionsGetBool(NULL, NULL, "-ksp_reason", &flg, NULL));
415: if (flg) {
416: KSPConvergedReason reason;
417: PetscCall(KSPGetConvergedReason(ksp, &reason));
418: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "KSPConvergedReason: %s\n", KSPConvergedReasons[reason]));
419: }
421: } /* while (num_numfac--) */
423: /*
424: Free work space. All PETSc objects should be destroyed when they
425: are no longer needed.
426: */
427: PetscCall(MatDestroy(&A));
428: PetscCall(VecDestroy(&b));
429: PetscCall(VecDestroy(&u));
430: PetscCall(VecDestroy(&x));
431: PetscCall(KSPDestroy(&ksp));
432: PetscPreLoadEnd();
433: /* -----------------------------------------------------------
434: End of linear solver loop
435: ----------------------------------------------------------- */
437: PetscCall(PetscFinalize());
438: return 0;
439: }
441: /*TEST
443: build:
444: requires: !complex
446: testset:
447: suffix: 1
448: nsize: 2
449: args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
450: requires: !__float128
452: testset:
453: suffix: 1a
454: args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
455: requires: !__float128
457: testset:
458: nsize: 2
459: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
460: args: -f0 ${DATAFILESPATH}/matrices/medium
461: args: -ksp_type bicg
462: test:
463: suffix: 2
465: testset:
466: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
467: args: -f0 ${DATAFILESPATH}/matrices/medium
468: args: -ksp_type bicg
469: test:
470: suffix: 4
471: args: -pc_type lu
472: test:
473: suffix: 5
475: testset:
476: suffix: 6
477: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
478: args: -f0 ${DATAFILESPATH}/matrices/fem1
479: args: -pc_factor_levels 2 -pc_factor_fill 1.73 -ksp_gmres_cgs_refinement_type refine_always
481: testset:
482: TODO: Matrix row/column sizes are not compatible with block size
483: suffix: 7
484: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
485: args: -f0 ${DATAFILESPATH}/matrices/medium
486: args: -viewer_binary_skip_info -mat_type seqbaij
487: args: -matload_block_size {{2 3 4 5 6 7 8}separate output}
488: args: -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always
489: args: -ksp_rtol 1.0e-15 -ksp_monitor_short
490: test:
491: suffix: a
492: test:
493: suffix: b
494: args: -pc_factor_mat_ordering_type nd
495: test:
496: suffix: c
497: args: -pc_factor_levels 1
498: test:
499: requires: metis
500: suffix: d
501: args: -pc_factor_mat_ordering_type metisnd
503: testset:
504: TODO: Matrix row/column sizes are not compatible with block size
505: suffix: 7_d
506: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
507: args: -f0 ${DATAFILESPATH}/matrices/medium
508: args: -viewer_binary_skip_info -mat_type seqbaij
509: args: -matload_block_size {{2 3 4 5 6 7 8}shared output}
510: args: -ksp_type preonly -pc_type lu
512: testset:
513: suffix: 8
514: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
515: args: -f0 ${DATAFILESPATH}/matrices/medium
516: args: -ksp_diagonal_scale -pc_type eisenstat -ksp_monitor_short -ksp_diagonal_scale_fix -ksp_gmres_cgs_refinement_type refine_always -mat_no_inode
518: testset:
519: TODO: Matrix row/column sizes are not compatible with block size
520: suffix: 9
521: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
522: args: -f0 ${DATAFILESPATH}/matrices/medium
523: args: -viewer_binary_skip_info -matload_block_size {{1 2 3 4 5 6 7}separate output} -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always -ksp_rtol 1.0e-15 -ksp_monitor_short
524: test:
525: suffix: a
526: args: -mat_type seqbaij
527: test:
528: suffix: b
529: args: -mat_type seqbaij -trans
530: test:
531: suffix: c
532: nsize: 2
533: args: -mat_type mpibaij
534: test:
535: suffix: d
536: nsize: 2
537: args: -mat_type mpibaij -trans
538: test:
539: suffix: e
540: nsize: 3
541: args: -mat_type mpibaij
542: test:
543: suffix: f
544: nsize: 3
545: args: -mat_type mpibaij -trans
547: testset:
548: suffix: 10
549: nsize: 2
550: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
551: args: -ksp_type fgmres -pc_type ksp -f0 ${DATAFILESPATH}/matrices/medium -ksp_fgmres_modifypcksp -ksp_monitor_short
553: testset:
554: suffix: 12
555: requires: datafilespath matlab
556: args: -pc_type lu -pc_factor_mat_solver_type matlab -f0 ${DATAFILESPATH}/matrices/arco1
558: testset:
559: suffix: 13
560: requires: datafilespath lusol
561: args: -f0 ${DATAFILESPATH}/matrices/arco1
562: args: -mat_type lusol -pc_type lu
564: testset:
565: nsize: 3
566: args: -f0 ${DATAFILESPATH}/matrices/medium
567: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
568: test:
569: suffix: 14
570: requires: spai
571: args: -pc_type spai
572: test:
573: suffix: 15
574: requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
575: args: -pc_type hypre -pc_hypre_type pilut
576: test:
577: suffix: 16
578: requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
579: args: -pc_type hypre -pc_hypre_type parasails
580: test:
581: suffix: 17
582: requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
583: args: -pc_type hypre -pc_hypre_type boomeramg
584: test:
585: suffix: 18
586: requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
587: args: -pc_type hypre -pc_hypre_type euclid
589: testset:
590: suffix: 19
591: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
592: args: -f0 ${DATAFILESPATH}/matrices/poisson1
593: args: -ksp_type cg -pc_type icc
594: args: -pc_factor_levels {{0 2 4}separate output}
595: test:
596: test:
597: args: -mat_type seqsbaij
599: testset:
600: suffix: ILU
601: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
602: args: -f0 ${DATAFILESPATH}/matrices/small
603: args: -pc_factor_levels 1
604: test:
605: test:
606: # This is tested against regular ILU (used to be denoted ILUBAIJ)
607: args: -mat_type baij
609: testset:
610: suffix: aijcusparse
611: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) cuda
612: args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info -mat_type aijcusparse -pc_factor_mat_solver_type cusparse -pc_type ilu -vec_type cuda
614: testset:
615: TODO: No output file. Need to determine if deprecated
616: suffix: asm_viennacl
617: nsize: 2
618: requires: viennacl
619: args: -pc_type asm -pc_asm_sub_mat_type aijviennacl -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int${PETSC_INDEX_SIZE}-float${PETSC_SCALAR_SIZE}
621: testset:
622: nsize: 2
623: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
624: args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz -ksp_monitor_short -ksp_rtol 1.E-9 -pc_type hypre -pc_hypre_type boomeramg
625: test:
626: suffix: boomeramg_euclid
627: args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01
628: test:
629: suffix: boomeramg_euclid_bj
630: args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01 -pc_hypre_boomeramg_eu_bj
631: test:
632: suffix: boomeramg_parasails
633: args: -pc_hypre_boomeramg_smooth_type ParaSails -pc_hypre_boomeramg_smooth_num_levels 2
634: test:
635: suffix: boomeramg_pilut
636: args: -pc_hypre_boomeramg_smooth_type Pilut -pc_hypre_boomeramg_smooth_num_levels 2
637: test:
638: suffix: boomeramg_schwarz
639: args: -pc_hypre_boomeramg_smooth_type Schwarz-smoothers
641: testset:
642: suffix: cg_singlereduction
643: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
644: args: -f0 ${DATAFILESPATH}/matrices/small
645: args: -mat_type mpisbaij -ksp_type cg -pc_type eisenstat -ksp_monitor_short -ksp_converged_reason
646: test:
647: test:
648: args: -ksp_cg_single_reduction
650: testset:
651: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
652: args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz
653: args: -ksp_monitor_short -pc_type icc
654: test:
655: suffix: cr
656: args: -ksp_type cr
657: test:
658: suffix: lcd
659: args: -ksp_type lcd
661: testset:
662: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
663: args: -f0 ${DATAFILESPATH}/matrices/small
664: args: -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info
665: test:
666: suffix: seqaijcrl
667: args: -mat_type seqaijcrl
668: test:
669: suffix: seqaijperm
670: args: -mat_type seqaijperm
672: testset:
673: nsize: 2
674: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
675: args: -f0 ${DATAFILESPATH}/matrices/small
676: args: -ksp_monitor_short -ksp_view
677: # Different output files
678: test:
679: suffix: mpiaijcrl
680: args: -mat_type mpiaijcrl
681: test:
682: suffix: mpiaijperm
683: args: -mat_type mpiaijperm
685: testset:
686: nsize: 4
687: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !defined(PETSC_HAVE_I_MPI_NUMVERSION)
688: args: -ksp_monitor_short -ksp_view
689: test:
690: suffix: xxt
691: args: -f0 ${DATAFILESPATH}/matrices/poisson1 -check_symmetry -ksp_type cg -pc_type tfs
692: test:
693: suffix: xyt
694: args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type gmres -pc_type tfs
696: testset:
697: # The output file here is the same as mumps
698: suffix: mumps_cholesky
699: output_file: output/ex72_mumps.out
700: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
701: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
702: nsize: {{1 2}}
703: test:
704: args: -mat_type sbaij -mat_ignore_lower_triangular
705: test:
706: args: -mat_type aij
707: test:
708: args: -mat_type aij -matload_spd
710: testset:
711: # The output file here is the same as mumps
712: suffix: mumps_lu
713: output_file: output/ex72_mumps.out
714: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
715: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
716: test:
717: args: -mat_type seqaij
718: test:
719: nsize: 2
720: args: -mat_type mpiaij
721: test:
722: args: -mat_type seqbaij -matload_block_size 2
723: test:
724: nsize: 2
725: args: -mat_type mpibaij -matload_block_size 2
726: test:
727: args: -mat_type aij -mat_mumps_icntl_7 5
728: TODO: Need to determine if deprecated
730: test:
731: suffix: mumps_lu_parmetis
732: output_file: output/ex72_mumps.out
733: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps parmetis
734: nsize: 2
735: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 2
737: test:
738: suffix: mumps_lu_ptscotch
739: output_file: output/ex72_mumps.out
740: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps ptscotch
741: nsize: 2
742: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 1
744: testset:
745: # The output file here is the same as mumps
746: suffix: mumps_redundant
747: output_file: output/ex72_mumps_redundant.out
748: nsize: 8
749: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
750: args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
752: testset:
753: suffix: pastix_cholesky
754: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
755: output_file: output/ex72_mumps.out
756: nsize: {{1 2}}
757: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2 -pc_type cholesky -mat_type sbaij -mat_ignore_lower_triangular
759: testset:
760: suffix: pastix_lu
761: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
762: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
763: output_file: output/ex72_mumps.out
764: test:
765: args: -mat_type seqaij
766: test:
767: nsize: 2
768: args: -mat_type mpiaij
770: testset:
771: suffix: pastix_redundant
772: output_file: output/ex72_mumps_redundant.out
773: nsize: 8
774: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
775: args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
777: testset:
778: suffix: superlu_dist_lu
779: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
780: output_file: output/ex72_mumps.out
781: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
782: nsize: {{1 2}}
784: testset:
785: suffix: superlu_dist_redundant
786: nsize: 8
787: output_file: output/ex72_mumps_redundant.out
788: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
789: args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
791: testset:
792: suffix: superlu_lu
793: output_file: output/ex72_mumps.out
794: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu
795: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu -num_numfac 2 -num_rhs 2
797: testset:
798: suffix: umfpack
799: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) suitesparse
800: args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -mat_type seqaij -pc_factor_mat_solver_type umfpack -num_numfac 2 -num_rhs 2
802: testset:
803: suffix: zeropivot
804: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
805: args: -f0 ${DATAFILESPATH}/matrices/small -test_zeropivot -ksp_converged_reason -ksp_type fgmres -pc_type ksp -fp_trap 0
806: test:
807: nsize: 3
808: args: -ksp_pc_type bjacobi
809: test:
810: nsize: 2
811: args: -ksp_ksp_type cg -ksp_pc_type bjacobi -ksp_pc_bjacobi_blocks 1
812: #test:
813: #nsize: 3
814: #args: -ksp_ksp_converged_reason -ksp_pc_type bjacobi -ksp_sub_ksp_converged_reason
815: #TODO: Need to determine if deprecated
817: testset:
818: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
819: args: -mat_convert_type is -f0 ${DATAFILESPATH}/matrices/medium -ksp_type fgmres
820: test:
821: suffix: aij_gdsw
822: nsize: 4
823: args: -mat_convert_type aij -pc_type mg -pc_mg_levels 2 -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
824: test:
825: output_file: output/ex72_aij_gdsw.out
826: suffix: is_gdsw
827: nsize: 4
828: args: -pc_type mg -pc_mg_levels 2 -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
829: test:
830: suffix: is_asm
831: nsize: {{1 2}separate output}
832: args: -pc_type asm
833: test:
834: suffix: bddc_seq
835: nsize: 1
836: args: -pc_type bddc
837: test:
838: suffix: bddc_par
839: nsize: 2
840: args: -pc_type bddc
841: test:
842: requires: parmetis
843: suffix: bddc_par_nd_parmetis
844: filter: sed -e "s/Number of iterations = [0-9]/Number of iterations = 9/g"
845: nsize: 4
846: args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type parmetis
847: test:
848: requires: ptscotch defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
849: suffix: bddc_par_nd_ptscotch
850: filter: sed -e "s/Number of iterations = [0-9]/Number of iterations = 9/g"
851: nsize: 4
852: args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type ptscotch
854: testset:
855: requires: !__float128 hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
856: test:
857: suffix: hpddm_mat
858: output_file: output/ex72_bddc_seq.out
859: filter: sed -e "s/Number of iterations = 2/Number of iterations = 1/g"
860: nsize: 2
861: args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_pc_type mat
862: test:
863: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
864: suffix: hpddm_gen_non_hermitian
865: output_file: output/ex72_2.out
866: nsize: 4
867: args: -f0 ${DATAFILESPATH}/matrices/arco1 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.7 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right
868: test:
869: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps !defined(PETSCTEST_VALGRIND)
870: suffix: hpddm_gen_non_hermitian_baij
871: output_file: output/ex72_10.out
872: nsize: 4
873: timeoutfactor: 2
874: args: -f0 ${DATAFILESPATH}/matrices/arco6 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.8 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_eps_tol 1.0e-2 -ksp_monitor_short
875: TEST*/