Actual source code: ex27.c
2: static char help[] = "Reads a PETSc matrix and vector from a file and solves the normal equations.\n\n";
4: /*
5: Include "petscksp.h" so that we can use KSP solvers. Note that this file
6: automatically includes:
7: petscsys.h - base PETSc routines petscvec.h - vectors
8: petscmat.h - matrices
9: petscis.h - index sets petscksp.h - Krylov subspace methods
10: petscviewer.h - viewers petscpc.h - preconditioners
11: */
12: #include <petscksp.h>
13: #include <petscviewerhdf5.h>
15: static PetscErrorCode VecLoadIfExists_Private(Vec b, PetscViewer fd, PetscBool *has)
16: {
17: PetscBool hdf5 = PETSC_FALSE;
19: PetscFunctionBeginUser;
20: PetscCall(PetscObjectTypeCompare((PetscObject)fd, PETSCVIEWERHDF5, &hdf5));
21: if (hdf5) {
22: #if defined(PETSC_HAVE_HDF5)
23: PetscCall(PetscViewerHDF5HasObject(fd, (PetscObject)b, has));
24: if (*has) PetscCall(VecLoad(b, fd));
25: #else
26: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "PETSc must be configured with HDF5 to use this feature");
27: #endif
28: } else {
29: PetscErrorCode ierrp;
30: PetscCall(PetscPushErrorHandler(PetscReturnErrorHandler, NULL));
31: ierrp = VecLoad(b, fd);
32: PetscCall(PetscPopErrorHandler());
33: *has = ierrp ? PETSC_FALSE : PETSC_TRUE;
34: }
35: PetscFunctionReturn(PETSC_SUCCESS);
36: }
38: int main(int argc, char **args)
39: {
40: KSP ksp; /* linear solver context */
41: Mat A, N; /* matrix */
42: Vec x, b, r, Ab, v[2]; /* approx solution, RHS, residual */
43: PetscViewer fd; /* viewer */
44: char file[PETSC_MAX_PATH_LEN] = ""; /* input file name */
45: char file_x0[PETSC_MAX_PATH_LEN] = ""; /* name of input file with initial guess */
46: char A_name[128] = "A", b_name[128] = "b", x0_name[128] = "x0"; /* name of the matrix, RHS and initial guess */
47: KSPType ksptype;
48: PetscBool has;
49: PetscInt its, n, m;
50: PetscReal norm;
51: PetscBool nonzero_guess = PETSC_TRUE;
52: PetscBool solve_normal = PETSC_FALSE;
53: PetscBool solve_augmented = PETSC_FALSE;
54: PetscBool truncate = PETSC_FALSE;
55: PetscBool explicit_transpose = PETSC_FALSE;
56: PetscBool hdf5 = PETSC_FALSE;
57: PetscBool test_custom_layout = PETSC_FALSE;
58: PetscMPIInt rank, size;
60: PetscFunctionBeginUser;
61: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
62: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
63: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
64: /*
65: Determine files from which we read the linear system
66: (matrix, right-hand-side and initial guess vector).
67: */
68: PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), NULL));
69: PetscCall(PetscOptionsGetBool(NULL, NULL, "-truncate", &truncate, NULL));
70: if (!truncate) PetscCall(PetscOptionsGetString(NULL, NULL, "-f_x0", file_x0, sizeof(file_x0), NULL));
71: PetscCall(PetscOptionsGetString(NULL, NULL, "-A_name", A_name, sizeof(A_name), NULL));
72: PetscCall(PetscOptionsGetString(NULL, NULL, "-b_name", b_name, sizeof(b_name), NULL));
73: PetscCall(PetscOptionsGetString(NULL, NULL, "-x0_name", x0_name, sizeof(x0_name), NULL));
74: /*
75: Decide whether to solve the original system (-solve_normal 0)
76: or the normal equation (-solve_normal 1).
77: */
78: PetscCall(PetscOptionsGetBool(NULL, NULL, "-solve_normal", &solve_normal, NULL));
79: if (!solve_normal) PetscCall(PetscOptionsGetBool(NULL, NULL, "-solve_augmented", &solve_augmented, NULL));
80: if (solve_augmented) PetscCall(PetscOptionsGetBool(NULL, NULL, "-explicit_transpose", &explicit_transpose, NULL));
81: /*
82: Decide whether to use the HDF5 reader.
83: */
84: PetscCall(PetscOptionsGetBool(NULL, NULL, "-hdf5", &hdf5, NULL));
85: /*
86: Decide whether custom matrix layout will be tested.
87: */
88: PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_custom_layout", &test_custom_layout, NULL));
90: /* -----------------------------------------------------------
91: Beginning of linear solver loop
92: ----------------------------------------------------------- */
93: /*
94: Loop through the linear solve 2 times.
95: - The intention here is to preload and solve a small system;
96: then load another (larger) system and solve it as well.
97: This process preloads the instructions with the smaller
98: system so that more accurate performance monitoring (via
99: -log_view) can be done with the larger one (that actually
100: is the system of interest).
101: */
102: PetscPreLoadBegin(PETSC_FALSE, "Load system");
104: /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
105: Load system
106: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108: /*
109: Open binary file. Note that we use FILE_MODE_READ to indicate
110: reading from this file.
111: */
112: if (hdf5) {
113: #if defined(PETSC_HAVE_HDF5)
114: PetscCall(PetscViewerHDF5Open(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
115: PetscCall(PetscViewerPushFormat(fd, PETSC_VIEWER_HDF5_MAT));
116: #else
117: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "PETSc must be configured with HDF5 to use this feature");
118: #endif
119: } else {
120: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
121: }
123: /*
124: Load the matrix.
125: Matrix type is set automatically but you can override it by MatSetType() prior to MatLoad().
126: Do that only if you really insist on the given type.
127: */
128: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
129: PetscCall(PetscObjectSetName((PetscObject)A, A_name));
130: PetscCall(MatSetFromOptions(A));
131: PetscCall(MatLoad(A, fd));
132: if (truncate) {
133: Mat P, B;
134: PetscInt M, N;
135: PetscCall(MatGetLocalSize(A, &m, &n));
136: PetscCall(MatGetSize(A, &M, &N));
137: PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, m, PETSC_DECIDE, M, N / 1.5, 1, NULL, 1, NULL, &P));
138: PetscCall(MatGetOwnershipRangeColumn(P, &m, &n));
139: for (; m < n; ++m) PetscCall(MatSetValue(P, m, m, 1.0, INSERT_VALUES));
140: PetscCall(MatAssemblyBegin(P, MAT_FINAL_ASSEMBLY));
141: PetscCall(MatAssemblyEnd(P, MAT_FINAL_ASSEMBLY));
142: PetscCall(MatShift(P, 1.0));
143: PetscCall(MatMatMult(A, P, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &B));
144: PetscCall(MatDestroy(&P));
145: PetscCall(MatDestroy(&A));
146: A = B;
147: }
148: if (test_custom_layout && size > 1) {
149: /* Perturb the local sizes and create the matrix anew */
150: PetscInt m1, n1;
151: PetscCall(MatGetLocalSize(A, &m, &n));
152: m = rank ? m - 1 : m + size - 1;
153: n = (rank == size - 1) ? n + size - 1 : n - 1;
154: PetscCall(MatDestroy(&A));
155: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
156: PetscCall(PetscObjectSetName((PetscObject)A, A_name));
157: PetscCall(MatSetSizes(A, m, n, PETSC_DECIDE, PETSC_DECIDE));
158: PetscCall(MatSetFromOptions(A));
159: PetscCall(MatLoad(A, fd));
160: PetscCall(MatGetLocalSize(A, &m1, &n1));
161: PetscCheck(m1 == m && n1 == n, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "resulting sizes differ from requested ones: %" PetscInt_FMT " %" PetscInt_FMT " != %" PetscInt_FMT " %" PetscInt_FMT, m1, n1, m, n);
162: }
163: PetscCall(MatGetLocalSize(A, &m, &n));
165: /*
166: Load the RHS vector if it is present in the file, otherwise use a vector of all ones.
167: */
168: PetscCall(MatCreateVecs(A, &x, &b));
169: PetscCall(PetscObjectSetName((PetscObject)b, b_name));
170: PetscCall(VecSetFromOptions(b));
171: PetscCall(VecLoadIfExists_Private(b, fd, &has));
172: if (!has) {
173: PetscScalar one = 1.0;
174: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Failed to load RHS, so use a vector of all ones.\n"));
175: PetscCall(VecSetFromOptions(b));
176: PetscCall(VecSet(b, one));
177: }
179: /*
180: Load the initial guess vector if it is present in the file, otherwise use a vector of all zeros.
181: */
182: PetscCall(PetscObjectSetName((PetscObject)x, x0_name));
183: PetscCall(VecSetFromOptions(x));
184: if (!truncate) {
185: /* load file_x0 if it is specified, otherwise try to reuse file */
186: if (file_x0[0]) {
187: PetscCall(PetscViewerDestroy(&fd));
188: if (hdf5) {
189: #if defined(PETSC_HAVE_HDF5)
190: PetscCall(PetscViewerHDF5Open(PETSC_COMM_WORLD, file_x0, FILE_MODE_READ, &fd));
191: #endif
192: } else {
193: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file_x0, FILE_MODE_READ, &fd));
194: }
195: }
196: PetscCall(VecLoadIfExists_Private(x, fd, &has));
197: } else has = PETSC_FALSE;
198: if (truncate || !has) {
199: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Failed to load initial guess, so use a vector of all zeros.\n"));
200: PetscCall(VecSet(x, 0.0));
201: nonzero_guess = PETSC_FALSE;
202: }
203: PetscCall(PetscViewerDestroy(&fd));
205: PetscCall(VecDuplicate(x, &Ab));
207: /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
208: Setup solve for system
209: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
211: /*
212: Conclude profiling last stage; begin profiling next stage.
213: */
214: PetscPreLoadStage("KSPSetUp");
216: PetscCall(MatCreateNormalHermitian(A, &N));
217: PetscCall(MatMultHermitianTranspose(A, b, Ab));
219: /*
220: Create linear solver; set operators; set runtime options.
221: */
222: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
224: if (solve_normal) {
225: PetscCall(KSPSetOperators(ksp, N, N));
226: } else if (solve_augmented) {
227: Mat array[4], C;
228: Vec view;
229: PetscInt M, n;
230: PetscReal diag;
232: PetscCall(MatDestroy(&N));
233: PetscCall(MatGetSize(A, &M, NULL));
234: PetscCall(MatGetLocalSize(A, NULL, &n));
235: PetscCall(MatCreateConstantDiagonal(PETSC_COMM_WORLD, m, m, M, M, -1.0, array));
236: array[1] = A;
237: if (!explicit_transpose) PetscCall(MatCreateHermitianTranspose(A, array + 2));
238: else PetscCall(MatHermitianTranspose(A, MAT_INITIAL_MATRIX, array + 2));
239: PetscCall(PetscOptionsGetReal(NULL, NULL, "-nonzero_A11", &diag, &has));
240: if (has) PetscCall(MatCreateConstantDiagonal(PETSC_COMM_WORLD, n, n, PETSC_DECIDE, PETSC_DECIDE, diag, array + 3));
241: else array[3] = NULL;
242: PetscCall(MatCreateNest(PETSC_COMM_WORLD, 2, NULL, 2, NULL, array, &C));
243: PetscCall(MatNestSetVecType(C, VECNEST));
244: PetscCall(MatCreateVecs(C, v + 1, v));
245: PetscCall(VecSet(v[0], 0.0));
246: PetscCall(VecSet(v[1], 0.0));
247: PetscCall(VecNestGetSubVec(v[0], 0, &view));
248: PetscCall(VecCopy(b, view));
249: PetscCall(VecNestGetSubVec(v[1], 1, &view));
250: PetscCall(VecCopy(x, view));
251: PetscCall(KSPSetOperators(ksp, C, C));
252: PetscCall(MatDestroy(&C));
253: PetscCall(MatDestroy(array));
254: PetscCall(MatDestroy(array + 2));
255: PetscCall(MatDestroy(array + 3));
256: } else {
257: PC pc;
258: PetscCall(KSPSetType(ksp, KSPLSQR));
259: PetscCall(KSPGetPC(ksp, &pc));
260: PetscCall(PCSetType(pc, PCNONE));
261: PetscCall(KSPSetOperators(ksp, A, N));
262: }
263: PetscCall(KSPSetInitialGuessNonzero(ksp, nonzero_guess));
264: PetscCall(KSPSetFromOptions(ksp));
266: /*
267: Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
268: enable more precise profiling of setting up the preconditioner.
269: These calls are optional, since both will be called within
270: KSPSolve() if they haven't been called already.
271: */
272: PetscCall(KSPSetUp(ksp));
273: PetscCall(KSPSetUpOnBlocks(ksp));
275: /*
276: Solve system
277: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
279: /*
280: Begin profiling next stage
281: */
282: PetscPreLoadStage("KSPSolve");
284: /*
285: Solve linear system
286: */
287: if (solve_normal) {
288: PetscCall(KSPSolve(ksp, Ab, x));
289: } else if (solve_augmented) {
290: Vec view;
292: PetscCall(KSPSolve(ksp, v[0], v[1]));
293: PetscCall(VecNestGetSubVec(v[1], 1, &view));
294: PetscCall(VecCopy(view, x));
295: } else {
296: PetscCall(KSPSolve(ksp, b, x));
297: }
298: PetscCall(PetscObjectSetName((PetscObject)x, "x"));
300: /*
301: Conclude profiling this stage
302: */
303: PetscPreLoadStage("Cleanup");
305: /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
306: Check error, print output, free data structures.
307: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
309: /*
310: Check error
311: */
312: PetscCall(VecDuplicate(b, &r));
313: PetscCall(MatMult(A, x, r));
314: PetscCall(VecAXPY(r, -1.0, b));
315: PetscCall(VecNorm(r, NORM_2, &norm));
316: PetscCall(KSPGetIterationNumber(ksp, &its));
317: PetscCall(KSPGetType(ksp, &ksptype));
318: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "KSP type: %s\n", ksptype));
319: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of iterations = %3" PetscInt_FMT "\n", its));
320: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Residual norm %g\n", (double)norm));
322: /*
323: Free work space. All PETSc objects should be destroyed when they
324: are no longer needed.
325: */
326: PetscCall(MatDestroy(&A));
327: PetscCall(VecDestroy(&b));
328: PetscCall(MatDestroy(&N));
329: PetscCall(VecDestroy(&Ab));
330: PetscCall(VecDestroy(&r));
331: PetscCall(VecDestroy(&x));
332: if (solve_augmented) {
333: PetscCall(VecDestroy(v));
334: PetscCall(VecDestroy(v + 1));
335: }
336: PetscCall(KSPDestroy(&ksp));
337: PetscPreLoadEnd();
338: /* -----------------------------------------------------------
339: End of linear solver loop
340: ----------------------------------------------------------- */
342: PetscCall(PetscFinalize());
343: return 0;
344: }
346: /*TEST
348: test:
349: suffix: 1
350: requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
351: args: -f ${DATAFILESPATH}/matrices/medium -ksp_view -ksp_monitor_short -ksp_max_it 100 -solve_normal
353: test:
354: suffix: 2
355: nsize: 2
356: requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
357: args: -f ${DATAFILESPATH}/matrices/shallow_water1 -ksp_view -ksp_monitor_short -ksp_max_it 100 -solve_normal -pc_type none
359: # Test handling failing VecLoad without abort
360: testset:
361: requires: double !complex !defined(PETSC_USE_64BIT_INDICES)
362: args: -ksp_type cg -ksp_view -ksp_converged_reason -ksp_monitor_short -ksp_max_it 10
363: test:
364: suffix: 3
365: nsize: {{1 2}separate output}
366: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system
367: args: -f_x0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system_x0
368: test:
369: suffix: 3a
370: nsize: {{1 2}separate output}
371: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system
372: args: -f_x0 NONEXISTING_FILE
373: test:
374: suffix: 3b
375: nsize: {{1 2}separate output}
376: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system_with_x0 # this file includes all A, b and x0
377: test:
378: # Load square matrix, RHS and initial guess from HDF5 (Version 7.3 MAT-File)
379: suffix: 3b_hdf5
380: requires: hdf5 defined(PETSC_HDF5_HAVE_ZLIB)
381: nsize: {{1 2}separate output}
382: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system_with_x0.mat -hdf5
384: # Test least-square algorithms
385: testset:
386: requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
387: args: -f ${DATAFILESPATH}/matrices/rectangular_ultrasound_4889x841
388: test:
389: suffix: 4
390: nsize: {{1 2 4}}
391: args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 100
392: args: -solve_normal -ksp_type cg
393: test:
394: suffix: 4a
395: nsize: {{1 2 4}}
396: args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 100
397: args: -ksp_type {{cgls lsqr}separate output}
398: test:
399: # Test KSPLSQR-specific options
400: suffix: 4b
401: nsize: 2
402: args: -ksp_converged_reason -ksp_rtol 1e-3 -ksp_max_it 200 -ksp_view
403: args: -ksp_type lsqr -ksp_convergence_test lsqr -ksp_lsqr_monitor -ksp_lsqr_compute_standard_error -ksp_lsqr_exact_mat_norm {{0 1}separate output}
404: test:
405: suffix: 4c
406: nsize: 4
407: requires: hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
408: filter: grep -v "shared subdomain KSP between SLEPc and PETSc" | grep -v "total: nonzeros="
409: args: -ksp_converged_reason -ksp_rtol 1e-5 -ksp_max_it 100 -ksp_view
410: args: -ksp_type lsqr -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output}
411: args: -pc_hpddm_levels_1_pc_asm_sub_mat_type aij -pc_hpddm_levels_1_pc_asm_type basic -pc_hpddm_levels_1_sub_pc_type cholesky
412: test:
413: suffix: 4d
414: nsize: 4
415: requires: hpddm slepc suitesparse defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
416: filter: grep -v "shared subdomain KSP between SLEPc and PETSc"
417: args: -ksp_converged_reason -ksp_rtol 1e-5 -ksp_max_it 100 -ksp_view
418: args: -ksp_type lsqr -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output} -pc_hpddm_levels_1_st_pc_type qr
419: args: -pc_hpddm_levels_1_pc_asm_sub_mat_type normalh -pc_hpddm_levels_1_pc_asm_type basic -pc_hpddm_levels_1_sub_pc_type qr
420: test:
421: suffix: 4e
422: nsize: 4
423: requires: hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
424: args: -solve_augmented -ksp_type gmres
425: args: -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -fieldsplit_0_pc_type jacobi -fieldsplit_ksp_type preonly
426: args: -prefix_push fieldsplit_1_ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type cholesky -prefix_pop -fieldsplit_1_mat_schur_complement_ainv_type {{diag lump}shared output}
427: test:
428: suffix: 4f
429: nsize: 4
430: requires: hpddm slepc suitesparse defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
431: filter: sed -e "s/(1,0) : type=mpiaij/(1,0) : type=transpose/g" -e "s/hermitiantranspose/transpose/g"
432: args: -solve_augmented -ksp_type gmres -ksp_view -explicit_transpose {{false true}shared output}
433: args: -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -fieldsplit_0_pc_type jacobi -fieldsplit_ksp_type preonly
434: args: -prefix_push fieldsplit_1_ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type qr -prefix_pop
435: test:
436: suffix: 4f_nonzero
437: nsize: 4
438: requires: hpddm slepc suitesparse defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
439: args: -solve_augmented -nonzero_A11 {{0.0 1e-14}shared output} -ksp_type gmres
440: args: -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -fieldsplit_0_pc_type jacobi -fieldsplit_ksp_type preonly
441: args: -prefix_push fieldsplit_1_ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type qr -prefix_pop
442: test:
443: suffix: 4f_nonzero_shift
444: nsize: 4
445: output_file: output/ex27_4f_nonzero.out
446: requires: hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
447: filter: sed -e "s/Number of iterations = 6/Number of iterations = 5/g"
448: args: -solve_augmented -nonzero_A11 {{0.0 1e-6}shared output} -ksp_type gmres
449: args: -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -fieldsplit_0_pc_type jacobi -fieldsplit_ksp_type preonly
450: args: -prefix_push fieldsplit_1_ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_gen_non_hermitian -prefix_pop
451: test:
452: suffix: 4g
453: nsize: 4
454: requires: hypre
455: args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 100
456: args: -ksp_type lsqr -pc_type hypre
458: test:
459: # Load rectangular matrix from HDF5 (Version 7.3 MAT-File)
460: suffix: 4a_lsqr_hdf5
461: nsize: {{1 2 4 8}}
462: requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) hdf5 defined(PETSC_HDF5_HAVE_ZLIB)
463: args: -f ${DATAFILESPATH}/matrices/matlab/rectangular_ultrasound_4889x841.mat -hdf5
464: args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 100
465: args: -ksp_type lsqr
466: args: -test_custom_layout {{0 1}}
468: # Test for correct cgls convergence reason
469: test:
470: suffix: 5
471: nsize: 1
472: requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
473: args: -f ${DATAFILESPATH}/matrices/rectangular_ultrasound_4889x841
474: args: -ksp_converged_reason -ksp_rtol 1e-2 -ksp_max_it 100
475: args: -ksp_type cgls
477: # Load a matrix, RHS and solution from HDF5 (Version 7.3 MAT-File). Test immediate convergence.
478: testset:
479: nsize: {{1 2 4 8}}
480: requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) hdf5 defined(PETSC_HDF5_HAVE_ZLIB)
481: args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 10
482: args: -ksp_type lsqr
483: args: -test_custom_layout {{0 1}}
484: args: -hdf5 -x0_name x
485: test:
486: suffix: 6_hdf5
487: args: -f ${DATAFILESPATH}/matrices/matlab/small.mat
488: test:
489: suffix: 6_hdf5_rect
490: args: -f ${DATAFILESPATH}/matrices/matlab/small_rect.mat
491: test:
492: suffix: 6_hdf5_dense
493: args: -f ${DATAFILESPATH}/matrices/matlab/small_dense.mat -mat_type dense
494: test:
495: suffix: 6_hdf5_rect_dense
496: args: -f ${DATAFILESPATH}/matrices/matlab/small_rect_dense.mat -mat_type dense
498: # Test correct handling of local dimensions in PCApply
499: testset:
500: requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
501: requires: hdf5 defined(PETSC_HDF5_HAVE_ZLIB)
502: nsize: 3
503: suffix: 7
504: args: -f ${DATAFILESPATH}/matrices/matlab/small.mat -hdf5 -test_custom_layout 1 -ksp_type lsqr -pc_type jacobi
506: # Test complex matrices
507: testset:
508: requires: double complex !defined(PETSC_USE_64BIT_INDICES)
509: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/nh-complex-int32-float64
510: output_file: output/ex27_8.out
511: filter: grep -v "KSP type"
512: test:
513: suffix: 8
514: args: -solve_normal 0 -ksp_type {{lsqr cgls}}
515: test:
516: suffix: 8_normal
517: args: -solve_normal 1 -ksp_type {{cg bicg}}
519: testset:
520: requires: double suitesparse !defined(PETSC_USE_64BIT_INDICES)
521: args: -solve_normal {{0 1}shared output} -pc_type qr
522: output_file: output/ex27_9.out
523: filter: grep -v "KSP type"
524: test:
525: suffix: 9_real
526: requires: !complex
527: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64
528: test:
529: suffix: 9_complex
530: requires: complex
531: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/nh-complex-int32-float64
533: test:
534: suffix: 10
535: requires: !complex double suitesparse !defined(PETSC_USE_64BIT_INDICES)
536: nsize: 2
537: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -pc_type bjacobi -sub_pc_type qr
539: test:
540: suffix: 11
541: nsize: 4
542: requires: datafilespath double complex !defined(PETSC_USE_64BIT_INDICES) hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
543: args: -f ${DATAFILESPATH}/matrices/farzad_B_rhs -truncate
544: args: -ksp_converged_reason -ksp_rtol 1e-5 -ksp_max_it 100
545: args: -ksp_type lsqr -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_eps_threshold 1e-6
546: args: -pc_hpddm_levels_1_pc_asm_sub_mat_type aij -pc_hpddm_levels_1_pc_asm_type basic -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_coarse_pc_type lu
548: TEST*/