Actual source code: ex76.c
1: #include <petscksp.h>
2: #include <petsc/private/petscimpl.h>
4: static char help[] = "Solves a linear system using PCHPDDM.\n\n";
6: int main(int argc, char **args)
7: {
8: Vec b; /* computed solution and RHS */
9: Mat A, aux, X, B; /* linear system matrix */
10: KSP ksp; /* linear solver context */
11: PC pc;
12: IS is, sizes;
13: const PetscInt *idx;
14: PetscMPIInt rank, size;
15: PetscInt m, N = 1;
16: PetscLayout map;
17: PetscViewer viewer;
18: char dir[PETSC_MAX_PATH_LEN], name[PETSC_MAX_PATH_LEN], type[256];
19: PetscBool3 share = PETSC_BOOL3_UNKNOWN;
20: PetscBool flg, set;
22: PetscFunctionBeginUser;
23: PetscCall(PetscInitialize(&argc, &args, NULL, help));
24: PetscCall(PetscLogDefaultBegin());
25: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
26: PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_USER, "This example requires 4 processes");
27: PetscCall(PetscOptionsGetInt(NULL, NULL, "-rhs", &N, NULL));
28: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
29: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
30: PetscCall(PetscStrncpy(dir, ".", sizeof(dir)));
31: PetscCall(PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL));
32: /* loading matrices */
33: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/sizes_%d.dat", dir, size));
34: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
35: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
36: PetscCall(ISLoad(sizes, viewer));
37: PetscCall(ISGetIndices(sizes, &idx));
38: PetscCall(MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]));
39: PetscCall(MatCreate(PETSC_COMM_WORLD, &X));
40: PetscCall(MatSetSizes(X, idx[4], idx[4], PETSC_DETERMINE, PETSC_DETERMINE));
41: PetscCall(MatSetUp(X));
42: PetscCall(ISRestoreIndices(sizes, &idx));
43: PetscCall(ISDestroy(&sizes));
44: PetscCall(PetscViewerDestroy(&viewer));
45: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/A.dat", dir));
46: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
47: PetscCall(MatLoad(A, viewer));
48: PetscCall(PetscViewerDestroy(&viewer));
49: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/is_%d.dat", dir, size));
50: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
51: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
52: PetscCall(MatGetLayouts(X, &map, NULL));
53: PetscCall(ISSetLayout(sizes, map));
54: PetscCall(ISLoad(sizes, viewer));
55: PetscCall(ISGetLocalSize(sizes, &m));
56: PetscCall(ISGetIndices(sizes, &idx));
57: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx, PETSC_COPY_VALUES, &is));
58: PetscCall(ISRestoreIndices(sizes, &idx));
59: PetscCall(ISDestroy(&sizes));
60: PetscCall(ISSetBlockSize(is, 2));
61: PetscCall(PetscViewerDestroy(&viewer));
62: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/Neumann_%d.dat", dir, size));
63: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
64: PetscCall(MatLoad(X, viewer));
65: PetscCall(PetscViewerDestroy(&viewer));
66: PetscCall(MatGetDiagonalBlock(X, &B));
67: PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &aux));
68: PetscCall(MatDestroy(&X));
69: flg = PETSC_FALSE;
70: PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_levels_1_st_share_sub_ksp", &flg, &set));
71: if (flg) { /* PETSc LU/Cholesky is struggling numerically for bs > 1 */
72: /* only set the proper bs for the geneo_share_* tests, 1 otherwise */
73: PetscCall(MatSetBlockSizesFromMats(aux, A, A));
74: share = PETSC_BOOL3_TRUE;
75: } else if (set) share = PETSC_BOOL3_FALSE;
76: PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE));
77: PetscCall(MatSetOption(aux, MAT_SYMMETRIC, PETSC_TRUE));
78: /* ready for testing */
79: PetscOptionsBegin(PETSC_COMM_WORLD, "", "", "");
80: PetscCall(PetscStrncpy(type, MATAIJ, sizeof(type)));
81: PetscCall(PetscOptionsFList("-mat_type", "Matrix type", "MatSetType", MatList, type, type, 256, &flg));
82: PetscOptionsEnd();
83: PetscCall(MatConvert(A, type, MAT_INPLACE_MATRIX, &A));
84: PetscCall(MatConvert(aux, type, MAT_INPLACE_MATRIX, &aux));
85: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
86: PetscCall(KSPSetOperators(ksp, A, A));
87: PetscCall(KSPGetPC(ksp, &pc));
88: PetscCall(PCSetType(pc, PCHPDDM));
89: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
90: flg = PETSC_FALSE;
91: PetscCall(PetscOptionsGetBool(NULL, NULL, "-reset", &flg, NULL));
92: if (flg) {
93: PetscCall(PetscOptionsSetValue(NULL, "-pc_hpddm_block_splitting", "true"));
94: PetscCall(PCSetFromOptions(pc));
95: PetscCall(PCSetUp(pc));
96: PetscCall(PetscOptionsClearValue(NULL, "-pc_hpddm_block_splitting"));
97: }
98: PetscCall(PCHPDDMSetAuxiliaryMat(pc, is, aux, NULL, NULL));
99: PetscCall(PCHPDDMHasNeumannMat(pc, PETSC_FALSE)); /* PETSC_TRUE is fine as well, just testing */
100: if (share == PETSC_BOOL3_UNKNOWN) PetscCall(PCHPDDMSetSTShareSubKSP(pc, PetscBool3ToBool(share)));
101: flg = PETSC_FALSE;
102: PetscCall(PetscOptionsGetBool(NULL, NULL, "-set_rhs", &flg, NULL));
103: if (flg) { /* user-provided RHS for concurrent generalized eigenvalue problems */
104: Mat a, c, P; /* usually assembled automatically in PCHPDDM, this is solely for testing PCHPDDMSetRHSMat() */
105: PetscInt rstart, rend, location;
107: PetscCall(MatDuplicate(aux, MAT_DO_NOT_COPY_VALUES, &B)); /* duplicate so that MatStructure is SAME_NONZERO_PATTERN */
108: PetscCall(MatGetDiagonalBlock(A, &a));
109: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
110: PetscCall(ISGetLocalSize(is, &m));
111: PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, rend - rstart, m, 1, NULL, &P));
112: for (m = rstart; m < rend; ++m) {
113: PetscCall(ISLocate(is, m, &location));
114: PetscCheck(location >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "IS of the auxiliary Mat does not include all local rows of A");
115: PetscCall(MatSetValue(P, m - rstart, location, 1.0, INSERT_VALUES));
116: }
117: PetscCall(MatAssemblyBegin(P, MAT_FINAL_ASSEMBLY));
118: PetscCall(MatAssemblyEnd(P, MAT_FINAL_ASSEMBLY));
119: PetscCall(PetscObjectTypeCompare((PetscObject)a, MATSEQAIJ, &flg));
120: if (flg) PetscCall(MatPtAP(a, P, MAT_INITIAL_MATRIX, 1.0, &X)); // MatPtAP() is used to extend diagonal blocks with zeros on the overlap
121: else { // workaround for MatPtAP() limitations with some types
122: PetscCall(MatConvert(a, MATSEQAIJ, MAT_INITIAL_MATRIX, &c));
123: PetscCall(MatPtAP(c, P, MAT_INITIAL_MATRIX, 1.0, &X));
124: PetscCall(MatDestroy(&c));
125: }
126: PetscCall(MatDestroy(&P));
127: PetscCall(MatAXPY(B, 1.0, X, SUBSET_NONZERO_PATTERN));
128: PetscCall(MatDestroy(&X));
129: PetscCall(MatSetOption(B, MAT_SYMMETRIC, PETSC_TRUE));
130: PetscCall(PCHPDDMSetRHSMat(pc, B));
131: PetscCall(MatDestroy(&B));
132: }
133: #else
134: (void)share;
135: #endif
136: PetscCall(MatDestroy(&aux));
137: PetscCall(KSPSetFromOptions(ksp));
138: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &flg));
139: if (flg) {
140: flg = PETSC_FALSE;
141: PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_define_subdomains", &flg, NULL));
142: if (flg) {
143: IS rows;
145: PetscCall(MatGetOwnershipIS(A, &rows, NULL));
146: PetscCall(PCASMSetLocalSubdomains(pc, 1, &is, &rows));
147: PetscCall(ISDestroy(&rows));
148: }
149: }
150: PetscCall(ISDestroy(&is));
151: PetscCall(MatCreateVecs(A, NULL, &b));
152: PetscCall(VecSet(b, 1.0));
153: PetscCall(KSPSolve(ksp, b, b));
154: PetscCall(VecGetLocalSize(b, &m));
155: PetscCall(VecDestroy(&b));
156: if (N > 1) {
157: KSPType type;
159: PetscCall(PetscOptionsClearValue(NULL, "-ksp_converged_reason"));
160: PetscCall(KSPSetFromOptions(ksp));
161: PetscCall(MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, &B));
162: PetscCall(MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, &X));
163: PetscCall(MatSetRandom(B, NULL));
164: /* this is algorithmically optimal in the sense that blocks of vectors are coarsened or interpolated using matrix--matrix operations */
165: /* PCHPDDM however heavily relies on MPI[S]BAIJ format for which there is no efficient MatProduct implementation */
166: PetscCall(KSPMatSolve(ksp, B, X));
167: PetscCall(KSPGetType(ksp, &type));
168: PetscCall(PetscStrcmp(type, KSPHPDDM, &flg));
169: #if defined(PETSC_HAVE_HPDDM)
170: if (flg) {
171: PetscReal norm;
172: KSPHPDDMType type;
174: PetscCall(KSPHPDDMGetType(ksp, &type));
175: if (type == KSP_HPDDM_TYPE_PREONLY || type == KSP_HPDDM_TYPE_CG || type == KSP_HPDDM_TYPE_GMRES || type == KSP_HPDDM_TYPE_GCRODR) {
176: Mat C;
178: PetscCall(MatDuplicate(X, MAT_DO_NOT_COPY_VALUES, &C));
179: PetscCall(KSPSetMatSolveBatchSize(ksp, 1));
180: PetscCall(KSPMatSolve(ksp, B, C));
181: PetscCall(MatAYPX(C, -1.0, X, SAME_NONZERO_PATTERN));
182: PetscCall(MatNorm(C, NORM_INFINITY, &norm));
183: PetscCall(MatDestroy(&C));
184: PetscCheck(norm <= 100 * PETSC_MACHINE_EPSILON, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "KSPMatSolve() and KSPSolve() difference has nonzero norm %g with pseudo-block KSPHPDDMType %s", (double)norm, KSPHPDDMTypes[type]);
185: }
186: }
187: #endif
188: PetscCall(MatDestroy(&X));
189: PetscCall(MatDestroy(&B));
190: }
191: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCHPDDM, &flg));
192: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
193: if (flg) PetscCall(PCHPDDMGetSTShareSubKSP(pc, &flg));
194: #endif
195: if (flg && PetscDefined(USE_LOG)) {
196: PetscCall(PetscOptionsHasName(NULL, NULL, "-pc_hpddm_harmonic_overlap", &flg));
197: if (!flg) {
198: PetscLogEvent event;
199: PetscEventPerfInfo info1, info2;
201: PetscCall(PetscLogEventRegister("MatLUFactorSym", PC_CLASSID, &event));
202: PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info1));
203: PetscCall(PetscLogEventRegister("MatLUFactorNum", PC_CLASSID, &event));
204: PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info2));
205: if (!info1.count && !info2.count) {
206: PetscCall(PetscLogEventRegister("MatCholFctrSym", PC_CLASSID, &event));
207: PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info1));
208: PetscCall(PetscLogEventRegister("MatCholFctrNum", PC_CLASSID, &event));
209: PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info2));
210: PetscCheck(info2.count > info1.count, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cholesky numerical factorization (%d) not called more times than Cholesky symbolic factorization (%d), broken -pc_hpddm_levels_1_st_share_sub_ksp", info2.count, info1.count);
211: } else PetscCheck(info2.count > info1.count, PETSC_COMM_SELF, PETSC_ERR_PLIB, "LU numerical factorization (%d) not called more times than LU symbolic factorization (%d), broken -pc_hpddm_levels_1_st_share_sub_ksp", info2.count, info1.count);
212: }
213: }
214: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
215: if (N == 1) {
216: flg = PETSC_FALSE;
217: PetscCall(PetscOptionsGetBool(NULL, NULL, "-successive_solves", &flg, NULL));
218: if (flg) {
219: KSPConvergedReason reason[2];
220: PetscInt iterations[3];
222: PetscCall(KSPGetConvergedReason(ksp, reason));
223: PetscCall(KSPGetTotalIterations(ksp, iterations));
224: PetscCall(PetscOptionsClearValue(NULL, "-ksp_converged_reason"));
225: PetscCall(KSPSetFromOptions(ksp));
226: flg = PETSC_FALSE;
227: PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_block_splitting", &flg, NULL));
228: if (!flg) {
229: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/sizes_%d.dat", dir, size));
230: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
231: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
232: PetscCall(ISLoad(sizes, viewer));
233: PetscCall(ISGetIndices(sizes, &idx));
234: PetscCall(MatCreate(PETSC_COMM_WORLD, &X));
235: PetscCall(MatSetSizes(X, idx[4], idx[4], PETSC_DETERMINE, PETSC_DETERMINE));
236: PetscCall(MatSetUp(X));
237: PetscCall(ISRestoreIndices(sizes, &idx));
238: PetscCall(ISDestroy(&sizes));
239: PetscCall(PetscViewerDestroy(&viewer));
240: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/is_%d.dat", dir, size));
241: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
242: PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
243: PetscCall(MatGetLayouts(X, &map, NULL));
244: PetscCall(ISSetLayout(sizes, map));
245: PetscCall(ISLoad(sizes, viewer));
246: PetscCall(ISGetLocalSize(sizes, &m));
247: PetscCall(ISGetIndices(sizes, &idx));
248: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx, PETSC_COPY_VALUES, &is));
249: PetscCall(ISRestoreIndices(sizes, &idx));
250: PetscCall(ISDestroy(&sizes));
251: PetscCall(ISSetBlockSize(is, 2));
252: PetscCall(PetscViewerDestroy(&viewer));
253: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/Neumann_%d.dat", dir, size));
254: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
255: PetscCall(MatLoad(X, viewer));
256: PetscCall(PetscViewerDestroy(&viewer));
257: PetscCall(MatGetDiagonalBlock(X, &B));
258: PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &aux));
259: PetscCall(MatDestroy(&X));
260: PetscCall(MatSetBlockSizesFromMats(aux, A, A));
261: PetscCall(MatSetOption(aux, MAT_SYMMETRIC, PETSC_TRUE));
262: PetscCall(MatConvert(aux, type, MAT_INPLACE_MATRIX, &aux));
263: }
264: PetscCall(MatCreateVecs(A, NULL, &b));
265: PetscCall(PetscObjectStateIncrease((PetscObject)A));
266: if (!flg) PetscCall(PCHPDDMSetAuxiliaryMat(pc, NULL, aux, NULL, NULL));
267: PetscCall(VecSet(b, 1.0));
268: PetscCall(KSPSolve(ksp, b, b));
269: PetscCall(KSPGetConvergedReason(ksp, reason + 1));
270: PetscCall(KSPGetTotalIterations(ksp, iterations + 1));
271: iterations[1] -= iterations[0];
272: PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[1]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[1]);
273: PetscCall(PetscObjectStateIncrease((PetscObject)A));
274: if (!flg) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is, aux, NULL, NULL));
275: PetscCall(PCSetFromOptions(pc));
276: PetscCall(VecSet(b, 1.0));
277: PetscCall(KSPSolve(ksp, b, b));
278: PetscCall(KSPGetConvergedReason(ksp, reason + 1));
279: PetscCall(KSPGetTotalIterations(ksp, iterations + 2));
280: iterations[2] -= iterations[0] + iterations[1];
281: PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[2]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[2]);
282: PetscCall(VecDestroy(&b));
283: PetscCall(ISDestroy(&is));
284: PetscCall(MatDestroy(&aux));
285: }
286: }
287: PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewer", &flg, NULL));
288: if (flg) {
289: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCHPDDM, &flg));
290: if (flg) {
291: PetscCall(PetscStrncpy(dir, "XXXXXX", sizeof(dir)));
292: if (rank == 0) PetscCall(PetscMkdtemp(dir));
293: PetscCallMPI(MPI_Bcast(dir, 6, MPI_CHAR, 0, PETSC_COMM_WORLD));
294: for (PetscInt i = 0; i < 2; ++i) {
295: PetscCall(PetscSNPrintf(name, sizeof(name), "%s/%s", dir, i == 0 ? "A" : "A.dat"));
296: PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, name, &viewer));
297: PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL));
298: PetscCall(PCView(pc, viewer));
299: PetscCall(PetscViewerPopFormat(viewer));
300: PetscCall(PetscViewerDestroy(&viewer));
301: }
302: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
303: if (rank == 0) PetscCall(PetscRMTree(dir));
304: }
305: }
306: #endif
307: PetscCall(KSPDestroy(&ksp));
308: PetscCall(MatDestroy(&A));
309: PetscCall(PetscFinalize());
310: return 0;
311: }
313: /*TEST
315: test:
316: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
317: nsize: 4
318: args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_type {{bjacobi hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
320: testset:
321: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
322: suffix: define_subdomains
323: nsize: 4
324: args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_hpddm_define_subdomains -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
325: test:
326: args: -pc_type {{asm hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -viewer
327: test:
328: args: -pc_type hpddm -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_coarse_sub_pc_type lu -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_coarse_correction none
330: testset:
331: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
332: nsize: 4
333: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
334: test:
335: suffix: geneo
336: args: -pc_hpddm_coarse_p {{1 2}shared output} -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev {{5 15}separate output} -mat_type {{aij baij sbaij}shared output}
337: test:
338: suffix: geneo_block_splitting
339: output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-15.out
340: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[6-9]/Linear solve converged due to CONVERGED_RTOL iterations 11/g"
341: args: -pc_hpddm_coarse_p 2 -pc_hpddm_levels_1_eps_nev 15 -pc_hpddm_block_splitting -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_gen_non_hermitian -mat_type {{aij baij}shared output} -successive_solves
342: test:
343: suffix: geneo_share
344: output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
345: args: -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_share_sub_ksp -reset {{false true}shared output}
346: test:
347: suffix: harmonic_overlap_1_define_false
348: output_file: output/ex76_geneo_share.out
349: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
350: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_pc_type lu -pc_hpddm_define_subdomains false -pc_hpddm_levels_1_pc_type asm -pc_hpddm_levels_1_pc_asm_overlap 2 -mat_type baij
351: test:
352: suffix: harmonic_overlap_1
353: output_file: output/ex76_geneo_share.out
354: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
355: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_pc_type lu -mat_type baij
356: test:
357: suffix: harmonic_overlap_1_share_petsc
358: output_file: output/ex76_geneo_share.out
359: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
360: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -pc_hpddm_levels_1_eps_pc_type lu -mat_type baij
361: test:
362: requires: mumps
363: suffix: harmonic_overlap_1_share_mumps
364: output_file: output/ex76_geneo_share.out
365: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
366: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps
367: test:
368: requires: mumps
369: suffix: harmonic_overlap_1_share_mumps_not_set_explicitly
370: output_file: output/ex76_geneo_share.out
371: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
372: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type baij
373: test:
374: requires: mkl_pardiso
375: suffix: harmonic_overlap_1_share_mkl_pardiso
376: output_file: output/ex76_geneo_share.out
377: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations [12][0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
378: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type shell -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mkl_pardiso
379: test:
380: requires: mkl_pardiso !mumps
381: suffix: harmonic_overlap_1_share_mkl_pardiso_no_set_explicitly
382: output_file: output/ex76_geneo_share.out
383: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations [12][0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
384: args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type shell
385: test:
386: suffix: harmonic_overlap_2_relative_threshold
387: output_file: output/ex76_geneo_share.out
388: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 9/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
389: args: -pc_hpddm_harmonic_overlap 2 -pc_hpddm_levels_1_svd_nsv 15 -pc_hpddm_levels_1_svd_relative_threshold 1e-1 -pc_hpddm_levels_1_st_share_sub_ksp -mat_type sbaij
390: test:
391: suffix: harmonic_overlap_2
392: output_file: output/ex76_geneo_share.out
393: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 9/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
394: args: -pc_hpddm_harmonic_overlap 2 -pc_hpddm_levels_1_svd_nsv 12 -pc_hpddm_levels_1_st_share_sub_ksp -mat_type sbaij
396: testset:
397: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
398: nsize: 4
399: args: -ksp_converged_reason -ksp_max_it 150 -pc_type hpddm -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_coarse_p 1 -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_define_subdomains
400: test:
401: suffix: geneo_share_cholesky
402: output_file: output/ex76_geneo_share.out
403: # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
404: args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -mat_type {{aij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output} -successive_solves
405: test:
406: suffix: geneo_share_cholesky_matstructure
407: output_file: output/ex76_geneo_share.out
408: # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
409: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 14/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
410: args: -pc_hpddm_levels_1_sub_pc_type cholesky -mat_type {{baij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure same -set_rhs {{false true} shared output}
411: test:
412: requires: mumps
413: suffix: geneo_share_lu
414: output_file: output/ex76_geneo_share.out
415: # extra -pc_factor_mat_solver_type mumps needed to avoid failures with PETSc LU
416: args: -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_st_pc_type lu -mat_type baij -pc_hpddm_levels_1_st_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output}
417: test:
418: requires: mumps
419: suffix: geneo_share_lu_matstructure
420: output_file: output/ex76_geneo_share.out
421: # extra -pc_factor_mat_solver_type mumps needed to avoid failures with PETSc LU
422: args: -pc_hpddm_levels_1_sub_pc_type lu -mat_type aij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure {{same different}shared output} -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_st_pc_factor_mat_solver_type mumps -successive_solves -pc_hpddm_levels_1_eps_target 1e-5
423: test:
424: suffix: geneo_share_not_asm
425: output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
426: # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
427: args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp true -pc_hpddm_levels_1_pc_type gasm -successive_solves
429: test:
430: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
431: suffix: fgmres_geneo_20_p_2
432: nsize: 4
433: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_coarse_p 2 -pc_hpddm_coarse_pc_type redundant -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -pc_hpddm_log_separate {{false true}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
435: testset:
436: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
437: output_file: output/ex76_fgmres_geneo_20_p_2.out
438: nsize: 4
439: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type {{baij sbaij}shared output} -pc_hpddm_levels_2_eps_nev {{5 20}shared output} -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_type gmres -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
440: test:
441: suffix: fgmres_geneo_20_p_2_geneo
442: args: -mat_type {{aij sbaij}shared output}
443: test:
444: suffix: fgmres_geneo_20_p_2_geneo_algebraic
445: args: -pc_hpddm_levels_2_st_pc_type mat
446: # PCHPDDM + KSPHPDDM test to exercise multilevel + multiple RHS in one go
447: test:
448: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
449: suffix: fgmres_geneo_20_p_2_geneo_rhs
450: output_file: output/ex76_fgmres_geneo_20_p_2.out
451: # for -pc_hpddm_coarse_correction additive
452: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 37/Linear solve converged due to CONVERGED_RTOL iterations 25/g"
453: nsize: 4
454: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type baij -pc_hpddm_levels_2_eps_nev 5 -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_max_it 10 -pc_hpddm_levels_2_ksp_type hpddm -pc_hpddm_levels_2_ksp_hpddm_type gmres -ksp_type hpddm -ksp_hpddm_variant flexible -pc_hpddm_coarse_mat_type baij -mat_type aij -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -rhs 4 -pc_hpddm_coarse_correction {{additive deflated balanced}shared output}
456: testset:
457: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES) mumps defined(PETSC_HAVE_OPENMP_SUPPORT)
458: filter: grep -E -e "Linear solve" -e " executing" | sed -e "s/MPI = 1/MPI = 2/g" -e "s/OMP = 1/OMP = 2/g"
459: nsize: 4
460: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 15 -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_coarse_p {{1 2}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_coarse_pc_factor_mat_solver_type mumps -pc_hpddm_coarse_mat_mumps_icntl_4 2 -pc_hpddm_coarse_mat_mumps_use_omp_threads {{1 2}shared output}
461: test:
462: suffix: geneo_mumps_use_omp_threads_1
463: output_file: output/ex76_geneo_mumps_use_omp_threads.out
464: args: -pc_hpddm_coarse_mat_type {{baij sbaij}shared output}
465: test:
466: suffix: geneo_mumps_use_omp_threads_2
467: output_file: output/ex76_geneo_mumps_use_omp_threads.out
468: args: -pc_hpddm_coarse_mat_type aij -pc_hpddm_levels_1_eps_threshold 0.4 -pc_hpddm_coarse_pc_type cholesky -pc_hpddm_coarse_mat_filter 1e-12
470: testset: # converge really poorly because of a tiny -pc_hpddm_levels_1_eps_threshold, but needed for proper code coverage where some subdomains don't call EPSSolve()
471: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
472: nsize: 4
473: args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_threshold 0.005 -pc_hpddm_levels_1_eps_use_inertia -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_define_subdomains -pc_hpddm_has_neumann -ksp_rtol 0.9
474: filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1/Linear solve converged due to CONVERGED_RTOL iterations 141/g"
475: test:
476: suffix: inertia_petsc
477: output_file: output/ex76_1.out
478: args: -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc
479: test:
480: suffix: inertia_mumps
481: output_file: output/ex76_1.out
482: requires: mumps
484: test:
485: requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
486: suffix: reuse_symbolic
487: output_file: output/ex77_preonly.out
488: nsize: 4
489: args: -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -rhs 4 -pc_hpddm_coarse_correction {{additive deflated balanced}shared output} -ksp_pc_side {{left right}shared output} -ksp_max_it 20 -ksp_type hpddm -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_define_subdomains -ksp_error_if_not_converged
491: TEST*/