Actual source code: ex76.c

  1: #include <petscksp.h>
  2: #include <petsc/private/petscimpl.h>

  4: static char help[] = "Solves a linear system using PCHPDDM.\n\n";

  6: int main(int argc, char **args)
  7: {
  8:   Vec             b;            /* computed solution and RHS */
  9:   Mat             A, aux, X, B; /* linear system matrix */
 10:   KSP             ksp;          /* linear solver context */
 11:   PC              pc;
 12:   IS              is, sizes;
 13:   const PetscInt *idx;
 14:   PetscMPIInt     rank, size;
 15:   PetscInt        m, N = 1;
 16:   PetscViewer     viewer;
 17:   char            dir[PETSC_MAX_PATH_LEN], name[PETSC_MAX_PATH_LEN], type[256];
 18:   PetscBool3      share = PETSC_BOOL3_UNKNOWN;
 19:   PetscBool       flg, set;

 21:   PetscFunctionBeginUser;
 22:   PetscCall(PetscInitialize(&argc, &args, NULL, help));
 23:   PetscCall(PetscLogDefaultBegin());
 24:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 25:   PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_USER, "This example requires 4 processes");
 26:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-rhs", &N, NULL));
 27:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 28:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 29:   PetscCall(MatCreate(PETSC_COMM_SELF, &aux));
 30:   PetscCall(ISCreate(PETSC_COMM_SELF, &is));
 31:   PetscCall(PetscStrncpy(dir, ".", sizeof(dir)));
 32:   PetscCall(PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL));
 33:   /* loading matrices */
 34:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s/sizes_%d_%d.dat", dir, rank, size));
 35:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
 36:   PetscCall(ISCreate(PETSC_COMM_SELF, &sizes));
 37:   PetscCall(ISLoad(sizes, viewer));
 38:   PetscCall(ISGetIndices(sizes, &idx));
 39:   PetscCall(MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]));
 40:   PetscCall(MatSetUp(A));
 41:   PetscCall(ISRestoreIndices(sizes, &idx));
 42:   PetscCall(ISDestroy(&sizes));
 43:   PetscCall(PetscViewerDestroy(&viewer));
 44:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s/A.dat", dir));
 45:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
 46:   PetscCall(MatLoad(A, viewer));
 47:   PetscCall(PetscViewerDestroy(&viewer));
 48:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s/is_%d_%d.dat", dir, rank, size));
 49:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
 50:   PetscCall(ISLoad(is, viewer));
 51:   PetscCall(ISSetBlockSize(is, 2));
 52:   PetscCall(PetscViewerDestroy(&viewer));
 53:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s/Neumann_%d_%d.dat", dir, rank, size));
 54:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
 55:   PetscCall(MatLoad(aux, viewer));
 56:   PetscCall(PetscViewerDestroy(&viewer));
 57:   flg = PETSC_FALSE;
 58:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_levels_1_st_share_sub_ksp", &flg, &set));
 59:   if (flg) { /* PETSc LU/Cholesky is struggling numerically for bs > 1          */
 60:              /* only set the proper bs for the geneo_share_* tests, 1 otherwise */
 61:     PetscCall(MatSetBlockSizesFromMats(aux, A, A));
 62:     share = PETSC_BOOL3_TRUE;
 63:   } else if (set) share = PETSC_BOOL3_FALSE;
 64:   PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE));
 65:   PetscCall(MatSetOption(aux, MAT_SYMMETRIC, PETSC_TRUE));
 66:   /* ready for testing */
 67:   PetscOptionsBegin(PETSC_COMM_WORLD, "", "", "");
 68:   PetscCall(PetscStrncpy(type, MATAIJ, sizeof(type)));
 69:   PetscCall(PetscOptionsFList("-mat_type", "Matrix type", "MatSetType", MatList, type, type, 256, &flg));
 70:   PetscOptionsEnd();
 71:   PetscCall(MatConvert(A, type, MAT_INPLACE_MATRIX, &A));
 72:   PetscCall(MatConvert(aux, type, MAT_INPLACE_MATRIX, &aux));
 73:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
 74:   PetscCall(KSPSetOperators(ksp, A, A));
 75:   PetscCall(KSPGetPC(ksp, &pc));
 76:   PetscCall(PCSetType(pc, PCHPDDM));
 77: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
 78:   flg = PETSC_FALSE;
 79:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-reset", &flg, NULL));
 80:   if (flg) {
 81:     PetscCall(PetscOptionsSetValue(NULL, "-pc_hpddm_block_splitting", "true"));
 82:     PetscCall(PCSetFromOptions(pc));
 83:     PetscCall(PCSetUp(pc));
 84:     PetscCall(PetscOptionsClearValue(NULL, "-pc_hpddm_block_splitting"));
 85:   }
 86:   PetscCall(PCHPDDMSetAuxiliaryMat(pc, is, aux, NULL, NULL));
 87:   PetscCall(PCHPDDMHasNeumannMat(pc, PETSC_FALSE)); /* PETSC_TRUE is fine as well, just testing */
 88:   if (share == PETSC_BOOL3_UNKNOWN) PetscCall(PCHPDDMSetSTShareSubKSP(pc, PetscBool3ToBool(share)));
 89:   flg = PETSC_FALSE;
 90:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-set_rhs", &flg, NULL));
 91:   if (flg) {          /* user-provided RHS for concurrent generalized eigenvalue problems                          */
 92:     Mat      a, c, P; /* usually assembled automatically in PCHPDDM, this is solely for testing PCHPDDMSetRHSMat() */
 93:     PetscInt rstart, rend, location;

 95:     PetscCall(MatDuplicate(aux, MAT_DO_NOT_COPY_VALUES, &B)); /* duplicate so that MatStructure is SAME_NONZERO_PATTERN */
 96:     PetscCall(MatGetDiagonalBlock(A, &a));
 97:     PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
 98:     PetscCall(ISGetLocalSize(is, &m));
 99:     PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, rend - rstart, m, 1, NULL, &P));
100:     for (m = rstart; m < rend; ++m) {
101:       PetscCall(ISLocate(is, m, &location));
102:       PetscCheck(location >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "IS of the auxiliary Mat does not include all local rows of A");
103:       PetscCall(MatSetValue(P, m - rstart, location, 1.0, INSERT_VALUES));
104:     }
105:     PetscCall(MatAssemblyBegin(P, MAT_FINAL_ASSEMBLY));
106:     PetscCall(MatAssemblyEnd(P, MAT_FINAL_ASSEMBLY));
107:     PetscCall(PetscObjectTypeCompare((PetscObject)a, MATSEQAIJ, &flg));
108:     if (flg) PetscCall(MatPtAP(a, P, MAT_INITIAL_MATRIX, 1.0, &X)); /* MatPtAP() is used to extend diagonal blocks with zeros on the overlap */
109:     else { /* workaround for MatPtAP() limitations with some types */ PetscCall(MatConvert(a, MATSEQAIJ, MAT_INITIAL_MATRIX, &c));
110:       PetscCall(MatPtAP(c, P, MAT_INITIAL_MATRIX, 1.0, &X));
111:       PetscCall(MatDestroy(&c));
112:     }
113:     PetscCall(MatDestroy(&P));
114:     PetscCall(MatAXPY(B, 1.0, X, SUBSET_NONZERO_PATTERN));
115:     PetscCall(MatDestroy(&X));
116:     PetscCall(MatSetOption(B, MAT_SYMMETRIC, PETSC_TRUE));
117:     PetscCall(PCHPDDMSetRHSMat(pc, B));
118:     PetscCall(MatDestroy(&B));
119:   }
120: #else
121:   (void)share;
122: #endif
123:   PetscCall(MatDestroy(&aux));
124:   PetscCall(KSPSetFromOptions(ksp));
125:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCASM, &flg));
126:   if (flg) {
127:     flg = PETSC_FALSE;
128:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_define_subdomains", &flg, NULL));
129:     if (flg) {
130:       IS rows;

132:       PetscCall(MatGetOwnershipIS(A, &rows, NULL));
133:       PetscCall(PCASMSetLocalSubdomains(pc, 1, &is, &rows));
134:       PetscCall(ISDestroy(&rows));
135:     }
136:   }
137:   PetscCall(ISDestroy(&is));
138:   PetscCall(MatCreateVecs(A, NULL, &b));
139:   PetscCall(VecSet(b, 1.0));
140:   PetscCall(KSPSolve(ksp, b, b));
141:   PetscCall(VecGetLocalSize(b, &m));
142:   PetscCall(VecDestroy(&b));
143:   if (N > 1) {
144:     KSPType type;

146:     PetscCall(PetscOptionsClearValue(NULL, "-ksp_converged_reason"));
147:     PetscCall(KSPSetFromOptions(ksp));
148:     PetscCall(MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, &B));
149:     PetscCall(MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, &X));
150:     PetscCall(MatSetRandom(B, NULL));
151:     /* this is algorithmically optimal in the sense that blocks of vectors are coarsened or interpolated using matrix--matrix operations */
152:     /* PCHPDDM however heavily relies on MPI[S]BAIJ format for which there is no efficient MatProduct implementation */
153:     PetscCall(KSPMatSolve(ksp, B, X));
154:     PetscCall(KSPGetType(ksp, &type));
155:     PetscCall(PetscStrcmp(type, KSPHPDDM, &flg));
156: #if defined(PETSC_HAVE_HPDDM)
157:     if (flg) {
158:       PetscReal    norm;
159:       KSPHPDDMType type;

161:       PetscCall(KSPHPDDMGetType(ksp, &type));
162:       if (type == KSP_HPDDM_TYPE_PREONLY || type == KSP_HPDDM_TYPE_CG || type == KSP_HPDDM_TYPE_GMRES || type == KSP_HPDDM_TYPE_GCRODR) {
163:         Mat C;

165:         PetscCall(MatDuplicate(X, MAT_DO_NOT_COPY_VALUES, &C));
166:         PetscCall(KSPSetMatSolveBatchSize(ksp, 1));
167:         PetscCall(KSPMatSolve(ksp, B, C));
168:         PetscCall(MatAYPX(C, -1.0, X, SAME_NONZERO_PATTERN));
169:         PetscCall(MatNorm(C, NORM_INFINITY, &norm));
170:         PetscCall(MatDestroy(&C));
171:         PetscCheck(norm <= 100 * PETSC_MACHINE_EPSILON, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "KSPMatSolve() and KSPSolve() difference has nonzero norm %g with pseudo-block KSPHPDDMType %s", (double)norm, KSPHPDDMTypes[type]);
172:       }
173:     }
174: #endif
175:     PetscCall(MatDestroy(&X));
176:     PetscCall(MatDestroy(&B));
177:   }
178:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCHPDDM, &flg));
179: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
180:   if (flg) PetscCall(PCHPDDMGetSTShareSubKSP(pc, &flg));
181: #endif
182:   if (flg && PetscDefined(USE_LOG)) {
183:     PetscCall(PetscOptionsHasName(NULL, NULL, "-pc_hpddm_harmonic_overlap", &flg));
184:     if (!flg) {
185:       PetscLogEvent      event;
186:       PetscEventPerfInfo info1, info2;

188:       PetscCall(PetscLogEventRegister("MatLUFactorSym", PC_CLASSID, &event));
189:       PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info1));
190:       PetscCall(PetscLogEventRegister("MatLUFactorNum", PC_CLASSID, &event));
191:       PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info2));
192:       if (!info1.count && !info2.count) {
193:         PetscCall(PetscLogEventRegister("MatCholFctrSym", PC_CLASSID, &event));
194:         PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info1));
195:         PetscCall(PetscLogEventRegister("MatCholFctrNum", PC_CLASSID, &event));
196:         PetscCall(PetscLogEventGetPerfInfo(PETSC_DETERMINE, event, &info2));
197:         PetscCheck(info2.count > info1.count, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cholesky numerical factorization (%d) not called more times than Cholesky symbolic factorization (%d), broken -pc_hpddm_levels_1_st_share_sub_ksp", info2.count, info1.count);
198:       } else PetscCheck(info2.count > info1.count, PETSC_COMM_SELF, PETSC_ERR_PLIB, "LU numerical factorization (%d) not called more times than LU symbolic factorization (%d), broken -pc_hpddm_levels_1_st_share_sub_ksp", info2.count, info1.count);
199:     }
200:   }
201: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
202:   if (N == 1) {
203:     flg = PETSC_FALSE;
204:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-successive_solves", &flg, NULL));
205:     if (flg) {
206:       KSPConvergedReason reason[2];
207:       PetscInt           iterations[3];

209:       PetscCall(KSPGetConvergedReason(ksp, reason));
210:       PetscCall(KSPGetTotalIterations(ksp, iterations));
211:       PetscCall(PetscOptionsClearValue(NULL, "-ksp_converged_reason"));
212:       PetscCall(KSPSetFromOptions(ksp));
213:       flg = PETSC_FALSE;
214:       PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_hpddm_block_splitting", &flg, NULL));
215:       if (!flg) {
216:         PetscCall(MatCreate(PETSC_COMM_SELF, &aux));
217:         PetscCall(ISCreate(PETSC_COMM_SELF, &is));
218:         PetscCall(PetscSNPrintf(name, sizeof(name), "%s/is_%d_%d.dat", dir, rank, size));
219:         PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
220:         PetscCall(ISLoad(is, viewer));
221:         PetscCall(ISSetBlockSize(is, 2));
222:         PetscCall(PetscViewerDestroy(&viewer));
223:         PetscCall(PetscSNPrintf(name, sizeof(name), "%s/Neumann_%d_%d.dat", dir, rank, size));
224:         PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
225:         PetscCall(MatLoad(aux, viewer));
226:         PetscCall(PetscViewerDestroy(&viewer));
227:         PetscCall(MatSetBlockSizesFromMats(aux, A, A));
228:         PetscCall(MatSetOption(aux, MAT_SYMMETRIC, PETSC_TRUE));
229:         PetscCall(MatConvert(aux, type, MAT_INPLACE_MATRIX, &aux));
230:       }
231:       PetscCall(MatCreateVecs(A, NULL, &b));
232:       PetscCall(PetscObjectStateIncrease((PetscObject)A));
233:       if (!flg) PetscCall(PCHPDDMSetAuxiliaryMat(pc, NULL, aux, NULL, NULL));
234:       PetscCall(VecSet(b, 1.0));
235:       PetscCall(KSPSolve(ksp, b, b));
236:       PetscCall(KSPGetConvergedReason(ksp, reason + 1));
237:       PetscCall(KSPGetTotalIterations(ksp, iterations + 1));
238:       iterations[1] -= iterations[0];
239:       PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[1]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[1]);
240:       PetscCall(PetscObjectStateIncrease((PetscObject)A));
241:       if (!flg) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is, aux, NULL, NULL));
242:       PetscCall(PCSetFromOptions(pc));
243:       PetscCall(VecSet(b, 1.0));
244:       PetscCall(KSPSolve(ksp, b, b));
245:       PetscCall(KSPGetConvergedReason(ksp, reason + 1));
246:       PetscCall(KSPGetTotalIterations(ksp, iterations + 2));
247:       iterations[2] -= iterations[0] + iterations[1];
248:       PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[2]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[2]);
249:       PetscCall(VecDestroy(&b));
250:       PetscCall(ISDestroy(&is));
251:       PetscCall(MatDestroy(&aux));
252:     }
253:   }
254:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewer", &flg, NULL));
255:   if (flg) {
256:     PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCHPDDM, &flg));
257:     if (flg) {
258:       PetscCall(PetscStrncpy(dir, "XXXXXX", sizeof(dir)));
259:       if (rank == 0) PetscCall(PetscMkdtemp(dir));
260:       PetscCallMPI(MPI_Bcast(dir, 6, MPI_CHAR, 0, PETSC_COMM_WORLD));
261:       for (PetscInt i = 0; i < 2; ++i) {
262:         PetscCall(PetscSNPrintf(name, sizeof(name), "%s/%s", dir, (i == 0 ? "A" : "A.dat")));
263:         PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, name, &viewer));
264:         PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL));
265:         PetscCall(PCView(pc, viewer));
266:         PetscCall(PetscViewerPopFormat(viewer));
267:         PetscCall(PetscViewerDestroy(&viewer));
268:       }
269:       PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
270:       if (rank == 0) PetscCall(PetscRMTree(dir));
271:     }
272:   }
273: #endif
274:   PetscCall(KSPDestroy(&ksp));
275:   PetscCall(MatDestroy(&A));
276:   PetscCall(PetscFinalize());
277:   return 0;
278: }

280: /*TEST

282:    test:
283:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
284:       nsize: 4
285:       args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_type {{bjacobi hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO

287:    testset:
288:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
289:       suffix: define_subdomains
290:       nsize: 4
291:       args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_hpddm_define_subdomains -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
292:       test:
293:         args: -pc_type {{asm hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -viewer
294:       test:
295:         args: -pc_type hpddm -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_coarse_sub_pc_type lu -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_coarse_correction none

297:    testset:
298:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
299:       nsize: 4
300:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
301:       test:
302:         suffix: geneo
303:         args: -pc_hpddm_coarse_p {{1 2}shared output} -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev {{5 15}separate output} -mat_type {{aij baij sbaij}shared output}
304:       test:
305:         suffix: geneo_block_splitting
306:         output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-15.out
307:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[6-9]/Linear solve converged due to CONVERGED_RTOL iterations 11/g"
308:         args: -pc_hpddm_coarse_p 2 -pc_hpddm_levels_1_eps_nev 15 -pc_hpddm_block_splitting -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_gen_non_hermitian -mat_type {{aij baij}shared output} -successive_solves
309:       test:
310:         suffix: geneo_share
311:         output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
312:         args: -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_share_sub_ksp -reset {{false true}shared output}
313:       test:
314:         suffix: harmonic_overlap_1_define_false
315:         output_file: output/ex76_geneo_share.out
316:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
317:         args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_pc_type lu -pc_hpddm_define_subdomains false -pc_hpddm_levels_1_pc_type asm -pc_hpddm_levels_1_pc_asm_overlap 2 -mat_type baij
318:       test:
319:         suffix: harmonic_overlap_1
320:         output_file: output/ex76_geneo_share.out
321:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
322:         args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_eps_pc_type lu -mat_type baij
323:       test:
324:         suffix: harmonic_overlap_1_share_petsc
325:         output_file: output/ex76_geneo_share.out
326:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
327:         args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -pc_hpddm_levels_1_eps_pc_type lu -mat_type baij
328:       test:
329:         requires: mumps
330:         suffix: harmonic_overlap_1_share_mumps
331:         output_file: output/ex76_geneo_share.out
332:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
333:         args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps
334:       test:
335:         requires: mumps
336:         suffix: harmonic_overlap_1_share_mumps_not_set_explicitly
337:         output_file: output/ex76_geneo_share.out
338:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1[0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
339:         args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type baij
340:       test:
341:         requires: mkl_pardiso
342:         suffix: harmonic_overlap_1_share_mkl_pardiso
343:         output_file: output/ex76_geneo_share.out
344:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations [12][0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
345:         args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type shell -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mkl_pardiso
346:       test:
347:         requires: mkl_pardiso !mumps
348:         suffix: harmonic_overlap_1_share_mkl_pardiso_no_set_explicitly
349:         output_file: output/ex76_geneo_share.out
350:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations [12][0-3]/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
351:         args: -pc_hpddm_harmonic_overlap 1 -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_eps_relative_threshold 1e+1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_mat_type shell
352:       test:
353:         suffix: harmonic_overlap_2_relative_threshold
354:         output_file: output/ex76_geneo_share.out
355:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 9/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
356:         args: -pc_hpddm_harmonic_overlap 2 -pc_hpddm_levels_1_svd_nsv 15 -pc_hpddm_levels_1_svd_relative_threshold 1e-1 -pc_hpddm_levels_1_st_share_sub_ksp -mat_type sbaij
357:       test:
358:         suffix: harmonic_overlap_2
359:         output_file: output/ex76_geneo_share.out
360:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 9/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
361:         args: -pc_hpddm_harmonic_overlap 2 -pc_hpddm_levels_1_svd_nsv 12 -pc_hpddm_levels_1_st_share_sub_ksp -mat_type sbaij

363:    testset:
364:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
365:       nsize: 4
366:       args: -ksp_converged_reason -ksp_max_it 150 -pc_type hpddm -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_coarse_p 1 -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_define_subdomains
367:       test:
368:         suffix: geneo_share_cholesky
369:         output_file: output/ex76_geneo_share.out
370:         # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
371:         args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -mat_type {{aij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output} -successive_solves
372:       test:
373:         suffix: geneo_share_cholesky_matstructure
374:         output_file: output/ex76_geneo_share.out
375:         # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
376:         filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 14/Linear solve converged due to CONVERGED_RTOL iterations 15/g"
377:         args: -pc_hpddm_levels_1_sub_pc_type cholesky -mat_type {{baij sbaij}shared output} -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure same -set_rhs {{false true} shared output}
378:       test:
379:         requires: mumps
380:         suffix: geneo_share_lu
381:         output_file: output/ex76_geneo_share.out
382:         # extra -pc_factor_mat_solver_type mumps needed to avoid failures with PETSc LU
383:         args: -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_st_pc_type lu -mat_type baij -pc_hpddm_levels_1_st_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output}
384:       test:
385:         requires: mumps
386:         suffix: geneo_share_lu_matstructure
387:         output_file: output/ex76_geneo_share.out
388:         # extra -pc_factor_mat_solver_type mumps needed to avoid failures with PETSc LU
389:         args: -pc_hpddm_levels_1_sub_pc_type lu -mat_type aij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_st_matstructure {{same different}shared output} -pc_hpddm_levels_1_st_pc_type lu -pc_hpddm_levels_1_st_pc_factor_mat_solver_type mumps -successive_solves -pc_hpddm_levels_1_eps_target 1e-5
390:       test:
391:         suffix: geneo_share_not_asm
392:         output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
393:         # extra -pc_hpddm_levels_1_eps_gen_non_hermitian needed to avoid failures with PETSc Cholesky
394:         args: -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_has_neumann -pc_hpddm_levels_1_st_share_sub_ksp true -pc_hpddm_levels_1_pc_type gasm -successive_solves

396:    test:
397:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
398:       suffix: fgmres_geneo_20_p_2
399:       nsize: 4
400:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_coarse_p 2 -pc_hpddm_coarse_pc_type redundant -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -pc_hpddm_log_separate {{false true}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO

402:    testset:
403:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
404:       output_file: output/ex76_fgmres_geneo_20_p_2.out
405:       nsize: 4
406:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type {{baij sbaij}shared output} -pc_hpddm_levels_2_eps_nev {{5 20}shared output} -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_type gmres -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
407:       test:
408:         suffix: fgmres_geneo_20_p_2_geneo
409:         args: -mat_type {{aij sbaij}shared output}
410:       test:
411:         suffix: fgmres_geneo_20_p_2_geneo_algebraic
412:         args: -pc_hpddm_levels_2_st_pc_type mat
413:    # PCHPDDM + KSPHPDDM test to exercise multilevel + multiple RHS in one go
414:    test:
415:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
416:       suffix: fgmres_geneo_20_p_2_geneo_rhs
417:       output_file: output/ex76_fgmres_geneo_20_p_2.out
418:       # for -pc_hpddm_coarse_correction additive
419:       filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 37/Linear solve converged due to CONVERGED_RTOL iterations 25/g"
420:       nsize: 4
421:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type baij -pc_hpddm_levels_2_eps_nev 5 -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_max_it 10 -pc_hpddm_levels_2_ksp_type hpddm -pc_hpddm_levels_2_ksp_hpddm_type gmres -ksp_type hpddm -ksp_hpddm_variant flexible -pc_hpddm_coarse_mat_type baij -mat_type aij -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -rhs 4 -pc_hpddm_coarse_correction {{additive deflated balanced}shared output}

423:    testset:
424:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES) mumps defined(PETSC_HAVE_OPENMP_SUPPORT)
425:       filter: grep -E -e "Linear solve" -e "      executing" | sed -e "s/MPI =      1/MPI =      2/g" -e "s/OMP =      1/OMP =      2/g"
426:       nsize: 4
427:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 15 -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_coarse_p {{1 2}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_coarse_pc_factor_mat_solver_type mumps -pc_hpddm_coarse_mat_mumps_icntl_4 2 -pc_hpddm_coarse_mat_mumps_use_omp_threads {{1 2}shared output}
428:       test:
429:         suffix: geneo_mumps_use_omp_threads_1
430:         output_file: output/ex76_geneo_mumps_use_omp_threads.out
431:         args: -pc_hpddm_coarse_mat_type {{baij sbaij}shared output}
432:       test:
433:         suffix: geneo_mumps_use_omp_threads_2
434:         output_file: output/ex76_geneo_mumps_use_omp_threads.out
435:         args: -pc_hpddm_coarse_mat_type aij -pc_hpddm_levels_1_eps_threshold 0.4 -pc_hpddm_coarse_pc_type cholesky -pc_hpddm_coarse_mat_filter 1e-12

437:    testset: # converge really poorly because of a tiny -pc_hpddm_levels_1_eps_threshold, but needed for proper code coverage where some subdomains don't call EPSSolve()
438:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
439:       nsize: 4
440:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_threshold 0.005 -pc_hpddm_levels_1_eps_use_inertia -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_define_subdomains -pc_hpddm_has_neumann -ksp_rtol 0.9
441:       filter: sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 1/Linear solve converged due to CONVERGED_RTOL iterations 141/g"
442:       test:
443:         suffix: inertia_petsc
444:         output_file: output/ex76_1.out
445:         args: -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc
446:       test:
447:         suffix: inertia_mumps
448:         output_file: output/ex76_1.out
449:         requires: mumps

451:    test:
452:       requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
453:       suffix: reuse_symbolic
454:       output_file: output/ex77_preonly.out
455:       nsize: 4
456:       args: -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -rhs 4 -pc_hpddm_coarse_correction {{additive deflated balanced}shared output} -ksp_pc_side {{left right}shared output} -ksp_max_it 20 -ksp_type hpddm -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -pc_hpddm_define_subdomains -ksp_error_if_not_converged

458: TEST*/