Actual source code: ex87.c

  1: #include <petscksp.h>
  2: #include <petsc/private/petscimpl.h>

  4: static char help[] = "Solves a saddle-point linear system using PCHPDDM.\n\n";

  6: static PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat N, PetscMPIInt size);

  8: int main(int argc, char **args)
  9: {
 10:   Vec               b, x;            /* computed solution and RHS */
 11:   Mat               A[4], aux[2], S; /* linear system matrix */
 12:   KSP               ksp, *subksp;    /* linear solver context */
 13:   PC                pc;
 14:   IS                is[2];
 15:   PetscMPIInt       size;
 16:   PetscInt          m, M, n, N, id = 0;
 17:   PetscViewer       viewer;
 18:   const char *const system[] = {"elasticity", "stokes", "diffusion", "lagrange"};
 19:   /* "elasticity":
 20:    *    2D linear elasticity with rubber-like and steel-like material coefficients, i.e., Poisson's ratio \in {0.4999, 0.35} and Young's modulus \in {0.01 GPa, 200.0 GPa}
 21:    *      discretized by order 2 (resp. 0) Lagrange finite elements in displacements (resp. pressure) on a triangle mesh
 22:    * "stokes":
 23:    *    2D lid-driven cavity with constant viscosity
 24:    *      discretized by order 2 (resp. 1) Lagrange finite elements, i.e., lowest-order Taylor--Hood finite elements, in velocities (resp. pressure) on a triangle mesh
 25:    *      if the option -empty_A11 is not set (or set to false), a pressure with a zero mean-value is computed
 26:    * "diffusion":
 27:    *    2D primal-dual nonsymmetric diffusion equation
 28:    *      discretized by order 2 (resp. 1) Lagrange finite elements in primal (resp. dual) unknowns on a triangle mesh
 29:    * "lagrange":
 30:    *    2D linear elasticity with essential boundary conditions imposed through a Lagrange multiplier
 31:    */
 32:   char      dir[PETSC_MAX_PATH_LEN], prefix[PETSC_MAX_PATH_LEN];
 33:   PCType    type;
 34:   PetscBool flg[4] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};

 36:   PetscFunctionBeginUser;
 37:   PetscCall(PetscInitialize(&argc, &args, NULL, help));
 38:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 39:   PetscCall(PetscOptionsGetEList(NULL, NULL, "-system", system, PETSC_STATIC_ARRAY_LENGTH(system), &id, NULL));
 40:   if (id == 1) PetscCall(PetscOptionsGetBool(NULL, NULL, "-empty_A11", flg, NULL));
 41:   if (id != 3) PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires 4 processes");
 42:   else PetscCheck(id == 3 && size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires 2 processes");
 43:   for (PetscInt i = 0; i < 2; ++i) {
 44:     PetscCall(MatCreate(PETSC_COMM_WORLD, A + (i ? 3 : 0)));
 45:     if (id < 2 || (id == 3 && i == 0)) {
 46:       PetscCall(ISCreate(PETSC_COMM_SELF, is + i));
 47:       PetscCall(MatCreate(PETSC_COMM_SELF, aux + i));
 48:     } else {
 49:       is[i]  = NULL;
 50:       aux[i] = NULL;
 51:     }
 52:   }
 53:   PetscCall(PetscStrncpy(dir, ".", sizeof(dir)));
 54:   PetscCall(PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL));
 55:   /* loading matrices and auxiliary data for the diagonal blocks */
 56:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s", dir, id == 3 ? "D" : (id == 2 ? "C" : (id == 1 ? "B" : "A"))));
 57:   PetscCall(MatAndISLoad(prefix, "00", A[0], is[0], aux[0], size));
 58:   PetscCall(MatAndISLoad(prefix, "11", A[3], is[1], aux[1], size));
 59:   /* loading the off-diagonal block with a coherent row/column layout */
 60:   PetscCall(MatCreate(PETSC_COMM_WORLD, A + 2));
 61:   PetscCall(MatGetLocalSize(A[0], &n, NULL));
 62:   PetscCall(MatGetSize(A[0], &N, NULL));
 63:   PetscCall(MatGetLocalSize(A[3], &m, NULL));
 64:   PetscCall(MatGetSize(A[3], &M, NULL));
 65:   PetscCall(MatSetSizes(A[2], m, n, M, N));
 66:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s10.dat", dir, id == 3 ? "D" : (id == 2 ? "C" : (id == 1 ? "B" : "A"))));
 67:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
 68:   PetscCall(MatLoad(A[2], viewer));
 69:   PetscCall(PetscViewerDestroy(&viewer));
 70:   if (id != 2) {
 71:     /* transposing the off-diagonal block */
 72:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-transpose", flg + 1, NULL));
 73:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-permute", flg + 2, NULL));
 74:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-explicit", flg + 3, NULL));
 75:     if (flg[1]) {
 76:       if (flg[2]) {
 77:         PetscCall(MatTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
 78:         PetscCall(MatDestroy(A + 2));
 79:       }
 80:       if (!flg[3]) PetscCall(MatCreateTranspose(A[2 - flg[2]], A + 1 + flg[2]));
 81:       else PetscCall(MatTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2]));
 82:     } else {
 83:       if (flg[2]) {
 84:         PetscCall(MatHermitianTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
 85:         PetscCall(MatDestroy(A + 2));
 86:       }
 87:       if (!flg[3]) PetscCall(MatCreateHermitianTranspose(A[2 - flg[2]], A + 1 + flg[2]));
 88:       else PetscCall(MatHermitianTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2]));
 89:     }
 90:   } else {
 91:     PetscCall(MatCreate(PETSC_COMM_WORLD, A + 1));
 92:     PetscCall(MatSetSizes(A[1], n, m, N, M));
 93:     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/C01.dat", dir));
 94:     PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
 95:     PetscCall(MatLoad(A[1], viewer));
 96:     PetscCall(PetscViewerDestroy(&viewer));
 97:   }
 98:   if (flg[0]) PetscCall(MatDestroy(A + 3));
 99:   else {
100:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-diagonal_A11", flg, NULL));
101:     if (flg[0]) {
102:       PetscCall(MatDestroy(A + 3));
103:       PetscCall(MatCreateConstantDiagonal(PETSC_COMM_WORLD, m, m, M, M, PETSC_SMALL, A + 3));
104:     }
105:   }
106:   flg[1] = PETSC_FALSE;
107:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-all_transpose", flg + 1, NULL));
108:   if (flg[1] && flg[2]) {
109:     PetscCall(MatTranspose(A[1], MAT_INITIAL_MATRIX, &S));
110:     PetscCall(MatDestroy(A + 1));
111:     PetscCall(MatCreateHermitianTranspose(S, A + 1));
112:     PetscCall(MatDestroy(&S));
113:   }
114:   /* global coefficient matrix */
115:   PetscCall(MatCreateNest(PETSC_COMM_WORLD, 2, NULL, 2, NULL, A, &S));
116:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
117:   PetscCall(KSPSetOperators(ksp, S, S));
118:   PetscCall(KSPGetPC(ksp, &pc));
119:   /* outer preconditioner */
120:   PetscCall(PCSetType(pc, PCFIELDSPLIT));
121:   PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
122:   PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
123:   PetscCall(PCSetFromOptions(pc));
124:   PetscCall(PCGetType(pc, &type));
125:   PetscCall(PetscStrcmp(type, PCFIELDSPLIT, flg + 1));
126:   if (flg[1]) {
127:     PetscCall(PCSetUp(pc));
128:     PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
129:     PetscCall(KSPGetPC(subksp[0], &pc));
130:     /* inner preconditioner associated to top-left block */
131: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
132:     PetscCall(PCSetType(pc, PCHPDDM));
133:     PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
134: #endif
135:     PetscCall(PCSetFromOptions(pc));
136:     PetscCall(KSPGetPC(subksp[1], &pc));
137:     /* inner preconditioner associated to Schur complement, which will be set internally to PCKSP (or PCASM if the Schur complement is centralized on a single process) */
138: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
139:     PetscCall(PCSetType(pc, PCHPDDM));
140:     if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
141: #endif
142:     PetscCall(PCSetFromOptions(pc));
143:     PetscCall(PetscFree(subksp));
144:   } else PetscCall(MatSetBlockSize(A[0], 2));
145:   PetscCall(KSPSetFromOptions(ksp));
146:   PetscCall(MatCreateVecs(S, &b, &x));
147:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/rhs_%s.dat", dir, id == 3 ? "D" : (id == 2 ? "C" : (id == 1 ? "B" : "A"))));
148:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
149:   PetscCall(VecLoad(b, viewer));
150:   PetscCall(PetscViewerDestroy(&viewer));
151:   PetscCall(KSPSolve(ksp, b, x));
152:   flg[1] = PETSC_FALSE;
153:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewer", flg + 1, NULL));
154:   if (flg[1]) PetscCall(PCView(pc, PETSC_VIEWER_STDOUT_WORLD));
155:   flg[1] = PETSC_FALSE;
156:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-successive_solves", flg + 1, NULL));
157:   if (flg[1]) {
158:     KSPConvergedReason reason[2];
159:     PetscInt           iterations[2];
160:     PetscCall(KSPGetConvergedReason(ksp, reason));
161:     PetscCall(KSPGetTotalIterations(ksp, iterations));
162:     PetscCall(KSPMonitorCancel(ksp));
163:     PetscCall(PetscOptionsClearValue(NULL, "-ksp_monitor"));
164:     PetscCall(PetscObjectStateIncrease((PetscObject)S));
165:     PetscCall(KSPGetPC(ksp, &pc));
166:     PetscCall(PCSetUp(pc)); /* update PCFIELDSPLIT submatrices */
167:     PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
168:     PetscCall(KSPGetPC(subksp[0], &pc));
169: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
170:     PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
171: #endif
172:     PetscCall(PCSetFromOptions(pc));
173:     PetscCall(KSPGetPC(subksp[1], &pc));
174: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
175:     PetscCall(PCSetType(pc, PCHPDDM)); /* may have been set to PCKSP internally (or PCASM if the Schur complement is centralized on a single process), so need to enforce the proper PCType */
176:     if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
177: #endif
178:     PetscCall(PCSetFromOptions(pc));
179:     PetscCall(PetscFree(subksp));
180:     PetscCall(KSPSolve(ksp, b, x));
181:     PetscCall(KSPGetConvergedReason(ksp, reason + 1));
182:     PetscCall(KSPGetTotalIterations(ksp, iterations + 1));
183:     iterations[1] -= iterations[0];
184:     PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[1]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[1]);
185:   }
186:   PetscCall(VecDestroy(&x));
187:   PetscCall(VecDestroy(&b));
188:   PetscCall(KSPDestroy(&ksp));
189:   PetscCall(MatDestroy(&S));
190:   PetscCall(MatDestroy(A + 1));
191:   PetscCall(MatDestroy(A + 2));
192:   for (PetscInt i = 0; i < 2; ++i) {
193:     PetscCall(MatDestroy(A + (i ? 3 : 0)));
194:     PetscCall(MatDestroy(aux + i));
195:     PetscCall(ISDestroy(is + i));
196:   }
197:   PetscCall(PetscFinalize());
198:   return 0;
199: }

201: PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat aux, PetscMPIInt size)
202: {
203:   Mat             tmp[3];
204:   IS              sizes;
205:   const PetscInt *idx;
206:   PetscInt        m;
207:   PetscLayout     map;
208:   PetscViewer     viewer;
209:   char            name[PETSC_MAX_PATH_LEN];

211:   PetscFunctionBeginUser;
212:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_sizes_%d.dat", prefix, identifier, size));
213:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
214:   PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
215:   PetscCall(ISLoad(sizes, viewer));
216:   PetscCall(ISSetBlockSize(sizes, is && aux ? 5 : 4)); /* not mandatory but useful to check for proper sizes */
217:   PetscCall(ISGetIndices(sizes, &idx));
218:   PetscCall(MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]));
219:   if (is && aux) {
220:     PetscCall(MatCreate(PETSC_COMM_WORLD, tmp));
221:     PetscCall(MatSetSizes(tmp[0], idx[4], idx[4], PETSC_DETERMINE, PETSC_DETERMINE));
222:     PetscCall(MatSetUp(tmp[0]));
223:   }
224:   PetscCall(ISRestoreIndices(sizes, &idx));
225:   PetscCall(ISDestroy(&sizes));
226:   PetscCall(PetscViewerDestroy(&viewer));
227:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s.dat", prefix, identifier));
228:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
229:   PetscCall(MatLoad(A, viewer));
230:   PetscCall(PetscViewerDestroy(&viewer));
231:   if (is && aux) {
232:     PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
233:     PetscCall(MatGetLayouts(tmp[0], &map, NULL));
234:     PetscCall(ISSetLayout(sizes, map));
235:     PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_is_%d.dat", prefix, identifier, size));
236:     PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
237:     PetscCall(ISLoad(sizes, viewer));
238:     PetscCall(ISGetLocalSize(sizes, &m));
239:     PetscCall(ISGetIndices(sizes, &idx));
240:     PetscCall(ISSetType(is, ISGENERAL));
241:     PetscCall(ISGeneralSetIndices(is, m, idx, PETSC_COPY_VALUES));
242:     PetscCall(ISRestoreIndices(sizes, &idx));
243:     PetscCall(ISDestroy(&sizes));
244:     PetscCall(PetscViewerDestroy(&viewer));
245:     PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_aux_%d.dat", prefix, identifier, size));
246:     PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
247:     PetscCall(MatLoad(tmp[0], viewer));
248:     PetscCall(PetscViewerDestroy(&viewer));
249:     PetscCall(MatGetDiagonalBlock(tmp[0], tmp + 1));
250:     PetscCall(MatDuplicate(tmp[1], MAT_COPY_VALUES, tmp + 2));
251:     PetscCall(MatHeaderReplace(aux, tmp + 2));
252:     PetscCall(MatDestroy(tmp));
253:   }
254:   PetscFunctionReturn(PETSC_SUCCESS);
255: }

257: /*TEST

259:    testset:
260:       requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
261:       nsize: 4
262:       args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_has_neumann -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2
263:       test:
264:         requires: mumps
265:         suffix: 1
266:         args: -viewer -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1
267:         filter: grep -v -e "action of " -e "                            " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e "                rows="
268:       test:
269:         requires: mumps
270:         suffix: 2
271:         output_file: output/ex87_1_system-stokes.out
272:         args: -viewer -system stokes -empty_A11 -transpose {{false true}shared output} -permute {{false true}shared output} -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1 -explicit {{false true}shared output}
273:         filter: grep -v -e "action of " -e "                            " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e "                rows=" | sed -e "s/      right preconditioning/      left preconditioning/g" -e "s/      using UNPRECONDITIONED/      using PRECONDITIONED/g"
274:       test:
275:         suffix: 1_petsc
276:         args: -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold_absolute 0.3 -permute
277:       test:
278:         suffix: 2_petsc
279:         output_file: output/ex87_1_petsc_system-stokes.out
280:         args: -system stokes -empty_A11 -transpose -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_eps_threshold_absolute 0.3 -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_shift_type inblocks -successive_solves
281:         filter: sed -e "s/type: transpose/type: hermitiantranspose/g"
282:       test:
283:         suffix: threshold
284:         requires: !defined(PETSC_HAVE_MKL_SPARSE_SP2M_FEATURE)
285:         output_file: output/ex87_1_petsc_system-elasticity.out
286:         args: -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_eps_threshold_absolute 0.2 -fieldsplit_1_pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -successive_solves
287:    testset:
288:       requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
289:       nsize: 4
290:       args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2 -system stokes -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold_absolute 0.3
291:       test:
292:         suffix: diagonal
293:         output_file: output/ex87_1_petsc_system-stokes.out
294:         args: -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_0_pc_hpddm_has_neumann -diagonal_A11 {{false true}shared output}
295:       test:
296:         suffix: harmonic_overlap_2
297:         output_file: output/ex87_1_petsc_system-stokes.out
298:         args: -fieldsplit_0_pc_hpddm_harmonic_overlap 2 -fieldsplit_0_pc_hpddm_levels_1_svd_nsv 20 -diagonal_A11 -permute {{false true}shared output} -all_transpose

300:    test:
301:       requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) !hpddm !memkind
302:       nsize: 4
303:       suffix: selfp
304:       output_file: output/empty.out
305:       filter: grep -v "CONVERGED_RTOL iterations"
306:       args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -system stokes -ksp_rtol 1e-4 -ksp_converged_reason -ksp_max_it 30 -pc_type fieldsplit -pc_fieldsplit_type schur -fieldsplit_ksp_type preonly -pc_fieldsplit_schur_precondition selfp -fieldsplit_pc_type bjacobi -fieldsplit_sub_pc_type lu -transpose {{false true}shared output} -fieldsplit_1_mat_schur_complement_ainv_type lump

308:    test:
309:       requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
310:       nsize: 4
311:       suffix: nonsymmetric_least_squares
312:       output_file: output/empty.out
313:       filter: grep -v "CONVERGED_RTOL iterations"
314:       args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -system diffusion -ksp_rtol 1e-4 -ksp_converged_reason -ksp_max_it 20 -pc_type fieldsplit -pc_fieldsplit_type schur -fieldsplit_ksp_type preonly -fieldsplit_0_pc_type jacobi -prefix_push fieldsplit_1_ -pc_hpddm_schur_precondition least_squares -pc_hpddm_define_subdomains -prefix_push pc_hpddm_levels_1_ -sub_pc_type lu -sub_pc_factor_shift_type nonzero -eps_nev 5 -st_share_sub_ksp -prefix_pop -prefix_pop

316:    test:
317:       requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
318:       nsize: 2
319:       suffix: lagrange
320:       output_file: output/empty.out
321:       filter: grep -v "CONVERGED_RTOL iterations"
322:       args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_0_pc_hpddm_has_neumann -fieldsplit_0_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_0_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_0_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_0_pc_hpddm_coarse_pc_type redundant -fieldsplit_0_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_0_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -system lagrange -transpose {{false true}shared output} -successive_solves

324:    test:
325:       requires: datafilespath mumps double !complex !defined(PETSC_USE_64BIT_INDICES)
326:       nsize: 4
327:       suffix: mumps
328:       output_file: output/empty.out
329:       args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_type preonly -system elasticity -pc_type cholesky -mat_mumps_icntl_15 1

331: TEST*/