Actual source code: ex87.c

  1: #include <petscksp.h>
  2: #include <petsc/private/petscimpl.h>

  4: static char help[] = "Solves a saddle-point linear system using PCHPDDM.\n\n";

  6: static PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat N, PetscMPIInt rank, PetscMPIInt size);

  8: int main(int argc, char **args)
  9: {
 10:   Vec               b, x;            /* computed solution and RHS */
 11:   Mat               A[4], aux[2], S; /* linear system matrix */
 12:   KSP               ksp, *subksp;    /* linear solver context */
 13:   PC                pc;
 14:   IS                is[2];
 15:   PetscMPIInt       rank, size;
 16:   PetscInt          m, M, n, N, id = 0;
 17:   PetscViewer       viewer;
 18:   const char *const system[] = {"elasticity", "stokes"};
 19:   /* "elasticity":
 20:    *    2D linear elasticity with rubber-like and steel-like material coefficients, i.e., Poisson's ratio \in {0.4999, 0.35} and Young's modulus \in {0.01 GPa, 200.0 GPa}
 21:    *      discretized by order 2 (resp. 0) Lagrange finite elements in displacements (resp. pressure) on a triangle mesh
 22:    * "stokes":
 23:    *    2D lid-driven cavity with constant viscosity
 24:    *      discretized by order 2 (resp. 1) Lagrange finite elements, i.e., lowest-order Taylor--Hood finite elements, in velocities (resp. pressure) on a triangle mesh
 25:    *      if the option -empty_A11 is not set (or set to false), a pressure with a zero mean-value is computed
 26:    */
 27:   char      dir[PETSC_MAX_PATH_LEN], prefix[PETSC_MAX_PATH_LEN];
 28:   PetscBool flg[4] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};

 30:   PetscFunctionBeginUser;
 31:   PetscCall(PetscInitialize(&argc, &args, NULL, help));
 32:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 33:   PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_USER, "This example requires 4 processes");
 34:   PetscCall(PetscOptionsGetEList(NULL, NULL, "-system", system, PETSC_STATIC_ARRAY_LENGTH(system), &id, NULL));
 35:   if (id == 1) PetscCall(PetscOptionsGetBool(NULL, NULL, "-empty_A11", flg, NULL));
 36:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 37:   for (PetscInt i = 0; i < 2; ++i) {
 38:     PetscCall(MatCreate(PETSC_COMM_WORLD, A + (i ? 3 : 0)));
 39:     PetscCall(ISCreate(PETSC_COMM_SELF, is + i));
 40:     PetscCall(MatCreate(PETSC_COMM_SELF, aux + i));
 41:   }
 42:   PetscCall(PetscStrncpy(dir, ".", sizeof(dir)));
 43:   PetscCall(PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL));
 44:   /* loading matrices and auxiliary data for the diagonal blocks */
 45:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s", dir, id == 1 ? "B" : "A"));
 46:   PetscCall(MatAndISLoad(prefix, "00", A[0], is[0], aux[0], rank, size));
 47:   PetscCall(MatAndISLoad(prefix, "11", A[3], is[1], aux[1], rank, size));
 48:   /* loading the off-diagonal block with a coherent row/column layout */
 49:   PetscCall(MatCreate(PETSC_COMM_WORLD, A + 2));
 50:   PetscCall(MatGetLocalSize(A[0], &n, NULL));
 51:   PetscCall(MatGetSize(A[0], &N, NULL));
 52:   PetscCall(MatGetLocalSize(A[3], &m, NULL));
 53:   PetscCall(MatGetSize(A[3], &M, NULL));
 54:   PetscCall(MatSetSizes(A[2], m, n, M, N));
 55:   PetscCall(MatSetUp(A[2]));
 56:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s10.dat", dir, id == 1 ? "B" : "A"));
 57:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
 58:   PetscCall(MatLoad(A[2], viewer));
 59:   PetscCall(PetscViewerDestroy(&viewer));
 60:   /* transposing the off-diagonal block */
 61:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-transpose", flg + 1, NULL));
 62:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-permute", flg + 2, NULL));
 63:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-explicit", flg + 3, NULL));
 64:   if (flg[1]) {
 65:     if (flg[2]) {
 66:       PetscCall(MatTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
 67:       PetscCall(MatDestroy(A + 2));
 68:     }
 69:     if (!flg[3]) PetscCall(MatCreateTranspose(A[2 - flg[2]], A + 1 + flg[2]));
 70:     else PetscCall(MatTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2]));
 71:   } else {
 72:     if (flg[2]) {
 73:       PetscCall(MatHermitianTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
 74:       PetscCall(MatDestroy(A + 2));
 75:     }
 76:     if (!flg[3]) PetscCall(MatCreateHermitianTranspose(A[2 - flg[2]], A + 1 + flg[2]));
 77:     else PetscCall(MatHermitianTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2]));
 78:   }
 79:   if (flg[0]) PetscCall(MatDestroy(A + 3));
 80:   /* global coefficient matrix */
 81:   PetscCall(MatCreateNest(PETSC_COMM_WORLD, 2, NULL, 2, NULL, A, &S));
 82:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
 83:   PetscCall(KSPSetOperators(ksp, S, S));
 84:   PetscCall(KSPGetPC(ksp, &pc));
 85:   /* outer preconditioner */
 86:   PetscCall(PCSetType(pc, PCFIELDSPLIT));
 87:   PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
 88:   PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
 89:   PetscCall(PCSetUp(pc));
 90:   PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
 91:   PetscCall(KSPGetPC(subksp[0], &pc));
 92:   /* inner preconditioner associated to top-left block */
 93:   PetscCall(PCSetType(pc, PCHPDDM));
 94:   PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
 95:   PetscCall(PCSetFromOptions(pc));
 96:   PetscCall(KSPGetPC(subksp[1], &pc));
 97:   /* inner preconditioner associated to Schur complement, which will be set internally to a PCKSP */
 98:   PetscCall(PCSetType(pc, PCHPDDM));
 99:   if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
100:   PetscCall(PCSetFromOptions(pc));
101:   PetscCall(PetscFree(subksp));
102:   PetscCall(KSPSetFromOptions(ksp));
103:   PetscCall(MatCreateVecs(S, &b, &x));
104:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/rhs_%s.dat", dir, id == 1 ? "B" : "A"));
105:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
106:   PetscCall(VecLoad(b, viewer));
107:   PetscCall(PetscViewerDestroy(&viewer));
108:   PetscCall(KSPSolve(ksp, b, x));
109:   flg[1] = PETSC_FALSE;
110:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewer", flg + 1, NULL));
111:   if (flg[1]) PetscCall(PCView(pc, PETSC_VIEWER_STDOUT_WORLD));
112:   flg[1] = PETSC_FALSE;
113:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-successive_solves", flg + 1, NULL));
114:   if (flg[1]) {
115:     KSPConvergedReason reason[2];
116:     PetscInt           iterations[2];
117:     PetscCall(KSPGetConvergedReason(ksp, reason));
118:     PetscCall(KSPGetTotalIterations(ksp, iterations));
119:     PetscCall(KSPMonitorCancel(ksp));
120:     PetscCall(PetscOptionsClearValue(NULL, "-ksp_monitor"));
121:     PetscCall(PetscObjectStateIncrease((PetscObject)S));
122:     PetscCall(KSPSetUp(ksp));
123:     PetscCall(KSPGetPC(ksp, &pc));
124:     PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
125:     PetscCall(KSPGetPC(subksp[0], &pc));
126:     PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
127:     PetscCall(PCSetFromOptions(pc));
128:     PetscCall(KSPGetPC(subksp[1], &pc));
129:     if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
130:     PetscCall(PCSetFromOptions(pc));
131:     PetscCall(PetscFree(subksp));
132:     PetscCall(KSPSolve(ksp, b, x));
133:     PetscCall(KSPGetConvergedReason(ksp, reason + 1));
134:     PetscCall(KSPGetTotalIterations(ksp, iterations + 1));
135:     iterations[1] -= iterations[0];
136:     PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[1]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[1]);
137:   }
138:   PetscCall(VecDestroy(&x));
139:   PetscCall(VecDestroy(&b));
140:   PetscCall(KSPDestroy(&ksp));
141:   PetscCall(MatDestroy(&S));
142:   PetscCall(MatDestroy(A + 1));
143:   PetscCall(MatDestroy(A + 2));
144:   for (PetscInt i = 0; i < 2; ++i) {
145:     PetscCall(MatDestroy(A + (i ? 3 : 0)));
146:     PetscCall(MatDestroy(aux + i));
147:     PetscCall(ISDestroy(is + i));
148:   }
149:   PetscCall(PetscFinalize());
150:   return 0;
151: }

153: PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat aux, PetscMPIInt rank, PetscMPIInt size)
154: {
155:   IS              sizes;
156:   const PetscInt *idx;
157:   PetscViewer     viewer;
158:   char            name[PETSC_MAX_PATH_LEN];

160:   PetscFunctionBeginUser;
161:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_sizes_%d_%d.dat", prefix, identifier, rank, size));
162:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
163:   PetscCall(ISCreate(PETSC_COMM_SELF, &sizes));
164:   PetscCall(ISLoad(sizes, viewer));
165:   PetscCall(ISGetIndices(sizes, &idx));
166:   PetscCall(MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]));
167:   PetscCall(MatSetUp(A));
168:   PetscCall(ISRestoreIndices(sizes, &idx));
169:   PetscCall(ISDestroy(&sizes));
170:   PetscCall(PetscViewerDestroy(&viewer));
171:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s.dat", prefix, identifier));
172:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
173:   PetscCall(MatLoad(A, viewer));
174:   PetscCall(PetscViewerDestroy(&viewer));
175:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_is_%d_%d.dat", prefix, identifier, rank, size));
176:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
177:   PetscCall(ISLoad(is, viewer));
178:   PetscCall(PetscViewerDestroy(&viewer));
179:   PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_aux_%d_%d.dat", prefix, identifier, rank, size));
180:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, name, FILE_MODE_READ, &viewer));
181:   PetscCall(MatLoad(aux, viewer));
182:   PetscCall(PetscViewerDestroy(&viewer));
183:   PetscFunctionReturn(PETSC_SUCCESS);
184: }

186: /*TEST

188:    build:
189:       requires: hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)

191:    testset:
192:       requires: datafilespath
193:       nsize: 4
194:       args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_has_neumann -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2
195:       test:
196:         requires: mumps
197:         suffix: 1
198:         args: -viewer -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1
199:         filter: grep -v -e "action of " -e "                            " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e "                rows="
200:       test:
201:         requires: mumps
202:         suffix: 2
203:         output_file: output/ex87_1_system-stokes.out
204:         args: -viewer -system stokes -empty_A11 -transpose {{false true}shared output} -permute {{false true}shared output} -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1 -explicit {{false true}shared output}
205:         filter: grep -v -e "action of " -e "                            " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e "                rows=" | sed -e "s/      right preconditioning/      left preconditioning/g" -e "s/      using UNPRECONDITIONED/      using PRECONDITIONED/g"
206:       test:
207:         suffix: 1_petsc
208:         args: -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.3 -permute
209:       test:
210:         suffix: 2_petsc
211:         output_file: output/ex87_1_petsc_system-stokes.out
212:         args: -system stokes -empty_A11 -transpose -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.3 -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_shift_type inblocks -successive_solves
213:         filter: sed -e "s/type: transpose/type: hermitiantranspose/g"
214:       test:
215:         suffix: threshold
216:         output_file: output/ex87_1_petsc_system-elasticity.out
217:         args: -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_eps_threshold 0.2 -fieldsplit_1_pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -successive_solves

219: TEST*/