Actual source code: ex29.c


  2: /*
  3: Added at the request of Marc Garbey.

  5: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation

  7:    -div \rho grad u = f,  0 < x,y < 1,

  9: with forcing function

 11:    f = e^{-x^2/\nu} e^{-y^2/\nu}

 13: with Dirichlet boundary conditions

 15:    u = f(x,y) for x = 0, x = 1, y = 0, y = 1

 17: or pure Neumman boundary conditions

 19: This uses multigrid to solve the linear system
 20: */

 22: static char help[] = "Solves 2D inhomogeneous Laplacian using multigrid.\n\n";

 24: #include <petscdm.h>
 25: #include <petscdmda.h>
 26: #include <petscksp.h>

 28: extern PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
 29: extern PetscErrorCode ComputeRHS(KSP, Vec, void *);

 31: typedef enum {
 32:   DIRICHLET,
 33:   NEUMANN
 34: } BCType;

 36: typedef struct {
 37:   PetscReal rho;
 38:   PetscReal nu;
 39:   BCType    bcType;
 40: } UserContext;

 42: int main(int argc, char **argv)
 43: {
 44:   KSP         ksp;
 45:   DM          da;
 46:   UserContext user;
 47:   const char *bcTypes[2] = {"dirichlet", "neumann"};
 48:   PetscInt    bc;
 49:   Vec         b, x;
 50:   PetscBool   testsolver = PETSC_FALSE;

 52:   PetscFunctionBeginUser;
 53:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 54:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
 55:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 3, 3, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, &da));
 56:   PetscCall(DMSetFromOptions(da));
 57:   PetscCall(DMSetUp(da));
 58:   PetscCall(DMDASetUniformCoordinates(da, 0, 1, 0, 1, 0, 0));
 59:   PetscCall(DMDASetFieldName(da, 0, "Pressure"));

 61:   PetscOptionsBegin(PETSC_COMM_WORLD, "", "Options for the inhomogeneous Poisson equation", "DMqq");
 62:   user.rho = 1.0;
 63:   PetscCall(PetscOptionsReal("-rho", "The conductivity", "ex29.c", user.rho, &user.rho, NULL));
 64:   user.nu = 0.1;
 65:   PetscCall(PetscOptionsReal("-nu", "The width of the Gaussian source", "ex29.c", user.nu, &user.nu, NULL));
 66:   bc = (PetscInt)DIRICHLET;
 67:   PetscCall(PetscOptionsEList("-bc_type", "Type of boundary condition", "ex29.c", bcTypes, 2, bcTypes[0], &bc, NULL));
 68:   user.bcType = (BCType)bc;
 69:   PetscCall(PetscOptionsBool("-testsolver", "Run solver multiple times, useful for performance studies of solver", "ex29.c", testsolver, &testsolver, NULL));
 70:   PetscOptionsEnd();

 72:   PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, &user));
 73:   PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, &user));
 74:   PetscCall(KSPSetDM(ksp, da));
 75:   PetscCall(KSPSetFromOptions(ksp));
 76:   PetscCall(KSPSetUp(ksp));
 77:   PetscCall(KSPSolve(ksp, NULL, NULL));

 79:   if (testsolver) {
 80:     PetscCall(KSPGetSolution(ksp, &x));
 81:     PetscCall(KSPGetRhs(ksp, &b));
 82:     PetscCall(KSPSetDMActive(ksp, PETSC_FALSE));
 83:     PetscCall(KSPSolve(ksp, b, x));
 84:     {
 85: #if defined(PETSC_USE_LOG)
 86:       PetscLogStage stage;
 87: #endif
 88:       PetscInt i, n = 20;

 90:       PetscCall(PetscLogStageRegister("Solve only", &stage));
 91:       PetscCall(PetscLogStagePush(stage));
 92:       for (i = 0; i < n; i++) PetscCall(KSPSolve(ksp, b, x));
 93:       PetscCall(PetscLogStagePop());
 94:     }
 95:   }

 97:   PetscCall(DMDestroy(&da));
 98:   PetscCall(KSPDestroy(&ksp));
 99:   PetscCall(PetscFinalize());
100:   return 0;
101: }

103: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
104: {
105:   UserContext  *user = (UserContext *)ctx;
106:   PetscInt      i, j, mx, my, xm, ym, xs, ys;
107:   PetscScalar   Hx, Hy;
108:   PetscScalar **array;
109:   DM            da;

111:   PetscFunctionBeginUser;
112:   PetscCall(KSPGetDM(ksp, &da));
113:   PetscCall(DMDAGetInfo(da, 0, &mx, &my, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
114:   Hx = 1.0 / (PetscReal)(mx - 1);
115:   Hy = 1.0 / (PetscReal)(my - 1);
116:   PetscCall(DMDAGetCorners(da, &xs, &ys, 0, &xm, &ym, 0));
117:   PetscCall(DMDAVecGetArray(da, b, &array));
118:   for (j = ys; j < ys + ym; j++) {
119:     for (i = xs; i < xs + xm; i++) array[j][i] = PetscExpScalar(-((PetscReal)i * Hx) * ((PetscReal)i * Hx) / user->nu) * PetscExpScalar(-((PetscReal)j * Hy) * ((PetscReal)j * Hy) / user->nu) * Hx * Hy;
120:   }
121:   PetscCall(DMDAVecRestoreArray(da, b, &array));
122:   PetscCall(VecAssemblyBegin(b));
123:   PetscCall(VecAssemblyEnd(b));

125:   /* force right hand side to be consistent for singular matrix */
126:   /* note this is really a hack, normally the model would provide you with a consistent right handside */
127:   if (user->bcType == NEUMANN) {
128:     MatNullSpace nullspace;

130:     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
131:     PetscCall(MatNullSpaceRemove(nullspace, b));
132:     PetscCall(MatNullSpaceDestroy(&nullspace));
133:   }
134:   PetscFunctionReturn(PETSC_SUCCESS);
135: }

137: PetscErrorCode ComputeRho(PetscInt i, PetscInt j, PetscInt mx, PetscInt my, PetscReal centerRho, PetscReal *rho)
138: {
139:   PetscFunctionBeginUser;
140:   if ((i > mx / 3.0) && (i < 2.0 * mx / 3.0) && (j > my / 3.0) && (j < 2.0 * my / 3.0)) {
141:     *rho = centerRho;
142:   } else {
143:     *rho = 1.0;
144:   }
145:   PetscFunctionReturn(PETSC_SUCCESS);
146: }

148: PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
149: {
150:   UserContext *user = (UserContext *)ctx;
151:   PetscReal    centerRho;
152:   PetscInt     i, j, mx, my, xm, ym, xs, ys;
153:   PetscScalar  v[5];
154:   PetscReal    Hx, Hy, HydHx, HxdHy, rho;
155:   MatStencil   row, col[5];
156:   DM           da;
157:   PetscBool    check_matis = PETSC_FALSE;

159:   PetscFunctionBeginUser;
160:   PetscCall(KSPGetDM(ksp, &da));
161:   centerRho = user->rho;
162:   PetscCall(DMDAGetInfo(da, 0, &mx, &my, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
163:   Hx    = 1.0 / (PetscReal)(mx - 1);
164:   Hy    = 1.0 / (PetscReal)(my - 1);
165:   HxdHy = Hx / Hy;
166:   HydHx = Hy / Hx;
167:   PetscCall(DMDAGetCorners(da, &xs, &ys, 0, &xm, &ym, 0));
168:   for (j = ys; j < ys + ym; j++) {
169:     for (i = xs; i < xs + xm; i++) {
170:       row.i = i;
171:       row.j = j;
172:       PetscCall(ComputeRho(i, j, mx, my, centerRho, &rho));
173:       if (i == 0 || j == 0 || i == mx - 1 || j == my - 1) {
174:         if (user->bcType == DIRICHLET) {
175:           v[0] = 2.0 * rho * (HxdHy + HydHx);
176:           PetscCall(MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES));
177:         } else if (user->bcType == NEUMANN) {
178:           PetscInt numx = 0, numy = 0, num = 0;
179:           if (j != 0) {
180:             v[num]     = -rho * HxdHy;
181:             col[num].i = i;
182:             col[num].j = j - 1;
183:             numy++;
184:             num++;
185:           }
186:           if (i != 0) {
187:             v[num]     = -rho * HydHx;
188:             col[num].i = i - 1;
189:             col[num].j = j;
190:             numx++;
191:             num++;
192:           }
193:           if (i != mx - 1) {
194:             v[num]     = -rho * HydHx;
195:             col[num].i = i + 1;
196:             col[num].j = j;
197:             numx++;
198:             num++;
199:           }
200:           if (j != my - 1) {
201:             v[num]     = -rho * HxdHy;
202:             col[num].i = i;
203:             col[num].j = j + 1;
204:             numy++;
205:             num++;
206:           }
207:           v[num]     = numx * rho * HydHx + numy * rho * HxdHy;
208:           col[num].i = i;
209:           col[num].j = j;
210:           num++;
211:           PetscCall(MatSetValuesStencil(jac, 1, &row, num, col, v, INSERT_VALUES));
212:         }
213:       } else {
214:         v[0]     = -rho * HxdHy;
215:         col[0].i = i;
216:         col[0].j = j - 1;
217:         v[1]     = -rho * HydHx;
218:         col[1].i = i - 1;
219:         col[1].j = j;
220:         v[2]     = 2.0 * rho * (HxdHy + HydHx);
221:         col[2].i = i;
222:         col[2].j = j;
223:         v[3]     = -rho * HydHx;
224:         col[3].i = i + 1;
225:         col[3].j = j;
226:         v[4]     = -rho * HxdHy;
227:         col[4].i = i;
228:         col[4].j = j + 1;
229:         PetscCall(MatSetValuesStencil(jac, 1, &row, 5, col, v, INSERT_VALUES));
230:       }
231:     }
232:   }
233:   PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
234:   PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
235:   PetscCall(MatViewFromOptions(jac, NULL, "-view_mat"));
236:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_matis", &check_matis, NULL));
237:   if (check_matis) {
238:     void (*f)(void);
239:     Mat       J2;
240:     MatType   jtype;
241:     PetscReal nrm;

243:     PetscCall(MatGetType(jac, &jtype));
244:     PetscCall(MatConvert(jac, MATIS, MAT_INITIAL_MATRIX, &J2));
245:     PetscCall(MatViewFromOptions(J2, NULL, "-view_conv"));
246:     PetscCall(MatConvert(J2, jtype, MAT_INPLACE_MATRIX, &J2));
247:     PetscCall(MatGetOperation(jac, MATOP_VIEW, &f));
248:     PetscCall(MatSetOperation(J2, MATOP_VIEW, f));
249:     PetscCall(MatSetDM(J2, da));
250:     PetscCall(MatViewFromOptions(J2, NULL, "-view_conv_assembled"));
251:     PetscCall(MatAXPY(J2, -1., jac, DIFFERENT_NONZERO_PATTERN));
252:     PetscCall(MatNorm(J2, NORM_FROBENIUS, &nrm));
253:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error MATIS %g\n", (double)nrm));
254:     PetscCall(MatViewFromOptions(J2, NULL, "-view_conv_err"));
255:     PetscCall(MatDestroy(&J2));
256:   }
257:   if (user->bcType == NEUMANN) {
258:     MatNullSpace nullspace;

260:     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
261:     PetscCall(MatSetNullSpace(J, nullspace));
262:     PetscCall(MatNullSpaceDestroy(&nullspace));
263:   }
264:   PetscFunctionReturn(PETSC_SUCCESS);
265: }

267: /*TEST

269:    test:
270:       args: -pc_type mg -pc_mg_type full -ksp_type fgmres -ksp_monitor_short -da_refine 8 -ksp_rtol 1.e-3

272:    test:
273:       suffix: 2
274:       args: -bc_type neumann -pc_type mg -pc_mg_type full -ksp_type fgmres -ksp_monitor_short -da_refine 8 -mg_coarse_pc_factor_shift_type nonzero
275:       requires: !single

277:    test:
278:       suffix: telescope
279:       nsize: 4
280:       args: -ksp_monitor_short -da_grid_x 257 -da_grid_y 257 -pc_type mg -pc_mg_galerkin pmat -pc_mg_levels 4 -ksp_type richardson -mg_levels_ksp_type chebyshev -mg_levels_pc_type jacobi -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_ignore_kspcomputeoperators -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_galerkin pmat -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_levels_ksp_type chebyshev -mg_coarse_telescope_mg_levels_pc_type jacobi -mg_coarse_pc_telescope_reduction_factor 4

282:    test:
283:       suffix: 3
284:       args: -ksp_view -da_refine 2 -pc_type mg -pc_mg_distinct_smoothup -mg_levels_up_pc_type jacobi

286:    test:
287:       suffix: 4
288:       args: -ksp_view -da_refine 2 -pc_type mg -pc_mg_distinct_smoothup -mg_levels_up_ksp_max_it 3 -mg_levels_ksp_max_it 4

290:    testset:
291:      suffix: aniso
292:      args: -da_grid_x 10 -da_grid_y 2 -da_refine 2 -pc_type mg -ksp_monitor_short -mg_levels_ksp_max_it 6 -mg_levels_pc_type jacobi
293:      test:
294:        suffix: first
295:        args: -mg_levels_ksp_chebyshev_kind first
296:      test:
297:        suffix: fourth
298:        args: -mg_levels_ksp_chebyshev_kind fourth
299:      test:
300:        suffix: opt_fourth
301:        args: -mg_levels_ksp_chebyshev_kind opt_fourth

303:    test:
304:       suffix: 5
305:       nsize: 2
306:       requires: hypre !complex
307:       args: -pc_type mg  -da_refine 2 -ksp_monitor  -matptap_via hypre -pc_mg_galerkin both

309:    test:
310:       suffix: 6
311:       args: -pc_type svd -pc_svd_monitor ::all

313: TEST*/