Actual source code: ex45.c
1: /*
2: Laplacian in 3D. Modeled by the partial differential equation
4: - Laplacian u = 1,0 < x,y,z < 1,
6: with boundary conditions
8: u = 1 for x = 0, x = 1, y = 0, y = 1, z = 0, z = 1.
10: This uses multigrid to solve the linear system
12: See src/snes/tutorials/ex50.c
14: Can also be run with -pc_type exotic -ksp_type fgmres
16: */
18: static char help[] = "Solves 3D Laplacian using multigrid.\n\n";
20: #include <petscksp.h>
21: #include <petscdm.h>
22: #include <petscdmda.h>
24: PetscLogEvent setvalues, usertime;
26: extern PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
27: extern PetscErrorCode ComputeRHS(KSP, Vec, void *);
28: extern PetscErrorCode ComputeInitialGuess(KSP, Vec, void *);
30: int main(int argc, char **argv)
31: {
32: KSP ksp;
33: PetscReal norm;
34: DM da;
35: Vec x, b, r;
36: Mat A;
38: PetscFunctionBeginUser;
39: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
40: PetscCall(PetscLogEventRegister("Set values", MAT_CLASSID, &setvalues));
41: PetscCall(PetscLogEventRegister("User time", MAT_CLASSID, &usertime));
42: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
43: PetscCall(PetscLogEventBegin(usertime, NULL, NULL, NULL, NULL));
45: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
46: PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 7, 7, 7, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, 0, &da));
47: PetscCall(DMSetFromOptions(da));
48: PetscCall(DMSetUp(da));
49: PetscCall(KSPSetDM(ksp, da));
50: PetscCall(KSPSetComputeInitialGuess(ksp, ComputeInitialGuess, NULL));
51: PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, NULL));
52: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, NULL));
53: PetscCall(DMDestroy(&da));
55: PetscCall(KSPSetFromOptions(ksp));
56: PetscCall(KSPSolve(ksp, NULL, NULL));
57: PetscCall(KSPGetSolution(ksp, &x));
58: PetscCall(KSPGetRhs(ksp, &b));
59: PetscCall(VecDuplicate(b, &r));
60: PetscCall(KSPGetOperators(ksp, &A, NULL));
62: PetscCall(MatMult(A, x, r));
63: PetscCall(VecAXPY(r, -1.0, b));
64: PetscCall(VecNorm(r, NORM_2, &norm));
65: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Residual norm %g\n", (double)norm));
67: PetscCall(VecDestroy(&r));
68: PetscCall(KSPDestroy(&ksp));
69: PetscCall(PetscLogEventEnd(usertime, NULL, NULL, NULL, NULL));
70: PetscCall(PetscFinalize());
71: return 0;
72: }
74: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
75: {
76: PetscInt i, j, k, mx, my, mz, xm, ym, zm, xs, ys, zs;
77: DM dm;
78: PetscScalar Hx, Hy, Hz, HxHydHz, HyHzdHx, HxHzdHy;
79: PetscScalar ***barray;
81: PetscFunctionBeginUser;
82: PetscCall(PetscLogEventBegin(setvalues, NULL, NULL, NULL, NULL));
83: PetscCall(KSPGetDM(ksp, &dm));
84: PetscCall(DMDAGetInfo(dm, 0, &mx, &my, &mz, 0, 0, 0, 0, 0, 0, 0, 0, 0));
85: Hx = 1.0 / (PetscReal)(mx - 1);
86: Hy = 1.0 / (PetscReal)(my - 1);
87: Hz = 1.0 / (PetscReal)(mz - 1);
88: HxHydHz = Hx * Hy / Hz;
89: HxHzdHy = Hx * Hz / Hy;
90: HyHzdHx = Hy * Hz / Hx;
91: PetscCall(DMDAGetCorners(dm, &xs, &ys, &zs, &xm, &ym, &zm));
92: PetscCall(DMDAVecGetArray(dm, b, &barray));
94: for (k = zs; k < zs + zm; k++) {
95: for (j = ys; j < ys + ym; j++) {
96: for (i = xs; i < xs + xm; i++) {
97: if (i == 0 || j == 0 || k == 0 || i == mx - 1 || j == my - 1 || k == mz - 1) {
98: barray[k][j][i] = 2.0 * (HxHydHz + HxHzdHy + HyHzdHx);
99: } else {
100: barray[k][j][i] = Hx * Hy * Hz;
101: }
102: }
103: }
104: }
105: PetscCall(DMDAVecRestoreArray(dm, b, &barray));
106: PetscCall(PetscLogEventEnd(setvalues, NULL, NULL, NULL, NULL));
107: PetscFunctionReturn(PETSC_SUCCESS);
108: }
110: PetscErrorCode ComputeInitialGuess(KSP ksp, Vec b, void *ctx)
111: {
112: PetscFunctionBeginUser;
113: PetscCall(VecSet(b, 0));
114: PetscFunctionReturn(PETSC_SUCCESS);
115: }
117: PetscErrorCode ComputeMatrix(KSP ksp, Mat jac, Mat B, void *ctx)
118: {
119: DM da;
120: PetscInt i, j, k, mx, my, mz, xm, ym, zm, xs, ys, zs;
121: PetscScalar v[7], Hx, Hy, Hz, HxHydHz, HyHzdHx, HxHzdHy;
122: MatStencil row, col[7];
124: PetscFunctionBeginUser;
125: PetscCall(PetscLogEventBegin(setvalues, NULL, NULL, NULL, NULL));
126: PetscCall(KSPGetDM(ksp, &da));
127: PetscCall(DMDAGetInfo(da, 0, &mx, &my, &mz, 0, 0, 0, 0, 0, 0, 0, 0, 0));
128: Hx = 1.0 / (PetscReal)(mx - 1);
129: Hy = 1.0 / (PetscReal)(my - 1);
130: Hz = 1.0 / (PetscReal)(mz - 1);
131: HxHydHz = Hx * Hy / Hz;
132: HxHzdHy = Hx * Hz / Hy;
133: HyHzdHx = Hy * Hz / Hx;
134: PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
136: for (k = zs; k < zs + zm; k++) {
137: for (j = ys; j < ys + ym; j++) {
138: for (i = xs; i < xs + xm; i++) {
139: row.i = i;
140: row.j = j;
141: row.k = k;
142: if (i == 0 || j == 0 || k == 0 || i == mx - 1 || j == my - 1 || k == mz - 1) {
143: v[0] = 2.0 * (HxHydHz + HxHzdHy + HyHzdHx);
144: PetscCall(MatSetValuesStencil(B, 1, &row, 1, &row, v, INSERT_VALUES));
145: } else {
146: v[0] = -HxHydHz;
147: col[0].i = i;
148: col[0].j = j;
149: col[0].k = k - 1;
150: v[1] = -HxHzdHy;
151: col[1].i = i;
152: col[1].j = j - 1;
153: col[1].k = k;
154: v[2] = -HyHzdHx;
155: col[2].i = i - 1;
156: col[2].j = j;
157: col[2].k = k;
158: v[3] = 2.0 * (HxHydHz + HxHzdHy + HyHzdHx);
159: col[3].i = row.i;
160: col[3].j = row.j;
161: col[3].k = row.k;
162: v[4] = -HyHzdHx;
163: col[4].i = i + 1;
164: col[4].j = j;
165: col[4].k = k;
166: v[5] = -HxHzdHy;
167: col[5].i = i;
168: col[5].j = j + 1;
169: col[5].k = k;
170: v[6] = -HxHydHz;
171: col[6].i = i;
172: col[6].j = j;
173: col[6].k = k + 1;
174: PetscCall(MatSetValuesStencil(B, 1, &row, 7, col, v, INSERT_VALUES));
175: }
176: }
177: }
178: }
179: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
180: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
181: PetscCall(PetscLogEventEnd(setvalues, NULL, NULL, NULL, NULL));
182: PetscFunctionReturn(PETSC_SUCCESS);
183: }
185: /*TEST
187: test:
188: nsize: 4
189: args: -pc_type exotic -ksp_monitor_short -ksp_type fgmres -mg_levels_ksp_type gmres -mg_levels_ksp_max_it 1 -mg_levels_pc_type bjacobi
190: output_file: output/ex45_1.out
192: test:
193: suffix: 2
194: nsize: 4
195: args: -ksp_monitor_short -da_grid_x 21 -da_grid_y 21 -da_grid_z 21 -pc_type mg -pc_mg_levels 3 -mg_levels_ksp_type richardson -mg_levels_ksp_max_it 1 -mg_levels_pc_type bjacobi
197: test:
198: suffix: telescope
199: nsize: 4
200: args: -ksp_type fgmres -ksp_monitor_short -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_levels 2 -da_grid_x 65 -da_grid_y 65 -da_grid_z 65 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_ignore_kspcomputeoperators -mg_coarse_pc_telescope_reduction_factor 4 -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_galerkin pmat -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_levels_ksp_type richardson -mg_coarse_telescope_mg_levels_pc_type jacobi -mg_levels_ksp_type richardson -mg_coarse_telescope_mg_levels_ksp_type richardson -ksp_rtol 1.0e-4
202: test:
203: suffix: telescope_2
204: nsize: 4
205: args: -ksp_type fgmres -ksp_monitor_short -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_levels 2 -da_grid_x 65 -da_grid_y 65 -da_grid_z 65 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_reduction_factor 2 -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_galerkin pmat -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_levels_ksp_type richardson -mg_coarse_telescope_mg_levels_pc_type jacobi -mg_levels_ksp_type richardson -mg_coarse_telescope_mg_levels_ksp_type richardson -ksp_rtol 1.0e-4
207: TEST*/