Actual source code: ex34.c
1: /*
2: Laplacian in 3D. Modeled by the partial differential equation
4: div grad u = f, 0 < x,y,z < 1,
6: with pure Neumann boundary conditions
8: u = 0 for x = 0, x = 1, y = 0, y = 1, z = 0, z = 1.
10: The functions are cell-centered
12: This uses multigrid to solve the linear system
14: Contributed by Jianming Yang <jianming-yang@uiowa.edu>
15: */
17: static char help[] = "Solves 3D Laplacian using multigrid.\n\n";
19: #include <petscdm.h>
20: #include <petscdmda.h>
21: #include <petscksp.h>
23: extern PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
24: extern PetscErrorCode ComputeRHS(KSP, Vec, void *);
26: int main(int argc, char **argv)
27: {
28: KSP ksp;
29: DM da;
30: PetscReal norm;
31: PetscInt i, j, k, mx, my, mz, xm, ym, zm, xs, ys, zs, d, dof;
32: PetscScalar Hx, Hy, Hz;
33: PetscScalar ****array;
34: Vec x, b, r;
35: Mat J;
37: PetscFunctionBeginUser;
38: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
39: dof = 1;
40: PetscCall(PetscOptionsGetInt(NULL, NULL, "-da_dof", &dof, NULL));
41: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
42: PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 12, 12, 12, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, dof, 1, 0, 0, 0, &da));
43: PetscCall(DMSetFromOptions(da));
44: PetscCall(DMSetUp(da));
45: PetscCall(DMDASetInterpolationType(da, DMDA_Q0));
47: PetscCall(KSPSetDM(ksp, da));
49: PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, NULL));
50: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, NULL));
51: PetscCall(KSPSetFromOptions(ksp));
52: PetscCall(KSPSolve(ksp, NULL, NULL));
53: PetscCall(KSPGetSolution(ksp, &x));
54: PetscCall(KSPGetRhs(ksp, &b));
55: PetscCall(KSPGetOperators(ksp, NULL, &J));
56: PetscCall(VecDuplicate(b, &r));
58: PetscCall(MatMult(J, x, r));
59: PetscCall(VecAXPY(r, -1.0, b));
60: PetscCall(VecNorm(r, NORM_2, &norm));
61: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Residual norm %g\n", (double)norm));
63: PetscCall(DMDAGetInfo(da, 0, &mx, &my, &mz, 0, 0, 0, 0, 0, 0, 0, 0, 0));
64: Hx = 1.0 / (PetscReal)mx;
65: Hy = 1.0 / (PetscReal)my;
66: Hz = 1.0 / (PetscReal)mz;
67: PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
68: PetscCall(DMDAVecGetArrayDOF(da, x, &array));
70: for (k = zs; k < zs + zm; k++) {
71: for (j = ys; j < ys + ym; j++) {
72: for (i = xs; i < xs + xm; i++) {
73: for (d = 0; d < dof; d++) array[k][j][i][d] -= PetscCosScalar(2 * PETSC_PI * (((PetscReal)i + 0.5) * Hx)) * PetscCosScalar(2 * PETSC_PI * (((PetscReal)j + 0.5) * Hy)) * PetscCosScalar(2 * PETSC_PI * (((PetscReal)k + 0.5) * Hz));
74: }
75: }
76: }
77: PetscCall(DMDAVecRestoreArrayDOF(da, x, &array));
78: PetscCall(VecAssemblyBegin(x));
79: PetscCall(VecAssemblyEnd(x));
81: PetscCall(VecNorm(x, NORM_INFINITY, &norm));
82: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)norm));
83: PetscCall(VecNorm(x, NORM_1, &norm));
84: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)(norm / ((PetscReal)mx * (PetscReal)my * (PetscReal)mz))));
85: PetscCall(VecNorm(x, NORM_2, &norm));
86: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)(norm / ((PetscReal)mx * (PetscReal)my * (PetscReal)mz))));
88: PetscCall(VecDestroy(&r));
89: PetscCall(KSPDestroy(&ksp));
90: PetscCall(DMDestroy(&da));
91: PetscCall(PetscFinalize());
92: return 0;
93: }
95: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
96: {
97: PetscInt d, dof, i, j, k, mx, my, mz, xm, ym, zm, xs, ys, zs;
98: PetscScalar Hx, Hy, Hz;
99: PetscScalar ****array;
100: DM da;
101: MatNullSpace nullspace;
103: PetscFunctionBeginUser;
104: PetscCall(KSPGetDM(ksp, &da));
105: PetscCall(DMDAGetInfo(da, 0, &mx, &my, &mz, 0, 0, 0, &dof, 0, 0, 0, 0, 0));
106: Hx = 1.0 / (PetscReal)mx;
107: Hy = 1.0 / (PetscReal)my;
108: Hz = 1.0 / (PetscReal)mz;
109: PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
110: PetscCall(DMDAVecGetArrayDOFWrite(da, b, &array));
111: for (k = zs; k < zs + zm; k++) {
112: for (j = ys; j < ys + ym; j++) {
113: for (i = xs; i < xs + xm; i++) {
114: for (d = 0; d < dof; d++) {
115: array[k][j][i][d] = 12 * PETSC_PI * PETSC_PI * PetscCosScalar(2 * PETSC_PI * (((PetscReal)i + 0.5) * Hx)) * PetscCosScalar(2 * PETSC_PI * (((PetscReal)j + 0.5) * Hy)) * PetscCosScalar(2 * PETSC_PI * (((PetscReal)k + 0.5) * Hz)) * Hx * Hy * Hz;
116: }
117: }
118: }
119: }
120: PetscCall(DMDAVecRestoreArrayDOFWrite(da, b, &array));
121: PetscCall(VecAssemblyBegin(b));
122: PetscCall(VecAssemblyEnd(b));
124: /* force right-hand side to be consistent for singular matrix */
125: /* note this is really a hack, normally the model would provide you with a consistent right handside */
127: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
128: PetscCall(MatNullSpaceRemove(nullspace, b));
129: PetscCall(MatNullSpaceDestroy(&nullspace));
130: PetscFunctionReturn(PETSC_SUCCESS);
131: }
133: PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
134: {
135: PetscInt dof, i, j, k, d, mx, my, mz, xm, ym, zm, xs, ys, zs, num, numi, numj, numk;
136: PetscScalar v[7], Hx, Hy, Hz, HyHzdHx, HxHzdHy, HxHydHz;
137: MatStencil row, col[7];
138: DM da;
139: MatNullSpace nullspace;
140: PetscBool dump_mat = PETSC_FALSE, check_matis = PETSC_FALSE;
142: PetscFunctionBeginUser;
143: PetscCall(KSPGetDM(ksp, &da));
144: PetscCall(DMDAGetInfo(da, 0, &mx, &my, &mz, 0, 0, 0, &dof, 0, 0, 0, 0, 0));
145: Hx = 1.0 / (PetscReal)mx;
146: Hy = 1.0 / (PetscReal)my;
147: Hz = 1.0 / (PetscReal)mz;
148: HyHzdHx = Hy * Hz / Hx;
149: HxHzdHy = Hx * Hz / Hy;
150: HxHydHz = Hx * Hy / Hz;
151: PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
152: for (k = zs; k < zs + zm; k++) {
153: for (j = ys; j < ys + ym; j++) {
154: for (i = xs; i < xs + xm; i++) {
155: for (d = 0; d < dof; d++) {
156: row.i = i;
157: row.j = j;
158: row.k = k;
159: row.c = d;
160: if (i == 0 || j == 0 || k == 0 || i == mx - 1 || j == my - 1 || k == mz - 1) {
161: num = 0;
162: numi = 0;
163: numj = 0;
164: numk = 0;
165: if (k != 0) {
166: v[num] = -HxHydHz;
167: col[num].i = i;
168: col[num].j = j;
169: col[num].k = k - 1;
170: col[num].c = d;
171: num++;
172: numk++;
173: }
174: if (j != 0) {
175: v[num] = -HxHzdHy;
176: col[num].i = i;
177: col[num].j = j - 1;
178: col[num].k = k;
179: col[num].c = d;
180: num++;
181: numj++;
182: }
183: if (i != 0) {
184: v[num] = -HyHzdHx;
185: col[num].i = i - 1;
186: col[num].j = j;
187: col[num].k = k;
188: col[num].c = d;
189: num++;
190: numi++;
191: }
192: if (i != mx - 1) {
193: v[num] = -HyHzdHx;
194: col[num].i = i + 1;
195: col[num].j = j;
196: col[num].k = k;
197: col[num].c = d;
198: num++;
199: numi++;
200: }
201: if (j != my - 1) {
202: v[num] = -HxHzdHy;
203: col[num].i = i;
204: col[num].j = j + 1;
205: col[num].k = k;
206: col[num].c = d;
207: num++;
208: numj++;
209: }
210: if (k != mz - 1) {
211: v[num] = -HxHydHz;
212: col[num].i = i;
213: col[num].j = j;
214: col[num].k = k + 1;
215: col[num].c = d;
216: num++;
217: numk++;
218: }
219: v[num] = (PetscReal)numk * HxHydHz + (PetscReal)numj * HxHzdHy + (PetscReal)numi * HyHzdHx;
220: col[num].i = i;
221: col[num].j = j;
222: col[num].k = k;
223: col[num].c = d;
224: num++;
225: PetscCall(MatSetValuesStencil(jac, 1, &row, num, col, v, INSERT_VALUES));
226: } else {
227: v[0] = -HxHydHz;
228: col[0].i = i;
229: col[0].j = j;
230: col[0].k = k - 1;
231: col[0].c = d;
232: v[1] = -HxHzdHy;
233: col[1].i = i;
234: col[1].j = j - 1;
235: col[1].k = k;
236: col[1].c = d;
237: v[2] = -HyHzdHx;
238: col[2].i = i - 1;
239: col[2].j = j;
240: col[2].k = k;
241: col[2].c = d;
242: v[3] = 2.0 * (HyHzdHx + HxHzdHy + HxHydHz);
243: col[3].i = i;
244: col[3].j = j;
245: col[3].k = k;
246: col[3].c = d;
247: v[4] = -HyHzdHx;
248: col[4].i = i + 1;
249: col[4].j = j;
250: col[4].k = k;
251: col[4].c = d;
252: v[5] = -HxHzdHy;
253: col[5].i = i;
254: col[5].j = j + 1;
255: col[5].k = k;
256: col[5].c = d;
257: v[6] = -HxHydHz;
258: col[6].i = i;
259: col[6].j = j;
260: col[6].k = k + 1;
261: col[6].c = d;
262: PetscCall(MatSetValuesStencil(jac, 1, &row, 7, col, v, INSERT_VALUES));
263: }
264: }
265: }
266: }
267: }
268: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
269: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
270: PetscCall(PetscOptionsGetBool(NULL, NULL, "-dump_mat", &dump_mat, NULL));
271: if (dump_mat) {
272: Mat JJ;
274: PetscCall(MatComputeOperator(jac, MATAIJ, &JJ));
275: PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_MATLAB));
276: PetscCall(MatView(JJ, PETSC_VIEWER_STDOUT_WORLD));
277: PetscCall(MatDestroy(&JJ));
278: }
279: PetscCall(MatViewFromOptions(jac, NULL, "-view_mat"));
280: PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_matis", &check_matis, NULL));
281: if (check_matis) {
282: void (*f)(void);
283: Mat J2;
284: MatType jtype;
285: PetscReal nrm;
287: PetscCall(MatGetType(jac, &jtype));
288: PetscCall(MatConvert(jac, MATIS, MAT_INITIAL_MATRIX, &J2));
289: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv"));
290: PetscCall(MatConvert(J2, jtype, MAT_INPLACE_MATRIX, &J2));
291: PetscCall(MatGetOperation(jac, MATOP_VIEW, &f));
292: PetscCall(MatSetOperation(J2, MATOP_VIEW, f));
293: PetscCall(MatSetDM(J2, da));
294: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv_assembled"));
295: PetscCall(MatAXPY(J2, -1., jac, DIFFERENT_NONZERO_PATTERN));
296: PetscCall(MatNorm(J2, NORM_FROBENIUS, &nrm));
297: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error MATIS %g\n", (double)nrm));
298: PetscCall(MatViewFromOptions(J2, NULL, "-view_conv_err"));
299: PetscCall(MatDestroy(&J2));
300: }
301: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
302: PetscCall(MatSetNullSpace(J, nullspace));
303: PetscCall(MatNullSpaceDestroy(&nullspace));
304: PetscFunctionReturn(PETSC_SUCCESS);
305: }
307: /*TEST
309: build:
310: requires: !complex !single
312: test:
313: args: -pc_type mg -pc_mg_type full -ksp_type fgmres -ksp_monitor_short -pc_mg_levels 3 -mg_coarse_pc_factor_shift_type nonzero -ksp_view
315: test:
316: suffix: 2
317: nsize: 2
318: args: -ksp_monitor_short -da_grid_x 50 -da_grid_y 50 -pc_type ksp -ksp_ksp_type cg -ksp_pc_type bjacobi -ksp_ksp_rtol 1e-1 -ksp_ksp_monitor -ksp_type pipefgmres -ksp_gmres_restart 5
320: test:
321: suffix: hyprestruct
322: nsize: 3
323: requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
324: args: -ksp_type gmres -pc_type pfmg -dm_mat_type hyprestruct -ksp_monitor -da_refine 3
326: TEST*/