Actual source code: ex50.c
1: /* DMDA/KSP solving a system of linear equations.
2: Poisson equation in 2D:
4: div(grad p) = f, 0 < x,y < 1
5: with
6: forcing function f = -cos(m*pi*x)*cos(n*pi*y),
7: Neumann boundary conditions
8: dp/dx = 0 for x = 0, x = 1.
9: dp/dy = 0 for y = 0, y = 1.
11: Contributed by Michael Boghosian <boghmic@iit.edu>, 2008,
12: based on petsc/src/ksp/ksp/tutorials/ex29.c and ex32.c
14: Compare to ex66.c
16: Example of Usage:
17: ./ex50 -da_grid_x 3 -da_grid_y 3 -pc_type mg -da_refine 3 -ksp_monitor -ksp_view -dm_view draw -draw_pause -1
18: ./ex50 -da_grid_x 100 -da_grid_y 100 -pc_type mg -pc_mg_levels 1 -mg_levels_0_pc_type ilu -mg_levels_0_pc_factor_levels 1 -ksp_monitor -ksp_view
19: ./ex50 -da_grid_x 100 -da_grid_y 100 -pc_type mg -pc_mg_levels 1 -mg_levels_0_pc_type lu -mg_levels_0_pc_factor_shift_type NONZERO -ksp_monitor
20: mpiexec -n 4 ./ex50 -da_grid_x 3 -da_grid_y 3 -pc_type mg -da_refine 10 -ksp_monitor -ksp_view -log_view
21: */
23: static char help[] = "Solves 2D Poisson equation using multigrid.\n\n";
25: #include <petscdm.h>
26: #include <petscdmda.h>
27: #include <petscksp.h>
28: #include <petscsys.h>
29: #include <petscvec.h>
31: extern PetscErrorCode ComputeJacobian(KSP, Mat, Mat, void *);
32: extern PetscErrorCode ComputeRHS(KSP, Vec, void *);
34: typedef struct {
35: PetscScalar uu, tt;
36: } UserContext;
38: int main(int argc, char **argv)
39: {
40: KSP ksp;
41: DM da;
42: UserContext user;
44: PetscFunctionBeginUser;
45: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
46: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
47: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 11, 11, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da));
48: PetscCall(DMSetFromOptions(da));
49: PetscCall(DMSetUp(da));
50: PetscCall(KSPSetDM(ksp, (DM)da));
51: PetscCall(DMSetApplicationContext(da, &user));
53: user.uu = 1.0;
54: user.tt = 1.0;
56: PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, &user));
57: PetscCall(KSPSetComputeOperators(ksp, ComputeJacobian, &user));
58: PetscCall(KSPSetFromOptions(ksp));
59: PetscCall(KSPSolve(ksp, NULL, NULL));
61: PetscCall(DMDestroy(&da));
62: PetscCall(KSPDestroy(&ksp));
63: PetscCall(PetscFinalize());
64: return 0;
65: }
67: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
68: {
69: UserContext *user = (UserContext *)ctx;
70: PetscInt i, j, M, N, xm, ym, xs, ys;
71: PetscScalar Hx, Hy, pi, uu, tt;
72: PetscScalar **array;
73: DM da;
74: MatNullSpace nullspace;
76: PetscFunctionBeginUser;
77: PetscCall(KSPGetDM(ksp, &da));
78: PetscCall(DMDAGetInfo(da, 0, &M, &N, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
79: uu = user->uu;
80: tt = user->tt;
81: pi = 4 * atan(1.0);
82: Hx = 1.0 / (PetscReal)M;
83: Hy = 1.0 / (PetscReal)N;
85: PetscCall(DMDAGetCorners(da, &xs, &ys, 0, &xm, &ym, 0)); /* Fine grid */
86: PetscCall(DMDAVecGetArray(da, b, &array));
87: for (j = ys; j < ys + ym; j++) {
88: for (i = xs; i < xs + xm; i++) array[j][i] = -PetscCosScalar(uu * pi * ((PetscReal)i + 0.5) * Hx) * PetscCosScalar(tt * pi * ((PetscReal)j + 0.5) * Hy) * Hx * Hy;
89: }
90: PetscCall(DMDAVecRestoreArray(da, b, &array));
91: PetscCall(VecAssemblyBegin(b));
92: PetscCall(VecAssemblyEnd(b));
94: /* force right-hand side to be consistent for singular matrix */
95: /* note this is really a hack, normally the model would provide you with a consistent right handside */
96: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
97: PetscCall(MatNullSpaceRemove(nullspace, b));
98: PetscCall(MatNullSpaceDestroy(&nullspace));
99: PetscFunctionReturn(PETSC_SUCCESS);
100: }
102: PetscErrorCode ComputeJacobian(KSP ksp, Mat J, Mat jac, void *ctx)
103: {
104: PetscInt i, j, M, N, xm, ym, xs, ys, num, numi, numj;
105: PetscScalar v[5], Hx, Hy, HydHx, HxdHy;
106: MatStencil row, col[5];
107: DM da;
108: MatNullSpace nullspace;
110: PetscFunctionBeginUser;
111: PetscCall(KSPGetDM(ksp, &da));
112: PetscCall(DMDAGetInfo(da, 0, &M, &N, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
113: Hx = 1.0 / (PetscReal)M;
114: Hy = 1.0 / (PetscReal)N;
115: HxdHy = Hx / Hy;
116: HydHx = Hy / Hx;
117: PetscCall(DMDAGetCorners(da, &xs, &ys, 0, &xm, &ym, 0));
118: for (j = ys; j < ys + ym; j++) {
119: for (i = xs; i < xs + xm; i++) {
120: row.i = i;
121: row.j = j;
123: if (i == 0 || j == 0 || i == M - 1 || j == N - 1) {
124: num = 0;
125: numi = 0;
126: numj = 0;
127: if (j != 0) {
128: v[num] = -HxdHy;
129: col[num].i = i;
130: col[num].j = j - 1;
131: num++;
132: numj++;
133: }
134: if (i != 0) {
135: v[num] = -HydHx;
136: col[num].i = i - 1;
137: col[num].j = j;
138: num++;
139: numi++;
140: }
141: if (i != M - 1) {
142: v[num] = -HydHx;
143: col[num].i = i + 1;
144: col[num].j = j;
145: num++;
146: numi++;
147: }
148: if (j != N - 1) {
149: v[num] = -HxdHy;
150: col[num].i = i;
151: col[num].j = j + 1;
152: num++;
153: numj++;
154: }
155: v[num] = (PetscReal)numj * HxdHy + (PetscReal)numi * HydHx;
156: col[num].i = i;
157: col[num].j = j;
158: num++;
159: PetscCall(MatSetValuesStencil(jac, 1, &row, num, col, v, INSERT_VALUES));
160: } else {
161: v[0] = -HxdHy;
162: col[0].i = i;
163: col[0].j = j - 1;
164: v[1] = -HydHx;
165: col[1].i = i - 1;
166: col[1].j = j;
167: v[2] = 2.0 * (HxdHy + HydHx);
168: col[2].i = i;
169: col[2].j = j;
170: v[3] = -HydHx;
171: col[3].i = i + 1;
172: col[3].j = j;
173: v[4] = -HxdHy;
174: col[4].i = i;
175: col[4].j = j + 1;
176: PetscCall(MatSetValuesStencil(jac, 1, &row, 5, col, v, INSERT_VALUES));
177: }
178: }
179: }
180: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
181: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
183: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
184: PetscCall(MatSetNullSpace(J, nullspace));
185: PetscCall(MatNullSpaceDestroy(&nullspace));
186: PetscFunctionReturn(PETSC_SUCCESS);
187: }
189: /*TEST
191: build:
192: requires: !complex !single
194: test:
195: args: -pc_type mg -pc_mg_type full -ksp_type cg -ksp_monitor_short -da_refine 3 -mg_coarse_pc_type svd -ksp_view
197: test:
198: suffix: 2
199: nsize: 4
200: args: -pc_type mg -pc_mg_type full -ksp_type cg -ksp_monitor_short -da_refine 3 -mg_coarse_pc_type redundant -mg_coarse_redundant_pc_type svd -ksp_view
202: test:
203: suffix: 3
204: nsize: 2
205: args: -pc_type mg -pc_mg_type full -ksp_monitor_short -da_refine 5 -mg_coarse_ksp_type cg -mg_coarse_ksp_converged_reason -mg_coarse_ksp_rtol 1e-2 -mg_coarse_ksp_max_it 5 -mg_coarse_pc_type none -pc_mg_levels 2 -ksp_type pipefgmres -ksp_pipefgmres_shift 1.5
207: test:
208: suffix: tut_1
209: nsize: 1
210: args: -da_grid_x 4 -da_grid_y 4 -mat_view
212: test:
213: suffix: tut_2
214: requires: superlu_dist parmetis
215: nsize: 4
216: args: -da_grid_x 120 -da_grid_y 120 -pc_type lu -pc_factor_mat_solver_type superlu_dist -ksp_monitor -ksp_view
218: test:
219: suffix: tut_3
220: nsize: 4
221: args: -da_grid_x 1025 -da_grid_y 1025 -pc_type mg -pc_mg_levels 9 -ksp_monitor
223: TEST*/