Actual source code: ex66.c

```  1: /*   DMDA/KSP solving a system of linear equations.
2:      Poisson equation in 2D:

4:      div(grad p) = f,  0 < x,y < 1
5:      with
6:        forcing function f = -cos(m*pi*x)*cos(n*pi*y),
7:        Periodic boundary conditions
8:          p(x=0) = p(x=1)
9:        Neumann boundary conditions
10:          dp/dy = 0 for y = 0, y = 1.

12:        This example uses the DM_BOUNDARY_MIRROR to implement the Neumann boundary conditions, see the manual pages for DMBoundaryType

14:        Compare to ex50.c
15: */

17: static char help[] = "Solves 2D Poisson equation,\n\
18:                       using mirrored boundaries to implement Neumann boundary conditions,\n\
19:                       using multigrid.\n\n";

21: #include <petscdm.h>
22: #include <petscdmda.h>
23: #include <petscksp.h>
24: #include <petscsys.h>
25: #include <petscvec.h>

27: extern PetscErrorCode ComputeJacobian(KSP, Mat, Mat, void *);
28: extern PetscErrorCode ComputeRHS(KSP, Vec, void *);

30: typedef struct {
31:   PetscScalar uu, tt;
32: } UserContext;

34: int main(int argc, char **argv)
35: {
36:   KSP         ksp;
37:   DM          da;
38:   UserContext user;

40:   PetscFunctionBeginUser;
41:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
42:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
43:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_MIRROR, DMDA_STENCIL_STAR, 11, 11, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da));
44:   PetscCall(DMSetFromOptions(da));
45:   PetscCall(DMSetUp(da));
46:   PetscCall(KSPSetDM(ksp, (DM)da));
47:   PetscCall(DMSetApplicationContext(da, &user));

49:   user.uu = 1.0;
50:   user.tt = 1.0;

52:   PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, &user));
53:   PetscCall(KSPSetComputeOperators(ksp, ComputeJacobian, &user));
54:   PetscCall(KSPSetFromOptions(ksp));
55:   PetscCall(KSPSolve(ksp, NULL, NULL));

57:   PetscCall(DMDestroy(&da));
58:   PetscCall(KSPDestroy(&ksp));
59:   PetscCall(PetscFinalize());
60:   return 0;
61: }

63: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
64: {
65:   UserContext  *user = (UserContext *)ctx;
66:   PetscInt      i, j, M, N, xm, ym, xs, ys;
67:   PetscScalar   Hx, Hy, pi, uu, tt;
68:   PetscScalar **array;
69:   DM            da;
70:   MatNullSpace  nullspace;

72:   PetscFunctionBeginUser;
73:   PetscCall(KSPGetDM(ksp, &da));
74:   PetscCall(DMDAGetInfo(da, 0, &M, &N, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
75:   uu = user->uu;
76:   tt = user->tt;
77:   pi = 4 * PetscAtanReal(1.0);
78:   Hx = 1.0 / (PetscReal)(M);
79:   Hy = 1.0 / (PetscReal)(N);

81:   PetscCall(DMDAGetCorners(da, &xs, &ys, 0, &xm, &ym, 0)); /* Fine grid */
82:   PetscCall(DMDAVecGetArray(da, b, &array));
83:   for (j = ys; j < ys + ym; j++) {
84:     for (i = xs; i < xs + xm; i++) array[j][i] = -PetscCosScalar(uu * pi * ((PetscReal)i + 0.5) * Hx) * PetscCosScalar(tt * pi * ((PetscReal)j + 0.5) * Hy) * Hx * Hy;
85:   }
86:   PetscCall(DMDAVecRestoreArray(da, b, &array));
87:   PetscCall(VecAssemblyBegin(b));
88:   PetscCall(VecAssemblyEnd(b));

90:   /* force right-hand side to be consistent for singular matrix */
91:   /* note this is really a hack, normally the model would provide you with a consistent right handside */
92:   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
93:   PetscCall(MatNullSpaceRemove(nullspace, b));
94:   PetscCall(MatNullSpaceDestroy(&nullspace));
95:   PetscFunctionReturn(PETSC_SUCCESS);
96: }

98: PetscErrorCode ComputeJacobian(KSP ksp, Mat J, Mat jac, void *ctx)
99: {
100:   PetscInt     i, j, M, N, xm, ym, xs, ys;
101:   PetscScalar  v[5], Hx, Hy, HydHx, HxdHy;
102:   MatStencil   row, col[5];
103:   DM           da;
104:   MatNullSpace nullspace;

106:   PetscFunctionBeginUser;
107:   PetscCall(KSPGetDM(ksp, &da));
108:   PetscCall(DMDAGetInfo(da, 0, &M, &N, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
109:   Hx    = 1.0 / (PetscReal)(M);
110:   Hy    = 1.0 / (PetscReal)(N);
111:   HxdHy = Hx / Hy;
112:   HydHx = Hy / Hx;
113:   PetscCall(DMDAGetCorners(da, &xs, &ys, 0, &xm, &ym, 0));
114:   for (j = ys; j < ys + ym; j++) {
115:     for (i = xs; i < xs + xm; i++) {
116:       row.i    = i;
117:       row.j    = j;
118:       v[0]     = -HxdHy;
119:       col[0].i = i;
120:       col[0].j = j - 1;
121:       v[1]     = -HydHx;
122:       col[1].i = i - 1;
123:       col[1].j = j;
124:       v[2]     = 2.0 * (HxdHy + HydHx);
125:       col[2].i = i;
126:       col[2].j = j;
127:       v[3]     = -HydHx;
128:       col[3].i = i + 1;
129:       col[3].j = j;
130:       v[4]     = -HxdHy;
131:       col[4].i = i;
132:       col[4].j = j + 1;
133:       PetscCall(MatSetValuesStencil(jac, 1, &row, 5, col, v, ADD_VALUES));
134:     }
135:   }
136:   PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
137:   PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));

139:   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
140:   PetscCall(MatSetNullSpace(J, nullspace));
141:   PetscCall(MatNullSpaceDestroy(&nullspace));
142:   PetscFunctionReturn(PETSC_SUCCESS);
143: }

145: /*TEST

147:    build:
148:       requires: !complex !single

150:    test:
151:       args: -pc_type mg -pc_mg_type full -ksp_monitor_short -da_refine 3 -mg_coarse_pc_type svd -ksp_view

153:    test:
154:       suffix: 2
155:       nsize: 4
156:       args: -pc_type mg -pc_mg_type full -ksp_monitor_short -da_refine 3 -mg_coarse_pc_type redundant -mg_coarse_redundant_pc_type svd -ksp_view

158: TEST*/
```