Actual source code: ex35.c

  1: static const char help[] = "-Laplacian u = b as a nonlinear problem.\n\n";

  3: /*
  4:     The linear and nonlinear versions of these should give almost identical results on this problem

  6:     Richardson
  7:       Nonlinear:
  8:         -snes_rtol 1.e-12 -snes_monitor -snes_type nrichardson -snes_linesearch_monitor

 10:       Linear:
 11:         -snes_rtol 1.e-12 -snes_monitor -ksp_rtol 1.e-12  -ksp_monitor -ksp_type richardson -pc_type none -ksp_richardson_self_scale -info

 13:     GMRES
 14:       Nonlinear:
 15:        -snes_rtol 1.e-12 -snes_monitor  -snes_type ngmres

 17:       Linear:
 18:        -snes_rtol 1.e-12 -snes_monitor  -ksp_type gmres -ksp_monitor -ksp_rtol 1.e-12 -pc_type none

 20:     CG
 21:        Nonlinear:
 22:             -snes_rtol 1.e-12 -snes_monitor  -snes_type ncg -snes_linesearch_monitor

 24:        Linear:
 25:              -snes_rtol 1.e-12 -snes_monitor  -ksp_type cg -ksp_monitor -ksp_rtol 1.e-12 -pc_type none

 27:     Multigrid
 28:        Linear:
 29:           1 level:
 30:             -snes_rtol 1.e-12 -snes_monitor  -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor
 31:             -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor -ksp_rtol 1.e-12  -ksp_monitor_true_residual

 33:           n levels:
 34:             -da_refine n

 36:        Nonlinear:
 37:          1 level:
 38:            -snes_rtol 1.e-12 -snes_monitor  -snes_type fas -fas_levels_snes_monitor

 40:           n levels:
 41:             -da_refine n  -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly
 42: */

 44: /*
 45:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 46:    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
 47: */
 48: #include <petscdm.h>
 49: #include <petscdmda.h>
 50: #include <petscsnes.h>

 52: /*
 53:    User-defined routines
 54: */
 55: extern PetscErrorCode FormMatrix(DM, Mat);
 56: extern PetscErrorCode MyComputeFunction(SNES, Vec, Vec, void *);
 57: extern PetscErrorCode MyComputeJacobian(SNES, Vec, Mat, Mat, void *);
 58: extern PetscErrorCode NonlinearGS(SNES, Vec);

 60: int main(int argc, char **argv)
 61: {
 62:   SNES      snes;  /* nonlinear solver */
 63:   SNES      psnes; /* nonlinear Gauss-Seidel approximate solver */
 64:   Vec       x, b;  /* solution vector */
 65:   PetscInt  its;   /* iterations for convergence */
 66:   DM        da;
 67:   PetscBool use_ngs_as_npc = PETSC_FALSE; /* use the nonlinear Gauss-Seidel approximate solver */

 69:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 70:      Initialize program
 71:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 73:   PetscFunctionBeginUser;
 74:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));

 76:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 77:      Create nonlinear solver context
 78:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 79:   PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes));

 81:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_ngs_as_npc", &use_ngs_as_npc, 0));

 83:   if (use_ngs_as_npc) {
 84:     PetscCall(SNESGetNPC(snes, &psnes));
 85:     PetscCall(SNESSetType(psnes, SNESSHELL));
 86:     PetscCall(SNESShellSetSolve(psnes, NonlinearGS));
 87:   }

 89:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 90:      Create distributed array (DMDA) to manage parallel grid and vectors
 91:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 92:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da));
 93:   PetscCall(DMSetFromOptions(da));
 94:   PetscCall(DMSetUp(da));
 95:   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
 96:   PetscCall(SNESSetDM(snes, da));
 97:   if (use_ngs_as_npc) PetscCall(SNESShellSetContext(psnes, da));
 98:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 99:      Extract global vectors from DMDA; then duplicate for remaining
100:      vectors that are the same types
101:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102:   PetscCall(DMCreateGlobalVector(da, &x));
103:   PetscCall(DMCreateGlobalVector(da, &b));
104:   PetscCall(VecSet(b, 1.0));

106:   PetscCall(SNESSetFunction(snes, NULL, MyComputeFunction, NULL));
107:   PetscCall(SNESSetJacobian(snes, NULL, NULL, MyComputeJacobian, NULL));

109:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
110:      Customize nonlinear solver; set runtime options
111:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
112:   PetscCall(SNESSetFromOptions(snes));

114:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115:      Solve nonlinear system
116:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117:   PetscCall(SNESSolve(snes, b, x));
118:   PetscCall(SNESGetIterationNumber(snes, &its));

120:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

123:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124:      Free work space.  All PETSc objects should be destroyed when they
125:      are no longer needed.
126:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
127:   PetscCall(VecDestroy(&x));
128:   PetscCall(VecDestroy(&b));
129:   PetscCall(SNESDestroy(&snes));
130:   PetscCall(DMDestroy(&da));
131:   PetscCall(PetscFinalize());
132:   return 0;
133: }

135: /* ------------------------------------------------------------------- */
136: PetscErrorCode MyComputeFunction(SNES snes, Vec x, Vec F, void *ctx)
137: {
138:   Mat J;
139:   DM  dm;

141:   PetscFunctionBeginUser;
142:   PetscCall(SNESGetDM(snes, &dm));
143:   PetscCall(DMGetApplicationContext(dm, &J));
144:   if (!J) {
145:     PetscCall(DMSetMatType(dm, MATAIJ));
146:     PetscCall(DMCreateMatrix(dm, &J));
147:     PetscCall(MatSetDM(J, NULL));
148:     PetscCall(FormMatrix(dm, J));
149:     PetscCall(DMSetApplicationContext(dm, J));
150:     PetscCall(DMSetApplicationContextDestroy(dm, (PetscErrorCode(*)(void **))MatDestroy));
151:   }
152:   PetscCall(MatMult(J, x, F));
153:   PetscFunctionReturn(PETSC_SUCCESS);
154: }

156: PetscErrorCode MyComputeJacobian(SNES snes, Vec x, Mat J, Mat Jp, void *ctx)
157: {
158:   DM dm;

160:   PetscFunctionBeginUser;
161:   PetscCall(SNESGetDM(snes, &dm));
162:   PetscCall(FormMatrix(dm, Jp));
163:   PetscFunctionReturn(PETSC_SUCCESS);
164: }

166: PetscErrorCode FormMatrix(DM da, Mat jac)
167: {
168:   PetscInt      i, j, nrows = 0;
169:   MatStencil    col[5], row, *rows;
170:   PetscScalar   v[5], hx, hy, hxdhy, hydhx;
171:   DMDALocalInfo info;

173:   PetscFunctionBeginUser;
174:   PetscCall(DMDAGetLocalInfo(da, &info));
175:   hx    = 1.0 / (PetscReal)(info.mx - 1);
176:   hy    = 1.0 / (PetscReal)(info.my - 1);
177:   hxdhy = hx / hy;
178:   hydhx = hy / hx;

180:   PetscCall(PetscMalloc1(info.ym * info.xm, &rows));
181:   /*
182:      Compute entries for the locally owned part of the Jacobian.
183:       - Currently, all PETSc parallel matrix formats are partitioned by
184:         contiguous chunks of rows across the processors.
185:       - Each processor needs to insert only elements that it owns
186:         locally (but any non-local elements will be sent to the
187:         appropriate processor during matrix assembly).
188:       - Here, we set all entries for a particular row at once.
189:       - We can set matrix entries either using either
190:         MatSetValuesLocal() or MatSetValues(), as discussed above.
191:   */
192:   for (j = info.ys; j < info.ys + info.ym; j++) {
193:     for (i = info.xs; i < info.xs + info.xm; i++) {
194:       row.j = j;
195:       row.i = i;
196:       /* boundary points */
197:       if (i == 0 || j == 0 || i == info.mx - 1 || j == info.my - 1) {
198:         v[0] = 2.0 * (hydhx + hxdhy);
199:         PetscCall(MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES));
200:         rows[nrows].i   = i;
201:         rows[nrows++].j = j;
202:       } else {
203:         /* interior grid points */
204:         v[0]     = -hxdhy;
205:         col[0].j = j - 1;
206:         col[0].i = i;
207:         v[1]     = -hydhx;
208:         col[1].j = j;
209:         col[1].i = i - 1;
210:         v[2]     = 2.0 * (hydhx + hxdhy);
211:         col[2].j = row.j;
212:         col[2].i = row.i;
213:         v[3]     = -hydhx;
214:         col[3].j = j;
215:         col[3].i = i + 1;
216:         v[4]     = -hxdhy;
217:         col[4].j = j + 1;
218:         col[4].i = i;
219:         PetscCall(MatSetValuesStencil(jac, 1, &row, 5, col, v, INSERT_VALUES));
220:       }
221:     }
222:   }

224:   /*
225:      Assemble matrix, using the 2-step process:
226:        MatAssemblyBegin(), MatAssemblyEnd().
227:   */
228:   PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
229:   PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
230:   PetscCall(MatZeroRowsColumnsStencil(jac, nrows, rows, 2.0 * (hydhx + hxdhy), NULL, NULL));
231:   PetscCall(PetscFree(rows));
232:   /*
233:      Tell the matrix we will never add a new nonzero location to the
234:      matrix. If we do, it will generate an error.
235:   */
236:   PetscCall(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
237:   PetscFunctionReturn(PETSC_SUCCESS);
238: }

240: /* ------------------------------------------------------------------- */
241: /*
242:       Applies some sweeps on nonlinear Gauss-Seidel on each process

244:  */
245: PetscErrorCode NonlinearGS(SNES snes, Vec X)
246: {
247:   PetscInt      i, j, Mx, My, xs, ys, xm, ym, its, l;
248:   PetscReal     hx, hy, hxdhy, hydhx;
249:   PetscScalar **x, F, J, u, uxx, uyy;
250:   DM            da;
251:   Vec           localX;

253:   PetscFunctionBeginUser;
254:   PetscCall(SNESGetTolerances(snes, NULL, NULL, NULL, &its, NULL));
255:   PetscCall(SNESShellGetContext(snes, &da));

257:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

259:   hx    = 1.0 / (PetscReal)(Mx - 1);
260:   hy    = 1.0 / (PetscReal)(My - 1);
261:   hxdhy = hx / hy;
262:   hydhx = hy / hx;

264:   PetscCall(DMGetLocalVector(da, &localX));

266:   for (l = 0; l < its; l++) {
267:     PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
268:     PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
269:     /*
270:      Get a pointer to vector data.
271:      - For default PETSc vectors, VecGetArray() returns a pointer to
272:      the data array.  Otherwise, the routine is implementation dependent.
273:      - You MUST call VecRestoreArray() when you no longer need access to
274:      the array.
275:      */
276:     PetscCall(DMDAVecGetArray(da, localX, &x));

278:     /*
279:      Get local grid boundaries (for 2-dimensional DMDA):
280:      xs, ys   - starting grid indices (no ghost points)
281:      xm, ym   - widths of local grid (no ghost points)

283:      */
284:     PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

286:     for (j = ys; j < ys + ym; j++) {
287:       for (i = xs; i < xs + xm; i++) {
288:         if (i == 0 || j == 0 || i == Mx - 1 || j == My - 1) {
289:           /* boundary conditions are all zero Dirichlet */
290:           x[j][i] = 0.0;
291:         } else {
292:           u   = x[j][i];
293:           uxx = (2.0 * u - x[j][i - 1] - x[j][i + 1]) * hydhx;
294:           uyy = (2.0 * u - x[j - 1][i] - x[j + 1][i]) * hxdhy;
295:           F   = uxx + uyy;
296:           J   = 2.0 * (hydhx + hxdhy);
297:           u   = u - F / J;

299:           x[j][i] = u;
300:         }
301:       }
302:     }

304:     /*
305:      Restore vector
306:      */
307:     PetscCall(DMDAVecRestoreArray(da, localX, &x));
308:     PetscCall(DMLocalToGlobalBegin(da, localX, INSERT_VALUES, X));
309:     PetscCall(DMLocalToGlobalEnd(da, localX, INSERT_VALUES, X));
310:   }
311:   PetscCall(DMRestoreLocalVector(da, &localX));
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }

315: /*TEST

317:    test:
318:       args: -snes_monitor_short -snes_type nrichardson
319:       requires: !single

321:    test:
322:       suffix: 2
323:       args: -snes_monitor_short -ksp_monitor_short -ksp_type richardson -pc_type none -ksp_richardson_self_scale
324:       requires: !single

326:    test:
327:       suffix: 3
328:       args: -snes_monitor_short -snes_type ngmres

330:    test:
331:       suffix: 4
332:       args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none

334:    test:
335:       suffix: 5
336:       args: -snes_monitor_short -snes_type ncg

338:    test:
339:       suffix: 6
340:       args: -snes_monitor_short -ksp_type cg -ksp_monitor_short -pc_type none

342:    test:
343:       suffix: 7
344:       args: -da_refine 2 -snes_monitor_short -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor_short -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor_short
345:       requires: !single

347:    test:
348:       suffix: 8
349:       args: -da_refine 2 -snes_monitor_short -snes_type fas -fas_levels_snes_monitor_short -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_type fas -snes_rtol 1.e-5

351:    test:
352:       suffix: 9
353:       args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc

355:    test:
356:       suffix: 10
357:       args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc -snes_trdc_use_cauchy false

359: TEST*/