Actual source code: ex5.c
1: static char help[] = "Bratu nonlinear PDE in 2d.\n\
2: We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3: domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4: The command line options include:\n\
5: -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6: problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7: -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8: that MMS3 will be evaluated with 2^m_par, 2^n_par";
10: /* ------------------------------------------------------------------------
12: Solid Fuel Ignition (SFI) problem. This problem is modeled by
13: the partial differential equation
15: -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
17: with boundary conditions
19: u = 0 for x = 0, x = 1, y = 0, y = 1.
21: A finite difference approximation with the usual 5-point stencil
22: is used to discretize the boundary value problem to obtain a nonlinear
23: system of equations.
25: This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
26: as SNESSetDM() is provided. Example usage
28: ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
29: -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
31: or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
32: multigrid levels, it will be determined automatically based on the number of refinements done)
34: ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
35: -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
37: ------------------------------------------------------------------------- */
39: /*
40: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
41: Include "petscsnes.h" so that we can use SNES solvers. Note that this
42: */
43: #include <petscdm.h>
44: #include <petscdmda.h>
45: #include <petscsnes.h>
46: #include <petscmatlab.h>
47: #include <petsc/private/snesimpl.h>
49: /*
50: User-defined application context - contains data needed by the
51: application-provided call-back routines, FormJacobianLocal() and
52: FormFunctionLocal().
53: */
54: typedef struct AppCtx AppCtx;
55: struct AppCtx {
56: PetscReal param; /* test problem parameter */
57: PetscInt m, n; /* MMS3 parameters */
58: PetscErrorCode (*mms_solution)(AppCtx *, const DMDACoor2d *, PetscScalar *);
59: PetscErrorCode (*mms_forcing)(AppCtx *, const DMDACoor2d *, PetscScalar *);
60: };
62: /* ------------------------------------------------------------------- */
63: /*
64: FormInitialGuess - Forms initial approximation.
66: Input Parameters:
67: da - The DM
68: user - user-defined application context
70: Output Parameter:
71: X - vector
72: */
73: static PetscErrorCode FormInitialGuess(DM da, AppCtx *user, Vec X)
74: {
75: PetscInt i, j, Mx, My, xs, ys, xm, ym;
76: PetscReal lambda, temp1, temp, hx, hy;
77: PetscScalar **x;
79: PetscFunctionBeginUser;
80: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
82: lambda = user->param;
83: hx = 1.0 / (PetscReal)(Mx - 1);
84: hy = 1.0 / (PetscReal)(My - 1);
85: temp1 = lambda / (lambda + 1.0);
87: /*
88: Get a pointer to vector data.
89: - For default PETSc vectors, VecGetArray() returns a pointer to
90: the data array. Otherwise, the routine is implementation dependent.
91: - You MUST call VecRestoreArray() when you no longer need access to
92: the array.
93: */
94: PetscCall(DMDAVecGetArray(da, X, &x));
96: /*
97: Get local grid boundaries (for 2-dimensional DMDA):
98: xs, ys - starting grid indices (no ghost points)
99: xm, ym - widths of local grid (no ghost points)
101: */
102: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
104: /*
105: Compute initial guess over the locally owned part of the grid
106: */
107: for (j = ys; j < ys + ym; j++) {
108: temp = (PetscReal)(PetscMin(j, My - j - 1)) * hy;
109: for (i = xs; i < xs + xm; i++) {
110: if (i == 0 || j == 0 || i == Mx - 1 || j == My - 1) {
111: /* boundary conditions are all zero Dirichlet */
112: x[j][i] = 0.0;
113: } else {
114: x[j][i] = temp1 * PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i, Mx - i - 1)) * hx, temp));
115: }
116: }
117: }
119: /*
120: Restore vector
121: */
122: PetscCall(DMDAVecRestoreArray(da, X, &x));
123: PetscFunctionReturn(PETSC_SUCCESS);
124: }
126: /*
127: FormExactSolution - Forms MMS solution
129: Input Parameters:
130: da - The DM
131: user - user-defined application context
133: Output Parameter:
134: X - vector
135: */
136: static PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
137: {
138: DM coordDA;
139: Vec coordinates;
140: DMDACoor2d **coords;
141: PetscScalar **u;
142: PetscInt xs, ys, xm, ym, i, j;
144: PetscFunctionBeginUser;
145: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
146: PetscCall(DMGetCoordinateDM(da, &coordDA));
147: PetscCall(DMGetCoordinates(da, &coordinates));
148: PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
149: PetscCall(DMDAVecGetArray(da, U, &u));
150: for (j = ys; j < ys + ym; ++j) {
151: for (i = xs; i < xs + xm; ++i) PetscCall(user->mms_solution(user, &coords[j][i], &u[j][i]));
152: }
153: PetscCall(DMDAVecRestoreArray(da, U, &u));
154: PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
155: PetscFunctionReturn(PETSC_SUCCESS);
156: }
158: static PetscErrorCode ZeroBCSolution(AppCtx *user, const DMDACoor2d *c, PetscScalar *u)
159: {
160: u[0] = 0.;
161: return PETSC_SUCCESS;
162: }
164: /* The functions below evaluate the MMS solution u(x,y) and associated forcing
166: f(x,y) = -u_xx - u_yy - lambda exp(u)
168: such that u(x,y) is an exact solution with f(x,y) as the right-hand side forcing term.
169: */
170: static PetscErrorCode MMSSolution1(AppCtx *user, const DMDACoor2d *c, PetscScalar *u)
171: {
172: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
174: PetscFunctionBeginUser;
175: u[0] = x * (1 - x) * y * (1 - y);
176: PetscCall(PetscLogFlops(5));
177: PetscFunctionReturn(PETSC_SUCCESS);
178: }
179: static PetscErrorCode MMSForcing1(AppCtx *user, const DMDACoor2d *c, PetscScalar *f)
180: {
181: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
183: PetscFunctionBeginUser;
184: f[0] = 2 * x * (1 - x) + 2 * y * (1 - y) - user->param * PetscExpReal(x * (1 - x) * y * (1 - y));
185: PetscFunctionReturn(PETSC_SUCCESS);
186: }
188: static PetscErrorCode MMSSolution2(AppCtx *user, const DMDACoor2d *c, PetscScalar *u)
189: {
190: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
192: PetscFunctionBeginUser;
193: u[0] = PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y);
194: PetscCall(PetscLogFlops(5));
195: PetscFunctionReturn(PETSC_SUCCESS);
196: }
197: static PetscErrorCode MMSForcing2(AppCtx *user, const DMDACoor2d *c, PetscScalar *f)
198: {
199: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
201: PetscFunctionBeginUser;
202: f[0] = 2 * PetscSqr(PETSC_PI) * PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y) - user->param * PetscExpReal(PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y));
203: PetscFunctionReturn(PETSC_SUCCESS);
204: }
206: static PetscErrorCode MMSSolution3(AppCtx *user, const DMDACoor2d *c, PetscScalar *u)
207: {
208: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
210: PetscFunctionBeginUser;
211: u[0] = PetscSinReal(user->m * PETSC_PI * x * (1 - y)) * PetscSinReal(user->n * PETSC_PI * y * (1 - x));
212: PetscCall(PetscLogFlops(5));
213: PetscFunctionReturn(PETSC_SUCCESS);
214: }
215: static PetscErrorCode MMSForcing3(AppCtx *user, const DMDACoor2d *c, PetscScalar *f)
216: {
217: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
218: PetscReal m = user->m, n = user->n, lambda = user->param;
220: PetscFunctionBeginUser;
221: f[0] = (-(PetscExpReal(PetscSinReal(m * PETSC_PI * x * (1 - y)) * PetscSinReal(n * PETSC_PI * (1 - x) * y)) * lambda) + PetscSqr(PETSC_PI) * (-2 * m * n * ((-1 + x) * x + (-1 + y) * y) * PetscCosReal(m * PETSC_PI * x * (-1 + y)) * PetscCosReal(n * PETSC_PI * (-1 + x) * y) + (PetscSqr(m) * (PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n) * (PetscSqr(-1 + x) + PetscSqr(y))) * PetscSinReal(m * PETSC_PI * x * (-1 + y)) * PetscSinReal(n * PETSC_PI * (-1 + x) * y)));
222: PetscFunctionReturn(PETSC_SUCCESS);
223: }
225: static PetscErrorCode MMSSolution4(AppCtx *user, const DMDACoor2d *c, PetscScalar *u)
226: {
227: const PetscReal Lx = 1., Ly = 1.;
228: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
230: PetscFunctionBeginUser;
231: u[0] = (PetscPowReal(x, 4) - PetscSqr(Lx) * PetscSqr(x)) * (PetscPowReal(y, 4) - PetscSqr(Ly) * PetscSqr(y));
232: PetscCall(PetscLogFlops(9));
233: PetscFunctionReturn(PETSC_SUCCESS);
234: }
235: static PetscErrorCode MMSForcing4(AppCtx *user, const DMDACoor2d *c, PetscScalar *f)
236: {
237: const PetscReal Lx = 1., Ly = 1.;
238: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
240: PetscFunctionBeginUser;
241: f[0] = (2 * PetscSqr(x) * (PetscSqr(x) - PetscSqr(Lx)) * (PetscSqr(Ly) - 6 * PetscSqr(y)) + 2 * PetscSqr(y) * (PetscSqr(Lx) - 6 * PetscSqr(x)) * (PetscSqr(y) - PetscSqr(Ly)) - user->param * PetscExpReal((PetscPowReal(x, 4) - PetscSqr(Lx) * PetscSqr(x)) * (PetscPowReal(y, 4) - PetscSqr(Ly) * PetscSqr(y))));
242: PetscFunctionReturn(PETSC_SUCCESS);
243: }
245: /* ------------------------------------------------------------------- */
246: /*
247: FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
249: */
250: static PetscErrorCode FormFunctionLocal(DMDALocalInfo *info, PetscScalar **x, PetscScalar **f, AppCtx *user)
251: {
252: PetscInt i, j;
253: PetscReal lambda, hx, hy, hxdhy, hydhx;
254: PetscScalar u, ue, uw, un, us, uxx, uyy, mms_solution, mms_forcing;
255: DMDACoor2d c;
257: PetscFunctionBeginUser;
258: lambda = user->param;
259: hx = 1.0 / (PetscReal)(info->mx - 1);
260: hy = 1.0 / (PetscReal)(info->my - 1);
261: hxdhy = hx / hy;
262: hydhx = hy / hx;
263: /*
264: Compute function over the locally owned part of the grid
265: */
266: for (j = info->ys; j < info->ys + info->ym; j++) {
267: for (i = info->xs; i < info->xs + info->xm; i++) {
268: if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) {
269: c.x = i * hx;
270: c.y = j * hy;
271: PetscCall(user->mms_solution(user, &c, &mms_solution));
272: f[j][i] = 2.0 * (hydhx + hxdhy) * (x[j][i] - mms_solution);
273: } else {
274: u = x[j][i];
275: uw = x[j][i - 1];
276: ue = x[j][i + 1];
277: un = x[j - 1][i];
278: us = x[j + 1][i];
280: /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
281: if (i - 1 == 0) {
282: c.x = (i - 1) * hx;
283: c.y = j * hy;
284: PetscCall(user->mms_solution(user, &c, &uw));
285: }
286: if (i + 1 == info->mx - 1) {
287: c.x = (i + 1) * hx;
288: c.y = j * hy;
289: PetscCall(user->mms_solution(user, &c, &ue));
290: }
291: if (j - 1 == 0) {
292: c.x = i * hx;
293: c.y = (j - 1) * hy;
294: PetscCall(user->mms_solution(user, &c, &un));
295: }
296: if (j + 1 == info->my - 1) {
297: c.x = i * hx;
298: c.y = (j + 1) * hy;
299: PetscCall(user->mms_solution(user, &c, &us));
300: }
302: uxx = (2.0 * u - uw - ue) * hydhx;
303: uyy = (2.0 * u - un - us) * hxdhy;
304: mms_forcing = 0;
305: c.x = i * hx;
306: c.y = j * hy;
307: if (user->mms_forcing) PetscCall(user->mms_forcing(user, &c, &mms_forcing));
308: f[j][i] = uxx + uyy - hx * hy * (lambda * PetscExpScalar(u) + mms_forcing);
309: }
310: }
311: }
312: PetscCall(PetscLogFlops(11.0 * info->ym * info->xm));
313: PetscFunctionReturn(PETSC_SUCCESS);
314: }
316: /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
317: static PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info, PetscScalar **x, PetscReal *obj, AppCtx *user)
318: {
319: PetscInt i, j;
320: PetscReal lambda, hx, hy, hxdhy, hydhx, sc, lobj = 0;
321: PetscScalar u, ue, uw, un, us, uxux, uyuy;
322: MPI_Comm comm;
324: PetscFunctionBeginUser;
325: *obj = 0;
326: PetscCall(PetscObjectGetComm((PetscObject)info->da, &comm));
327: lambda = user->param;
328: hx = 1.0 / (PetscReal)(info->mx - 1);
329: hy = 1.0 / (PetscReal)(info->my - 1);
330: sc = hx * hy * lambda;
331: hxdhy = hx / hy;
332: hydhx = hy / hx;
333: /*
334: Compute function over the locally owned part of the grid
335: */
336: for (j = info->ys; j < info->ys + info->ym; j++) {
337: for (i = info->xs; i < info->xs + info->xm; i++) {
338: if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) {
339: lobj += PetscRealPart((hydhx + hxdhy) * x[j][i] * x[j][i]);
340: } else {
341: u = x[j][i];
342: uw = x[j][i - 1];
343: ue = x[j][i + 1];
344: un = x[j - 1][i];
345: us = x[j + 1][i];
347: if (i - 1 == 0) uw = 0.;
348: if (i + 1 == info->mx - 1) ue = 0.;
349: if (j - 1 == 0) un = 0.;
350: if (j + 1 == info->my - 1) us = 0.;
352: /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
354: uxux = u * (2. * u - ue - uw) * hydhx;
355: uyuy = u * (2. * u - un - us) * hxdhy;
357: lobj += PetscRealPart(0.5 * (uxux + uyuy) - sc * PetscExpScalar(u));
358: }
359: }
360: }
361: PetscCall(PetscLogFlops(12.0 * info->ym * info->xm));
362: *obj = lobj;
363: PetscFunctionReturn(PETSC_SUCCESS);
364: }
366: /*
367: FormJacobianLocal - Evaluates Jacobian matrix on local process patch
368: */
369: static PetscErrorCode FormJacobianLocal(DMDALocalInfo *info, PetscScalar **x, Mat jac, Mat jacpre, AppCtx *user)
370: {
371: PetscInt i, j, k;
372: MatStencil col[5], row;
373: PetscScalar lambda, v[5], hx, hy, hxdhy, hydhx, sc;
374: DM coordDA;
375: Vec coordinates;
376: DMDACoor2d **coords;
378: PetscFunctionBeginUser;
379: lambda = user->param;
380: /* Extract coordinates */
381: PetscCall(DMGetCoordinateDM(info->da, &coordDA));
382: PetscCall(DMGetCoordinates(info->da, &coordinates));
383: PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
384: hx = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs + 1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
385: hy = info->ym > 1 ? PetscRealPart(coords[info->ys + 1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
386: PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
387: hxdhy = hx / hy;
388: hydhx = hy / hx;
389: sc = hx * hy * lambda;
391: /*
392: Compute entries for the locally owned part of the Jacobian.
393: - Currently, all PETSc parallel matrix formats are partitioned by
394: contiguous chunks of rows across the processors.
395: - Each processor needs to insert only elements that it owns
396: locally (but any non-local elements will be sent to the
397: appropriate processor during matrix assembly).
398: - Here, we set all entries for a particular row at once.
399: - We can set matrix entries either using either
400: MatSetValuesLocal() or MatSetValues(), as discussed above.
401: */
402: for (j = info->ys; j < info->ys + info->ym; j++) {
403: for (i = info->xs; i < info->xs + info->xm; i++) {
404: row.j = j;
405: row.i = i;
406: /* boundary points */
407: if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) {
408: v[0] = 2.0 * (hydhx + hxdhy);
409: PetscCall(MatSetValuesStencil(jacpre, 1, &row, 1, &row, v, INSERT_VALUES));
410: } else {
411: k = 0;
412: /* interior grid points */
413: if (j - 1 != 0) {
414: v[k] = -hxdhy;
415: col[k].j = j - 1;
416: col[k].i = i;
417: k++;
418: }
419: if (i - 1 != 0) {
420: v[k] = -hydhx;
421: col[k].j = j;
422: col[k].i = i - 1;
423: k++;
424: }
426: v[k] = 2.0 * (hydhx + hxdhy) - sc * PetscExpScalar(x[j][i]);
427: col[k].j = row.j;
428: col[k].i = row.i;
429: k++;
431: if (i + 1 != info->mx - 1) {
432: v[k] = -hydhx;
433: col[k].j = j;
434: col[k].i = i + 1;
435: k++;
436: }
437: if (j + 1 != info->mx - 1) {
438: v[k] = -hxdhy;
439: col[k].j = j + 1;
440: col[k].i = i;
441: k++;
442: }
443: PetscCall(MatSetValuesStencil(jacpre, 1, &row, k, col, v, INSERT_VALUES));
444: }
445: }
446: }
448: /*
449: Assemble matrix, using the 2-step process:
450: MatAssemblyBegin(), MatAssemblyEnd().
451: */
452: PetscCall(MatAssemblyBegin(jacpre, MAT_FINAL_ASSEMBLY));
453: PetscCall(MatAssemblyEnd(jacpre, MAT_FINAL_ASSEMBLY));
454: /*
455: Tell the matrix we will never add a new nonzero location to the
456: matrix. If we do, it will generate an error.
457: */
458: PetscCall(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
459: PetscFunctionReturn(PETSC_SUCCESS);
460: }
462: static PetscErrorCode FormFunctionMatlab(SNES snes, Vec X, Vec F, void *ptr)
463: {
464: #if PetscDefined(HAVE_MATLAB)
465: AppCtx *user = (AppCtx *)ptr;
466: PetscInt Mx, My;
467: PetscReal lambda, hx, hy;
468: Vec localX, localF;
469: MPI_Comm comm;
470: DM da;
472: PetscFunctionBeginUser;
473: PetscCall(SNESGetDM(snes, &da));
474: PetscCall(DMGetLocalVector(da, &localX));
475: PetscCall(DMGetLocalVector(da, &localF));
476: PetscCall(PetscObjectSetName((PetscObject)localX, "localX"));
477: PetscCall(PetscObjectSetName((PetscObject)localF, "localF"));
478: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
480: lambda = user->param;
481: hx = 1.0 / (PetscReal)(Mx - 1);
482: hy = 1.0 / (PetscReal)(My - 1);
484: PetscCall(PetscObjectGetComm((PetscObject)snes, &comm));
485: /*
486: Scatter ghost points to local vector,using the 2-step process
487: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
488: By placing code between these two statements, computations can be
489: done while messages are in transition.
490: */
491: PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
492: PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
493: PetscCall(PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm), (PetscObject)localX));
494: PetscCall(PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm), "localF=ex5m(localX,%18.16e,%18.16e,%18.16e)", (double)hx, (double)hy, (double)lambda));
495: PetscCall(PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm), (PetscObject)localF));
497: /*
498: Insert values into global vector
499: */
500: PetscCall(DMLocalToGlobalBegin(da, localF, INSERT_VALUES, F));
501: PetscCall(DMLocalToGlobalEnd(da, localF, INSERT_VALUES, F));
502: PetscCall(DMRestoreLocalVector(da, &localX));
503: PetscCall(DMRestoreLocalVector(da, &localF));
504: PetscFunctionReturn(PETSC_SUCCESS);
505: #else
506: return PETSC_SUCCESS; /* Never called */
507: #endif
508: }
510: /* ------------------------------------------------------------------- */
511: /*
512: Applies some sweeps on nonlinear Gauss-Seidel on each process
514: */
515: static PetscErrorCode NonlinearGS(SNES snes, Vec X, Vec B, void *ctx)
516: {
517: PetscInt i, j, k, Mx, My, xs, ys, xm, ym, its, tot_its, sweeps, l;
518: PetscReal lambda, hx, hy, hxdhy, hydhx, sc;
519: PetscScalar **x, **b, bij, F, F0 = 0, J, u, un, us, ue, eu, uw, uxx, uyy, y;
520: PetscReal atol, rtol, stol;
521: DM da;
522: AppCtx *user;
523: Vec localX, localB;
525: PetscFunctionBeginUser;
526: tot_its = 0;
527: PetscCall(SNESNGSGetSweeps(snes, &sweeps));
528: PetscCall(SNESNGSGetTolerances(snes, &atol, &rtol, &stol, &its));
529: PetscCall(SNESGetDM(snes, &da));
530: PetscCall(DMGetApplicationContext(da, &user));
532: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
534: lambda = user->param;
535: hx = 1.0 / (PetscReal)(Mx - 1);
536: hy = 1.0 / (PetscReal)(My - 1);
537: sc = hx * hy * lambda;
538: hxdhy = hx / hy;
539: hydhx = hy / hx;
541: PetscCall(DMGetLocalVector(da, &localX));
542: if (B) PetscCall(DMGetLocalVector(da, &localB));
543: for (l = 0; l < sweeps; l++) {
544: PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
545: PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
546: if (B) {
547: PetscCall(DMGlobalToLocalBegin(da, B, INSERT_VALUES, localB));
548: PetscCall(DMGlobalToLocalEnd(da, B, INSERT_VALUES, localB));
549: }
550: /*
551: Get a pointer to vector data.
552: - For default PETSc vectors, VecGetArray() returns a pointer to
553: the data array. Otherwise, the routine is implementation dependent.
554: - You MUST call VecRestoreArray() when you no longer need access to
555: the array.
556: */
557: PetscCall(DMDAVecGetArray(da, localX, &x));
558: if (B) PetscCall(DMDAVecGetArray(da, localB, &b));
559: /*
560: Get local grid boundaries (for 2-dimensional DMDA):
561: xs, ys - starting grid indices (no ghost points)
562: xm, ym - widths of local grid (no ghost points)
563: */
564: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
566: for (j = ys; j < ys + ym; j++) {
567: for (i = xs; i < xs + xm; i++) {
568: if (i == 0 || j == 0 || i == Mx - 1 || j == My - 1) {
569: /* boundary conditions are all zero Dirichlet */
570: x[j][i] = 0.0;
571: } else {
572: if (B) bij = b[j][i];
573: else bij = 0.;
575: u = x[j][i];
576: un = x[j - 1][i];
577: us = x[j + 1][i];
578: ue = x[j][i - 1];
579: uw = x[j][i + 1];
581: for (k = 0; k < its; k++) {
582: eu = PetscExpScalar(u);
583: uxx = (2.0 * u - ue - uw) * hydhx;
584: uyy = (2.0 * u - un - us) * hxdhy;
585: F = uxx + uyy - sc * eu - bij;
586: if (k == 0) F0 = F;
587: J = 2.0 * (hydhx + hxdhy) - sc * eu;
588: y = F / J;
589: u -= y;
590: tot_its++;
592: if (atol > PetscAbsReal(PetscRealPart(F)) || rtol * PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) || stol * PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) break;
593: }
594: x[j][i] = u;
595: }
596: }
597: }
598: /*
599: Restore vector
600: */
601: PetscCall(DMDAVecRestoreArray(da, localX, &x));
602: PetscCall(DMLocalToGlobalBegin(da, localX, INSERT_VALUES, X));
603: PetscCall(DMLocalToGlobalEnd(da, localX, INSERT_VALUES, X));
604: }
605: PetscCall(PetscLogFlops(tot_its * (21.0)));
606: PetscCall(DMRestoreLocalVector(da, &localX));
607: if (B) {
608: PetscCall(DMDAVecRestoreArray(da, localB, &b));
609: PetscCall(DMRestoreLocalVector(da, &localB));
610: }
611: PetscFunctionReturn(PETSC_SUCCESS);
612: }
614: int main(int argc, char **argv)
615: {
616: SNES snes; /* nonlinear solver */
617: Vec x; /* solution vector */
618: AppCtx user; /* user-defined work context */
619: PetscInt its; /* iterations for convergence */
620: PetscReal bratu_lambda_max = 6.81;
621: PetscReal bratu_lambda_min = 0.;
622: PetscInt MMS = 1;
623: PetscBool flg = PETSC_FALSE, setMMS;
624: DM da;
625: Vec r = NULL;
626: KSP ksp;
627: PetscInt lits, slits;
629: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
630: Initialize program
631: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
633: PetscFunctionBeginUser;
634: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
636: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
637: Initialize problem parameters
638: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
639: user.param = 6.0;
640: PetscCall(PetscOptionsGetReal(NULL, NULL, "-par", &user.param, NULL));
641: PetscCheck(user.param <= bratu_lambda_max && user.param >= bratu_lambda_min, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Lambda, %g, is out of range, [%g, %g]", (double)user.param, (double)bratu_lambda_min, (double)bratu_lambda_max);
642: PetscCall(PetscOptionsGetInt(NULL, NULL, "-mms", &MMS, &setMMS));
643: if (MMS == 3) {
644: PetscInt mPar = 2, nPar = 1;
645: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m_par", &mPar, NULL));
646: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n_par", &nPar, NULL));
647: user.m = PetscPowInt(2, mPar);
648: user.n = PetscPowInt(2, nPar);
649: }
651: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
652: Create nonlinear solver context
653: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
654: PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes));
655: PetscCall(SNESSetCountersReset(snes, PETSC_FALSE));
656: PetscCall(SNESSetNGS(snes, NonlinearGS, NULL));
658: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
659: Create distributed array (DMDA) to manage parallel grid and vectors
660: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
661: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da));
662: PetscCall(DMSetFromOptions(da));
663: PetscCall(DMSetUp(da));
664: PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
665: PetscCall(DMSetApplicationContext(da, &user));
666: PetscCall(SNESSetDM(snes, da));
667: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
668: Extract global vectors from DMDA; then duplicate for remaining
669: vectors that are the same types
670: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
671: PetscCall(DMCreateGlobalVector(da, &x));
673: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
674: Set local function evaluation routine
675: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
676: switch (MMS) {
677: case 0:
678: user.mms_solution = ZeroBCSolution;
679: user.mms_forcing = NULL;
680: break;
681: case 1:
682: user.mms_solution = MMSSolution1;
683: user.mms_forcing = MMSForcing1;
684: break;
685: case 2:
686: user.mms_solution = MMSSolution2;
687: user.mms_forcing = MMSForcing2;
688: break;
689: case 3:
690: user.mms_solution = MMSSolution3;
691: user.mms_forcing = MMSForcing3;
692: break;
693: case 4:
694: user.mms_solution = MMSSolution4;
695: user.mms_forcing = MMSForcing4;
696: break;
697: default:
698: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_USER, "Unknown MMS type %" PetscInt_FMT, MMS);
699: }
700: PetscCall(DMDASNESSetFunctionLocal(da, INSERT_VALUES, (DMDASNESFunctionFn *)FormFunctionLocal, &user));
701: PetscCall(PetscOptionsGetBool(NULL, NULL, "-fd", &flg, NULL));
702: if (!flg) PetscCall(DMDASNESSetJacobianLocal(da, (DMDASNESJacobianFn *)FormJacobianLocal, &user));
704: PetscCall(PetscOptionsGetBool(NULL, NULL, "-obj", &flg, NULL));
705: if (flg) PetscCall(DMDASNESSetObjectiveLocal(da, (DMDASNESObjectiveFn *)FormObjectiveLocal, &user));
707: if (PetscDefined(HAVE_MATLAB)) {
708: PetscBool matlab_function = PETSC_FALSE;
709: PetscCall(PetscOptionsGetBool(NULL, NULL, "-matlab_function", &matlab_function, 0));
710: if (matlab_function) {
711: PetscCall(VecDuplicate(x, &r));
712: PetscCall(SNESSetFunction(snes, r, FormFunctionMatlab, &user));
713: }
714: }
716: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
717: Customize nonlinear solver; set runtime options
718: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
719: PetscCall(SNESSetFromOptions(snes));
721: PetscCall(FormInitialGuess(da, &user, x));
723: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
724: Solve nonlinear system
725: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
726: PetscCall(SNESSolve(snes, NULL, x));
727: PetscCall(SNESGetIterationNumber(snes, &its));
729: PetscCall(SNESGetLinearSolveIterations(snes, &slits));
730: PetscCall(SNESGetKSP(snes, &ksp));
731: PetscCall(KSPGetTotalIterations(ksp, &lits));
732: PetscCheck(lits == slits, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Number of total linear iterations reported by SNES %" PetscInt_FMT " does not match reported by KSP %" PetscInt_FMT, slits, lits);
733: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
734: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
736: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
737: If using MMS, check the l_2 error
738: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
739: if (setMMS) {
740: Vec e;
741: PetscReal errorl2, errorinf;
742: PetscInt N;
744: PetscCall(VecDuplicate(x, &e));
745: PetscCall(PetscObjectViewFromOptions((PetscObject)x, NULL, "-sol_view"));
746: PetscCall(FormExactSolution(da, &user, e));
747: PetscCall(PetscObjectViewFromOptions((PetscObject)e, NULL, "-exact_view"));
748: PetscCall(VecAXPY(e, -1.0, x));
749: PetscCall(PetscObjectViewFromOptions((PetscObject)e, NULL, "-error_view"));
750: PetscCall(VecNorm(e, NORM_2, &errorl2));
751: PetscCall(VecNorm(e, NORM_INFINITY, &errorinf));
752: PetscCall(VecGetSize(e, &N));
753: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "N: %" PetscInt_FMT " error L2 %g inf %g\n", N, (double)(errorl2 / PetscSqrtReal((PetscReal)N)), (double)errorinf));
754: PetscCall(VecDestroy(&e));
755: PetscCall(PetscLogEventSetDof(SNES_Solve, 0, N));
756: PetscCall(PetscLogEventSetError(SNES_Solve, 0, errorl2 / PetscSqrtReal(N)));
757: }
759: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
760: Free work space. All PETSc objects should be destroyed when they
761: are no longer needed.
762: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
763: PetscCall(VecDestroy(&r));
764: PetscCall(VecDestroy(&x));
765: PetscCall(SNESDestroy(&snes));
766: PetscCall(DMDestroy(&da));
767: PetscCall(PetscFinalize());
768: return 0;
769: }
771: /*TEST
773: test:
774: suffix: asm_0
775: requires: !single
776: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
778: test:
779: suffix: msm_0
780: requires: !single
781: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
783: test:
784: suffix: asm_1
785: requires: !single
786: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
788: test:
789: suffix: msm_1
790: requires: !single
791: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
793: test:
794: suffix: asm_2
795: requires: !single
796: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
798: test:
799: suffix: msm_2
800: requires: !single
801: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
803: test:
804: suffix: asm_3
805: requires: !single
806: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
808: test:
809: suffix: msm_3
810: requires: !single
811: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
813: test:
814: suffix: asm_4
815: requires: !single
816: nsize: 2
817: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
819: test:
820: suffix: msm_4
821: requires: !single
822: nsize: 2
823: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
825: test:
826: suffix: asm_5
827: requires: !single
828: nsize: 2
829: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
831: test:
832: suffix: msm_5
833: requires: !single
834: nsize: 2
835: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
837: test:
838: args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
839: requires: !single
841: test:
842: suffix: 2
843: args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol 0
845: test:
846: suffix: 3
847: nsize: 2
848: args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol 0 -snes_rtol 1.e-2
849: filter: grep -v "otal number of function evaluations"
851: test:
852: suffix: 4
853: nsize: 2
854: args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol 0 -ksp_atol 0
856: test:
857: suffix: 5_anderson
858: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
860: test:
861: suffix: 5_aspin
862: nsize: 4
863: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view -npc_sub_pc_type lu -npc_sub_ksp_type preonly
865: test:
866: suffix: 5_broyden
867: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
869: test:
870: suffix: 5_fas
871: args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
872: requires: !single
874: test:
875: suffix: 5_fas_additive
876: args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
878: test:
879: suffix: 5_fas_monitor
880: args: -da_refine 1 -snes_type fas -snes_fas_monitor
881: requires: !single
883: test:
884: suffix: 5_ls
885: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
887: test:
888: suffix: 5_ls_sell_sor
889: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
890: output_file: output/ex5_5_ls.out
892: test:
893: suffix: 5_nasm
894: nsize: 4
895: args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
897: test:
898: suffix: 5_ncg
899: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
901: test:
902: suffix: 5_newton_asm_dmda
903: nsize: 4
904: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
905: requires: !single
907: test:
908: suffix: 5_newton_gasm_dmda
909: nsize: 4
910: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
911: requires: !single
913: test:
914: suffix: 5_ngmres
915: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
917: test:
918: suffix: 5_ngmres_fas
919: args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
921: test:
922: suffix: 5_ngmres_ngs
923: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
925: test:
926: suffix: 5_ngmres_nrichardson
927: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
928: output_file: output/ex5_5_ngmres_richardson.out
930: test:
931: suffix: 5_nrichardson
932: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
934: test:
935: suffix: 5_qn
936: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
938: test:
939: suffix: 6
940: nsize: 4
941: args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
943: test:
944: requires: complex !single
945: suffix: complex
946: args: -snes_mf_operator -mat_mffd_complex -snes_monitor
948: test:
949: requires: !single
950: suffix: 7_ksp_view_pre
951: args: -pc_type gamg -ksp_view_pre
953: test:
954: requires: !single
955: suffix: hem_view_detailed
956: args: -pc_type gamg -ksp_view ::ascii_info_detail -pc_gamg_mat_coarsen_type hem
958: test:
959: requires: !single
960: suffix: mis_view_detailed
961: args: -pc_type gamg -ksp_view ::ascii_info_detail -pc_gamg_mat_coarsen_type mis
963: TEST*/