Actual source code: ex5f90.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
10: !
11: ! --------------------------------------------------------------------------
12: !
13: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
14: ! the partial differential equation
15: !
16: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
17: !
18: ! with boundary conditions
19: !
20: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
21: !
22: ! A finite difference approximation with the usual 5-point stencil
23: ! is used to discretize the boundary value problem to obtain a nonlinear
24: ! system of equations.
25: !
26: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
27: !
28: ! --------------------------------------------------------------------------
29: ! The following define must be used before including any PETSc include files
30: ! into a module or interface. This is because they can't handle declarations
31: ! in them
32: !
33: #include <petsc/finclude/petscsnes.h>
34: #include <petsc/finclude/petscdmda.h>
35: module ex5module
36: use petscsnes
37: use petscdmda
38: implicit none
39: type AppCtx
40: PetscInt xs, xe, xm, gxs, gxe, gxm
41: PetscInt ys, ye, ym, gys, gye, gym
42: PetscInt mx, my
43: PetscMPIInt rank
44: PetscReal lambda
45: end type AppCtx
47: contains
48: ! ---------------------------------------------------------------------
49: !
50: ! FormFunction - Evaluates nonlinear function, F(x).
51: !
52: ! Input Parameters:
53: ! snes - the SNES context
54: ! X - input vector
55: ! dummy - optional user-defined context, as set by SNESSetFunction()
56: ! (not used here)
57: !
58: ! Output Parameter:
59: ! F - function vector
60: !
61: ! Notes:
62: ! This routine serves as a wrapper for the lower-level routine
63: ! "FormFunctionLocal", where the actual computations are
64: ! done using the standard Fortran style of treating the local
65: ! vector data as a multidimensional array over the local mesh.
66: ! This routine merely handles ghost point scatters and accesses
67: ! the local vector data via VecGetArray() and VecRestoreArray().
68: !
69: subroutine FormFunction(snes, X, F, ctx, ierr)
70: ! Input/output variables:
71: SNES snes
72: Vec X, F
73: PetscErrorCode, intent(out) :: ierr
74: type(AppCtx) ctx
75: DM da
77: ! Declarations for use with local arrays:
78: PetscScalar, pointer :: lx_v(:), lf_v(:)
79: Vec localX
81: ! Scatter ghost points to local vector, using the 2-step process
82: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
83: ! By placing code between these two statements, computations can
84: ! be done while messages are in transition.
85: PetscCall(SNESGetDM(snes, da, ierr))
86: PetscCall(DMGetLocalVector(da, localX, ierr))
87: PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX, ierr))
88: PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX, ierr))
90: ! Get a pointer to vector data.
91: ! - For default PETSc vectors, VecGetArray() returns a pointer to
92: ! the data array. Otherwise, the routine is implementation dependent.
93: ! - You MUST call VecRestoreArray() when you no longer need access to
94: ! the array.
95: ! - Note that the interface to VecGetArray() differs from VecGetArray().
97: PetscCall(VecGetArray(localX, lx_v, ierr))
98: PetscCall(VecGetArray(F, lf_v, ierr))
100: ! Compute function over the locally owned part of the grid
101: PetscCall(FormFunctionLocal(lx_v, lf_v, ctx, ierr))
103: ! Restore vectors
104: PetscCall(VecRestoreArray(localX, lx_v, ierr))
105: PetscCall(VecRestoreArray(F, lf_v, ierr))
107: ! Insert values into global vector
109: PetscCall(DMRestoreLocalVector(da, localX, ierr))
110: PetscCall(PetscLogFlops(11.0d0*ctx%ym*ctx%xm, ierr))
112: ! PetscCallA(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
113: ! PetscCallA(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
114: end subroutine formfunction
116: ! ---------------------------------------------------------------------
117: !
118: ! FormInitialGuess - Forms initial approximation.
119: !
120: ! Input Parameters:
121: ! X - vector
122: !
123: ! Output Parameter:
124: ! X - vector
125: !
126: ! Notes:
127: ! This routine serves as a wrapper for the lower-level routine
128: ! "InitialGuessLocal", where the actual computations are
129: ! done using the standard Fortran style of treating the local
130: ! vector data as a multidimensional array over the local mesh.
131: ! This routine merely handles ghost point scatters and accesses
132: ! the local vector data via VecGetArray() and VecRestoreArray().
133: !
134: subroutine FormInitialGuess(snes, X, ierr)
135: ! Input/output variables:
136: SNES snes
137: type(AppCtx), pointer:: ctx
138: Vec X
139: PetscErrorCode, intent(out) :: ierr
140: DM da
142: ! Declarations for use with local arrays:
143: PetscScalar, pointer :: lx_v(:)
145: PetscCallA(SNESGetDM(snes, da, ierr))
146: PetscCallA(SNESGetApplicationContext(snes, ctx, ierr))
147: ! Get a pointer to vector data.
148: ! - For default PETSc vectors, VecGetArray() returns a pointer to
149: ! the data array. Otherwise, the routine is implementation dependent.
150: ! - You MUST call VecRestoreArray() when you no longer need access to
151: ! the array.
152: ! - Note that the interface to VecGetArray() differs from VecGetArray().
154: PetscCallA(VecGetArray(X, lx_v, ierr))
156: ! Compute initial guess over the locally owned part of the grid
157: PetscCallA(InitialGuessLocal(ctx, lx_v, ierr))
159: ! Restore vector
160: PetscCallA(VecRestoreArray(X, lx_v, ierr))
162: ! Insert values into global vector
164: end
166: ! ---------------------------------------------------------------------
167: !
168: ! InitialGuessLocal - Computes initial approximation, called by
169: ! the higher level routine FormInitialGuess().
170: !
171: ! Input Parameter:
172: ! x - local vector data
173: !
174: ! Output Parameters:
175: ! x - local vector data
176: ! ierr - error code
177: !
178: ! Notes:
179: ! This routine uses standard Fortran-style computations over a 2-dim array.
180: !
181: subroutine InitialGuessLocal(ctx, x, ierr)
182: ! Input/output variables:
183: type(AppCtx) ctx
184: PetscScalar x(ctx%xs:ctx%xe, ctx%ys:ctx%ye)
185: PetscErrorCode, intent(out) :: ierr
186: ! Local variables:
187: PetscInt i, j
188: PetscReal temp1, temp, hx, hy
190: hx = 1._PETSC_REAL_KIND/(ctx%mx - 1)
191: hy = 1._PETSC_REAL_KIND/(ctx%my - 1)
192: temp1 = ctx%lambda/(ctx%lambda + 1._PETSC_REAL_KIND)
194: do j = ctx%ys, ctx%ye
195: temp = min(j - 1, ctx%my - j)*hy
196: do i = ctx%xs, ctx%xe
197: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
198: x(i, j) = 0.0
199: else
200: x(i, j) = temp1*sqrt(min(hx*min(i - 1, ctx%mx - i), temp))
201: end if
202: end do
203: end do
204: ierr = 0
205: end
207: ! ---------------------------------------------------------------------
208: !
209: ! FormFunctionLocal - Computes nonlinear function, called by
210: ! the higher level routine FormFunction().
211: !
212: ! Input Parameter:
213: ! x - local vector data
214: !
215: ! Output Parameters:
216: ! f - local vector data, f(x)
217: ! ierr - error code
218: !
219: ! Notes:
220: ! This routine uses standard Fortran-style computations over a 2-dim array.
221: !
222: subroutine FormFunctionLocal(x, f, ctx, ierr)
223: ! Input/output variables:
224: type(AppCtx), intent(in) :: ctx
225: PetscScalar x(ctx%gxs:ctx%gxe, ctx%gys:ctx%gye)
226: PetscScalar f(ctx%xs:ctx%xe, ctx%ys:ctx%ye)
227: PetscErrorCode, intent(out) :: ierr
228: ! Local variables:
229: PetscScalar, parameter :: two = 2.0, one = 1.0
230: PetscScalar hx, hy, hxdhy, hydhx, sc
231: PetscScalar u, uxx, uyy
232: PetscInt i, j
234: hx = one/(ctx%mx - 1)
235: hy = one/(ctx%my - 1)
236: sc = hx*hy*ctx%lambda
237: hxdhy = hx/hy
238: hydhx = hy/hx
240: ! Compute function over the locally owned part of the grid
242: do j = ctx%ys, ctx%ye
243: do i = ctx%xs, ctx%xe
244: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
245: f(i, j) = x(i, j)
246: else
247: u = x(i, j)
248: uxx = hydhx*(two*u - x(i - 1, j) - x(i + 1, j))
249: uyy = hxdhy*(two*u - x(i, j - 1) - x(i, j + 1))
250: f(i, j) = uxx + uyy - sc*exp(u)
251: end if
252: end do
253: end do
254: ierr = 0
255: end
257: ! ---------------------------------------------------------------------
258: !
259: ! FormJacobian - Evaluates Jacobian matrix.
260: !
261: ! Input Parameters:
262: ! snes - the SNES context
263: ! x - input vector
264: ! dummy - optional user-defined context, as set by SNESSetJacobian()
265: ! (not used here)
266: !
267: ! Output Parameters:
268: ! jac - Jacobian matrix
269: ! jac_prec - optionally different matrix used to construct the preconditioner (not used here)
270: !
271: ! Notes:
272: ! This routine serves as a wrapper for the lower-level routine
273: ! "FormJacobianLocal", where the actual computations are
274: ! done using the standard Fortran style of treating the local
275: ! vector data as a multidimensional array over the local mesh.
276: ! This routine merely accesses the local vector data via
277: ! VecGetArray() and VecRestoreArray().
278: !
279: ! Notes:
280: ! Due to grid point reordering with DMDAs, we must always work
281: ! with the local grid points, and then transform them to the new
282: ! global numbering with the "ltog" mapping
283: ! We cannot work directly with the global numbers for the original
284: ! uniprocessor grid!
285: !
286: ! Two methods are available for imposing this transformation
287: ! when setting matrix entries:
288: ! (A) MatSetValuesLocal(), using the local ordering (including
289: ! ghost points!)
290: ! - Set matrix entries using the local ordering
291: ! by calling MatSetValuesLocal()
292: ! (B) MatSetValues(), using the global ordering
294: ! - Set matrix entries using the global ordering by calling
295: ! MatSetValues()
296: ! Option (A) seems cleaner/easier in many cases, and is the procedure
297: ! used in this example.
298: !
299: subroutine FormJacobian(snes, X, jac, jac_prec, ctx, ierr)
300: ! Input/output variables:
301: SNES snes
302: Vec X
303: Mat jac, jac_prec
304: type(AppCtx) ctx
305: PetscErrorCode, intent(out) :: ierr
306: DM da
307: ! Declarations for use with local arrays:
308: PetscScalar, pointer :: lx_v(:)
309: Vec localX
311: ! Scatter ghost points to local vector, using the 2-step process
312: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
313: ! Computations can be done while messages are in transition,
314: ! by placing code between these two statements.
316: PetscCallA(SNESGetDM(snes, da, ierr))
317: PetscCallA(DMGetLocalVector(da, localX, ierr))
318: PetscCallA(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX, ierr))
319: PetscCallA(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX, ierr))
321: ! Get a pointer to vector data
322: PetscCallA(VecGetArray(localX, lx_v, ierr))
324: ! Compute entries for the locally owned part of the Jacobian preconditioner.
325: PetscCallA(FormJacobianLocal(lx_v, jac_prec, ctx, ierr))
327: ! Assemble matrix, using the 2-step process:
328: ! MatAssemblyBegin(), MatAssemblyEnd()
329: ! Computations can be done while messages are in transition,
330: ! by placing code between these two statements.
332: PetscCallA(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY, ierr))
333: if (jac /= jac_prec) then
334: PetscCallA(MatAssemblyBegin(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
335: end if
336: PetscCallA(VecRestoreArray(localX, lx_v, ierr))
337: PetscCallA(DMRestoreLocalVector(da, localX, ierr))
338: PetscCallA(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY, ierr))
339: if (jac /= jac_prec) then
340: PetscCallA(MatAssemblyEnd(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
341: end if
343: ! Tell the matrix we will never add a new nonzero location to the
344: ! matrix. If we do it will generate an error.
346: PetscCallA(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE, ierr))
348: end
350: ! ---------------------------------------------------------------------
351: !
352: ! FormJacobianLocal - Computes Jacobian matrix used to compute the preconditioner,
353: ! called by the higher level routine FormJacobian().
354: !
355: ! Input Parameters:
356: ! x - local vector data
357: !
358: ! Output Parameters:
359: ! jac_prec - Jacobian matrix used to compute the preconditioner
360: ! ierr - error code
361: !
362: ! Notes:
363: ! This routine uses standard Fortran-style computations over a 2-dim array.
364: !
365: ! Notes:
366: ! Due to grid point reordering with DMDAs, we must always work
367: ! with the local grid points, and then transform them to the new
368: ! global numbering with the "ltog" mapping
369: ! We cannot work directly with the global numbers for the original
370: ! uniprocessor grid!
371: !
372: ! Two methods are available for imposing this transformation
373: ! when setting matrix entries:
374: ! (A) MatSetValuesLocal(), using the local ordering (including
375: ! ghost points!)
376: ! - Set matrix entries using the local ordering
377: ! by calling MatSetValuesLocal()
378: ! (B) MatSetValues(), using the global ordering
379: ! - Then apply this map explicitly yourself
380: ! - Set matrix entries using the global ordering by calling
381: ! MatSetValues()
382: ! Option (A) seems cleaner/easier in many cases, and is the procedure
383: ! used in this example.
384: !
385: subroutine FormJacobianLocal(x, jac_prec, ctx, ierr)
386: ! Input/output variables:
387: type(AppCtx) ctx
388: PetscScalar x(ctx%gxs:ctx%gxe, ctx%gys:ctx%gye)
389: Mat jac_prec
390: PetscErrorCode ierr
392: ! Local variables:
393: PetscInt row, col(5), i, j
394: PetscScalar, parameter :: two = 2.0, one = 1.0
395: PetscScalar hx, hy, hxdhy, hydhx, sc, v(5)
397: ! Set parameters
398: hx = one/(ctx%mx - 1)
399: hy = one/(ctx%my - 1)
400: sc = hx*hy
401: hxdhy = hx/hy
402: hydhx = hy/hx
404: ! Compute entries for the locally owned part of the Jacobian.
405: ! - Currently, all PETSc parallel matrix formats are partitioned by
406: ! contiguous chunks of rows across the processors.
407: ! - Each processor needs to insert only elements that it owns
408: ! locally (but any non-local elements will be sent to the
409: ! appropriate processor during matrix assembly).
410: ! - Here, we set all entries for a particular row at once.
411: ! - We can set matrix entries either using either
412: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
413: ! - Note that MatSetValues() uses 0-based row and column numbers
414: ! in Fortran as well as in C.
416: do j = ctx%ys, ctx%ye
417: row = (j - ctx%gys)*ctx%gxm + ctx%xs - ctx%gxs - 1
418: do i = ctx%xs, ctx%xe
419: row = row + 1
420: ! boundary points
421: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
422: col(1) = row
423: v(1) = one
424: PetscCallA(MatSetValuesLocal(jac_prec, 1_PETSC_INT_KIND, [row], 1_PETSC_INT_KIND, col, v, INSERT_VALUES, ierr))
425: ! interior grid points
426: else
427: v(1) = -hxdhy
428: v(2) = -hydhx
429: v(3) = two*(hydhx + hxdhy) - sc*ctx%lambda*exp(x(i, j))
430: v(4) = -hydhx
431: v(5) = -hxdhy
432: col(1) = row - ctx%gxm
433: col(2) = row - 1
434: col(3) = row
435: col(4) = row + 1
436: col(5) = row + ctx%gxm
437: PetscCallA(MatSetValuesLocal(jac_prec, 1_PETSC_INT_KIND, [row], 5_PETSC_INT_KIND, col, v, INSERT_VALUES, ierr))
438: end if
439: end do
440: end do
442: end
444: end module ex5module
446: program main
447: use ex5module
448: implicit none
449: !
451: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
452: ! Variable declarations
453: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
454: !
455: ! Variables:
456: ! snes - nonlinear solver
457: ! x, r - solution, residual vectors
458: ! J - Jacobian matrix
459: ! its - iterations for convergence
460: ! Nx, Ny - number of preocessors in x- and y- directions
461: ! matrix_free - flag - 1 indicates matrix-free version
462: !
463: SNES snes
464: Vec x, r
465: Mat J
466: PetscErrorCode ierr
467: PetscInt its
468: PetscBool flg, matrix_free
469: PetscReal, parameter :: lambda_min = 0.0, lambda_max = 6.81
470: type(AppCtx) ctx
471: DM da
473: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
474: ! Initialize program
475: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
476: PetscCallA(PetscInitialize(ierr))
477: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, ctx%rank, ierr))
479: ! Initialize problem parameters
480: ctx%lambda = 6.0
481: PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-par', ctx%lambda, flg, ierr))
482: PetscCheckA(ctx%lambda < lambda_max .and. ctx%lambda > lambda_min, PETSC_COMM_SELF, PETSC_ERR_USER, 'Lambda provided with -par is out of range')
484: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
485: ! Create nonlinear solver context
486: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
487: PetscCallA(SNESCreate(PETSC_COMM_WORLD, snes, ierr))
489: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
490: ! Create vector data structures; set function evaluation routine
491: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
493: ! Create distributed array (DMDA) to manage parallel grid and vectors
495: ! This really needs only the star-type stencil, but we use the box
496: ! stencil temporarily.
497: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, 4_PETSC_INT_KIND, 4_PETSC_INT_KIND, PETSC_DECIDE, PETSC_DECIDE, 1_PETSC_INT_KIND, 1_PETSC_INT_KIND, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, da, ierr))
498: PetscCallA(DMSetFromOptions(da, ierr))
499: PetscCallA(DMSetUp(da, ierr))
501: PetscCallA(DMDAGetInfo(da, PETSC_NULL_INTEGER, ctx%mx, ctx%my, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMDASTENCILTYPE, ierr))
503: !
504: ! Visualize the distribution of the array across the processors
505: !
506: ! PetscCallA(DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr))
508: ! Extract global and local vectors from DMDA; then duplicate for remaining
509: ! vectors that are the same types
510: PetscCallA(DMCreateGlobalVector(da, x, ierr))
511: PetscCallA(VecDuplicate(x, r, ierr))
513: ! Get local grid boundaries (for 2-dimensional DMDA)
514: PetscCallA(DMDAGetCorners(da, ctx%xs, ctx%ys, PETSC_NULL_INTEGER, ctx%xm, ctx%ym, PETSC_NULL_INTEGER, ierr))
515: PetscCallA(DMDAGetGhostCorners(da, ctx%gxs, ctx%gys, PETSC_NULL_INTEGER, ctx%gxm, ctx%gym, PETSC_NULL_INTEGER, ierr))
517: ! Here we shift the starting indices up by one so that we can easily
518: ! use the Fortran convention of 1-based indices (rather 0-based indices).
519: ctx%xs = ctx%xs + 1
520: ctx%ys = ctx%ys + 1
521: ctx%gxs = ctx%gxs + 1
522: ctx%gys = ctx%gys + 1
524: ctx%ye = ctx%ys + ctx%ym - 1
525: ctx%xe = ctx%xs + ctx%xm - 1
526: ctx%gye = ctx%gys + ctx%gym - 1
527: ctx%gxe = ctx%gxs + ctx%gxm - 1
529: PetscCallA(SNESSetApplicationContext(snes, ctx, ierr))
531: ! Set function evaluation routine and vector
532: PetscCallA(SNESSetFunction(snes, r, FormFunction, ctx, ierr))
534: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
535: ! Create matrix data structure; set Jacobian evaluation routine
536: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
538: ! Set Jacobian matrix data structure and default Jacobian evaluation
539: ! routine. User can override with:
540: ! -snes_fd : default finite differencing approximation of Jacobian
541: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
542: ! (unless user explicitly sets preconditioner)
543: ! -snes_mf_operator : form matrix used to construct the preconditioner as set by the user,
544: ! but use matrix-free approx for Jacobian-vector
545: ! products within Newton-Krylov method
546: !
547: ! Note: For the parallel case, vectors and matrices MUST be partitioned
548: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
549: ! the DMDAs determine the problem partitioning. We must explicitly
550: ! specify the local matrix dimensions upon its creation for compatibility
551: ! with the vector distribution. Thus, the generic MatCreate() routine
552: ! is NOT sufficient when working with distributed arrays.
553: !
554: ! Note: Here we only approximately preallocate storage space for the
555: ! Jacobian. See the users manual for a discussion of better techniques
556: ! for preallocating matrix memory.
558: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-snes_mf', matrix_free, ierr))
559: if (.not. matrix_free) then
560: PetscCallA(DMSetMatType(da, MATAIJ, ierr))
561: PetscCallA(DMCreateMatrix(da, J, ierr))
562: PetscCallA(SNESSetJacobian(snes, J, J, FormJacobian, ctx, ierr))
563: end if
565: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
566: ! Customize nonlinear solver; set runtime options
567: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
568: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
569: PetscCallA(SNESSetDM(snes, da, ierr))
570: PetscCallA(SNESSetFromOptions(snes, ierr))
572: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
573: ! Evaluate initial guess; then solve nonlinear system.
574: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
575: ! Note: The user should initialize the vector, x, with the initial guess
576: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
577: ! to employ an initial guess of zero, the user should explicitly set
578: ! this vector to zero by calling VecSet().
580: PetscCallA(FormInitialGuess(snes, x, ierr))
581: PetscCallA(SNESSolve(snes, PETSC_NULL_VEC, x, ierr))
582: PetscCallA(SNESGetIterationNumber(snes, its, ierr))
583: if (ctx%rank == 0) then
584: write (6, 100) its
585: end if
586: 100 format('Number of SNES iterations = ', i5)
588: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
589: ! Free work space. All PETSc objects should be destroyed when they
590: ! are no longer needed.
591: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
592: if (.not. matrix_free) PetscCallA(MatDestroy(J, ierr))
593: PetscCallA(VecDestroy(x, ierr))
594: PetscCallA(VecDestroy(r, ierr))
595: PetscCallA(SNESDestroy(snes, ierr))
596: PetscCallA(DMDestroy(da, ierr))
598: PetscCallA(PetscFinalize(ierr))
599: end
600: !
601: !/*TEST
602: !
603: ! test:
604: ! nsize: 4
605: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
606: ! requires: !single
607: !
608: ! test:
609: ! suffix: 2
610: ! nsize: 4
611: ! args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
612: ! requires: !single
613: !
614: ! test:
615: ! suffix: 3
616: ! nsize: 3
617: ! args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
618: ! requires: !single
619: !
620: ! test:
621: ! suffix: 4
622: ! nsize: 3
623: ! args: -snes_mf_operator -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
624: ! requires: !single
625: !
626: ! test:
627: ! suffix: 5
628: ! requires: !single
629: !
630: !TEST*/