Actual source code: ex5f90t.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !
10: ! --------------------------------------------------------------------------
11: !
12: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
13: ! the partial differential equation
14: !
15: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
16: !
17: ! with boundary conditions
18: !
19: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
20: !
21: ! A finite difference approximation with the usual 5-point stencil
22: ! is used to discretize the boundary value problem to obtain a nonlinear
23: ! system of equations.
24: !
25: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
26: !
27: ! --------------------------------------------------------------------------
28: ! The following define must be used before including any PETSc include files
29: ! into a module or interface. This is because they can't handle declarations
30: ! in them
31: !
32: #include <petsc/finclude/petscdmda.h>
33: #include <petsc/finclude/petscsnes.h>
34: module ex5f90tmodule
35: use petscsnes
36: use petscdmda
37: implicit none
38: type AppCtx
39: type(tDM) da
40: PetscInt xs, xe, xm, gxs, gxe, gxm
41: PetscInt ys, ye, ym, gys, gye, gym
42: PetscInt mx, my
43: PetscMPIInt rank
44: PetscReal lambda
45: end type AppCtx
46: PetscScalar, parameter :: one = 1.0, two = 2.0
48: contains
49: ! ---------------------------------------------------------------------
50: !
51: ! FormFunction - Evaluates nonlinear function, F(x).
52: !
53: ! Input Parameters:
54: ! snes - the SNES context
55: ! X - input vector
56: ! dummy - optional user-defined context, as set by SNESSetFunction()
57: ! (not used here)
58: !
59: ! Output Parameter:
60: ! F - function vector
61: !
62: ! Notes:
63: ! This routine serves as a wrapper for the lower-level routine
64: ! "FormFunctionLocal", where the actual computations are
65: ! done using the standard Fortran style of treating the local
66: ! vector data as a multidimensional array over the local mesh.
67: ! This routine merely handles ghost point scatters and accesses
68: ! the local vector data via VecGetArray() and VecRestoreArray().
69: !
70: subroutine FormFunction(snesIn, X, F, ctx, ierr)
71: ! Input/output variables:
72: type(tSNES) snesIn
73: type(tVec) X, F
74: PetscErrorCode ierr
75: type(AppCtx) ctx
77: ! Declarations for use with local arrays:
78: PetscScalar, pointer :: lx_v(:), lf_v(:)
79: type(tVec) localX
81: ! Scatter ghost points to local vector, using the 2-step process
82: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
83: ! By placing code between these two statements, computations can
84: ! be done while messages are in transition.
85: PetscCall(DMGetLocalVector(ctx%da, localX, ierr))
86: PetscCall(DMGlobalToLocalBegin(ctx%da, X, INSERT_VALUES, localX, ierr))
87: PetscCall(DMGlobalToLocalEnd(ctx%da, X, INSERT_VALUES, localX, ierr))
89: ! Get a pointer to vector data.
90: ! - VecGetArray90() returns a pointer to the data array.
91: ! - You MUST call VecRestoreArray() when you no longer need access to
92: ! the array.
94: PetscCall(VecGetArray(localX, lx_v, ierr))
95: PetscCall(VecGetArray(F, lf_v, ierr))
97: ! Compute function over the locally owned part of the grid
98: PetscCall(FormFunctionLocal(lx_v, lf_v, ctx, ierr))
100: ! Restore vectors
101: PetscCall(VecRestoreArray(localX, lx_v, ierr))
102: PetscCall(VecRestoreArray(F, lf_v, ierr))
104: ! Insert values into global vector
106: PetscCall(DMRestoreLocalVector(ctx%da, localX, ierr))
107: PetscCall(PetscLogFlops(11.0d0*ctx%ym*ctx%xm, ierr))
109: ! PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
110: ! PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
111: end subroutine formfunction
113: ! ---------------------------------------------------------------------
114: !
115: ! FormInitialGuess - Forms initial approximation.
116: !
117: ! Input Parameters:
118: ! X - vector
119: !
120: ! Output Parameter:
121: ! X - vector
122: !
123: ! Notes:
124: ! This routine serves as a wrapper for the lower-level routine
125: ! "InitialGuessLocal", where the actual computations are
126: ! done using the standard Fortran style of treating the local
127: ! vector data as a multidimensional array over the local mesh.
128: ! This routine merely handles ghost point scatters and accesses
129: ! the local vector data via VecGetArray() and VecRestoreArray().
130: !
131: subroutine FormInitialGuess(mysnes, X, ierr)
132: ! Input/output variables:
133: type(tSNES) mysnes
134: type(AppCtx), pointer:: pctx
135: type(tVec) X
136: PetscErrorCode, intent(out) :: ierr
138: ! Declarations for use with local arrays:
139: PetscScalar, pointer :: lx_v(:)
141: PetscCallA(SNESGetApplicationContext(mysnes, pctx, ierr))
142: ! Get a pointer to vector data.
143: ! - VecGetArray90() returns a pointer to the data array.
144: ! - You MUST call VecRestoreArray() when you no longer need access to
145: ! the array.
147: PetscCallA(VecGetArray(X, lx_v, ierr))
149: ! Compute initial guess over the locally owned part of the grid
150: PetscCallA(InitialGuessLocal(pctx, lx_v, ierr))
152: ! Restore vector
153: PetscCallA(VecRestoreArray(X, lx_v, ierr))
155: end
157: ! ---------------------------------------------------------------------
158: !
159: ! InitialGuessLocal - Computes initial approximation, called by
160: ! the higher level routine FormInitialGuess().
161: !
162: ! Input Parameter:
163: ! x - local vector data
164: !
165: ! Output Parameters:
166: ! x - local vector data
167: ! ierr - error code
168: !
169: ! Notes:
170: ! This routine uses standard Fortran-style computations over a 2-dim array.
171: !
172: subroutine InitialGuessLocal(ctx, x, ierr)
173: type(AppCtx), intent(in) :: ctx
174: PetscScalar, intent(out) :: x(ctx%xs:ctx%xe, ctx%ys:ctx%ye)
175: PetscErrorCode, intent(out) :: ierr
176: ! Local variables:
177: PetscInt i, j
178: PetscScalar temp1, temp, hx, hy
180: ! Set parameters
182: hx = one/(PetscIntToReal(ctx%mx - 1))
183: hy = one/(PetscIntToReal(ctx%my - 1))
184: temp1 = ctx%lambda/(ctx%lambda + one)
186: do j = ctx%ys, ctx%ye
187: temp = PetscIntToReal(min(j - 1, ctx%my - j))*hy
188: do i = ctx%xs, ctx%xe
189: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
190: x(i, j) = 0.0
191: else
192: x(i, j) = temp1*sqrt(min(PetscIntToReal(min(i - 1, ctx%mx - i)*hx), PetscIntToReal(temp)))
193: end if
194: end do
195: end do
196: ierr = 0
197: end
199: ! ---------------------------------------------------------------------
200: !
201: ! FormFunctionLocal - Computes nonlinear function, called by
202: ! the higher level routine FormFunction().
203: !
204: ! Input Parameter:
205: ! x - local vector data
206: !
207: ! Output Parameters:
208: ! f - local vector data, f(x)
209: ! ierr - error code
210: !
211: ! Notes:
212: ! This routine uses standard Fortran-style computations over a 2-dim array.
213: !
214: subroutine FormFunctionLocal(x, f, ctx, ierr)
215: type(AppCtx), intent(in) :: ctx
216: PetscScalar, intent(in) :: x(ctx%gxs:ctx%gxe, ctx%gys:ctx%gye)
217: PetscScalar, intent(out) :: f(ctx%xs:ctx%xe, ctx%ys:ctx%ye)
218: PetscErrorCode, intent(out) :: ierr
219: PetscScalar hx, hy, hxdhy, hydhx, sc, u, uxx, uyy
220: PetscInt i, j
222: hx = one/PetscIntToReal(ctx%mx - 1)
223: hy = one/PetscIntToReal(ctx%my - 1)
224: sc = hx*hy*ctx%lambda
225: hxdhy = hx/hy
226: hydhx = hy/hx
228: ! Compute function over the locally owned part of the grid
230: do j = ctx%ys, ctx%ye
231: do i = ctx%xs, ctx%xe
232: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
233: f(i, j) = x(i, j)
234: else
235: u = x(i, j)
236: uxx = hydhx*(two*u - x(i - 1, j) - x(i + 1, j))
237: uyy = hxdhy*(two*u - x(i, j - 1) - x(i, j + 1))
238: f(i, j) = uxx + uyy - sc*exp(u)
239: end if
240: end do
241: end do
242: ierr = 0
243: end
245: ! ---------------------------------------------------------------------
246: !
247: ! FormJacobian - Evaluates Jacobian matrix.
248: !
249: ! Input Parameters:
250: ! snes - the SNES context
251: ! x - input vector
252: ! dummy - optional ctx-defined context, as set by SNESSetJacobian()
253: ! (not used here)
254: !
255: ! Output Parameters:
256: ! jac - Jacobian matrix
257: ! jac_prec - optionally different matrix used to construct the preconditioner (not used here)
258: !
259: ! Notes:
260: ! This routine serves as a wrapper for the lower-level routine
261: ! "FormJacobianLocal", where the actual computations are
262: ! done using the standard Fortran style of treating the local
263: ! vector data as a multidimensional array over the local mesh.
264: ! This routine merely accesses the local vector data via
265: ! VecGetArray() and VecRestoreArray().
266: !
267: ! Notes:
268: ! Due to grid point reordering with DMDAs, we must always work
269: ! with the local grid points, and then transform them to the new
270: ! global numbering with the "ltog" mapping
271: ! We cannot work directly with the global numbers for the original
272: ! uniprocessor grid!
273: !
274: ! Two methods are available for imposing this transformation
275: ! when setting matrix entries:
276: ! (A) MatSetValuesLocal(), using the local ordering (including
277: ! ghost points!)
278: ! - Set matrix entries using the local ordering
279: ! by calling MatSetValuesLocal()
280: ! (B) MatSetValues(), using the global ordering
281: ! - Use DMGetLocalToGlobalMapping() then
282: ! ISLocalToGlobalMappingGetIndices() to extract the local-to-global map
283: ! - Then apply this map explicitly yourself
284: ! - Set matrix entries using the global ordering by calling
285: ! MatSetValues()
286: ! Option (A) seems cleaner/easier in many cases, and is the procedure
287: ! used in this example.
288: !
289: subroutine FormJacobian(mysnes, X, jac, jac_prec, ctx, ierr)
290: ! Input/output variables:
291: type(tSNES) mysnes
292: type(tVec) X
293: type(tMat) jac, jac_prec
294: type(AppCtx) ctx
295: PetscErrorCode ierr
297: ! Declarations for use with local arrays:
298: PetscScalar, pointer :: lx_v(:)
299: type(tVec) localX
301: ! Scatter ghost points to local vector, using the 2-step process
302: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
303: ! Computations can be done while messages are in transition,
304: ! by placing code between these two statements.
306: PetscCallA(DMGetLocalVector(ctx%da, localX, ierr))
307: PetscCallA(DMGlobalToLocalBegin(ctx%da, X, INSERT_VALUES, localX, ierr))
308: PetscCallA(DMGlobalToLocalEnd(ctx%da, X, INSERT_VALUES, localX, ierr))
310: ! Get a pointer to vector data
311: PetscCallA(VecGetArray(localX, lx_v, ierr))
313: ! Compute entries for the locally owned part of the Jacobian preconditioner.
314: PetscCallA(FormJacobianLocal(lx_v, jac_prec, ctx, ierr))
316: ! Assemble matrix, using the 2-step process:
317: ! MatAssemblyBegin(), MatAssemblyEnd()
318: ! Computations can be done while messages are in transition,
319: ! by placing code between these two statements.
321: PetscCallA(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY, ierr))
322: ! if (jac .ne. jac_prec) then
323: PetscCallA(MatAssemblyBegin(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
324: ! endif
325: PetscCallA(VecRestoreArray(localX, lx_v, ierr))
326: PetscCallA(DMRestoreLocalVector(ctx%da, localX, ierr))
327: PetscCallA(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY, ierr))
328: ! if (jac .ne. jac_prec) then
329: PetscCallA(MatAssemblyEnd(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
330: ! endif
332: ! Tell the matrix we will never add a new nonzero location to the
333: ! matrix. If we do it will generate an error.
335: PetscCallA(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE, ierr))
337: end
339: ! ---------------------------------------------------------------------
340: !
341: ! FormJacobianLocal - Computes Jacobian matrix used to compute the preconditioner,
342: ! called by the higher level routine FormJacobian().
343: !
344: ! Input Parameters:
345: ! x - local vector data
346: !
347: ! Output Parameters:
348: ! jac_prec - Jacobian matrix used to compute the preconditioner
349: ! ierr - error code
350: !
351: ! Notes:
352: ! This routine uses standard Fortran-style computations over a 2-dim array.
353: !
354: ! Notes:
355: ! Due to grid point reordering with DMDAs, we must always work
356: ! with the local grid points, and then transform them to the new
357: ! global numbering with the "ltog" mapping
358: ! We cannot work directly with the global numbers for the original
359: ! uniprocessor grid!
360: !
361: ! Two methods are available for imposing this transformation
362: ! when setting matrix entries:
363: ! (A) MatSetValuesLocal(), using the local ordering (including
364: ! ghost points!)
365: ! - Set matrix entries using the local ordering
366: ! by calling MatSetValuesLocal()
367: ! (B) MatSetValues(), using the global ordering
368: ! - Set matrix entries using the global ordering by calling
369: ! MatSetValues()
370: ! Option (A) seems cleaner/easier in many cases, and is the procedure
371: ! used in this example.
372: !
373: subroutine FormJacobianLocal(x, jac_prec, ctx, ierr)
374: ! Input/output variables:
375: type(AppCtx), intent(in) :: ctx
376: PetscScalar, intent(in) :: x(ctx%gxs:ctx%gxe, ctx%gys:ctx%gye)
377: type(tMat) jac_prec
378: PetscErrorCode, intent(out) :: ierr
380: ! Local variables:
381: PetscInt row, col(5), i, j
382: PetscScalar hx, hy, hxdhy, hydhx, sc, v(5)
384: ! Set parameters
385: hx = one/PetscIntToReal(ctx%mx - 1)
386: hy = one/PetscIntToReal(ctx%my - 1)
387: sc = hx*hy
388: hxdhy = hx/hy
389: hydhx = hy/hx
391: ! Compute entries for the locally owned part of the Jacobian.
392: ! - Currently, all PETSc parallel matrix formats are partitioned by
393: ! contiguous chunks of rows across the processors.
394: ! - Each processor needs to insert only elements that it owns
395: ! locally (but any non-local elements will be sent to the
396: ! appropriate processor during matrix assembly).
397: ! - Here, we set all entries for a particular row at once.
398: ! - We can set matrix entries either using either
399: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
400: ! - Note that MatSetValues() uses 0-based row and column numbers
401: ! in Fortran as well as in C.
403: do j = ctx%ys, ctx%ye
404: row = (j - ctx%gys)*ctx%gxm + ctx%xs - ctx%gxs - 1
405: do i = ctx%xs, ctx%xe
406: row = row + 1
407: ! boundary points
408: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
409: col(1) = row
410: v(1) = one
411: PetscCallA(MatSetValuesLocal(jac_prec, 1_PETSC_INT_KIND, [row], 1_PETSC_INT_KIND, col, v, INSERT_VALUES, ierr))
412: ! interior grid points
413: else
414: v(1) = -hxdhy
415: v(2) = -hydhx
416: v(3) = two*(hydhx + hxdhy) - sc*ctx%lambda*exp(x(i, j))
417: v(4) = -hydhx
418: v(5) = -hxdhy
419: col(1) = row - ctx%gxm
420: col(2) = row - 1
421: col(3) = row
422: col(4) = row + 1
423: col(5) = row + ctx%gxm
424: PetscCallA(MatSetValuesLocal(jac_prec, 1_PETSC_INT_KIND, [row], 5_PETSC_INT_KIND, col, v, INSERT_VALUES, ierr))
425: end if
426: end do
427: end do
428: ierr = 0
429: end
431: end module
433: program main
434: use petscdmda
435: use petscsnes
436: use ex5f90tmodule
437: implicit none
438: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
439: ! Variable declarations
440: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
441: !
442: ! Variables:
443: ! mysnes - nonlinear solver
444: ! x, r - solution, residual vectors
445: ! J - Jacobian matrix
446: ! its - iterations for convergence
447: ! Nx, Ny - number of preocessors in x- and y- directions
448: ! matrix_free - flag - 1 indicates matrix-free version
449: !
450: type(tSNES) mysnes
451: type(tVec) x, r
452: type(tMat) J
453: PetscErrorCode ierr
454: PetscInt its
455: PetscBool flg, matrix_free
456: PetscReal, parameter :: lambda_min = 0.0, lambda_max = 6.81
457: type(AppCtx) ctx
458: type(tPetscOptions) :: options
460: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
461: ! Initialize program
462: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
463: PetscCallA(PetscInitialize(ierr))
464: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, ctx%rank, ierr))
466: ! Initialize problem parameters
467: options%v = 0
468: ctx%lambda = 6.0
469: PetscCallA(PetscOptionsGetReal(options, PETSC_NULL_CHARACTER, '-par', ctx%lambda, flg, ierr))
470: PetscCheckA(ctx%lambda < lambda_max .and. ctx%lambda > lambda_min, PETSC_COMM_SELF, PETSC_ERR_USER, 'Lambda provided with -par is out of range')
472: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
473: ! Create nonlinear solver context
474: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
475: PetscCallA(SNESCreate(PETSC_COMM_WORLD, mysnes, ierr))
477: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
478: ! Create vector data structures; set function evaluation routine
479: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
481: ! Create distributed array (DMDA) to manage parallel grid and vectors
483: ! This really needs only the star-type stencil, but we use the box
484: ! stencil temporarily.
485: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, 4_PETSC_INT_KIND, 4_PETSC_INT_KIND, PETSC_DECIDE, PETSC_DECIDE, 1_PETSC_INT_KIND, 1_PETSC_INT_KIND, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, ctx%da, ierr))
486: PetscCallA(DMSetFromOptions(ctx%da, ierr))
487: PetscCallA(DMSetUp(ctx%da, ierr))
488: PetscCallA(DMDAGetInfo(ctx%da, PETSC_NULL_INTEGER, ctx%mx, ctx%my, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMDASTENCILTYPE, ierr))
490: !
491: ! Visualize the distribution of the array across the processors
492: !
493: ! PetscCallA(DMView(ctx%da,PETSC_VIEWER_DRAW_WORLD,ierr))
495: ! Extract global and local vectors from DMDA; then duplicate for remaining
496: ! vectors that are the same types
497: PetscCallA(DMCreateGlobalVector(ctx%da, x, ierr))
498: PetscCallA(VecDuplicate(x, r, ierr))
500: ! Get local grid boundaries (for 2-dimensional DMDA)
501: PetscCallA(DMDAGetCorners(ctx%da, ctx%xs, ctx%ys, PETSC_NULL_INTEGER, ctx%xm, ctx%ym, PETSC_NULL_INTEGER, ierr))
502: PetscCallA(DMDAGetGhostCorners(ctx%da, ctx%gxs, ctx%gys, PETSC_NULL_INTEGER, ctx%gxm, ctx%gym, PETSC_NULL_INTEGER, ierr))
504: ! Here we shift the starting indices up by one so that we can easily
505: ! use the Fortran convention of 1-based indices (rather 0-based indices).
506: ctx%xs = ctx%xs + 1
507: ctx%ys = ctx%ys + 1
508: ctx%gxs = ctx%gxs + 1
509: ctx%gys = ctx%gys + 1
511: ctx%ye = ctx%ys + ctx%ym - 1
512: ctx%xe = ctx%xs + ctx%xm - 1
513: ctx%gye = ctx%gys + ctx%gym - 1
514: ctx%gxe = ctx%gxs + ctx%gxm - 1
516: PetscCallA(SNESSetApplicationContext(mysnes, ctx, ierr))
518: ! Set function evaluation routine and vector
519: PetscCallA(SNESSetFunction(mysnes, r, FormFunction, ctx, ierr))
521: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
522: ! Create matrix data structure; set Jacobian evaluation routine
523: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
525: ! Set Jacobian matrix data structure and default Jacobian evaluation
526: ! routine. User can override with:
527: ! -snes_fd : default finite differencing approximation of Jacobian
528: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
529: ! (unless user explicitly sets preconditioner)
530: ! -snes_mf_operator : form matrix used to construct the preconditioner as set by the user,
531: ! but use matrix-free approx for Jacobian-vector
532: ! products within Newton-Krylov method
533: !
534: ! Note: For the parallel case, vectors and matrices MUST be partitioned
535: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
536: ! the DMDAs determine the problem partitioning. We must explicitly
537: ! specify the local matrix dimensions upon its creation for compatibility
538: ! with the vector distribution. Thus, the generic MatCreate() routine
539: ! is NOT sufficient when working with distributed arrays.
540: !
541: ! Note: Here we only approximately preallocate storage space for the
542: ! Jacobian. See the users manual for a discussion of better techniques
543: ! for preallocating matrix memory.
545: PetscCallA(PetscOptionsHasName(options, PETSC_NULL_CHARACTER, '-snes_mf', matrix_free, ierr))
546: if (.not. matrix_free) then
547: PetscCallA(DMSetMatType(ctx%da, MATAIJ, ierr))
548: PetscCallA(DMCreateMatrix(ctx%da, J, ierr))
549: PetscCallA(SNESSetJacobian(mysnes, J, J, FormJacobian, ctx, ierr))
550: end if
552: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
553: ! Customize nonlinear solver; set runtime options
554: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
555: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
556: PetscCallA(SNESSetFromOptions(mysnes, ierr))
558: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
559: ! Evaluate initial guess; then solve nonlinear system.
560: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
561: ! Note: The user should initialize the vector, x, with the initial guess
562: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
563: ! to employ an initial guess of zero, the user should explicitly set
564: ! this vector to zero by calling VecSet().
566: PetscCallA(FormInitialGuess(mysnes, x, ierr))
567: PetscCallA(SNESSolve(mysnes, PETSC_NULL_VEC, x, ierr))
568: PetscCallA(SNESGetIterationNumber(mysnes, its, ierr))
569: if (ctx%rank == 0) then
570: write (6, 100) its
571: end if
572: 100 format('Number of SNES iterations = ', i5)
574: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
575: ! Free work space. All PETSc objects should be destroyed when they
576: ! are no longer needed.
577: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
578: if (.not. matrix_free) PetscCallA(MatDestroy(J, ierr))
579: PetscCallA(VecDestroy(x, ierr))
580: PetscCallA(VecDestroy(r, ierr))
581: PetscCallA(SNESDestroy(mysnes, ierr))
582: PetscCallA(DMDestroy(ctx%da, ierr))
584: PetscCallA(PetscFinalize(ierr))
585: end
586: !/*TEST
587: !
588: ! test:
589: ! nsize: 4
590: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
591: !
592: !TEST*/