Actual source code: ex5f90t.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !
10: ! --------------------------------------------------------------------------
11: !
12: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
13: ! the partial differential equation
14: !
15: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
16: !
17: ! with boundary conditions
18: !
19: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
20: !
21: ! A finite difference approximation with the usual 5-point stencil
22: ! is used to discretize the boundary value problem to obtain a nonlinear
23: ! system of equations.
24: !
25: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
26: !
27: ! --------------------------------------------------------------------------
28: ! The following define must be used before including any PETSc include files
29: ! into a module or interface. This is because they can't handle declarations
30: ! in them
31: !
32: #include <petsc/finclude/petscdmda.h>
33: #include <petsc/finclude/petscsnes.h>
34: module ex5f90tmodule
35: use petscsnes
36: use petscdmda
37: implicit none
38: type AppCtx
39: type(tDM) da
40: PetscInt xs, xe, xm, gxs, gxe, gxm
41: PetscInt ys, ye, ym, gys, gye, gym
42: PetscInt mx, my
43: PetscMPIInt rank
44: PetscReal lambda
45: end type AppCtx
47: contains
48: ! ---------------------------------------------------------------------
49: !
50: ! FormFunction - Evaluates nonlinear function, F(x).
51: !
52: ! Input Parameters:
53: ! snes - the SNES context
54: ! X - input vector
55: ! dummy - optional user-defined context, as set by SNESSetFunction()
56: ! (not used here)
57: !
58: ! Output Parameter:
59: ! F - function vector
60: !
61: ! Notes:
62: ! This routine serves as a wrapper for the lower-level routine
63: ! "FormFunctionLocal", where the actual computations are
64: ! done using the standard Fortran style of treating the local
65: ! vector data as a multidimensional array over the local mesh.
66: ! This routine merely handles ghost point scatters and accesses
67: ! the local vector data via VecGetArray() and VecRestoreArray().
68: !
69: subroutine FormFunction(snesIn, X, F, ctx, ierr)
70: ! Input/output variables:
71: type(tSNES) snesIn
72: type(tVec) X, F
73: PetscErrorCode ierr
74: type(AppCtx) ctx
76: ! Declarations for use with local arrays:
77: PetscScalar, pointer :: lx_v(:), lf_v(:)
78: type(tVec) localX
80: ! Scatter ghost points to local vector, using the 2-step process
81: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
82: ! By placing code between these two statements, computations can
83: ! be done while messages are in transition.
84: PetscCall(DMGetLocalVector(ctx%da, localX, ierr))
85: PetscCall(DMGlobalToLocalBegin(ctx%da, X, INSERT_VALUES, localX, ierr))
86: PetscCall(DMGlobalToLocalEnd(ctx%da, X, INSERT_VALUES, localX, ierr))
88: ! Get a pointer to vector data.
89: ! - VecGetArray90() returns a pointer to the data array.
90: ! - You MUST call VecRestoreArray() when you no longer need access to
91: ! the array.
93: PetscCall(VecGetArray(localX, lx_v, ierr))
94: PetscCall(VecGetArray(F, lf_v, ierr))
96: ! Compute function over the locally owned part of the grid
97: PetscCall(FormFunctionLocal(lx_v, lf_v, ctx, ierr))
99: ! Restore vectors
100: PetscCall(VecRestoreArray(localX, lx_v, ierr))
101: PetscCall(VecRestoreArray(F, lf_v, ierr))
103: ! Insert values into global vector
105: PetscCall(DMRestoreLocalVector(ctx%da, localX, ierr))
106: PetscCall(PetscLogFlops(11.0d0*ctx%ym*ctx%xm, ierr))
108: ! PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
109: ! PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
110: end subroutine formfunction
112: ! ---------------------------------------------------------------------
113: !
114: ! FormInitialGuess - Forms initial approximation.
115: !
116: ! Input Parameters:
117: ! X - vector
118: !
119: ! Output Parameter:
120: ! X - vector
121: !
122: ! Notes:
123: ! This routine serves as a wrapper for the lower-level routine
124: ! "InitialGuessLocal", where the actual computations are
125: ! done using the standard Fortran style of treating the local
126: ! vector data as a multidimensional array over the local mesh.
127: ! This routine merely handles ghost point scatters and accesses
128: ! the local vector data via VecGetArray() and VecRestoreArray().
129: !
130: subroutine FormInitialGuess(mysnes, X, ierr)
131: ! Input/output variables:
132: type(tSNES) mysnes
133: type(AppCtx), pointer:: pctx
134: type(tVec) X
135: PetscErrorCode ierr
137: ! Declarations for use with local arrays:
138: PetscScalar, pointer :: lx_v(:)
140: ierr = 0
141: PetscCallA(SNESGetApplicationContext(mysnes, pctx, ierr))
142: ! Get a pointer to vector data.
143: ! - VecGetArray90() returns a pointer to the data array.
144: ! - You MUST call VecRestoreArray() when you no longer need access to
145: ! the array.
147: PetscCallA(VecGetArray(X, lx_v, ierr))
149: ! Compute initial guess over the locally owned part of the grid
150: PetscCallA(InitialGuessLocal(pctx, lx_v, ierr))
152: ! Restore vector
153: PetscCallA(VecRestoreArray(X, lx_v, ierr))
155: ! Insert values into global vector
157: end
159: ! ---------------------------------------------------------------------
160: !
161: ! InitialGuessLocal - Computes initial approximation, called by
162: ! the higher level routine FormInitialGuess().
163: !
164: ! Input Parameter:
165: ! x - local vector data
166: !
167: ! Output Parameters:
168: ! x - local vector data
169: ! ierr - error code
170: !
171: ! Notes:
172: ! This routine uses standard Fortran-style computations over a 2-dim array.
173: !
174: subroutine InitialGuessLocal(ctx, x, ierr)
175: ! Input/output variables:
176: type(AppCtx) ctx
177: PetscScalar x(ctx%xs:ctx%xe, ctx%ys:ctx%ye)
178: PetscErrorCode ierr
180: ! Local variables:
181: PetscInt i, j
182: PetscScalar temp1, temp, hx, hy
183: PetscScalar one
185: ! Set parameters
187: ierr = 0
188: one = 1.0
189: hx = one/(PetscIntToReal(ctx%mx - 1))
190: hy = one/(PetscIntToReal(ctx%my - 1))
191: temp1 = ctx%lambda/(ctx%lambda + one)
193: do j = ctx%ys, ctx%ye
194: temp = PetscIntToReal(min(j - 1, ctx%my - j))*hy
195: do i = ctx%xs, ctx%xe
196: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
197: x(i, j) = 0.0
198: else
199: x(i, j) = temp1*sqrt(min(PetscIntToReal(min(i - 1, ctx%mx - i)*hx), PetscIntToReal(temp)))
200: end if
201: end do
202: end do
204: end
206: ! ---------------------------------------------------------------------
207: !
208: ! FormFunctionLocal - Computes nonlinear function, called by
209: ! the higher level routine FormFunction().
210: !
211: ! Input Parameter:
212: ! x - local vector data
213: !
214: ! Output Parameters:
215: ! f - local vector data, f(x)
216: ! ierr - error code
217: !
218: ! Notes:
219: ! This routine uses standard Fortran-style computations over a 2-dim array.
220: !
221: subroutine FormFunctionLocal(x, f, ctx, ierr)
222: ! Input/output variables:
223: type(AppCtx) ctx
224: PetscScalar x(ctx%gxs:ctx%gxe, ctx%gys:ctx%gye)
225: PetscScalar f(ctx%xs:ctx%xe, ctx%ys:ctx%ye)
226: PetscErrorCode ierr
228: ! Local variables:
229: PetscScalar two, one, hx, hy, hxdhy, hydhx, sc
230: PetscScalar u, uxx, uyy
231: PetscInt i, j
233: one = 1.0
234: two = 2.0
235: hx = one/PetscIntToReal(ctx%mx - 1)
236: hy = one/PetscIntToReal(ctx%my - 1)
237: sc = hx*hy*ctx%lambda
238: hxdhy = hx/hy
239: hydhx = hy/hx
241: ! Compute function over the locally owned part of the grid
243: do j = ctx%ys, ctx%ye
244: do i = ctx%xs, ctx%xe
245: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
246: f(i, j) = x(i, j)
247: else
248: u = x(i, j)
249: uxx = hydhx*(two*u - x(i - 1, j) - x(i + 1, j))
250: uyy = hxdhy*(two*u - x(i, j - 1) - x(i, j + 1))
251: f(i, j) = uxx + uyy - sc*exp(u)
252: end if
253: end do
254: end do
255: ierr = 0
256: end
258: ! ---------------------------------------------------------------------
259: !
260: ! FormJacobian - Evaluates Jacobian matrix.
261: !
262: ! Input Parameters:
263: ! snes - the SNES context
264: ! x - input vector
265: ! dummy - optional ctx-defined context, as set by SNESSetJacobian()
266: ! (not used here)
267: !
268: ! Output Parameters:
269: ! jac - Jacobian matrix
270: ! jac_prec - optionally different matrix used to construct the preconditioner (not used here)
271: !
272: ! Notes:
273: ! This routine serves as a wrapper for the lower-level routine
274: ! "FormJacobianLocal", where the actual computations are
275: ! done using the standard Fortran style of treating the local
276: ! vector data as a multidimensional array over the local mesh.
277: ! This routine merely accesses the local vector data via
278: ! VecGetArray() and VecRestoreArray().
279: !
280: ! Notes:
281: ! Due to grid point reordering with DMDAs, we must always work
282: ! with the local grid points, and then transform them to the new
283: ! global numbering with the "ltog" mapping
284: ! We cannot work directly with the global numbers for the original
285: ! uniprocessor grid!
286: !
287: ! Two methods are available for imposing this transformation
288: ! when setting matrix entries:
289: ! (A) MatSetValuesLocal(), using the local ordering (including
290: ! ghost points!)
291: ! - Set matrix entries using the local ordering
292: ! by calling MatSetValuesLocal()
293: ! (B) MatSetValues(), using the global ordering
294: ! - Use DMGetLocalToGlobalMapping() then
295: ! ISLocalToGlobalMappingGetIndices() to extract the local-to-global map
296: ! - Then apply this map explicitly yourself
297: ! - Set matrix entries using the global ordering by calling
298: ! MatSetValues()
299: ! Option (A) seems cleaner/easier in many cases, and is the procedure
300: ! used in this example.
301: !
302: subroutine FormJacobian(mysnes, X, jac, jac_prec, ctx, ierr)
303: ! Input/output variables:
304: type(tSNES) mysnes
305: type(tVec) X
306: type(tMat) jac, jac_prec
307: type(AppCtx) ctx
308: PetscErrorCode ierr
310: ! Declarations for use with local arrays:
311: PetscScalar, pointer :: lx_v(:)
312: type(tVec) localX
314: ! Scatter ghost points to local vector, using the 2-step process
315: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
316: ! Computations can be done while messages are in transition,
317: ! by placing code between these two statements.
319: PetscCallA(DMGetLocalVector(ctx%da, localX, ierr))
320: PetscCallA(DMGlobalToLocalBegin(ctx%da, X, INSERT_VALUES, localX, ierr))
321: PetscCallA(DMGlobalToLocalEnd(ctx%da, X, INSERT_VALUES, localX, ierr))
323: ! Get a pointer to vector data
324: PetscCallA(VecGetArray(localX, lx_v, ierr))
326: ! Compute entries for the locally owned part of the Jacobian preconditioner.
327: PetscCallA(FormJacobianLocal(lx_v, jac_prec, ctx, ierr))
329: ! Assemble matrix, using the 2-step process:
330: ! MatAssemblyBegin(), MatAssemblyEnd()
331: ! Computations can be done while messages are in transition,
332: ! by placing code between these two statements.
334: PetscCallA(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY, ierr))
335: ! if (jac .ne. jac_prec) then
336: PetscCallA(MatAssemblyBegin(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
337: ! endif
338: PetscCallA(VecRestoreArray(localX, lx_v, ierr))
339: PetscCallA(DMRestoreLocalVector(ctx%da, localX, ierr))
340: PetscCallA(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY, ierr))
341: ! if (jac .ne. jac_prec) then
342: PetscCallA(MatAssemblyEnd(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
343: ! endif
345: ! Tell the matrix we will never add a new nonzero location to the
346: ! matrix. If we do it will generate an error.
348: PetscCallA(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE, ierr))
350: end
352: ! ---------------------------------------------------------------------
353: !
354: ! FormJacobianLocal - Computes Jacobian matrix used to compute the preconditioner,
355: ! called by the higher level routine FormJacobian().
356: !
357: ! Input Parameters:
358: ! x - local vector data
359: !
360: ! Output Parameters:
361: ! jac_prec - Jacobian matrix used to compute the preconditioner
362: ! ierr - error code
363: !
364: ! Notes:
365: ! This routine uses standard Fortran-style computations over a 2-dim array.
366: !
367: ! Notes:
368: ! Due to grid point reordering with DMDAs, we must always work
369: ! with the local grid points, and then transform them to the new
370: ! global numbering with the "ltog" mapping
371: ! We cannot work directly with the global numbers for the original
372: ! uniprocessor grid!
373: !
374: ! Two methods are available for imposing this transformation
375: ! when setting matrix entries:
376: ! (A) MatSetValuesLocal(), using the local ordering (including
377: ! ghost points!)
378: ! - Set matrix entries using the local ordering
379: ! by calling MatSetValuesLocal()
380: ! (B) MatSetValues(), using the global ordering
381: ! - Set matrix entries using the global ordering by calling
382: ! MatSetValues()
383: ! Option (A) seems cleaner/easier in many cases, and is the procedure
384: ! used in this example.
385: !
386: subroutine FormJacobianLocal(x, jac_prec, ctx, ierr)
387: ! Input/output variables:
388: type(AppCtx) ctx
389: PetscScalar x(ctx%gxs:ctx%gxe, ctx%gys:ctx%gye)
390: type(tMat) jac_prec
391: PetscErrorCode ierr
393: ! Local variables:
394: PetscInt row, col(5), i, j
395: PetscInt ione, ifive
396: PetscScalar two, one, hx, hy, hxdhy
397: PetscScalar hydhx, sc, v(5)
399: ! Set parameters
400: ione = 1
401: ifive = 5
402: one = 1.0
403: two = 2.0
404: hx = one/PetscIntToReal(ctx%mx - 1)
405: hy = one/PetscIntToReal(ctx%my - 1)
406: sc = hx*hy
407: hxdhy = hx/hy
408: hydhx = hy/hx
410: ! Compute entries for the locally owned part of the Jacobian.
411: ! - Currently, all PETSc parallel matrix formats are partitioned by
412: ! contiguous chunks of rows across the processors.
413: ! - Each processor needs to insert only elements that it owns
414: ! locally (but any non-local elements will be sent to the
415: ! appropriate processor during matrix assembly).
416: ! - Here, we set all entries for a particular row at once.
417: ! - We can set matrix entries either using either
418: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
419: ! - Note that MatSetValues() uses 0-based row and column numbers
420: ! in Fortran as well as in C.
422: do j = ctx%ys, ctx%ye
423: row = (j - ctx%gys)*ctx%gxm + ctx%xs - ctx%gxs - 1
424: do i = ctx%xs, ctx%xe
425: row = row + 1
426: ! boundary points
427: if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
428: col(1) = row
429: v(1) = one
430: PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ione, col, v, INSERT_VALUES, ierr))
431: ! interior grid points
432: else
433: v(1) = -hxdhy
434: v(2) = -hydhx
435: v(3) = two*(hydhx + hxdhy) - sc*ctx%lambda*exp(x(i, j))
436: v(4) = -hydhx
437: v(5) = -hxdhy
438: col(1) = row - ctx%gxm
439: col(2) = row - 1
440: col(3) = row
441: col(4) = row + 1
442: col(5) = row + ctx%gxm
443: PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ifive, col, v, INSERT_VALUES, ierr))
444: end if
445: end do
446: end do
447: end
449: end module
451: program main
452: use petscdmda
453: use petscsnes
454: use ex5f90tmodule
455: implicit none
456: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
457: ! Variable declarations
458: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
459: !
460: ! Variables:
461: ! mysnes - nonlinear solver
462: ! x, r - solution, residual vectors
463: ! J - Jacobian matrix
464: ! its - iterations for convergence
465: ! Nx, Ny - number of preocessors in x- and y- directions
466: ! matrix_free - flag - 1 indicates matrix-free version
467: !
468: type(tSNES) mysnes
469: type(tVec) x, r
470: type(tMat) J
471: PetscErrorCode ierr
472: PetscInt its
473: PetscBool flg, matrix_free
474: PetscInt ione, nfour
475: PetscReal lambda_max, lambda_min
476: type(AppCtx) ctx
477: type(tPetscOptions) :: options
479: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
480: ! Initialize program
481: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
482: PetscCallA(PetscInitialize(ierr))
483: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, ctx%rank, ierr))
485: ! Initialize problem parameters
486: options%v = 0
487: lambda_max = 6.81
488: lambda_min = 0.0
489: ctx%lambda = 6.0
490: ione = 1
491: nfour = 4
492: PetscCallA(PetscOptionsGetReal(options, PETSC_NULL_CHARACTER, '-par', ctx%lambda, flg, ierr))
493: PetscCheckA(ctx%lambda < lambda_max .and. ctx%lambda > lambda_min, PETSC_COMM_SELF, PETSC_ERR_USER, 'Lambda provided with -par is out of range')
495: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
496: ! Create nonlinear solver context
497: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
498: PetscCallA(SNESCreate(PETSC_COMM_WORLD, mysnes, ierr))
500: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
501: ! Create vector data structures; set function evaluation routine
502: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
504: ! Create distributed array (DMDA) to manage parallel grid and vectors
506: ! This really needs only the star-type stencil, but we use the box
507: ! stencil temporarily.
508: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, nfour, nfour, PETSC_DECIDE, PETSC_DECIDE, ione, ione, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, ctx%da, ierr))
509: PetscCallA(DMSetFromOptions(ctx%da, ierr))
510: PetscCallA(DMSetUp(ctx%da, ierr))
511: PetscCallA(DMDAGetInfo(ctx%da, PETSC_NULL_INTEGER, ctx%mx, ctx%my, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMDASTENCILTYPE, ierr))
513: !
514: ! Visualize the distribution of the array across the processors
515: !
516: ! PetscCallA(DMView(ctx%da,PETSC_VIEWER_DRAW_WORLD,ierr))
518: ! Extract global and local vectors from DMDA; then duplicate for remaining
519: ! vectors that are the same types
520: PetscCallA(DMCreateGlobalVector(ctx%da, x, ierr))
521: PetscCallA(VecDuplicate(x, r, ierr))
523: ! Get local grid boundaries (for 2-dimensional DMDA)
524: PetscCallA(DMDAGetCorners(ctx%da, ctx%xs, ctx%ys, PETSC_NULL_INTEGER, ctx%xm, ctx%ym, PETSC_NULL_INTEGER, ierr))
525: PetscCallA(DMDAGetGhostCorners(ctx%da, ctx%gxs, ctx%gys, PETSC_NULL_INTEGER, ctx%gxm, ctx%gym, PETSC_NULL_INTEGER, ierr))
527: ! Here we shift the starting indices up by one so that we can easily
528: ! use the Fortran convention of 1-based indices (rather 0-based indices).
529: ctx%xs = ctx%xs + 1
530: ctx%ys = ctx%ys + 1
531: ctx%gxs = ctx%gxs + 1
532: ctx%gys = ctx%gys + 1
534: ctx%ye = ctx%ys + ctx%ym - 1
535: ctx%xe = ctx%xs + ctx%xm - 1
536: ctx%gye = ctx%gys + ctx%gym - 1
537: ctx%gxe = ctx%gxs + ctx%gxm - 1
539: PetscCallA(SNESSetApplicationContext(mysnes, ctx, ierr))
541: ! Set function evaluation routine and vector
542: PetscCallA(SNESSetFunction(mysnes, r, FormFunction, ctx, ierr))
544: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
545: ! Create matrix data structure; set Jacobian evaluation routine
546: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
548: ! Set Jacobian matrix data structure and default Jacobian evaluation
549: ! routine. User can override with:
550: ! -snes_fd : default finite differencing approximation of Jacobian
551: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
552: ! (unless user explicitly sets preconditioner)
553: ! -snes_mf_operator : form matrix used to construct the preconditioner as set by the user,
554: ! but use matrix-free approx for Jacobian-vector
555: ! products within Newton-Krylov method
556: !
557: ! Note: For the parallel case, vectors and matrices MUST be partitioned
558: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
559: ! the DMDAs determine the problem partitioning. We must explicitly
560: ! specify the local matrix dimensions upon its creation for compatibility
561: ! with the vector distribution. Thus, the generic MatCreate() routine
562: ! is NOT sufficient when working with distributed arrays.
563: !
564: ! Note: Here we only approximately preallocate storage space for the
565: ! Jacobian. See the users manual for a discussion of better techniques
566: ! for preallocating matrix memory.
568: PetscCallA(PetscOptionsHasName(options, PETSC_NULL_CHARACTER, '-snes_mf', matrix_free, ierr))
569: if (.not. matrix_free) then
570: PetscCallA(DMSetMatType(ctx%da, MATAIJ, ierr))
571: PetscCallA(DMCreateMatrix(ctx%da, J, ierr))
572: PetscCallA(SNESSetJacobian(mysnes, J, J, FormJacobian, ctx, ierr))
573: end if
575: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
576: ! Customize nonlinear solver; set runtime options
577: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
578: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
579: PetscCallA(SNESSetFromOptions(mysnes, ierr))
581: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
582: ! Evaluate initial guess; then solve nonlinear system.
583: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
584: ! Note: The user should initialize the vector, x, with the initial guess
585: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
586: ! to employ an initial guess of zero, the user should explicitly set
587: ! this vector to zero by calling VecSet().
589: PetscCallA(FormInitialGuess(mysnes, x, ierr))
590: PetscCallA(SNESSolve(mysnes, PETSC_NULL_VEC, x, ierr))
591: PetscCallA(SNESGetIterationNumber(mysnes, its, ierr))
592: if (ctx%rank == 0) then
593: write (6, 100) its
594: end if
595: 100 format('Number of SNES iterations = ', i5)
597: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
598: ! Free work space. All PETSc objects should be destroyed when they
599: ! are no longer needed.
600: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
601: if (.not. matrix_free) PetscCallA(MatDestroy(J, ierr))
602: PetscCallA(VecDestroy(x, ierr))
603: PetscCallA(VecDestroy(r, ierr))
604: PetscCallA(SNESDestroy(mysnes, ierr))
605: PetscCallA(DMDestroy(ctx%da, ierr))
607: PetscCallA(PetscFinalize(ierr))
608: end
609: !/*TEST
610: !
611: ! test:
612: ! nsize: 4
613: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
614: !
615: !TEST*/