Actual source code: ex5f90.F90

  1: !
  2: !  Description: Solves a nonlinear system in parallel with SNES.
  3: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
  4: !  domain, using distributed arrays (DMDAs) to partition the parallel grid.
  5: !  The command line options include:
  6: !    -par <parameter>, where <parameter> indicates the nonlinearity of the problem
  7: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
  8: !

 10: !
 11: !  --------------------------------------------------------------------------
 12: !
 13: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 14: !  the partial differential equation
 15: !
 16: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 17: !
 18: !  with boundary conditions
 19: !
 20: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
 21: !
 22: !  A finite difference approximation with the usual 5-point stencil
 23: !  is used to discretize the boundary value problem to obtain a nonlinear
 24: !  system of equations.
 25: !
 26: !  The uniprocessor version of this code is snes/tutorials/ex4f.F
 27: !
 28: !  --------------------------------------------------------------------------
 29: !  The following define must be used before including any PETSc include files
 30: !  into a module or interface. This is because they can't handle declarations
 31: !  in them
 32: !
 33: #include <petsc/finclude/petscsnes.h>
 34: #include <petsc/finclude/petscdmda.h>
 35: module ex5module
 36:   use petscsnes
 37:   use petscdmda
 38:   implicit none
 39:   type AppCtx
 40:     PetscInt xs, xe, xm, gxs, gxe, gxm
 41:     PetscInt ys, ye, ym, gys, gye, gym
 42:     PetscInt mx, my
 43:     PetscMPIInt rank
 44:     PetscReal lambda
 45:   end type AppCtx

 47: contains
 48: ! ---------------------------------------------------------------------
 49: !
 50: !  FormFunction - Evaluates nonlinear function, F(x).
 51: !
 52: !  Input Parameters:
 53: !  snes - the SNES context
 54: !  X - input vector
 55: !  dummy - optional user-defined context, as set by SNESSetFunction()
 56: !          (not used here)
 57: !
 58: !  Output Parameter:
 59: !  F - function vector
 60: !
 61: !  Notes:
 62: !  This routine serves as a wrapper for the lower-level routine
 63: !  "FormFunctionLocal", where the actual computations are
 64: !  done using the standard Fortran style of treating the local
 65: !  vector data as a multidimensional array over the local mesh.
 66: !  This routine merely handles ghost point scatters and accesses
 67: !  the local vector data via VecGetArray() and VecRestoreArray().
 68: !
 69:   subroutine FormFunction(snes, X, F, ctx, ierr)
 70:     implicit none

 72: !  Input/output variables:
 73:     SNES snes
 74:     Vec X, F
 75:     PetscErrorCode ierr
 76:     type(AppCtx) ctx
 77:     DM da

 79: !  Declarations for use with local arrays:
 80:     PetscScalar, pointer :: lx_v(:), lf_v(:)
 81:     Vec localX

 83: !  Scatter ghost points to local vector, using the 2-step process
 84: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
 85: !  By placing code between these two statements, computations can
 86: !  be done while messages are in transition.
 87:     PetscCall(SNESGetDM(snes, da, ierr))
 88:     PetscCall(DMGetLocalVector(da, localX, ierr))
 89:     PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX, ierr))
 90:     PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX, ierr))

 92: !  Get a pointer to vector data.
 93: !    - For default PETSc vectors, VecGetArray() returns a pointer to
 94: !      the data array. Otherwise, the routine is implementation dependent.
 95: !    - You MUST call VecRestoreArray() when you no longer need access to
 96: !      the array.
 97: !    - Note that the interface to VecGetArray() differs from VecGetArray().

 99:     PetscCall(VecGetArray(localX, lx_v, ierr))
100:     PetscCall(VecGetArray(F, lf_v, ierr))

102: !  Compute function over the locally owned part of the grid
103:     PetscCall(FormFunctionLocal(lx_v, lf_v, ctx, ierr))

105: !  Restore vectors
106:     PetscCall(VecRestoreArray(localX, lx_v, ierr))
107:     PetscCall(VecRestoreArray(F, lf_v, ierr))

109: !  Insert values into global vector

111:     PetscCall(DMRestoreLocalVector(da, localX, ierr))
112:     PetscCall(PetscLogFlops(11.0d0*ctx%ym*ctx%xm, ierr))

114: !      PetscCallA(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
115: !      PetscCallA(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
116:   end subroutine formfunction

118: ! ---------------------------------------------------------------------
119: !
120: !  FormInitialGuess - Forms initial approximation.
121: !
122: !  Input Parameters:
123: !  X - vector
124: !
125: !  Output Parameter:
126: !  X - vector
127: !
128: !  Notes:
129: !  This routine serves as a wrapper for the lower-level routine
130: !  "InitialGuessLocal", where the actual computations are
131: !  done using the standard Fortran style of treating the local
132: !  vector data as a multidimensional array over the local mesh.
133: !  This routine merely handles ghost point scatters and accesses
134: !  the local vector data via VecGetArray() and VecRestoreArray().
135: !
136:   subroutine FormInitialGuess(snes, X, ierr)
137: !  Input/output variables:
138:     SNES snes
139:     type(AppCtx), pointer:: ctx
140:     Vec X
141:     PetscErrorCode ierr
142:     DM da

144: !  Declarations for use with local arrays:
145:     PetscScalar, pointer :: lx_v(:)

147:     ierr = 0
148:     PetscCallA(SNESGetDM(snes, da, ierr))
149:     PetscCallA(SNESGetApplicationContext(snes, ctx, ierr))
150: !  Get a pointer to vector data.
151: !    - For default PETSc vectors, VecGetArray() returns a pointer to
152: !      the data array. Otherwise, the routine is implementation dependent.
153: !    - You MUST call VecRestoreArray() when you no longer need access to
154: !      the array.
155: !    - Note that the interface to VecGetArray() differs from VecGetArray().

157:     PetscCallA(VecGetArray(X, lx_v, ierr))

159: !  Compute initial guess over the locally owned part of the grid
160:     PetscCallA(InitialGuessLocal(ctx, lx_v, ierr))

162: !  Restore vector
163:     PetscCallA(VecRestoreArray(X, lx_v, ierr))

165: !  Insert values into global vector

167:   end

169: ! ---------------------------------------------------------------------
170: !
171: !  InitialGuessLocal - Computes initial approximation, called by
172: !  the higher level routine FormInitialGuess().
173: !
174: !  Input Parameter:
175: !  x - local vector data
176: !
177: !  Output Parameters:
178: !  x - local vector data
179: !  ierr - error code
180: !
181: !  Notes:
182: !  This routine uses standard Fortran-style computations over a 2-dim array.
183: !
184:   subroutine InitialGuessLocal(ctx, x, ierr)
185: !  Input/output variables:
186:     type(AppCtx) ctx
187:     PetscScalar x(ctx%xs:ctx%xe, ctx%ys:ctx%ye)
188:     PetscErrorCode ierr

190: !  Local variables:
191:     PetscInt i, j
192:     PetscReal temp1, temp, hx, hy
193:     PetscReal one

195: !  Set parameters

197:     ierr = 0
198:     one = 1.0
199:     hx = one/(ctx%mx - 1)
200:     hy = one/(ctx%my - 1)
201:     temp1 = ctx%lambda/(ctx%lambda + one)

203:     do j = ctx%ys, ctx%ye
204:       temp = min(j - 1, ctx%my - j)*hy
205:       do i = ctx%xs, ctx%xe
206:         if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
207:           x(i, j) = 0.0
208:         else
209:           x(i, j) = temp1*sqrt(min(hx*min(i - 1, ctx%mx - i), temp))
210:         end if
211:       end do
212:     end do

214:   end

216: ! ---------------------------------------------------------------------
217: !
218: !  FormFunctionLocal - Computes nonlinear function, called by
219: !  the higher level routine FormFunction().
220: !
221: !  Input Parameter:
222: !  x - local vector data
223: !
224: !  Output Parameters:
225: !  f - local vector data, f(x)
226: !  ierr - error code
227: !
228: !  Notes:
229: !  This routine uses standard Fortran-style computations over a 2-dim array.
230: !
231:   subroutine FormFunctionLocal(x, f, ctx, ierr)
232: !  Input/output variables:
233:     type(AppCtx) ctx
234:     PetscScalar x(ctx%gxs:ctx%gxe, ctx%gys:ctx%gye)
235:     PetscScalar f(ctx%xs:ctx%xe, ctx%ys:ctx%ye)
236:     PetscErrorCode ierr

238: !  Local variables:
239:     PetscScalar two, one, hx, hy, hxdhy, hydhx, sc
240:     PetscScalar u, uxx, uyy
241:     PetscInt i, j

243:     one = 1.0
244:     two = 2.0
245:     hx = one/(ctx%mx - 1)
246:     hy = one/(ctx%my - 1)
247:     sc = hx*hy*ctx%lambda
248:     hxdhy = hx/hy
249:     hydhx = hy/hx

251: !  Compute function over the locally owned part of the grid

253:     do j = ctx%ys, ctx%ye
254:       do i = ctx%xs, ctx%xe
255:         if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
256:           f(i, j) = x(i, j)
257:         else
258:           u = x(i, j)
259:           uxx = hydhx*(two*u - x(i - 1, j) - x(i + 1, j))
260:           uyy = hxdhy*(two*u - x(i, j - 1) - x(i, j + 1))
261:           f(i, j) = uxx + uyy - sc*exp(u)
262:         end if
263:       end do
264:     end do

266:   end

268: ! ---------------------------------------------------------------------
269: !
270: !  FormJacobian - Evaluates Jacobian matrix.
271: !
272: !  Input Parameters:
273: !  snes     - the SNES context
274: !  x        - input vector
275: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
276: !             (not used here)
277: !
278: !  Output Parameters:
279: !  jac      - Jacobian matrix
280: !  jac_prec - optionally different matrix used to construct the preconditioner (not used here)
281: !
282: !  Notes:
283: !  This routine serves as a wrapper for the lower-level routine
284: !  "FormJacobianLocal", where the actual computations are
285: !  done using the standard Fortran style of treating the local
286: !  vector data as a multidimensional array over the local mesh.
287: !  This routine merely accesses the local vector data via
288: !  VecGetArray() and VecRestoreArray().
289: !
290: !  Notes:
291: !  Due to grid point reordering with DMDAs, we must always work
292: !  with the local grid points, and then transform them to the new
293: !  global numbering with the "ltog" mapping
294: !  We cannot work directly with the global numbers for the original
295: !  uniprocessor grid!
296: !
297: !  Two methods are available for imposing this transformation
298: !  when setting matrix entries:
299: !    (A) MatSetValuesLocal(), using the local ordering (including
300: !        ghost points!)
301: !        - Set matrix entries using the local ordering
302: !          by calling MatSetValuesLocal()
303: !    (B) MatSetValues(), using the global ordering

305: !        - Set matrix entries using the global ordering by calling
306: !          MatSetValues()
307: !  Option (A) seems cleaner/easier in many cases, and is the procedure
308: !  used in this example.
309: !
310:   subroutine FormJacobian(snes, X, jac, jac_prec, ctx, ierr)
311: !  Input/output variables:
312:     SNES snes
313:     Vec X
314:     Mat jac, jac_prec
315:     type(AppCtx) ctx
316:     PetscErrorCode ierr
317:     DM da

319: !  Declarations for use with local arrays:
320:     PetscScalar, pointer :: lx_v(:)
321:     Vec localX

323: !  Scatter ghost points to local vector, using the 2-step process
324: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
325: !  Computations can be done while messages are in transition,
326: !  by placing code between these two statements.

328:     PetscCallA(SNESGetDM(snes, da, ierr))
329:     PetscCallA(DMGetLocalVector(da, localX, ierr))
330:     PetscCallA(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX, ierr))
331:     PetscCallA(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX, ierr))

333: !  Get a pointer to vector data
334:     PetscCallA(VecGetArray(localX, lx_v, ierr))

336: !  Compute entries for the locally owned part of the Jacobian preconditioner.
337:     PetscCallA(FormJacobianLocal(lx_v, jac_prec, ctx, ierr))

339: !  Assemble matrix, using the 2-step process:
340: !     MatAssemblyBegin(), MatAssemblyEnd()
341: !  Computations can be done while messages are in transition,
342: !  by placing code between these two statements.

344:     PetscCallA(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY, ierr))
345:     if (jac /= jac_prec) then
346:       PetscCallA(MatAssemblyBegin(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
347:     end if
348:     PetscCallA(VecRestoreArray(localX, lx_v, ierr))
349:     PetscCallA(DMRestoreLocalVector(da, localX, ierr))
350:     PetscCallA(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY, ierr))
351:     if (jac /= jac_prec) then
352:       PetscCallA(MatAssemblyEnd(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
353:     end if

355: !  Tell the matrix we will never add a new nonzero location to the
356: !  matrix. If we do it will generate an error.

358:     PetscCallA(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE, ierr))

360:   end

362: ! ---------------------------------------------------------------------
363: !
364: !  FormJacobianLocal - Computes Jacobian matrix used to compute the preconditioner,
365: !  called by the higher level routine FormJacobian().
366: !
367: !  Input Parameters:
368: !  x        - local vector data
369: !
370: !  Output Parameters:
371: !  jac_prec - Jacobian matrix used to compute the preconditioner
372: !  ierr     - error code
373: !
374: !  Notes:
375: !  This routine uses standard Fortran-style computations over a 2-dim array.
376: !
377: !  Notes:
378: !  Due to grid point reordering with DMDAs, we must always work
379: !  with the local grid points, and then transform them to the new
380: !  global numbering with the "ltog" mapping
381: !  We cannot work directly with the global numbers for the original
382: !  uniprocessor grid!
383: !
384: !  Two methods are available for imposing this transformation
385: !  when setting matrix entries:
386: !    (A) MatSetValuesLocal(), using the local ordering (including
387: !        ghost points!)
388: !        - Set matrix entries using the local ordering
389: !          by calling MatSetValuesLocal()
390: !    (B) MatSetValues(), using the global ordering
391: !        - Then apply this map explicitly yourself
392: !        - Set matrix entries using the global ordering by calling
393: !          MatSetValues()
394: !  Option (A) seems cleaner/easier in many cases, and is the procedure
395: !  used in this example.
396: !
397:   subroutine FormJacobianLocal(x, jac_prec, ctx, ierr)
398: !  Input/output variables:
399:     type(AppCtx) ctx
400:     PetscScalar x(ctx%gxs:ctx%gxe, ctx%gys:ctx%gye)
401:     Mat jac_prec
402:     PetscErrorCode ierr

404: !  Local variables:
405:     PetscInt row, col(5), i, j
406:     PetscInt ione, ifive
407:     PetscScalar two, one, hx, hy, hxdhy
408:     PetscScalar hydhx, sc, v(5)

410: !  Set parameters
411:     ione = 1
412:     ifive = 5
413:     one = 1.0
414:     two = 2.0
415:     hx = one/(ctx%mx - 1)
416:     hy = one/(ctx%my - 1)
417:     sc = hx*hy
418:     hxdhy = hx/hy
419:     hydhx = hy/hx

421: !  Compute entries for the locally owned part of the Jacobian.
422: !   - Currently, all PETSc parallel matrix formats are partitioned by
423: !     contiguous chunks of rows across the processors.
424: !   - Each processor needs to insert only elements that it owns
425: !     locally (but any non-local elements will be sent to the
426: !     appropriate processor during matrix assembly).
427: !   - Here, we set all entries for a particular row at once.
428: !   - We can set matrix entries either using either
429: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
430: !   - Note that MatSetValues() uses 0-based row and column numbers
431: !     in Fortran as well as in C.

433:     do j = ctx%ys, ctx%ye
434:       row = (j - ctx%gys)*ctx%gxm + ctx%xs - ctx%gxs - 1
435:       do i = ctx%xs, ctx%xe
436:         row = row + 1
437: !           boundary points
438:         if (i == 1 .or. j == 1 .or. i == ctx%mx .or. j == ctx%my) then
439:           col(1) = row
440:           v(1) = one
441:           PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ione, col, v, INSERT_VALUES, ierr))
442: !           interior grid points
443:         else
444:           v(1) = -hxdhy
445:           v(2) = -hydhx
446:           v(3) = two*(hydhx + hxdhy) - sc*ctx%lambda*exp(x(i, j))
447:           v(4) = -hydhx
448:           v(5) = -hxdhy
449:           col(1) = row - ctx%gxm
450:           col(2) = row - 1
451:           col(3) = row
452:           col(4) = row + 1
453:           col(5) = row + ctx%gxm
454:           PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ifive, col, v, INSERT_VALUES, ierr))
455:         end if
456:       end do
457:     end do

459:   end

461: end module ex5module

463: program main
464:   use ex5module
465:   implicit none
466: !

468: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
469: !                   Variable declarations
470: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
471: !
472: !  Variables:
473: !     snes        - nonlinear solver
474: !     x, r        - solution, residual vectors
475: !     J           - Jacobian matrix
476: !     its         - iterations for convergence
477: !     Nx, Ny      - number of preocessors in x- and y- directions
478: !     matrix_free - flag - 1 indicates matrix-free version
479: !
480:   SNES snes
481:   Vec x, r
482:   Mat J
483:   PetscErrorCode ierr
484:   PetscInt its
485:   PetscBool flg, matrix_free
486:   PetscInt ione, nfour
487:   PetscReal lambda_max, lambda_min
488:   type(AppCtx) ctx
489:   DM da

491: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
492: !  Initialize program
493: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
494:   PetscCallA(PetscInitialize(ierr))
495:   PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, ctx%rank, ierr))

497: !  Initialize problem parameters
498:   lambda_max = 6.81
499:   lambda_min = 0.0
500:   ctx%lambda = 6.0
501:   ione = 1
502:   nfour = 4
503:   PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-par', ctx%lambda, flg, ierr))
504:   PetscCheckA(ctx%lambda < lambda_max .and. ctx%lambda > lambda_min, PETSC_COMM_SELF, PETSC_ERR_USER, 'Lambda provided with -par is out of range')

506: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
507: !  Create nonlinear solver context
508: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
509:   PetscCallA(SNESCreate(PETSC_COMM_WORLD, snes, ierr))

511: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
512: !  Create vector data structures; set function evaluation routine
513: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

515: !  Create distributed array (DMDA) to manage parallel grid and vectors

517: ! This really needs only the star-type stencil, but we use the box
518: ! stencil temporarily.
519:   PetscCallA(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, nfour, nfour, PETSC_DECIDE, PETSC_DECIDE, ione, ione, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, da, ierr))
520:   PetscCallA(DMSetFromOptions(da, ierr))
521:   PetscCallA(DMSetUp(da, ierr))

523:   PetscCallA(DMDAGetInfo(da, PETSC_NULL_INTEGER, ctx%mx, ctx%my, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMDASTENCILTYPE, ierr))

525: !
526: !   Visualize the distribution of the array across the processors
527: !
528: !     PetscCallA(DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr))

530: !  Extract global and local vectors from DMDA; then duplicate for remaining
531: !  vectors that are the same types
532:   PetscCallA(DMCreateGlobalVector(da, x, ierr))
533:   PetscCallA(VecDuplicate(x, r, ierr))

535: !  Get local grid boundaries (for 2-dimensional DMDA)
536:   PetscCallA(DMDAGetCorners(da, ctx%xs, ctx%ys, PETSC_NULL_INTEGER, ctx%xm, ctx%ym, PETSC_NULL_INTEGER, ierr))
537:   PetscCallA(DMDAGetGhostCorners(da, ctx%gxs, ctx%gys, PETSC_NULL_INTEGER, ctx%gxm, ctx%gym, PETSC_NULL_INTEGER, ierr))

539: !  Here we shift the starting indices up by one so that we can easily
540: !  use the Fortran convention of 1-based indices (rather 0-based indices).
541:   ctx%xs = ctx%xs + 1
542:   ctx%ys = ctx%ys + 1
543:   ctx%gxs = ctx%gxs + 1
544:   ctx%gys = ctx%gys + 1

546:   ctx%ye = ctx%ys + ctx%ym - 1
547:   ctx%xe = ctx%xs + ctx%xm - 1
548:   ctx%gye = ctx%gys + ctx%gym - 1
549:   ctx%gxe = ctx%gxs + ctx%gxm - 1

551:   PetscCallA(SNESSetApplicationContext(snes, ctx, ierr))

553: !  Set function evaluation routine and vector
554:   PetscCallA(SNESSetFunction(snes, r, FormFunction, ctx, ierr))

556: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
557: !  Create matrix data structure; set Jacobian evaluation routine
558: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

560: !  Set Jacobian matrix data structure and default Jacobian evaluation
561: !  routine. User can override with:
562: !     -snes_fd : default finite differencing approximation of Jacobian
563: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
564: !                (unless user explicitly sets preconditioner)
565: !     -snes_mf_operator : form matrix used to construct the preconditioner as set by the user,
566: !                         but use matrix-free approx for Jacobian-vector
567: !                         products within Newton-Krylov method
568: !
569: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
570: !     accordingly.  When using distributed arrays (DMDAs) to create vectors,
571: !     the DMDAs determine the problem partitioning.  We must explicitly
572: !     specify the local matrix dimensions upon its creation for compatibility
573: !     with the vector distribution.  Thus, the generic MatCreate() routine
574: !     is NOT sufficient when working with distributed arrays.
575: !
576: !     Note: Here we only approximately preallocate storage space for the
577: !     Jacobian.  See the users manual for a discussion of better techniques
578: !     for preallocating matrix memory.

580:   PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-snes_mf', matrix_free, ierr))
581:   if (.not. matrix_free) then
582:     PetscCallA(DMSetMatType(da, MATAIJ, ierr))
583:     PetscCallA(DMCreateMatrix(da, J, ierr))
584:     PetscCallA(SNESSetJacobian(snes, J, J, FormJacobian, ctx, ierr))
585:   end if

587: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
588: !  Customize nonlinear solver; set runtime options
589: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
590: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
591:   PetscCallA(SNESSetDM(snes, da, ierr))
592:   PetscCallA(SNESSetFromOptions(snes, ierr))

594: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
595: !  Evaluate initial guess; then solve nonlinear system.
596: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
597: !  Note: The user should initialize the vector, x, with the initial guess
598: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
599: !  to employ an initial guess of zero, the user should explicitly set
600: !  this vector to zero by calling VecSet().

602:   PetscCallA(FormInitialGuess(snes, x, ierr))
603:   PetscCallA(SNESSolve(snes, PETSC_NULL_VEC, x, ierr))
604:   PetscCallA(SNESGetIterationNumber(snes, its, ierr))
605:   if (ctx%rank == 0) then
606:     write (6, 100) its
607:   end if
608: 100 format('Number of SNES iterations = ', i5)

610: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
611: !  Free work space.  All PETSc objects should be destroyed when they
612: !  are no longer needed.
613: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
614:   if (.not. matrix_free) PetscCallA(MatDestroy(J, ierr))
615:   PetscCallA(VecDestroy(x, ierr))
616:   PetscCallA(VecDestroy(r, ierr))
617:   PetscCallA(SNESDestroy(snes, ierr))
618:   PetscCallA(DMDestroy(da, ierr))

620:   PetscCallA(PetscFinalize(ierr))
621: end
622: !
623: !/*TEST
624: !
625: !   test:
626: !      nsize: 4
627: !      args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
628: !      requires: !single
629: !
630: !   test:
631: !      suffix: 2
632: !      nsize: 4
633: !      args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
634: !      requires: !single
635: !
636: !   test:
637: !      suffix: 3
638: !      nsize: 3
639: !      args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
640: !      requires: !single
641: !
642: !   test:
643: !      suffix: 4
644: !      nsize: 3
645: !      args: -snes_mf_operator -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
646: !      requires: !single
647: !
648: !   test:
649: !      suffix: 5
650: !      requires: !single
651: !
652: !TEST*/