Actual source code: ex5f90.F90

  1: !
  2: !  Description: Solves a nonlinear system in parallel with SNES.
  3: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
  4: !  domain, using distributed arrays (DMDAs) to partition the parallel grid.
  5: !  The command line options include:
  6: !    -par <parameter>, where <parameter> indicates the nonlinearity of the problem
  7: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
  8: !

 10: !
 11: !  --------------------------------------------------------------------------
 12: !
 13: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 14: !  the partial differential equation
 15: !
 16: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 17: !
 18: !  with boundary conditions
 19: !
 20: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
 21: !
 22: !  A finite difference approximation with the usual 5-point stencil
 23: !  is used to discretize the boundary value problem to obtain a nonlinear
 24: !  system of equations.
 25: !
 26: !  The uniprocessor version of this code is snes/tutorials/ex4f.F
 27: !
 28: !  --------------------------------------------------------------------------
 29: !  The following define must be used before including any PETSc include files
 30: !  into a module or interface. This is because they can't handle declarations
 31: !  in them
 32: !
 33: #include <petsc/finclude/petscsnes.h>
 34: #include <petsc/finclude/petscdmda.h>
 35: module ex5f90module
 36:   use petscsnes
 37:   use petscdmda
 38:   implicit none
 39:   type userctx
 40:     PetscInt xs, xe, xm, gxs, gxe, gxm
 41:     PetscInt ys, ye, ym, gys, gye, gym
 42:     PetscInt mx, my
 43:     PetscMPIInt rank
 44:     PetscReal lambda
 45:   end type userctx

 47:   interface
 48:     subroutine SNESSetApplicationContext(snes, ctx, ierr)
 49:       use petscsnes
 50:       import userctx
 51:       implicit none
 52:       SNES snes
 53:       type(userctx) ctx
 54:       PetscErrorCode ierr
 55:     end subroutine
 56:     subroutine SNESGetApplicationContext(snes, ctx, ierr)
 57:       use petscsnes
 58:       import userctx
 59:       implicit none
 60:       SNES snes
 61:       type(userctx), pointer :: ctx
 62:       PetscErrorCode ierr
 63:     end subroutine
 64:   end interface

 66: contains
 67: ! ---------------------------------------------------------------------
 68: !
 69: !  FormFunction - Evaluates nonlinear function, F(x).
 70: !
 71: !  Input Parameters:
 72: !  snes - the SNES context
 73: !  X - input vector
 74: !  dummy - optional user-defined context, as set by SNESSetFunction()
 75: !          (not used here)
 76: !
 77: !  Output Parameter:
 78: !  F - function vector
 79: !
 80: !  Notes:
 81: !  This routine serves as a wrapper for the lower-level routine
 82: !  "FormFunctionLocal", where the actual computations are
 83: !  done using the standard Fortran style of treating the local
 84: !  vector data as a multidimensional array over the local mesh.
 85: !  This routine merely handles ghost point scatters and accesses
 86: !  the local vector data via VecGetArray() and VecRestoreArray().
 87: !
 88:   subroutine FormFunction(snes, X, F, user, ierr)
 89:     implicit none

 91: !  Input/output variables:
 92:     SNES snes
 93:     Vec X, F
 94:     PetscErrorCode ierr
 95:     type(userctx) user
 96:     DM da

 98: !  Declarations for use with local arrays:
 99:     PetscScalar, pointer :: lx_v(:), lf_v(:)
100:     Vec localX

102: !  Scatter ghost points to local vector, using the 2-step process
103: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
104: !  By placing code between these two statements, computations can
105: !  be done while messages are in transition.
106:     PetscCall(SNESGetDM(snes, da, ierr))
107:     PetscCall(DMGetLocalVector(da, localX, ierr))
108:     PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX, ierr))
109:     PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX, ierr))

111: !  Get a pointer to vector data.
112: !    - For default PETSc vectors, VecGetArray() returns a pointer to
113: !      the data array. Otherwise, the routine is implementation dependent.
114: !    - You MUST call VecRestoreArray() when you no longer need access to
115: !      the array.
116: !    - Note that the interface to VecGetArray() differs from VecGetArray().

118:     PetscCall(VecGetArray(localX, lx_v, ierr))
119:     PetscCall(VecGetArray(F, lf_v, ierr))

121: !  Compute function over the locally owned part of the grid
122:     PetscCall(FormFunctionLocal(lx_v, lf_v, user, ierr))

124: !  Restore vectors
125:     PetscCall(VecRestoreArray(localX, lx_v, ierr))
126:     PetscCall(VecRestoreArray(F, lf_v, ierr))

128: !  Insert values into global vector

130:     PetscCall(DMRestoreLocalVector(da, localX, ierr))
131:     PetscCall(PetscLogFlops(11.0d0*user%ym*user%xm, ierr))

133: !      PetscCallA(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
134: !      PetscCallA(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
135:   end subroutine formfunction

137: ! ---------------------------------------------------------------------
138: !
139: !  FormInitialGuess - Forms initial approximation.
140: !
141: !  Input Parameters:
142: !  X - vector
143: !
144: !  Output Parameter:
145: !  X - vector
146: !
147: !  Notes:
148: !  This routine serves as a wrapper for the lower-level routine
149: !  "InitialGuessLocal", where the actual computations are
150: !  done using the standard Fortran style of treating the local
151: !  vector data as a multidimensional array over the local mesh.
152: !  This routine merely handles ghost point scatters and accesses
153: !  the local vector data via VecGetArray() and VecRestoreArray().
154: !
155:   subroutine FormInitialGuess(snes, X, ierr)
156: !  Input/output variables:
157:     SNES snes
158:     type(userctx), pointer:: puser
159:     Vec X
160:     PetscErrorCode ierr
161:     DM da

163: !  Declarations for use with local arrays:
164:     PetscScalar, pointer :: lx_v(:)

166:     ierr = 0
167:     PetscCallA(SNESGetDM(snes, da, ierr))
168:     PetscCallA(SNESGetApplicationContext(snes, puser, ierr))
169: !  Get a pointer to vector data.
170: !    - For default PETSc vectors, VecGetArray() returns a pointer to
171: !      the data array. Otherwise, the routine is implementation dependent.
172: !    - You MUST call VecRestoreArray() when you no longer need access to
173: !      the array.
174: !    - Note that the interface to VecGetArray() differs from VecGetArray().

176:     PetscCallA(VecGetArray(X, lx_v, ierr))

178: !  Compute initial guess over the locally owned part of the grid
179:     PetscCallA(InitialGuessLocal(puser, lx_v, ierr))

181: !  Restore vector
182:     PetscCallA(VecRestoreArray(X, lx_v, ierr))

184: !  Insert values into global vector

186:   end

188: ! ---------------------------------------------------------------------
189: !
190: !  InitialGuessLocal - Computes initial approximation, called by
191: !  the higher level routine FormInitialGuess().
192: !
193: !  Input Parameter:
194: !  x - local vector data
195: !
196: !  Output Parameters:
197: !  x - local vector data
198: !  ierr - error code
199: !
200: !  Notes:
201: !  This routine uses standard Fortran-style computations over a 2-dim array.
202: !
203:   subroutine InitialGuessLocal(user, x, ierr)
204: !  Input/output variables:
205:     type(userctx) user
206:     PetscScalar x(user%xs:user%xe, user%ys:user%ye)
207:     PetscErrorCode ierr

209: !  Local variables:
210:     PetscInt i, j
211:     PetscReal temp1, temp, hx, hy
212:     PetscReal one

214: !  Set parameters

216:     ierr = 0
217:     one = 1.0
218:     hx = one/(user%mx - 1)
219:     hy = one/(user%my - 1)
220:     temp1 = user%lambda/(user%lambda + one)

222:     do j = user%ys, user%ye
223:       temp = min(j - 1, user%my - j)*hy
224:       do i = user%xs, user%xe
225:         if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
226:           x(i, j) = 0.0
227:         else
228:           x(i, j) = temp1*sqrt(min(hx*min(i - 1, user%mx - i), temp))
229:         end if
230:       end do
231:     end do

233:   end

235: ! ---------------------------------------------------------------------
236: !
237: !  FormFunctionLocal - Computes nonlinear function, called by
238: !  the higher level routine FormFunction().
239: !
240: !  Input Parameter:
241: !  x - local vector data
242: !
243: !  Output Parameters:
244: !  f - local vector data, f(x)
245: !  ierr - error code
246: !
247: !  Notes:
248: !  This routine uses standard Fortran-style computations over a 2-dim array.
249: !
250:   subroutine FormFunctionLocal(x, f, user, ierr)
251: !  Input/output variables:
252:     type(userctx) user
253:     PetscScalar x(user%gxs:user%gxe, user%gys:user%gye)
254:     PetscScalar f(user%xs:user%xe, user%ys:user%ye)
255:     PetscErrorCode ierr

257: !  Local variables:
258:     PetscScalar two, one, hx, hy, hxdhy, hydhx, sc
259:     PetscScalar u, uxx, uyy
260:     PetscInt i, j

262:     one = 1.0
263:     two = 2.0
264:     hx = one/(user%mx - 1)
265:     hy = one/(user%my - 1)
266:     sc = hx*hy*user%lambda
267:     hxdhy = hx/hy
268:     hydhx = hy/hx

270: !  Compute function over the locally owned part of the grid

272:     do j = user%ys, user%ye
273:       do i = user%xs, user%xe
274:         if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
275:           f(i, j) = x(i, j)
276:         else
277:           u = x(i, j)
278:           uxx = hydhx*(two*u - x(i - 1, j) - x(i + 1, j))
279:           uyy = hxdhy*(two*u - x(i, j - 1) - x(i, j + 1))
280:           f(i, j) = uxx + uyy - sc*exp(u)
281:         end if
282:       end do
283:     end do

285:   end

287: ! ---------------------------------------------------------------------
288: !
289: !  FormJacobian - Evaluates Jacobian matrix.
290: !
291: !  Input Parameters:
292: !  snes     - the SNES context
293: !  x        - input vector
294: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
295: !             (not used here)
296: !
297: !  Output Parameters:
298: !  jac      - Jacobian matrix
299: !  jac_prec - optionally different matrix used to construct the preconditioner (not used here)
300: !
301: !  Notes:
302: !  This routine serves as a wrapper for the lower-level routine
303: !  "FormJacobianLocal", where the actual computations are
304: !  done using the standard Fortran style of treating the local
305: !  vector data as a multidimensional array over the local mesh.
306: !  This routine merely accesses the local vector data via
307: !  VecGetArray() and VecRestoreArray().
308: !
309: !  Notes:
310: !  Due to grid point reordering with DMDAs, we must always work
311: !  with the local grid points, and then transform them to the new
312: !  global numbering with the "ltog" mapping
313: !  We cannot work directly with the global numbers for the original
314: !  uniprocessor grid!
315: !
316: !  Two methods are available for imposing this transformation
317: !  when setting matrix entries:
318: !    (A) MatSetValuesLocal(), using the local ordering (including
319: !        ghost points!)
320: !        - Set matrix entries using the local ordering
321: !          by calling MatSetValuesLocal()
322: !    (B) MatSetValues(), using the global ordering

324: !        - Set matrix entries using the global ordering by calling
325: !          MatSetValues()
326: !  Option (A) seems cleaner/easier in many cases, and is the procedure
327: !  used in this example.
328: !
329:   subroutine FormJacobian(snes, X, jac, jac_prec, user, ierr)
330: !  Input/output variables:
331:     SNES snes
332:     Vec X
333:     Mat jac, jac_prec
334:     type(userctx) user
335:     PetscErrorCode ierr
336:     DM da

338: !  Declarations for use with local arrays:
339:     PetscScalar, pointer :: lx_v(:)
340:     Vec localX

342: !  Scatter ghost points to local vector, using the 2-step process
343: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
344: !  Computations can be done while messages are in transition,
345: !  by placing code between these two statements.

347:     PetscCallA(SNESGetDM(snes, da, ierr))
348:     PetscCallA(DMGetLocalVector(da, localX, ierr))
349:     PetscCallA(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX, ierr))
350:     PetscCallA(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX, ierr))

352: !  Get a pointer to vector data
353:     PetscCallA(VecGetArray(localX, lx_v, ierr))

355: !  Compute entries for the locally owned part of the Jacobian preconditioner.
356:     PetscCallA(FormJacobianLocal(lx_v, jac_prec, user, ierr))

358: !  Assemble matrix, using the 2-step process:
359: !     MatAssemblyBegin(), MatAssemblyEnd()
360: !  Computations can be done while messages are in transition,
361: !  by placing code between these two statements.

363:     PetscCallA(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY, ierr))
364:     if (jac /= jac_prec) then
365:       PetscCallA(MatAssemblyBegin(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
366:     end if
367:     PetscCallA(VecRestoreArray(localX, lx_v, ierr))
368:     PetscCallA(DMRestoreLocalVector(da, localX, ierr))
369:     PetscCallA(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY, ierr))
370:     if (jac /= jac_prec) then
371:       PetscCallA(MatAssemblyEnd(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
372:     end if

374: !  Tell the matrix we will never add a new nonzero location to the
375: !  matrix. If we do it will generate an error.

377:     PetscCallA(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE, ierr))

379:   end

381: ! ---------------------------------------------------------------------
382: !
383: !  FormJacobianLocal - Computes Jacobian matrix used to compute the preconditioner,
384: !  called by the higher level routine FormJacobian().
385: !
386: !  Input Parameters:
387: !  x        - local vector data
388: !
389: !  Output Parameters:
390: !  jac_prec - Jacobian matrix used to compute the preconditioner
391: !  ierr     - error code
392: !
393: !  Notes:
394: !  This routine uses standard Fortran-style computations over a 2-dim array.
395: !
396: !  Notes:
397: !  Due to grid point reordering with DMDAs, we must always work
398: !  with the local grid points, and then transform them to the new
399: !  global numbering with the "ltog" mapping
400: !  We cannot work directly with the global numbers for the original
401: !  uniprocessor grid!
402: !
403: !  Two methods are available for imposing this transformation
404: !  when setting matrix entries:
405: !    (A) MatSetValuesLocal(), using the local ordering (including
406: !        ghost points!)
407: !        - Set matrix entries using the local ordering
408: !          by calling MatSetValuesLocal()
409: !    (B) MatSetValues(), using the global ordering
410: !        - Then apply this map explicitly yourself
411: !        - Set matrix entries using the global ordering by calling
412: !          MatSetValues()
413: !  Option (A) seems cleaner/easier in many cases, and is the procedure
414: !  used in this example.
415: !
416:   subroutine FormJacobianLocal(x, jac_prec, user, ierr)
417: !  Input/output variables:
418:     type(userctx) user
419:     PetscScalar x(user%gxs:user%gxe, user%gys:user%gye)
420:     Mat jac_prec
421:     PetscErrorCode ierr

423: !  Local variables:
424:     PetscInt row, col(5), i, j
425:     PetscInt ione, ifive
426:     PetscScalar two, one, hx, hy, hxdhy
427:     PetscScalar hydhx, sc, v(5)

429: !  Set parameters
430:     ione = 1
431:     ifive = 5
432:     one = 1.0
433:     two = 2.0
434:     hx = one/(user%mx - 1)
435:     hy = one/(user%my - 1)
436:     sc = hx*hy
437:     hxdhy = hx/hy
438:     hydhx = hy/hx

440: !  Compute entries for the locally owned part of the Jacobian.
441: !   - Currently, all PETSc parallel matrix formats are partitioned by
442: !     contiguous chunks of rows across the processors.
443: !   - Each processor needs to insert only elements that it owns
444: !     locally (but any non-local elements will be sent to the
445: !     appropriate processor during matrix assembly).
446: !   - Here, we set all entries for a particular row at once.
447: !   - We can set matrix entries either using either
448: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
449: !   - Note that MatSetValues() uses 0-based row and column numbers
450: !     in Fortran as well as in C.

452:     do j = user%ys, user%ye
453:       row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
454:       do i = user%xs, user%xe
455:         row = row + 1
456: !           boundary points
457:         if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
458:           col(1) = row
459:           v(1) = one
460:           PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ione, col, v, INSERT_VALUES, ierr))
461: !           interior grid points
462:         else
463:           v(1) = -hxdhy
464:           v(2) = -hydhx
465:           v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i, j))
466:           v(4) = -hydhx
467:           v(5) = -hxdhy
468:           col(1) = row - user%gxm
469:           col(2) = row - 1
470:           col(3) = row
471:           col(4) = row + 1
472:           col(5) = row + user%gxm
473:           PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ifive, col, v, INSERT_VALUES, ierr))
474:         end if
475:       end do
476:     end do

478:   end

480: end module ex5f90module

482: program main
483:   use ex5f90module
484:   implicit none
485: !

487: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
488: !                   Variable declarations
489: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
490: !
491: !  Variables:
492: !     snes        - nonlinear solver
493: !     x, r        - solution, residual vectors
494: !     J           - Jacobian matrix
495: !     its         - iterations for convergence
496: !     Nx, Ny      - number of preocessors in x- and y- directions
497: !     matrix_free - flag - 1 indicates matrix-free version
498: !
499:   SNES snes
500:   Vec x, r
501:   Mat J
502:   PetscErrorCode ierr
503:   PetscInt its
504:   PetscBool flg, matrix_free
505:   PetscInt ione, nfour
506:   PetscReal lambda_max, lambda_min
507:   type(userctx) user
508:   DM da

510: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
511: !  Initialize program
512: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
513:   PetscCallA(PetscInitialize(ierr))
514:   PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, user%rank, ierr))

516: !  Initialize problem parameters
517:   lambda_max = 6.81
518:   lambda_min = 0.0
519:   user%lambda = 6.0
520:   ione = 1
521:   nfour = 4
522:   PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-par', user%lambda, flg, ierr))
523:   PetscCheckA(user%lambda < lambda_max .and. user%lambda > lambda_min, PETSC_COMM_SELF, PETSC_ERR_USER, 'Lambda provided with -par is out of range')

525: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
526: !  Create nonlinear solver context
527: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
528:   PetscCallA(SNESCreate(PETSC_COMM_WORLD, snes, ierr))

530: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
531: !  Create vector data structures; set function evaluation routine
532: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

534: !  Create distributed array (DMDA) to manage parallel grid and vectors

536: ! This really needs only the star-type stencil, but we use the box
537: ! stencil temporarily.
538:   PetscCallA(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, nfour, nfour, PETSC_DECIDE, PETSC_DECIDE, ione, ione, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, da, ierr))
539:   PetscCallA(DMSetFromOptions(da, ierr))
540:   PetscCallA(DMSetUp(da, ierr))

542:   PetscCallA(DMDAGetInfo(da, PETSC_NULL_INTEGER, user%mx, user%my, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMDASTENCILTYPE, ierr))

544: !
545: !   Visualize the distribution of the array across the processors
546: !
547: !     PetscCallA(DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr))

549: !  Extract global and local vectors from DMDA; then duplicate for remaining
550: !  vectors that are the same types
551:   PetscCallA(DMCreateGlobalVector(da, x, ierr))
552:   PetscCallA(VecDuplicate(x, r, ierr))

554: !  Get local grid boundaries (for 2-dimensional DMDA)
555:   PetscCallA(DMDAGetCorners(da, user%xs, user%ys, PETSC_NULL_INTEGER, user%xm, user%ym, PETSC_NULL_INTEGER, ierr))
556:   PetscCallA(DMDAGetGhostCorners(da, user%gxs, user%gys, PETSC_NULL_INTEGER, user%gxm, user%gym, PETSC_NULL_INTEGER, ierr))

558: !  Here we shift the starting indices up by one so that we can easily
559: !  use the Fortran convention of 1-based indices (rather 0-based indices).
560:   user%xs = user%xs + 1
561:   user%ys = user%ys + 1
562:   user%gxs = user%gxs + 1
563:   user%gys = user%gys + 1

565:   user%ye = user%ys + user%ym - 1
566:   user%xe = user%xs + user%xm - 1
567:   user%gye = user%gys + user%gym - 1
568:   user%gxe = user%gxs + user%gxm - 1

570:   PetscCallA(SNESSetApplicationContext(snes, user, ierr))

572: !  Set function evaluation routine and vector
573:   PetscCallA(SNESSetFunction(snes, r, FormFunction, user, ierr))

575: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
576: !  Create matrix data structure; set Jacobian evaluation routine
577: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

579: !  Set Jacobian matrix data structure and default Jacobian evaluation
580: !  routine. User can override with:
581: !     -snes_fd : default finite differencing approximation of Jacobian
582: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
583: !                (unless user explicitly sets preconditioner)
584: !     -snes_mf_operator : form matrix used to construct the preconditioner as set by the user,
585: !                         but use matrix-free approx for Jacobian-vector
586: !                         products within Newton-Krylov method
587: !
588: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
589: !     accordingly.  When using distributed arrays (DMDAs) to create vectors,
590: !     the DMDAs determine the problem partitioning.  We must explicitly
591: !     specify the local matrix dimensions upon its creation for compatibility
592: !     with the vector distribution.  Thus, the generic MatCreate() routine
593: !     is NOT sufficient when working with distributed arrays.
594: !
595: !     Note: Here we only approximately preallocate storage space for the
596: !     Jacobian.  See the users manual for a discussion of better techniques
597: !     for preallocating matrix memory.

599:   PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-snes_mf', matrix_free, ierr))
600:   if (.not. matrix_free) then
601:     PetscCallA(DMSetMatType(da, MATAIJ, ierr))
602:     PetscCallA(DMCreateMatrix(da, J, ierr))
603:     PetscCallA(SNESSetJacobian(snes, J, J, FormJacobian, user, ierr))
604:   end if

606: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
607: !  Customize nonlinear solver; set runtime options
608: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
609: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
610:   PetscCallA(SNESSetDM(snes, da, ierr))
611:   PetscCallA(SNESSetFromOptions(snes, ierr))

613: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
614: !  Evaluate initial guess; then solve nonlinear system.
615: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
616: !  Note: The user should initialize the vector, x, with the initial guess
617: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
618: !  to employ an initial guess of zero, the user should explicitly set
619: !  this vector to zero by calling VecSet().

621:   PetscCallA(FormInitialGuess(snes, x, ierr))
622:   PetscCallA(SNESSolve(snes, PETSC_NULL_VEC, x, ierr))
623:   PetscCallA(SNESGetIterationNumber(snes, its, ierr))
624:   if (user%rank == 0) then
625:     write (6, 100) its
626:   end if
627: 100 format('Number of SNES iterations = ', i5)

629: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
630: !  Free work space.  All PETSc objects should be destroyed when they
631: !  are no longer needed.
632: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
633:   if (.not. matrix_free) PetscCallA(MatDestroy(J, ierr))
634:   PetscCallA(VecDestroy(x, ierr))
635:   PetscCallA(VecDestroy(r, ierr))
636:   PetscCallA(SNESDestroy(snes, ierr))
637:   PetscCallA(DMDestroy(da, ierr))

639:   PetscCallA(PetscFinalize(ierr))
640: end
641: !
642: !/*TEST
643: !
644: !   test:
645: !      nsize: 4
646: !      args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
647: !      requires: !single
648: !
649: !   test:
650: !      suffix: 2
651: !      nsize: 4
652: !      args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
653: !      requires: !single
654: !
655: !   test:
656: !      suffix: 3
657: !      nsize: 3
658: !      args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
659: !      requires: !single
660: !
661: !   test:
662: !      suffix: 4
663: !      nsize: 3
664: !      args: -snes_mf_operator -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
665: !      requires: !single
666: !
667: !   test:
668: !      suffix: 5
669: !      requires: !single
670: !
671: !TEST*/