Actual source code: ex5f90.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
10: !
11: ! --------------------------------------------------------------------------
12: !
13: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
14: ! the partial differential equation
15: !
16: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
17: !
18: ! with boundary conditions
19: !
20: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
21: !
22: ! A finite difference approximation with the usual 5-point stencil
23: ! is used to discretize the boundary value problem to obtain a nonlinear
24: ! system of equations.
25: !
26: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
27: !
28: ! --------------------------------------------------------------------------
29: ! The following define must be used before including any PETSc include files
30: ! into a module or interface. This is because they can't handle declarations
31: ! in them
32: !
34: module ex5f90module
35: #include <petsc/finclude/petscsnes.h>
36: #include <petsc/finclude/petscdmda.h>
37: use petscsnes
38: use petscdmda
39: type userctx
40: PetscInt xs, xe, xm, gxs, gxe, gxm
41: PetscInt ys, ye, ym, gys, gye, gym
42: PetscInt mx, my
43: PetscMPIInt rank
44: PetscReal lambda
45: end type userctx
47: contains
48: ! ---------------------------------------------------------------------
49: !
50: ! FormFunction - Evaluates nonlinear function, F(x).
51: !
52: ! Input Parameters:
53: ! snes - the SNES context
54: ! X - input vector
55: ! dummy - optional user-defined context, as set by SNESSetFunction()
56: ! (not used here)
57: !
58: ! Output Parameter:
59: ! F - function vector
60: !
61: ! Notes:
62: ! This routine serves as a wrapper for the lower-level routine
63: ! "FormFunctionLocal", where the actual computations are
64: ! done using the standard Fortran style of treating the local
65: ! vector data as a multidimensional array over the local mesh.
66: ! This routine merely handles ghost point scatters and accesses
67: ! the local vector data via VecGetArray() and VecRestoreArray().
68: !
69: subroutine FormFunction(snes, X, F, user, ierr)
70: implicit none
72: ! Input/output variables:
73: SNES snes
74: Vec X, F
75: PetscErrorCode ierr
76: type(userctx) user
77: DM da
79: ! Declarations for use with local arrays:
80: PetscScalar, pointer :: lx_v(:), lf_v(:)
81: Vec localX
83: ! Scatter ghost points to local vector, using the 2-step process
84: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
85: ! By placing code between these two statements, computations can
86: ! be done while messages are in transition.
87: PetscCall(SNESGetDM(snes, da, ierr))
88: PetscCall(DMGetLocalVector(da, localX, ierr))
89: PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX, ierr))
90: PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX, ierr))
92: ! Get a pointer to vector data.
93: ! - For default PETSc vectors, VecGetArray() returns a pointer to
94: ! the data array. Otherwise, the routine is implementation dependent.
95: ! - You MUST call VecRestoreArray() when you no longer need access to
96: ! the array.
97: ! - Note that the interface to VecGetArray() differs from VecGetArray().
99: PetscCall(VecGetArray(localX, lx_v, ierr))
100: PetscCall(VecGetArray(F, lf_v, ierr))
102: ! Compute function over the locally owned part of the grid
103: PetscCall(FormFunctionLocal(lx_v, lf_v, user, ierr))
105: ! Restore vectors
106: PetscCall(VecRestoreArray(localX, lx_v, ierr))
107: PetscCall(VecRestoreArray(F, lf_v, ierr))
109: ! Insert values into global vector
111: PetscCall(DMRestoreLocalVector(da, localX, ierr))
112: PetscCall(PetscLogFlops(11.0d0*user%ym*user%xm, ierr))
114: ! PetscCallA(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
115: ! PetscCallA(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
116: end subroutine formfunction
117: end module ex5f90module
119: module ex5f90moduleinterfaces
120: use ex5f90module
122: Interface SNESSetApplicationContext
123: Subroutine SNESSetApplicationContext(snes, ctx, ierr)
124: use ex5f90module
125: SNES snes
126: type(userctx) ctx
127: PetscErrorCode ierr
128: End Subroutine
129: End Interface SNESSetApplicationContext
131: Interface SNESGetApplicationContext
132: Subroutine SNESGetApplicationContext(snes, ctx, ierr)
133: use ex5f90module
134: SNES snes
135: type(userctx), pointer :: ctx
136: PetscErrorCode ierr
137: End Subroutine
138: End Interface SNESGetApplicationContext
139: end module ex5f90moduleinterfaces
141: program main
142: use ex5f90module
143: use ex5f90moduleinterfaces
144: implicit none
145: !
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: ! Variable declarations
149: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: !
151: ! Variables:
152: ! snes - nonlinear solver
153: ! x, r - solution, residual vectors
154: ! J - Jacobian matrix
155: ! its - iterations for convergence
156: ! Nx, Ny - number of preocessors in x- and y- directions
157: ! matrix_free - flag - 1 indicates matrix-free version
158: !
159: SNES snes
160: Vec x, r
161: Mat J
162: PetscErrorCode ierr
163: PetscInt its
164: PetscBool flg, matrix_free
165: PetscInt ione, nfour
166: PetscReal lambda_max, lambda_min
167: type(userctx) user
168: DM da
170: ! Note: Any user-defined Fortran routines (such as FormJacobian)
171: ! MUST be declared as external.
172: external FormInitialGuess, FormJacobian
174: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: ! Initialize program
176: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: PetscCallA(PetscInitialize(ierr))
178: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, user%rank, ierr))
180: ! Initialize problem parameters
181: lambda_max = 6.81
182: lambda_min = 0.0
183: user%lambda = 6.0
184: ione = 1
185: nfour = 4
186: PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-par', user%lambda, flg, ierr))
187: PetscCheckA(user%lambda < lambda_max .and. user%lambda > lambda_min, PETSC_COMM_SELF, PETSC_ERR_USER, 'Lambda provided with -par is out of range')
189: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190: ! Create nonlinear solver context
191: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192: PetscCallA(SNESCreate(PETSC_COMM_WORLD, snes, ierr))
194: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: ! Create vector data structures; set function evaluation routine
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: ! Create distributed array (DMDA) to manage parallel grid and vectors
200: ! This really needs only the star-type stencil, but we use the box
201: ! stencil temporarily.
202: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, nfour, nfour, PETSC_DECIDE, PETSC_DECIDE, ione, ione, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, da, ierr))
203: PetscCallA(DMSetFromOptions(da, ierr))
204: PetscCallA(DMSetUp(da, ierr))
206: PetscCallA(DMDAGetInfo(da, PETSC_NULL_INTEGER, user%mx, user%my, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMDASTENCILTYPE, ierr))
208: !
209: ! Visualize the distribution of the array across the processors
210: !
211: ! PetscCallA(DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr))
213: ! Extract global and local vectors from DMDA; then duplicate for remaining
214: ! vectors that are the same types
215: PetscCallA(DMCreateGlobalVector(da, x, ierr))
216: PetscCallA(VecDuplicate(x, r, ierr))
218: ! Get local grid boundaries (for 2-dimensional DMDA)
219: PetscCallA(DMDAGetCorners(da, user%xs, user%ys, PETSC_NULL_INTEGER, user%xm, user%ym, PETSC_NULL_INTEGER, ierr))
220: PetscCallA(DMDAGetGhostCorners(da, user%gxs, user%gys, PETSC_NULL_INTEGER, user%gxm, user%gym, PETSC_NULL_INTEGER, ierr))
222: ! Here we shift the starting indices up by one so that we can easily
223: ! use the Fortran convention of 1-based indices (rather 0-based indices).
224: user%xs = user%xs + 1
225: user%ys = user%ys + 1
226: user%gxs = user%gxs + 1
227: user%gys = user%gys + 1
229: user%ye = user%ys + user%ym - 1
230: user%xe = user%xs + user%xm - 1
231: user%gye = user%gys + user%gym - 1
232: user%gxe = user%gxs + user%gxm - 1
234: PetscCallA(SNESSetApplicationContext(snes, user, ierr))
236: ! Set function evaluation routine and vector
237: PetscCallA(SNESSetFunction(snes, r, FormFunction, user, ierr))
239: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
240: ! Create matrix data structure; set Jacobian evaluation routine
241: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
243: ! Set Jacobian matrix data structure and default Jacobian evaluation
244: ! routine. User can override with:
245: ! -snes_fd : default finite differencing approximation of Jacobian
246: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
247: ! (unless user explicitly sets preconditioner)
248: ! -snes_mf_operator : form matrix used to construct the preconditioner as set by the user,
249: ! but use matrix-free approx for Jacobian-vector
250: ! products within Newton-Krylov method
251: !
252: ! Note: For the parallel case, vectors and matrices MUST be partitioned
253: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
254: ! the DMDAs determine the problem partitioning. We must explicitly
255: ! specify the local matrix dimensions upon its creation for compatibility
256: ! with the vector distribution. Thus, the generic MatCreate() routine
257: ! is NOT sufficient when working with distributed arrays.
258: !
259: ! Note: Here we only approximately preallocate storage space for the
260: ! Jacobian. See the users manual for a discussion of better techniques
261: ! for preallocating matrix memory.
263: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-snes_mf', matrix_free, ierr))
264: if (.not. matrix_free) then
265: PetscCallA(DMSetMatType(da, MATAIJ, ierr))
266: PetscCallA(DMCreateMatrix(da, J, ierr))
267: PetscCallA(SNESSetJacobian(snes, J, J, FormJacobian, user, ierr))
268: end if
270: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
271: ! Customize nonlinear solver; set runtime options
272: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
273: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
274: PetscCallA(SNESSetDM(snes, da, ierr))
275: PetscCallA(SNESSetFromOptions(snes, ierr))
277: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
278: ! Evaluate initial guess; then solve nonlinear system.
279: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
280: ! Note: The user should initialize the vector, x, with the initial guess
281: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
282: ! to employ an initial guess of zero, the user should explicitly set
283: ! this vector to zero by calling VecSet().
285: PetscCallA(FormInitialGuess(snes, x, ierr))
286: PetscCallA(SNESSolve(snes, PETSC_NULL_VEC, x, ierr))
287: PetscCallA(SNESGetIterationNumber(snes, its, ierr))
288: if (user%rank == 0) then
289: write (6, 100) its
290: end if
291: 100 format('Number of SNES iterations = ', i5)
293: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
294: ! Free work space. All PETSc objects should be destroyed when they
295: ! are no longer needed.
296: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
297: if (.not. matrix_free) PetscCallA(MatDestroy(J, ierr))
298: PetscCallA(VecDestroy(x, ierr))
299: PetscCallA(VecDestroy(r, ierr))
300: PetscCallA(SNESDestroy(snes, ierr))
301: PetscCallA(DMDestroy(da, ierr))
303: PetscCallA(PetscFinalize(ierr))
304: end
306: ! ---------------------------------------------------------------------
307: !
308: ! FormInitialGuess - Forms initial approximation.
309: !
310: ! Input Parameters:
311: ! X - vector
312: !
313: ! Output Parameter:
314: ! X - vector
315: !
316: ! Notes:
317: ! This routine serves as a wrapper for the lower-level routine
318: ! "InitialGuessLocal", where the actual computations are
319: ! done using the standard Fortran style of treating the local
320: ! vector data as a multidimensional array over the local mesh.
321: ! This routine merely handles ghost point scatters and accesses
322: ! the local vector data via VecGetArray() and VecRestoreArray().
323: !
324: subroutine FormInitialGuess(snes, X, ierr)
325: use ex5f90module
326: use ex5f90moduleinterfaces
327: implicit none
329: ! Input/output variables:
330: SNES snes
331: type(userctx), pointer:: puser
332: Vec X
333: PetscErrorCode ierr
334: DM da
336: ! Declarations for use with local arrays:
337: PetscScalar, pointer :: lx_v(:)
339: ierr = 0
340: PetscCallA(SNESGetDM(snes, da, ierr))
341: PetscCallA(SNESGetApplicationContext(snes, puser, ierr))
342: ! Get a pointer to vector data.
343: ! - For default PETSc vectors, VecGetArray() returns a pointer to
344: ! the data array. Otherwise, the routine is implementation dependent.
345: ! - You MUST call VecRestoreArray() when you no longer need access to
346: ! the array.
347: ! - Note that the interface to VecGetArray() differs from VecGetArray().
349: PetscCallA(VecGetArray(X, lx_v, ierr))
351: ! Compute initial guess over the locally owned part of the grid
352: PetscCallA(InitialGuessLocal(puser, lx_v, ierr))
354: ! Restore vector
355: PetscCallA(VecRestoreArray(X, lx_v, ierr))
357: ! Insert values into global vector
359: end
361: ! ---------------------------------------------------------------------
362: !
363: ! InitialGuessLocal - Computes initial approximation, called by
364: ! the higher level routine FormInitialGuess().
365: !
366: ! Input Parameter:
367: ! x - local vector data
368: !
369: ! Output Parameters:
370: ! x - local vector data
371: ! ierr - error code
372: !
373: ! Notes:
374: ! This routine uses standard Fortran-style computations over a 2-dim array.
375: !
376: subroutine InitialGuessLocal(user, x, ierr)
377: use ex5f90module
378: implicit none
380: ! Input/output variables:
381: type(userctx) user
382: PetscScalar x(user%xs:user%xe, user%ys:user%ye)
383: PetscErrorCode ierr
385: ! Local variables:
386: PetscInt i, j
387: PetscReal temp1, temp, hx, hy
388: PetscReal one
390: ! Set parameters
392: ierr = 0
393: one = 1.0
394: hx = one/(user%mx - 1)
395: hy = one/(user%my - 1)
396: temp1 = user%lambda/(user%lambda + one)
398: do 20 j = user%ys, user%ye
399: temp = min(j - 1, user%my - j)*hy
400: do 10 i = user%xs, user%xe
401: if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
402: x(i, j) = 0.0
403: else
404: x(i, j) = temp1*sqrt(min(hx*min(i - 1, user%mx - i), temp))
405: end if
406: 10 continue
407: 20 continue
409: end
411: ! ---------------------------------------------------------------------
412: !
413: ! FormFunctionLocal - Computes nonlinear function, called by
414: ! the higher level routine FormFunction().
415: !
416: ! Input Parameter:
417: ! x - local vector data
418: !
419: ! Output Parameters:
420: ! f - local vector data, f(x)
421: ! ierr - error code
422: !
423: ! Notes:
424: ! This routine uses standard Fortran-style computations over a 2-dim array.
425: !
426: subroutine FormFunctionLocal(x, f, user, ierr)
427: use ex5f90module
429: implicit none
431: ! Input/output variables:
432: type(userctx) user
433: PetscScalar x(user%gxs:user%gxe, user%gys:user%gye)
434: PetscScalar f(user%xs:user%xe, user%ys:user%ye)
435: PetscErrorCode ierr
437: ! Local variables:
438: PetscScalar two, one, hx, hy, hxdhy, hydhx, sc
439: PetscScalar u, uxx, uyy
440: PetscInt i, j
442: one = 1.0
443: two = 2.0
444: hx = one/(user%mx - 1)
445: hy = one/(user%my - 1)
446: sc = hx*hy*user%lambda
447: hxdhy = hx/hy
448: hydhx = hy/hx
450: ! Compute function over the locally owned part of the grid
452: do 20 j = user%ys, user%ye
453: do 10 i = user%xs, user%xe
454: if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
455: f(i, j) = x(i, j)
456: else
457: u = x(i, j)
458: uxx = hydhx*(two*u - x(i - 1, j) - x(i + 1, j))
459: uyy = hxdhy*(two*u - x(i, j - 1) - x(i, j + 1))
460: f(i, j) = uxx + uyy - sc*exp(u)
461: end if
462: 10 continue
463: 20 continue
465: end
467: ! ---------------------------------------------------------------------
468: !
469: ! FormJacobian - Evaluates Jacobian matrix.
470: !
471: ! Input Parameters:
472: ! snes - the SNES context
473: ! x - input vector
474: ! dummy - optional user-defined context, as set by SNESSetJacobian()
475: ! (not used here)
476: !
477: ! Output Parameters:
478: ! jac - Jacobian matrix
479: ! jac_prec - optionally different matrix used to construct the preconditioner (not used here)
480: !
481: ! Notes:
482: ! This routine serves as a wrapper for the lower-level routine
483: ! "FormJacobianLocal", where the actual computations are
484: ! done using the standard Fortran style of treating the local
485: ! vector data as a multidimensional array over the local mesh.
486: ! This routine merely accesses the local vector data via
487: ! VecGetArray() and VecRestoreArray().
488: !
489: ! Notes:
490: ! Due to grid point reordering with DMDAs, we must always work
491: ! with the local grid points, and then transform them to the new
492: ! global numbering with the "ltog" mapping
493: ! We cannot work directly with the global numbers for the original
494: ! uniprocessor grid!
495: !
496: ! Two methods are available for imposing this transformation
497: ! when setting matrix entries:
498: ! (A) MatSetValuesLocal(), using the local ordering (including
499: ! ghost points!)
500: ! - Set matrix entries using the local ordering
501: ! by calling MatSetValuesLocal()
502: ! (B) MatSetValues(), using the global ordering
504: ! - Set matrix entries using the global ordering by calling
505: ! MatSetValues()
506: ! Option (A) seems cleaner/easier in many cases, and is the procedure
507: ! used in this example.
508: !
509: subroutine FormJacobian(snes, X, jac, jac_prec, user, ierr)
510: use ex5f90module
511: implicit none
513: ! Input/output variables:
514: SNES snes
515: Vec X
516: Mat jac, jac_prec
517: type(userctx) user
518: PetscErrorCode ierr
519: DM da
521: ! Declarations for use with local arrays:
522: PetscScalar, pointer :: lx_v(:)
523: Vec localX
525: ! Scatter ghost points to local vector, using the 2-step process
526: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
527: ! Computations can be done while messages are in transition,
528: ! by placing code between these two statements.
530: PetscCallA(SNESGetDM(snes, da, ierr))
531: PetscCallA(DMGetLocalVector(da, localX, ierr))
532: PetscCallA(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX, ierr))
533: PetscCallA(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX, ierr))
535: ! Get a pointer to vector data
536: PetscCallA(VecGetArray(localX, lx_v, ierr))
538: ! Compute entries for the locally owned part of the Jacobian preconditioner.
539: PetscCallA(FormJacobianLocal(lx_v, jac_prec, user, ierr))
541: ! Assemble matrix, using the 2-step process:
542: ! MatAssemblyBegin(), MatAssemblyEnd()
543: ! Computations can be done while messages are in transition,
544: ! by placing code between these two statements.
546: PetscCallA(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY, ierr))
547: if (jac /= jac_prec) then
548: PetscCallA(MatAssemblyBegin(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
549: end if
550: PetscCallA(VecRestoreArray(localX, lx_v, ierr))
551: PetscCallA(DMRestoreLocalVector(da, localX, ierr))
552: PetscCallA(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY, ierr))
553: if (jac /= jac_prec) then
554: PetscCallA(MatAssemblyEnd(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
555: end if
557: ! Tell the matrix we will never add a new nonzero location to the
558: ! matrix. If we do it will generate an error.
560: PetscCallA(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE, ierr))
562: end
564: ! ---------------------------------------------------------------------
565: !
566: ! FormJacobianLocal - Computes Jacobian matrix used to compute the preconditioner,
567: ! called by the higher level routine FormJacobian().
568: !
569: ! Input Parameters:
570: ! x - local vector data
571: !
572: ! Output Parameters:
573: ! jac_prec - Jacobian matrix used to compute the preconditioner
574: ! ierr - error code
575: !
576: ! Notes:
577: ! This routine uses standard Fortran-style computations over a 2-dim array.
578: !
579: ! Notes:
580: ! Due to grid point reordering with DMDAs, we must always work
581: ! with the local grid points, and then transform them to the new
582: ! global numbering with the "ltog" mapping
583: ! We cannot work directly with the global numbers for the original
584: ! uniprocessor grid!
585: !
586: ! Two methods are available for imposing this transformation
587: ! when setting matrix entries:
588: ! (A) MatSetValuesLocal(), using the local ordering (including
589: ! ghost points!)
590: ! - Set matrix entries using the local ordering
591: ! by calling MatSetValuesLocal()
592: ! (B) MatSetValues(), using the global ordering
593: ! - Then apply this map explicitly yourself
594: ! - Set matrix entries using the global ordering by calling
595: ! MatSetValues()
596: ! Option (A) seems cleaner/easier in many cases, and is the procedure
597: ! used in this example.
598: !
599: subroutine FormJacobianLocal(x, jac_prec, user, ierr)
600: use ex5f90module
601: implicit none
603: ! Input/output variables:
604: type(userctx) user
605: PetscScalar x(user%gxs:user%gxe, user%gys:user%gye)
606: Mat jac_prec
607: PetscErrorCode ierr
609: ! Local variables:
610: PetscInt row, col(5), i, j
611: PetscInt ione, ifive
612: PetscScalar two, one, hx, hy, hxdhy
613: PetscScalar hydhx, sc, v(5)
615: ! Set parameters
616: ione = 1
617: ifive = 5
618: one = 1.0
619: two = 2.0
620: hx = one/(user%mx - 1)
621: hy = one/(user%my - 1)
622: sc = hx*hy
623: hxdhy = hx/hy
624: hydhx = hy/hx
626: ! Compute entries for the locally owned part of the Jacobian.
627: ! - Currently, all PETSc parallel matrix formats are partitioned by
628: ! contiguous chunks of rows across the processors.
629: ! - Each processor needs to insert only elements that it owns
630: ! locally (but any non-local elements will be sent to the
631: ! appropriate processor during matrix assembly).
632: ! - Here, we set all entries for a particular row at once.
633: ! - We can set matrix entries either using either
634: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
635: ! - Note that MatSetValues() uses 0-based row and column numbers
636: ! in Fortran as well as in C.
638: do 20 j = user%ys, user%ye
639: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
640: do 10 i = user%xs, user%xe
641: row = row + 1
642: ! boundary points
643: if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
644: col(1) = row
645: v(1) = one
646: PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ione, col, v, INSERT_VALUES, ierr))
647: ! interior grid points
648: else
649: v(1) = -hxdhy
650: v(2) = -hydhx
651: v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i, j))
652: v(4) = -hydhx
653: v(5) = -hxdhy
654: col(1) = row - user%gxm
655: col(2) = row - 1
656: col(3) = row
657: col(4) = row + 1
658: col(5) = row + user%gxm
659: PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ifive, col, v, INSERT_VALUES, ierr))
660: end if
661: 10 continue
662: 20 continue
664: end
666: !
667: !/*TEST
668: !
669: ! test:
670: ! nsize: 4
671: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
672: ! requires: !single
673: !
674: ! test:
675: ! suffix: 2
676: ! nsize: 4
677: ! args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
678: ! requires: !single
679: !
680: ! test:
681: ! suffix: 3
682: ! nsize: 3
683: ! args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
684: ! requires: !single
685: !
686: ! test:
687: ! suffix: 4
688: ! nsize: 3
689: ! args: -snes_mf_operator -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
690: ! requires: !single
691: !
692: ! test:
693: ! suffix: 5
694: ! requires: !single
695: !
696: !TEST*/