Actual source code: ex5f90.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
10: !
11: ! --------------------------------------------------------------------------
12: !
13: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
14: ! the partial differential equation
15: !
16: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
17: !
18: ! with boundary conditions
19: !
20: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
21: !
22: ! A finite difference approximation with the usual 5-point stencil
23: ! is used to discretize the boundary value problem to obtain a nonlinear
24: ! system of equations.
25: !
26: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
27: !
28: ! --------------------------------------------------------------------------
29: ! The following define must be used before including any PETSc include files
30: ! into a module or interface. This is because they can't handle declarations
31: ! in them
32: !
34: module ex5f90module
35: use petscsnes
36: use petscdmda
37: #include <petsc/finclude/petscsnes.h>
38: type userctx
39: PetscInt xs,xe,xm,gxs,gxe,gxm
40: PetscInt ys,ye,ym,gys,gye,gym
41: PetscInt mx,my
42: PetscMPIInt rank
43: PetscReal lambda
44: end type userctx
46: contains
47: ! ---------------------------------------------------------------------
48: !
49: ! FormFunction - Evaluates nonlinear function, F(x).
50: !
51: ! Input Parameters:
52: ! snes - the SNES context
53: ! X - input vector
54: ! dummy - optional user-defined context, as set by SNESSetFunction()
55: ! (not used here)
56: !
57: ! Output Parameter:
58: ! F - function vector
59: !
60: ! Notes:
61: ! This routine serves as a wrapper for the lower-level routine
62: ! "FormFunctionLocal", where the actual computations are
63: ! done using the standard Fortran style of treating the local
64: ! vector data as a multidimensional array over the local mesh.
65: ! This routine merely handles ghost point scatters and accesses
66: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
67: !
68: subroutine FormFunction(snes,X,F,user,ierr)
69: implicit none
71: ! Input/output variables:
72: SNES snes
73: Vec X,F
74: PetscErrorCode ierr
75: type (userctx) user
76: DM da
78: ! Declarations for use with local arrays:
79: PetscScalar,pointer :: lx_v(:),lf_v(:)
80: Vec localX
82: ! Scatter ghost points to local vector, using the 2-step process
83: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
84: ! By placing code between these two statements, computations can
85: ! be done while messages are in transition.
86: PetscCall(SNESGetDM(snes,da,ierr))
87: PetscCall(DMGetLocalVector(da,localX,ierr))
88: PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr))
89: PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr))
91: ! Get a pointer to vector data.
92: ! - For default PETSc vectors, VecGetArrayF90() returns a pointer to
93: ! the data array. Otherwise, the routine is implementation dependent.
94: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
95: ! the array.
96: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray().
98: PetscCall(VecGetArrayF90(localX,lx_v,ierr))
99: PetscCall(VecGetArrayF90(F,lf_v,ierr))
101: ! Compute function over the locally owned part of the grid
102: PetscCall(FormFunctionLocal(lx_v,lf_v,user,ierr))
104: ! Restore vectors
105: PetscCall(VecRestoreArrayF90(localX,lx_v,ierr))
106: PetscCall(VecRestoreArrayF90(F,lf_v,ierr))
108: ! Insert values into global vector
110: PetscCall(DMRestoreLocalVector(da,localX,ierr))
111: PetscCall(PetscLogFlops(11.0d0*user%ym*user%xm,ierr))
113: ! PetscCallA(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
114: ! PetscCallA(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
115: end subroutine formfunction
116: end module ex5f90module
118: module ex5f90moduleinterfaces
119: use ex5f90module
121: Interface SNESSetApplicationContext
122: Subroutine SNESSetApplicationContext(snes,ctx,ierr)
123: use ex5f90module
124: SNES snes
125: type(userctx) ctx
126: PetscErrorCode ierr
127: End Subroutine
128: End Interface SNESSetApplicationContext
130: Interface SNESGetApplicationContext
131: Subroutine SNESGetApplicationContext(snes,ctx,ierr)
132: use ex5f90module
133: SNES snes
134: type(userctx), pointer :: ctx
135: PetscErrorCode ierr
136: End Subroutine
137: End Interface SNESGetApplicationContext
138: end module ex5f90moduleinterfaces
140: program main
141: use ex5f90module
142: use ex5f90moduleinterfaces
143: implicit none
144: !
146: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147: ! Variable declarations
148: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: !
150: ! Variables:
151: ! snes - nonlinear solver
152: ! x, r - solution, residual vectors
153: ! J - Jacobian matrix
154: ! its - iterations for convergence
155: ! Nx, Ny - number of preocessors in x- and y- directions
156: ! matrix_free - flag - 1 indicates matrix-free version
157: !
158: SNES snes
159: Vec x,r
160: Mat J
161: PetscErrorCode ierr
162: PetscInt its
163: PetscBool flg,matrix_free
164: PetscInt ione,nfour
165: PetscReal lambda_max,lambda_min
166: type (userctx) user
167: DM da
169: ! Note: Any user-defined Fortran routines (such as FormJacobian)
170: ! MUST be declared as external.
171: external FormInitialGuess,FormJacobian
173: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: ! Initialize program
175: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176: PetscCallA(PetscInitialize(ierr))
177: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr))
179: ! Initialize problem parameters
180: lambda_max = 6.81
181: lambda_min = 0.0
182: user%lambda = 6.0
183: ione = 1
184: nfour = 4
185: PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-par',user%lambda,flg,ierr))
186: PetscCheckA(user%lambda .lt. lambda_max .and. user%lambda .gt. lambda_min,PETSC_COMM_SELF,PETSC_ERR_USER,'Lambda provided with -par is out of range')
188: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: ! Create nonlinear solver context
190: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: PetscCallA(SNESCreate(PETSC_COMM_WORLD,snes,ierr))
193: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
194: ! Create vector data structures; set function evaluation routine
195: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: ! Create distributed array (DMDA) to manage parallel grid and vectors
199: ! This really needs only the star-type stencil, but we use the box
200: ! stencil temporarily.
201: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,da,ierr))
202: PetscCallA(DMSetFromOptions(da,ierr))
203: PetscCallA(DMSetUp(da,ierr))
205: PetscCallA(DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,PETSC_NULL_ENUM,ierr))
207: !
208: ! Visualize the distribution of the array across the processors
209: !
210: ! PetscCallA(DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr))
212: ! Extract global and local vectors from DMDA; then duplicate for remaining
213: ! vectors that are the same types
214: PetscCallA(DMCreateGlobalVector(da,x,ierr))
215: PetscCallA(VecDuplicate(x,r,ierr))
217: ! Get local grid boundaries (for 2-dimensional DMDA)
218: PetscCallA(DMDAGetCorners(da,user%xs,user%ys,PETSC_NULL_INTEGER,user%xm,user%ym,PETSC_NULL_INTEGER,ierr))
219: PetscCallA(DMDAGetGhostCorners(da,user%gxs,user%gys,PETSC_NULL_INTEGER,user%gxm,user%gym,PETSC_NULL_INTEGER,ierr))
221: ! Here we shift the starting indices up by one so that we can easily
222: ! use the Fortran convention of 1-based indices (rather 0-based indices).
223: user%xs = user%xs+1
224: user%ys = user%ys+1
225: user%gxs = user%gxs+1
226: user%gys = user%gys+1
228: user%ye = user%ys+user%ym-1
229: user%xe = user%xs+user%xm-1
230: user%gye = user%gys+user%gym-1
231: user%gxe = user%gxs+user%gxm-1
233: PetscCallA(SNESSetApplicationContext(snes,user,ierr))
235: ! Set function evaluation routine and vector
236: PetscCallA(SNESSetFunction(snes,r,FormFunction,user,ierr))
238: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
239: ! Create matrix data structure; set Jacobian evaluation routine
240: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
242: ! Set Jacobian matrix data structure and default Jacobian evaluation
243: ! routine. User can override with:
244: ! -snes_fd : default finite differencing approximation of Jacobian
245: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
246: ! (unless user explicitly sets preconditioner)
247: ! -snes_mf_operator : form preconditioning matrix as set by the user,
248: ! but use matrix-free approx for Jacobian-vector
249: ! products within Newton-Krylov method
250: !
251: ! Note: For the parallel case, vectors and matrices MUST be partitioned
252: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
253: ! the DMDAs determine the problem partitioning. We must explicitly
254: ! specify the local matrix dimensions upon its creation for compatibility
255: ! with the vector distribution. Thus, the generic MatCreate() routine
256: ! is NOT sufficient when working with distributed arrays.
257: !
258: ! Note: Here we only approximately preallocate storage space for the
259: ! Jacobian. See the users manual for a discussion of better techniques
260: ! for preallocating matrix memory.
262: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr))
263: if (.not. matrix_free) then
264: PetscCallA(DMSetMatType(da,MATAIJ,ierr))
265: PetscCallA(DMCreateMatrix(da,J,ierr))
266: PetscCallA(SNESSetJacobian(snes,J,J,FormJacobian,user,ierr))
267: endif
269: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
270: ! Customize nonlinear solver; set runtime options
271: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
272: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
273: PetscCallA(SNESSetDM(snes,da,ierr))
274: PetscCallA(SNESSetFromOptions(snes,ierr))
276: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
277: ! Evaluate initial guess; then solve nonlinear system.
278: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
279: ! Note: The user should initialize the vector, x, with the initial guess
280: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
281: ! to employ an initial guess of zero, the user should explicitly set
282: ! this vector to zero by calling VecSet().
284: PetscCallA(FormInitialGuess(snes,x,ierr))
285: PetscCallA(SNESSolve(snes,PETSC_NULL_VEC,x,ierr))
286: PetscCallA(SNESGetIterationNumber(snes,its,ierr))
287: if (user%rank .eq. 0) then
288: write(6,100) its
289: endif
290: 100 format('Number of SNES iterations = ',i5)
292: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
293: ! Free work space. All PETSc objects should be destroyed when they
294: ! are no longer needed.
295: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
296: if (.not. matrix_free) PetscCallA(MatDestroy(J,ierr))
297: PetscCallA(VecDestroy(x,ierr))
298: PetscCallA(VecDestroy(r,ierr))
299: PetscCallA(SNESDestroy(snes,ierr))
300: PetscCallA(DMDestroy(da,ierr))
302: PetscCallA(PetscFinalize(ierr))
303: end
305: ! ---------------------------------------------------------------------
306: !
307: ! FormInitialGuess - Forms initial approximation.
308: !
309: ! Input Parameters:
310: ! X - vector
311: !
312: ! Output Parameter:
313: ! X - vector
314: !
315: ! Notes:
316: ! This routine serves as a wrapper for the lower-level routine
317: ! "InitialGuessLocal", where the actual computations are
318: ! done using the standard Fortran style of treating the local
319: ! vector data as a multidimensional array over the local mesh.
320: ! This routine merely handles ghost point scatters and accesses
321: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
322: !
323: subroutine FormInitialGuess(snes,X,ierr)
324: use ex5f90module
325: use ex5f90moduleinterfaces
326: implicit none
328: ! Input/output variables:
329: SNES snes
330: type(userctx), pointer:: puser
331: Vec X
332: PetscErrorCode ierr
333: DM da
335: ! Declarations for use with local arrays:
336: PetscScalar,pointer :: lx_v(:)
338: ierr = 0
339: PetscCallA(SNESGetDM(snes,da,ierr))
340: PetscCallA(SNESGetApplicationContext(snes,puser,ierr))
341: ! Get a pointer to vector data.
342: ! - For default PETSc vectors, VecGetArrayF90() returns a pointer to
343: ! the data array. Otherwise, the routine is implementation dependent.
344: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
345: ! the array.
346: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray().
348: PetscCallA(VecGetArrayF90(X,lx_v,ierr))
350: ! Compute initial guess over the locally owned part of the grid
351: PetscCallA(InitialGuessLocal(puser,lx_v,ierr))
353: ! Restore vector
354: PetscCallA(VecRestoreArrayF90(X,lx_v,ierr))
356: ! Insert values into global vector
358: end
360: ! ---------------------------------------------------------------------
361: !
362: ! InitialGuessLocal - Computes initial approximation, called by
363: ! the higher level routine FormInitialGuess().
364: !
365: ! Input Parameter:
366: ! x - local vector data
367: !
368: ! Output Parameters:
369: ! x - local vector data
370: ! ierr - error code
371: !
372: ! Notes:
373: ! This routine uses standard Fortran-style computations over a 2-dim array.
374: !
375: subroutine InitialGuessLocal(user,x,ierr)
376: use ex5f90module
377: implicit none
379: ! Input/output variables:
380: type (userctx) user
381: PetscScalar x(user%xs:user%xe,user%ys:user%ye)
382: PetscErrorCode ierr
384: ! Local variables:
385: PetscInt i,j
386: PetscReal temp1,temp,hx,hy
387: PetscReal one
389: ! Set parameters
391: ierr = 0
392: one = 1.0
393: hx = one/(user%mx-1)
394: hy = one/(user%my-1)
395: temp1 = user%lambda/(user%lambda + one)
397: do 20 j=user%ys,user%ye
398: temp = min(j-1,user%my-j)*hy
399: do 10 i=user%xs,user%xe
400: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
401: x(i,j) = 0.0
402: else
403: x(i,j) = temp1 * sqrt(min(hx*min(i-1,user%mx-i),temp))
404: endif
405: 10 continue
406: 20 continue
408: end
410: ! ---------------------------------------------------------------------
411: !
412: ! FormFunctionLocal - Computes nonlinear function, called by
413: ! the higher level routine FormFunction().
414: !
415: ! Input Parameter:
416: ! x - local vector data
417: !
418: ! Output Parameters:
419: ! f - local vector data, f(x)
420: ! ierr - error code
421: !
422: ! Notes:
423: ! This routine uses standard Fortran-style computations over a 2-dim array.
424: !
425: subroutine FormFunctionLocal(x,f,user,ierr)
426: use ex5f90module
428: implicit none
430: ! Input/output variables:
431: type (userctx) user
432: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
433: PetscScalar f(user%xs:user%xe,user%ys:user%ye)
434: PetscErrorCode ierr
436: ! Local variables:
437: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
438: PetscScalar u,uxx,uyy
439: PetscInt i,j
441: one = 1.0
442: two = 2.0
443: hx = one/(user%mx-1)
444: hy = one/(user%my-1)
445: sc = hx*hy*user%lambda
446: hxdhy = hx/hy
447: hydhx = hy/hx
449: ! Compute function over the locally owned part of the grid
451: do 20 j=user%ys,user%ye
452: do 10 i=user%xs,user%xe
453: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
454: f(i,j) = x(i,j)
455: else
456: u = x(i,j)
457: uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
458: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
459: f(i,j) = uxx + uyy - sc*exp(u)
460: endif
461: 10 continue
462: 20 continue
464: end
466: ! ---------------------------------------------------------------------
467: !
468: ! FormJacobian - Evaluates Jacobian matrix.
469: !
470: ! Input Parameters:
471: ! snes - the SNES context
472: ! x - input vector
473: ! dummy - optional user-defined context, as set by SNESSetJacobian()
474: ! (not used here)
475: !
476: ! Output Parameters:
477: ! jac - Jacobian matrix
478: ! jac_prec - optionally different preconditioning matrix (not used here)
479: !
480: ! Notes:
481: ! This routine serves as a wrapper for the lower-level routine
482: ! "FormJacobianLocal", where the actual computations are
483: ! done using the standard Fortran style of treating the local
484: ! vector data as a multidimensional array over the local mesh.
485: ! This routine merely accesses the local vector data via
486: ! VecGetArrayF90() and VecRestoreArrayF90().
487: !
488: ! Notes:
489: ! Due to grid point reordering with DMDAs, we must always work
490: ! with the local grid points, and then transform them to the new
491: ! global numbering with the "ltog" mapping
492: ! We cannot work directly with the global numbers for the original
493: ! uniprocessor grid!
494: !
495: ! Two methods are available for imposing this transformation
496: ! when setting matrix entries:
497: ! (A) MatSetValuesLocal(), using the local ordering (including
498: ! ghost points!)
499: ! - Set matrix entries using the local ordering
500: ! by calling MatSetValuesLocal()
501: ! (B) MatSetValues(), using the global ordering
503: ! - Set matrix entries using the global ordering by calling
504: ! MatSetValues()
505: ! Option (A) seems cleaner/easier in many cases, and is the procedure
506: ! used in this example.
507: !
508: subroutine FormJacobian(snes,X,jac,jac_prec,user,ierr)
509: use ex5f90module
510: implicit none
512: ! Input/output variables:
513: SNES snes
514: Vec X
515: Mat jac,jac_prec
516: type(userctx) user
517: PetscErrorCode ierr
518: DM da
520: ! Declarations for use with local arrays:
521: PetscScalar,pointer :: lx_v(:)
522: Vec localX
524: ! Scatter ghost points to local vector, using the 2-step process
525: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
526: ! Computations can be done while messages are in transition,
527: ! by placing code between these two statements.
529: PetscCallA(SNESGetDM(snes,da,ierr))
530: PetscCallA(DMGetLocalVector(da,localX,ierr))
531: PetscCallA(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr))
532: PetscCallA(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr))
534: ! Get a pointer to vector data
535: PetscCallA(VecGetArrayF90(localX,lx_v,ierr))
537: ! Compute entries for the locally owned part of the Jacobian preconditioner.
538: PetscCallA(FormJacobianLocal(lx_v,jac_prec,user,ierr))
540: ! Assemble matrix, using the 2-step process:
541: ! MatAssemblyBegin(), MatAssemblyEnd()
542: ! Computations can be done while messages are in transition,
543: ! by placing code between these two statements.
545: PetscCallA(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr))
546: if (jac .ne. jac_prec) then
547: PetscCallA(MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr))
548: endif
549: PetscCallA(VecRestoreArrayF90(localX,lx_v,ierr))
550: PetscCallA(DMRestoreLocalVector(da,localX,ierr))
551: PetscCallA(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr))
552: if (jac .ne. jac_prec) then
553: PetscCallA(MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr))
554: endif
556: ! Tell the matrix we will never add a new nonzero location to the
557: ! matrix. If we do it will generate an error.
559: PetscCallA(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr))
561: end
563: ! ---------------------------------------------------------------------
564: !
565: ! FormJacobianLocal - Computes Jacobian preconditioner matrix,
566: ! called by the higher level routine FormJacobian().
567: !
568: ! Input Parameters:
569: ! x - local vector data
570: !
571: ! Output Parameters:
572: ! jac_prec - Jacobian preconditioner matrix
573: ! ierr - error code
574: !
575: ! Notes:
576: ! This routine uses standard Fortran-style computations over a 2-dim array.
577: !
578: ! Notes:
579: ! Due to grid point reordering with DMDAs, we must always work
580: ! with the local grid points, and then transform them to the new
581: ! global numbering with the "ltog" mapping
582: ! We cannot work directly with the global numbers for the original
583: ! uniprocessor grid!
584: !
585: ! Two methods are available for imposing this transformation
586: ! when setting matrix entries:
587: ! (A) MatSetValuesLocal(), using the local ordering (including
588: ! ghost points!)
589: ! - Set matrix entries using the local ordering
590: ! by calling MatSetValuesLocal()
591: ! (B) MatSetValues(), using the global ordering
592: ! - Then apply this map explicitly yourself
593: ! - Set matrix entries using the global ordering by calling
594: ! MatSetValues()
595: ! Option (A) seems cleaner/easier in many cases, and is the procedure
596: ! used in this example.
597: !
598: subroutine FormJacobianLocal(x,jac_prec,user,ierr)
599: use ex5f90module
600: implicit none
602: ! Input/output variables:
603: type (userctx) user
604: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
605: Mat jac_prec
606: PetscErrorCode ierr
608: ! Local variables:
609: PetscInt row,col(5),i,j
610: PetscInt ione,ifive
611: PetscScalar two,one,hx,hy,hxdhy
612: PetscScalar hydhx,sc,v(5)
614: ! Set parameters
615: ione = 1
616: ifive = 5
617: one = 1.0
618: two = 2.0
619: hx = one/(user%mx-1)
620: hy = one/(user%my-1)
621: sc = hx*hy
622: hxdhy = hx/hy
623: hydhx = hy/hx
625: ! Compute entries for the locally owned part of the Jacobian.
626: ! - Currently, all PETSc parallel matrix formats are partitioned by
627: ! contiguous chunks of rows across the processors.
628: ! - Each processor needs to insert only elements that it owns
629: ! locally (but any non-local elements will be sent to the
630: ! appropriate processor during matrix assembly).
631: ! - Here, we set all entries for a particular row at once.
632: ! - We can set matrix entries either using either
633: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
634: ! - Note that MatSetValues() uses 0-based row and column numbers
635: ! in Fortran as well as in C.
637: do 20 j=user%ys,user%ye
638: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
639: do 10 i=user%xs,user%xe
640: row = row + 1
641: ! boundary points
642: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
643: col(1) = row
644: v(1) = one
645: PetscCallA(MatSetValuesLocal(jac_prec,ione,[row],ione,col,v,INSERT_VALUES,ierr))
646: ! interior grid points
647: else
648: v(1) = -hxdhy
649: v(2) = -hydhx
650: v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i,j))
651: v(4) = -hydhx
652: v(5) = -hxdhy
653: col(1) = row - user%gxm
654: col(2) = row - 1
655: col(3) = row
656: col(4) = row + 1
657: col(5) = row + user%gxm
658: PetscCallA(MatSetValuesLocal(jac_prec,ione,[row],ifive,col,v,INSERT_VALUES,ierr))
659: endif
660: 10 continue
661: 20 continue
663: end
665: !
666: !/*TEST
667: !
668: ! test:
669: ! nsize: 4
670: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
671: ! requires: !single
672: !
673: ! test:
674: ! suffix: 2
675: ! nsize: 4
676: ! args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
677: ! requires: !single
678: !
679: ! test:
680: ! suffix: 3
681: ! nsize: 3
682: ! args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
683: ! requires: !single
684: !
685: ! test:
686: ! suffix: 4
687: ! nsize: 3
688: ! args: -snes_mf_operator -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
689: ! requires: !single
690: !
691: ! test:
692: ! suffix: 5
693: ! requires: !single
694: !
695: !TEST*/