Actual source code: ex5f90t.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !
10: ! --------------------------------------------------------------------------
11: !
12: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
13: ! the partial differential equation
14: !
15: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
16: !
17: ! with boundary conditions
18: !
19: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
20: !
21: ! A finite difference approximation with the usual 5-point stencil
22: ! is used to discretize the boundary value problem to obtain a nonlinear
23: ! system of equations.
24: !
25: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
26: !
27: ! --------------------------------------------------------------------------
28: ! The following define must be used before including any PETSc include files
29: ! into a module or interface. This is because they can't handle declarations
30: ! in them
31: !
32: #include <petsc/finclude/petscdmda.h>
33: #include <petsc/finclude/petscsnes.h>
34: module ex5f90tmodule
35: use petscsnes
36: use petscdmda
37: implicit none
38: type userctx
39: type(tDM) da
40: PetscInt xs, xe, xm, gxs, gxe, gxm
41: PetscInt ys, ye, ym, gys, gye, gym
42: PetscInt mx, my
43: PetscMPIInt rank
44: PetscReal lambda
45: end type userctx
47: interface
48: subroutine SNESSetApplicationContext(snesIn, ctx, ierr)
49: use petscsnes
50: import userctx
51: type(tSNES) snesIn
52: type(userctx) ctx
53: PetscErrorCode ierr
54: end subroutine
55: subroutine SNESGetApplicationContext(snesIn, ctx, ierr)
56: use petscsnes
57: import userctx
58: type(tSNES) snesIn
59: type(userctx), pointer :: ctx
60: PetscErrorCode ierr
61: end subroutine
62: end interface
64: contains
65: ! ---------------------------------------------------------------------
66: !
67: ! FormFunction - Evaluates nonlinear function, F(x).
68: !
69: ! Input Parameters:
70: ! snes - the SNES context
71: ! X - input vector
72: ! dummy - optional user-defined context, as set by SNESSetFunction()
73: ! (not used here)
74: !
75: ! Output Parameter:
76: ! F - function vector
77: !
78: ! Notes:
79: ! This routine serves as a wrapper for the lower-level routine
80: ! "FormFunctionLocal", where the actual computations are
81: ! done using the standard Fortran style of treating the local
82: ! vector data as a multidimensional array over the local mesh.
83: ! This routine merely handles ghost point scatters and accesses
84: ! the local vector data via VecGetArray() and VecRestoreArray().
85: !
86: subroutine FormFunction(snesIn, X, F, user, ierr)
87: ! Input/output variables:
88: type(tSNES) snesIn
89: type(tVec) X, F
90: PetscErrorCode ierr
91: type(userctx) user
93: ! Declarations for use with local arrays:
94: PetscScalar, pointer :: lx_v(:), lf_v(:)
95: type(tVec) localX
97: ! Scatter ghost points to local vector, using the 2-step process
98: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
99: ! By placing code between these two statements, computations can
100: ! be done while messages are in transition.
101: PetscCall(DMGetLocalVector(user%da, localX, ierr))
102: PetscCall(DMGlobalToLocalBegin(user%da, X, INSERT_VALUES, localX, ierr))
103: PetscCall(DMGlobalToLocalEnd(user%da, X, INSERT_VALUES, localX, ierr))
105: ! Get a pointer to vector data.
106: ! - VecGetArray90() returns a pointer to the data array.
107: ! - You MUST call VecRestoreArray() when you no longer need access to
108: ! the array.
110: PetscCall(VecGetArray(localX, lx_v, ierr))
111: PetscCall(VecGetArray(F, lf_v, ierr))
113: ! Compute function over the locally owned part of the grid
114: PetscCall(FormFunctionLocal(lx_v, lf_v, user, ierr))
116: ! Restore vectors
117: PetscCall(VecRestoreArray(localX, lx_v, ierr))
118: PetscCall(VecRestoreArray(F, lf_v, ierr))
120: ! Insert values into global vector
122: PetscCall(DMRestoreLocalVector(user%da, localX, ierr))
123: PetscCall(PetscLogFlops(11.0d0*user%ym*user%xm, ierr))
125: ! PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
126: ! PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
127: end subroutine formfunction
129: ! ---------------------------------------------------------------------
130: !
131: ! FormInitialGuess - Forms initial approximation.
132: !
133: ! Input Parameters:
134: ! X - vector
135: !
136: ! Output Parameter:
137: ! X - vector
138: !
139: ! Notes:
140: ! This routine serves as a wrapper for the lower-level routine
141: ! "InitialGuessLocal", where the actual computations are
142: ! done using the standard Fortran style of treating the local
143: ! vector data as a multidimensional array over the local mesh.
144: ! This routine merely handles ghost point scatters and accesses
145: ! the local vector data via VecGetArray() and VecRestoreArray().
146: !
147: subroutine FormInitialGuess(mysnes, X, ierr)
148: ! Input/output variables:
149: type(tSNES) mysnes
150: type(userctx), pointer:: puser
151: type(tVec) X
152: PetscErrorCode ierr
154: ! Declarations for use with local arrays:
155: PetscScalar, pointer :: lx_v(:)
157: ierr = 0
158: PetscCallA(SNESGetApplicationContext(mysnes, puser, ierr))
159: ! Get a pointer to vector data.
160: ! - VecGetArray90() returns a pointer to the data array.
161: ! - You MUST call VecRestoreArray() when you no longer need access to
162: ! the array.
164: PetscCallA(VecGetArray(X, lx_v, ierr))
166: ! Compute initial guess over the locally owned part of the grid
167: PetscCallA(InitialGuessLocal(puser, lx_v, ierr))
169: ! Restore vector
170: PetscCallA(VecRestoreArray(X, lx_v, ierr))
172: ! Insert values into global vector
174: end
176: ! ---------------------------------------------------------------------
177: !
178: ! InitialGuessLocal - Computes initial approximation, called by
179: ! the higher level routine FormInitialGuess().
180: !
181: ! Input Parameter:
182: ! x - local vector data
183: !
184: ! Output Parameters:
185: ! x - local vector data
186: ! ierr - error code
187: !
188: ! Notes:
189: ! This routine uses standard Fortran-style computations over a 2-dim array.
190: !
191: subroutine InitialGuessLocal(user, x, ierr)
192: ! Input/output variables:
193: type(userctx) user
194: PetscScalar x(user%xs:user%xe, user%ys:user%ye)
195: PetscErrorCode ierr
197: ! Local variables:
198: PetscInt i, j
199: PetscScalar temp1, temp, hx, hy
200: PetscScalar one
202: ! Set parameters
204: ierr = 0
205: one = 1.0
206: hx = one/(PetscIntToReal(user%mx - 1))
207: hy = one/(PetscIntToReal(user%my - 1))
208: temp1 = user%lambda/(user%lambda + one)
210: do j = user%ys, user%ye
211: temp = PetscIntToReal(min(j - 1, user%my - j))*hy
212: do i = user%xs, user%xe
213: if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
214: x(i, j) = 0.0
215: else
216: x(i, j) = temp1*sqrt(min(PetscIntToReal(min(i - 1, user%mx - i)*hx), PetscIntToReal(temp)))
217: end if
218: end do
219: end do
221: end
223: ! ---------------------------------------------------------------------
224: !
225: ! FormFunctionLocal - Computes nonlinear function, called by
226: ! the higher level routine FormFunction().
227: !
228: ! Input Parameter:
229: ! x - local vector data
230: !
231: ! Output Parameters:
232: ! f - local vector data, f(x)
233: ! ierr - error code
234: !
235: ! Notes:
236: ! This routine uses standard Fortran-style computations over a 2-dim array.
237: !
238: subroutine FormFunctionLocal(x, f, user, ierr)
239: ! Input/output variables:
240: type(userctx) user
241: PetscScalar x(user%gxs:user%gxe, user%gys:user%gye)
242: PetscScalar f(user%xs:user%xe, user%ys:user%ye)
243: PetscErrorCode ierr
245: ! Local variables:
246: PetscScalar two, one, hx, hy, hxdhy, hydhx, sc
247: PetscScalar u, uxx, uyy
248: PetscInt i, j
250: one = 1.0
251: two = 2.0
252: hx = one/PetscIntToReal(user%mx - 1)
253: hy = one/PetscIntToReal(user%my - 1)
254: sc = hx*hy*user%lambda
255: hxdhy = hx/hy
256: hydhx = hy/hx
258: ! Compute function over the locally owned part of the grid
260: do j = user%ys, user%ye
261: do i = user%xs, user%xe
262: if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
263: f(i, j) = x(i, j)
264: else
265: u = x(i, j)
266: uxx = hydhx*(two*u - x(i - 1, j) - x(i + 1, j))
267: uyy = hxdhy*(two*u - x(i, j - 1) - x(i, j + 1))
268: f(i, j) = uxx + uyy - sc*exp(u)
269: end if
270: end do
271: end do
272: ierr = 0
273: end
275: ! ---------------------------------------------------------------------
276: !
277: ! FormJacobian - Evaluates Jacobian matrix.
278: !
279: ! Input Parameters:
280: ! snes - the SNES context
281: ! x - input vector
282: ! dummy - optional user-defined context, as set by SNESSetJacobian()
283: ! (not used here)
284: !
285: ! Output Parameters:
286: ! jac - Jacobian matrix
287: ! jac_prec - optionally different matrix used to construct the preconditioner (not used here)
288: !
289: ! Notes:
290: ! This routine serves as a wrapper for the lower-level routine
291: ! "FormJacobianLocal", where the actual computations are
292: ! done using the standard Fortran style of treating the local
293: ! vector data as a multidimensional array over the local mesh.
294: ! This routine merely accesses the local vector data via
295: ! VecGetArray() and VecRestoreArray().
296: !
297: ! Notes:
298: ! Due to grid point reordering with DMDAs, we must always work
299: ! with the local grid points, and then transform them to the new
300: ! global numbering with the "ltog" mapping
301: ! We cannot work directly with the global numbers for the original
302: ! uniprocessor grid!
303: !
304: ! Two methods are available for imposing this transformation
305: ! when setting matrix entries:
306: ! (A) MatSetValuesLocal(), using the local ordering (including
307: ! ghost points!)
308: ! - Set matrix entries using the local ordering
309: ! by calling MatSetValuesLocal()
310: ! (B) MatSetValues(), using the global ordering
311: ! - Use DMGetLocalToGlobalMapping() then
312: ! ISLocalToGlobalMappingGetIndices() to extract the local-to-global map
313: ! - Then apply this map explicitly yourself
314: ! - Set matrix entries using the global ordering by calling
315: ! MatSetValues()
316: ! Option (A) seems cleaner/easier in many cases, and is the procedure
317: ! used in this example.
318: !
319: subroutine FormJacobian(mysnes, X, jac, jac_prec, user, ierr)
320: ! Input/output variables:
321: type(tSNES) mysnes
322: type(tVec) X
323: type(tMat) jac, jac_prec
324: type(userctx) user
325: PetscErrorCode ierr
327: ! Declarations for use with local arrays:
328: PetscScalar, pointer :: lx_v(:)
329: type(tVec) localX
331: ! Scatter ghost points to local vector, using the 2-step process
332: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
333: ! Computations can be done while messages are in transition,
334: ! by placing code between these two statements.
336: PetscCallA(DMGetLocalVector(user%da, localX, ierr))
337: PetscCallA(DMGlobalToLocalBegin(user%da, X, INSERT_VALUES, localX, ierr))
338: PetscCallA(DMGlobalToLocalEnd(user%da, X, INSERT_VALUES, localX, ierr))
340: ! Get a pointer to vector data
341: PetscCallA(VecGetArray(localX, lx_v, ierr))
343: ! Compute entries for the locally owned part of the Jacobian preconditioner.
344: PetscCallA(FormJacobianLocal(lx_v, jac_prec, user, ierr))
346: ! Assemble matrix, using the 2-step process:
347: ! MatAssemblyBegin(), MatAssemblyEnd()
348: ! Computations can be done while messages are in transition,
349: ! by placing code between these two statements.
351: PetscCallA(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY, ierr))
352: ! if (jac .ne. jac_prec) then
353: PetscCallA(MatAssemblyBegin(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
354: ! endif
355: PetscCallA(VecRestoreArray(localX, lx_v, ierr))
356: PetscCallA(DMRestoreLocalVector(user%da, localX, ierr))
357: PetscCallA(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY, ierr))
358: ! if (jac .ne. jac_prec) then
359: PetscCallA(MatAssemblyEnd(jac_prec, MAT_FINAL_ASSEMBLY, ierr))
360: ! endif
362: ! Tell the matrix we will never add a new nonzero location to the
363: ! matrix. If we do it will generate an error.
365: PetscCallA(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE, ierr))
367: end
369: ! ---------------------------------------------------------------------
370: !
371: ! FormJacobianLocal - Computes Jacobian matrix used to compute the preconditioner,
372: ! called by the higher level routine FormJacobian().
373: !
374: ! Input Parameters:
375: ! x - local vector data
376: !
377: ! Output Parameters:
378: ! jac_prec - Jacobian matrix used to compute the preconditioner
379: ! ierr - error code
380: !
381: ! Notes:
382: ! This routine uses standard Fortran-style computations over a 2-dim array.
383: !
384: ! Notes:
385: ! Due to grid point reordering with DMDAs, we must always work
386: ! with the local grid points, and then transform them to the new
387: ! global numbering with the "ltog" mapping
388: ! We cannot work directly with the global numbers for the original
389: ! uniprocessor grid!
390: !
391: ! Two methods are available for imposing this transformation
392: ! when setting matrix entries:
393: ! (A) MatSetValuesLocal(), using the local ordering (including
394: ! ghost points!)
395: ! - Set matrix entries using the local ordering
396: ! by calling MatSetValuesLocal()
397: ! (B) MatSetValues(), using the global ordering
398: ! - Set matrix entries using the global ordering by calling
399: ! MatSetValues()
400: ! Option (A) seems cleaner/easier in many cases, and is the procedure
401: ! used in this example.
402: !
403: subroutine FormJacobianLocal(x, jac_prec, user, ierr)
404: ! Input/output variables:
405: type(userctx) user
406: PetscScalar x(user%gxs:user%gxe, user%gys:user%gye)
407: type(tMat) jac_prec
408: PetscErrorCode ierr
410: ! Local variables:
411: PetscInt row, col(5), i, j
412: PetscInt ione, ifive
413: PetscScalar two, one, hx, hy, hxdhy
414: PetscScalar hydhx, sc, v(5)
416: ! Set parameters
417: ione = 1
418: ifive = 5
419: one = 1.0
420: two = 2.0
421: hx = one/PetscIntToReal(user%mx - 1)
422: hy = one/PetscIntToReal(user%my - 1)
423: sc = hx*hy
424: hxdhy = hx/hy
425: hydhx = hy/hx
427: ! Compute entries for the locally owned part of the Jacobian.
428: ! - Currently, all PETSc parallel matrix formats are partitioned by
429: ! contiguous chunks of rows across the processors.
430: ! - Each processor needs to insert only elements that it owns
431: ! locally (but any non-local elements will be sent to the
432: ! appropriate processor during matrix assembly).
433: ! - Here, we set all entries for a particular row at once.
434: ! - We can set matrix entries either using either
435: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
436: ! - Note that MatSetValues() uses 0-based row and column numbers
437: ! in Fortran as well as in C.
439: do j = user%ys, user%ye
440: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
441: do i = user%xs, user%xe
442: row = row + 1
443: ! boundary points
444: if (i == 1 .or. j == 1 .or. i == user%mx .or. j == user%my) then
445: col(1) = row
446: v(1) = one
447: PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ione, col, v, INSERT_VALUES, ierr))
448: ! interior grid points
449: else
450: v(1) = -hxdhy
451: v(2) = -hydhx
452: v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i, j))
453: v(4) = -hydhx
454: v(5) = -hxdhy
455: col(1) = row - user%gxm
456: col(2) = row - 1
457: col(3) = row
458: col(4) = row + 1
459: col(5) = row + user%gxm
460: PetscCallA(MatSetValuesLocal(jac_prec, ione, [row], ifive, col, v, INSERT_VALUES, ierr))
461: end if
462: end do
463: end do
464: end
466: end module
468: program main
469: use petscdmda
470: use petscsnes
471: use ex5f90tmodule
472: implicit none
473: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
474: ! Variable declarations
475: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
476: !
477: ! Variables:
478: ! mysnes - nonlinear solver
479: ! x, r - solution, residual vectors
480: ! J - Jacobian matrix
481: ! its - iterations for convergence
482: ! Nx, Ny - number of preocessors in x- and y- directions
483: ! matrix_free - flag - 1 indicates matrix-free version
484: !
485: type(tSNES) mysnes
486: type(tVec) x, r
487: type(tMat) J
488: PetscErrorCode ierr
489: PetscInt its
490: PetscBool flg, matrix_free, set
491: PetscInt ione, nfour
492: PetscReal lambda_max, lambda_min
493: type(userctx) user
494: type(userctx), pointer:: puser
495: type(tPetscOptions) :: options
497: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
498: ! Initialize program
499: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
500: PetscCallA(PetscInitialize(ierr))
501: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, user%rank, ierr))
503: ! Initialize problem parameters
504: options%v = 0
505: lambda_max = 6.81
506: lambda_min = 0.0
507: user%lambda = 6.0
508: ione = 1
509: nfour = 4
510: PetscCallA(PetscOptionsGetReal(options, PETSC_NULL_CHARACTER, '-par', user%lambda, flg, ierr))
511: PetscCheckA(user%lambda < lambda_max .and. user%lambda > lambda_min, PETSC_COMM_SELF, PETSC_ERR_USER, 'Lambda provided with -par is out of range')
513: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
514: ! Create nonlinear solver context
515: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
516: PetscCallA(SNESCreate(PETSC_COMM_WORLD, mysnes, ierr))
518: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
519: ! Create vector data structures; set function evaluation routine
520: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
522: ! Create distributed array (DMDA) to manage parallel grid and vectors
524: ! This really needs only the star-type stencil, but we use the box
525: ! stencil temporarily.
526: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, nfour, nfour, PETSC_DECIDE, PETSC_DECIDE, ione, ione, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, user%da, ierr))
527: PetscCallA(DMSetFromOptions(user%da, ierr))
528: PetscCallA(DMSetUp(user%da, ierr))
529: PetscCallA(DMDAGetInfo(user%da, PETSC_NULL_INTEGER, user%mx, user%my, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMBOUNDARYTYPE, PETSC_NULL_DMDASTENCILTYPE, ierr))
531: !
532: ! Visualize the distribution of the array across the processors
533: !
534: ! PetscCallA(DMView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr))
536: ! Extract global and local vectors from DMDA; then duplicate for remaining
537: ! vectors that are the same types
538: PetscCallA(DMCreateGlobalVector(user%da, x, ierr))
539: PetscCallA(VecDuplicate(x, r, ierr))
541: ! Get local grid boundaries (for 2-dimensional DMDA)
542: PetscCallA(DMDAGetCorners(user%da, user%xs, user%ys, PETSC_NULL_INTEGER, user%xm, user%ym, PETSC_NULL_INTEGER, ierr))
543: PetscCallA(DMDAGetGhostCorners(user%da, user%gxs, user%gys, PETSC_NULL_INTEGER, user%gxm, user%gym, PETSC_NULL_INTEGER, ierr))
545: ! Here we shift the starting indices up by one so that we can easily
546: ! use the Fortran convention of 1-based indices (rather 0-based indices).
547: user%xs = user%xs + 1
548: user%ys = user%ys + 1
549: user%gxs = user%gxs + 1
550: user%gys = user%gys + 1
552: user%ye = user%ys + user%ym - 1
553: user%xe = user%xs + user%xm - 1
554: user%gye = user%gys + user%gym - 1
555: user%gxe = user%gxs + user%gxm - 1
557: PetscCallA(SNESSetApplicationContext(mysnes, user, ierr))
559: ! Set function evaluation routine and vector
560: PetscCallA(SNESSetFunction(mysnes, r, FormFunction, user, ierr))
562: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
563: ! Create matrix data structure; set Jacobian evaluation routine
564: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
566: ! Set Jacobian matrix data structure and default Jacobian evaluation
567: ! routine. User can override with:
568: ! -snes_fd : default finite differencing approximation of Jacobian
569: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
570: ! (unless user explicitly sets preconditioner)
571: ! -snes_mf_operator : form matrix used to construct the preconditioner as set by the user,
572: ! but use matrix-free approx for Jacobian-vector
573: ! products within Newton-Krylov method
574: !
575: ! Note: For the parallel case, vectors and matrices MUST be partitioned
576: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
577: ! the DMDAs determine the problem partitioning. We must explicitly
578: ! specify the local matrix dimensions upon its creation for compatibility
579: ! with the vector distribution. Thus, the generic MatCreate() routine
580: ! is NOT sufficient when working with distributed arrays.
581: !
582: ! Note: Here we only approximately preallocate storage space for the
583: ! Jacobian. See the users manual for a discussion of better techniques
584: ! for preallocating matrix memory.
586: PetscCallA(PetscOptionsHasName(options, PETSC_NULL_CHARACTER, '-snes_mf', matrix_free, ierr))
587: if (.not. matrix_free) then
588: PetscCallA(DMSetMatType(user%da, MATAIJ, ierr))
589: PetscCallA(DMCreateMatrix(user%da, J, ierr))
590: PetscCallA(SNESSetJacobian(mysnes, J, J, FormJacobian, user, ierr))
591: end if
593: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
594: ! Customize nonlinear solver; set runtime options
595: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
596: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
597: PetscCallA(SNESSetFromOptions(mysnes, ierr))
599: ! Test Fortran90 wrapper for SNESSet/Get ApplicationContext()
600: PetscCallA(PetscOptionsGetBool(options, PETSC_NULL_CHARACTER, '-test_appctx', flg, set, ierr))
601: if (flg) then
602: PetscCallA(SNESGetApplicationContext(mysnes, puser, ierr))
603: end if
605: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
606: ! Evaluate initial guess; then solve nonlinear system.
607: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
608: ! Note: The user should initialize the vector, x, with the initial guess
609: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
610: ! to employ an initial guess of zero, the user should explicitly set
611: ! this vector to zero by calling VecSet().
613: PetscCallA(FormInitialGuess(mysnes, x, ierr))
614: PetscCallA(SNESSolve(mysnes, PETSC_NULL_VEC, x, ierr))
615: PetscCallA(SNESGetIterationNumber(mysnes, its, ierr))
616: if (user%rank == 0) then
617: write (6, 100) its
618: end if
619: 100 format('Number of SNES iterations = ', i5)
621: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
622: ! Free work space. All PETSc objects should be destroyed when they
623: ! are no longer needed.
624: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
625: if (.not. matrix_free) PetscCallA(MatDestroy(J, ierr))
626: PetscCallA(VecDestroy(x, ierr))
627: PetscCallA(VecDestroy(r, ierr))
628: PetscCallA(SNESDestroy(mysnes, ierr))
629: PetscCallA(DMDestroy(user%da, ierr))
631: PetscCallA(PetscFinalize(ierr))
632: end
633: !/*TEST
634: !
635: ! test:
636: ! nsize: 4
637: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
638: !
639: !TEST*/