Actual source code: ex2f.F90

  1: !
  2: !  Description: Solves a linear system in parallel with KSP (Fortran code).
  3: !               Also shows how to set a user-defined monitoring routine.
  4: !
  5: !
  6: !
  7: ! -----------------------------------------------------------------------

  9:       program main
 10: #include <petsc/finclude/petscksp.h>
 11:       use petscksp
 12:       implicit none
 13: !
 14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 15: !                   Variable declarations
 16: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 17: !
 18: !  Variables:
 19: !     ksp     - linear solver context
 20: !     ksp      - Krylov subspace method context
 21: !     pc       - preconditioner context
 22: !     x, b, u  - approx solution, right-hand side, exact solution vectors
 23: !     A        - matrix that defines linear system
 24: !     its      - iterations for convergence
 25: !     norm     - norm of error in solution
 26: !     rctx     - random number generator context
 27: !
 28: !  Note that vectors are declared as PETSc "Vec" objects.  These vectors
 29: !  are mathematical objects that contain more than just an array of
 30: !  double precision numbers. I.e., vectors in PETSc are not just
 31: !        double precision x(*).
 32: !  However, local vector data can be easily accessed via VecGetArray().
 33: !  See the Fortran section of the PETSc users manual for details.
 34: !
 35:       PetscReal  norm
 36:       PetscInt  i,j,II,JJ,m,n,its
 37:       PetscInt  Istart,Iend,ione
 38:       PetscErrorCode ierr
 39:       PetscMPIInt     rank,size
 40:       PetscBool   flg
 41:       PetscScalar v,one,neg_one
 42:       Vec         x,b,u
 43:       Mat         A
 44:       KSP         ksp
 45:       PetscRandom rctx
 46:       PetscViewerAndFormat vf,vzero

 48: !  These variables are not currently used.
 49: !      PC          pc
 50: !      PCType      ptype
 51: !      PetscReal tol

 53: !  Note: Any user-defined Fortran routines (such as MyKSPMonitor)
 54: !  MUST be declared as external.

 56:       external MyKSPMonitor,MyKSPConverged

 58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 59: !                 Beginning of program
 60: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 62:       PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,ierr))
 63:       m = 3
 64:       n = 3
 65:       one  = 1.0
 66:       neg_one = -1.0
 67:       ione    = 1
 68:       PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
 69:       PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
 70:       PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
 71:       PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))

 73: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 74: !      Compute the matrix and right-hand-side vector that define
 75: !      the linear system, Ax = b.
 76: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 78: !  Create parallel matrix, specifying only its global dimensions.
 79: !  When using MatCreate(), the matrix format can be specified at
 80: !  runtime. Also, the parallel partitioning of the matrix is
 81: !  determined by PETSc at runtime.

 83:       PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
 84:       PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
 85:       PetscCallA(MatSetFromOptions(A,ierr))
 86:       PetscCallA(MatSetUp(A,ierr))

 88: !  Currently, all PETSc parallel matrix formats are partitioned by
 89: !  contiguous chunks of rows across the processors.  Determine which
 90: !  rows of the matrix are locally owned.

 92:       PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))

 94: !  Set matrix elements for the 2-D, five-point stencil in parallel.
 95: !   - Each processor needs to insert only elements that it owns
 96: !     locally (but any non-local elements will be sent to the
 97: !     appropriate processor during matrix assembly).
 98: !   - Always specify global row and columns of matrix entries.
 99: !   - Note that MatSetValues() uses 0-based row and column numbers
100: !     in Fortran as well as in C.

102: !     Note: this uses the less common natural ordering that orders first
103: !     all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
104: !     instead of JJ = II +- m as you might expect. The more standard ordering
105: !     would first do all variables for y = h, then y = 2h etc.

107:       do 10, II=Istart,Iend-1
108:         v = -1.0
109:         i = II/n
110:         j = II - i*n
111:         if (i.gt.0) then
112:           JJ = II - n
113:           PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr))
114:         endif
115:         if (i.lt.m-1) then
116:           JJ = II + n
117:           PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr))
118:         endif
119:         if (j.gt.0) then
120:           JJ = II - 1
121:           PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr))
122:         endif
123:         if (j.lt.n-1) then
124:           JJ = II + 1
125:           PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr))
126:         endif
127:         v = 4.0
128:         PetscCallA( MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr))
129:  10   continue

131: !  Assemble matrix, using the 2-step process:
132: !       MatAssemblyBegin(), MatAssemblyEnd()
133: !  Computations can be done while messages are in transition,
134: !  by placing code between these two statements.

136:       PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
137:       PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))

139: !  Create parallel vectors.
140: !   - Here, the parallel partitioning of the vector is determined by
141: !     PETSc at runtime.  We could also specify the local dimensions
142: !     if desired -- or use the more general routine VecCreate().
143: !   - When solving a linear system, the vectors and matrices MUST
144: !     be partitioned accordingly.  PETSc automatically generates
145: !     appropriately partitioned matrices and vectors when MatCreate()
146: !     and VecCreate() are used with the same communicator.
147: !   - Note: We form 1 vector from scratch and then duplicate as needed.

149:       PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD,PETSC_NULL_CHARACTER,ione,PETSC_DECIDE,m*n,u,ierr))
150:       PetscCallA(VecSetFromOptions(u,ierr))
151:       PetscCallA(VecDuplicate(u,b,ierr))
152:       PetscCallA(VecDuplicate(b,x,ierr))

154: !  Set exact solution; then compute right-hand-side vector.
155: !  By default we use an exact solution of a vector with all
156: !  elements of 1.0;  Alternatively, using the runtime option
157: !  -random_sol forms a solution vector with random components.

159:       PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr))
160:       if (flg) then
161:          PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr))
162:          PetscCallA(PetscRandomSetFromOptions(rctx,ierr))
163:          PetscCallA(VecSetRandom(u,rctx,ierr))
164:          PetscCallA(PetscRandomDestroy(rctx,ierr))
165:       else
166:          PetscCallA(VecSet(u,one,ierr))
167:       endif
168:       PetscCallA(MatMult(A,u,b,ierr))

170: !  View the exact solution vector if desired

172:       PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr))
173:       if (flg) then
174:          PetscCallA(VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr))
175:       endif

177: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178: !         Create the linear solver and set various options
179: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

181: !  Create linear solver context

183:       PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))

185: !  Set operators. Here the matrix that defines the linear system
186: !  also serves as the preconditioning matrix.

188:       PetscCallA(KSPSetOperators(ksp,A,A,ierr))

190: !  Set linear solver defaults for this problem (optional).
191: !   - By extracting the KSP and PC contexts from the KSP context,
192: !     we can then directly call any KSP and PC routines
193: !     to set various options.
194: !   - The following four statements are optional; all of these
195: !     parameters could alternatively be specified at runtime via
196: !     KSPSetFromOptions(). All of these defaults can be
197: !     overridden at runtime, as indicated below.

199: !     We comment out this section of code since the Jacobi
200: !     preconditioner is not a good general default.

202: !      PetscCallA(KSPGetPC(ksp,pc,ierr))
203: !      ptype = PCJACOBI
204: !      PetscCallA(PCSetType(pc,ptype,ierr))
205: !      tol = 1.e-7
206: !      PetscCallA(KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr))

208: !  Set user-defined monitoring routine if desired

210:       PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_monitor',flg,ierr))
211:       if (flg) then
212:         vzero = 0
213:         PetscCallA(KSPMonitorSet(ksp,MyKSPMonitor,vzero,PETSC_NULL_FUNCTION,ierr))
214: !
215: !     Also use the default KSP monitor routine showing how it may be used from Fortran
216: !
217:         PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,vf,ierr))
218:         PetscCallA(KSPMonitorSet(ksp,KSPMonitorResidual,vf,PetscViewerAndFormatDestroy,ierr))
219:       endif

221: !  Set runtime options, e.g.,
222: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
223: !  These options will override those specified above as long as
224: !  KSPSetFromOptions() is called _after_ any other customization
225: !  routines.

227:       PetscCallA(KSPSetFromOptions(ksp,ierr))

229: !  Set convergence test routine if desired

231:       PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr))
232:       if (flg) then
233:         PetscCallA(KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr))
234:       endif
235: !
236: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
237: !                      Solve the linear system
238: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

240:       PetscCallA(KSPSolve(ksp,b,x,ierr))

242: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
243: !                     Check solution and clean up
244: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

246: !  Check the error
247:       PetscCallA(VecAXPY(x,neg_one,u,ierr))
248:       PetscCallA(VecNorm(x,NORM_2,norm,ierr))
249:       PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
250:       if (rank .eq. 0) then
251:         if (norm .gt. 1.e-12) then
252:            write(6,100) norm,its
253:         else
254:            write(6,110) its
255:         endif
256:       endif
257:   100 format('Norm of error ',e11.4,' iterations ',i5)
258:   110 format('Norm of error < 1.e-12 iterations ',i5)

260: !  Free work space.  All PETSc objects should be destroyed when they
261: !  are no longer needed.

263:       PetscCallA(KSPDestroy(ksp,ierr))
264:       PetscCallA(VecDestroy(u,ierr))
265:       PetscCallA(VecDestroy(x,ierr))
266:       PetscCallA(VecDestroy(b,ierr))
267:       PetscCallA(MatDestroy(A,ierr))

269: !  Always call PetscFinalize() before exiting a program.  This routine
270: !    - finalizes the PETSc libraries as well as MPI
271: !    - provides summary and diagnostic information if certain runtime
272: !      options are chosen (e.g., -log_view).  See PetscFinalize()
273: !      manpage for more information.

275:       PetscCallA(PetscFinalize(ierr))
276:       end

278: ! --------------------------------------------------------------
279: !
280: !  MyKSPMonitor - This is a user-defined routine for monitoring
281: !  the KSP iterative solvers.
282: !
283: !  Input Parameters:
284: !    ksp   - iterative context
285: !    n     - iteration number
286: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
287: !    dummy - optional user-defined monitor context (unused here)
288: !
289:       subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
290:       use petscksp
291:       implicit none

293:       KSP              ksp
294:       Vec              x
295:       PetscErrorCode ierr
296:       PetscInt n,dummy
297:       PetscMPIInt rank
298:       PetscReal rnorm

300: !  Build the solution vector
301:       PetscCallA(KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr))

303: !  Write the solution vector and residual norm to stdout
304: !   - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
305: !     handles data from multiple processors so that the
306: !     output is not jumbled.

308:       PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
309:       if (rank .eq. 0) write(6,100) n
310:       PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
311:       if (rank .eq. 0) write(6,200) n,rnorm

313:  100  format('iteration ',i5,' solution vector:')
314:  200  format('iteration ',i5,' residual norm ',e11.4)
315:       ierr = 0
316:       end

318: ! --------------------------------------------------------------
319: !
320: !  MyKSPConverged - This is a user-defined routine for testing
321: !  convergence of the KSP iterative solvers.
322: !
323: !  Input Parameters:
324: !    ksp   - iterative context
325: !    n     - iteration number
326: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
327: !    dummy - optional user-defined monitor context (unused here)
328: !
329:       subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
330:       use petscksp
331:       implicit none

333:       KSP              ksp
334:       PetscErrorCode ierr
335:       PetscInt n,dummy
336:       KSPConvergedReason flag
337:       PetscReal rnorm

339:       if (rnorm .le. .05) then
340:         flag = 1
341:       else
342:         flag = 0
343:       endif
344:       ierr = 0

346:       end

348: !/*TEST
349: !
350: !   test:
351: !      nsize: 2
352: !      args: -pc_type jacobi -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
353: !
354: !   test:
355: !      suffix: 2
356: !      nsize: 2
357: !      args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
358: !
359: !TEST*/