Actual source code: ex2f.F90

  1: !
  2: !  Description: Solves a linear system in parallel with KSP (Fortran code).
  3: !               Also shows how to set a user-defined monitoring routine.
  4: !
  5: !
  6: !/*T
  7: !  Concepts: KSP^basic parallel example
  8: !  Concepts: KSP^setting a user-defined monitoring routine
  9: !  Processors: n
 10: !T*/
 11: !
 12: ! -----------------------------------------------------------------------

 14:       program main
 15: #include <petsc/finclude/petscksp.h>
 16:       use petscksp
 17:       implicit none
 18: !
 19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 20: !                   Variable declarations
 21: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 22: !
 23: !  Variables:
 24: !     ksp     - linear solver context
 25: !     ksp      - Krylov subspace method context
 26: !     pc       - preconditioner context
 27: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 28: !     A        - matrix that defines linear system
 29: !     its      - iterations for convergence
 30: !     norm     - norm of error in solution
 31: !     rctx     - random number generator context
 32: !
 33: !  Note that vectors are declared as PETSc "Vec" objects.  These vectors
 34: !  are mathematical objects that contain more than just an array of
 35: !  double precision numbers. I.e., vectors in PETSc are not just
 36: !        double precision x(*).
 37: !  However, local vector data can be easily accessed via VecGetArray().
 38: !  See the Fortran section of the PETSc users manual for details.
 39: !
 40:       PetscReal  norm
 41:       PetscInt  i,j,II,JJ,m,n,its
 42:       PetscInt  Istart,Iend,ione
 43:       PetscErrorCode ierr
 44:       PetscMPIInt     rank,size
 45:       PetscBool   flg
 46:       PetscScalar v,one,neg_one
 47:       Vec         x,b,u
 48:       Mat         A
 49:       KSP         ksp
 50:       PetscRandom rctx
 51:       PetscViewerAndFormat vf,vzero

 53: !  These variables are not currently used.
 54: !      PC          pc
 55: !      PCType      ptype
 56: !      PetscReal tol

 58: !  Note: Any user-defined Fortran routines (such as MyKSPMonitor)
 59: !  MUST be declared as external.

 61:       external MyKSPMonitor,MyKSPConverged

 63: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 64: !                 Beginning of program
 65: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 67:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 68:       if (ierr .ne. 0) then
 69:         print*,'Unable to initialize PETSc'
 70:         stop
 71:       endif
 72:       m = 3
 73:       n = 3
 74:       one  = 1.0
 75:       neg_one = -1.0
 76:       ione    = 1
 77:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 78:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
 79:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
 80:       call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)

 82: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 83: !      Compute the matrix and right-hand-side vector that define
 84: !      the linear system, Ax = b.
 85: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 87: !  Create parallel matrix, specifying only its global dimensions.
 88: !  When using MatCreate(), the matrix format can be specified at
 89: !  runtime. Also, the parallel partitioning of the matrix is
 90: !  determined by PETSc at runtime.

 92:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 93:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
 94:       call MatSetFromOptions(A,ierr)
 95:       call MatSetUp(A,ierr)

 97: !  Currently, all PETSc parallel matrix formats are partitioned by
 98: !  contiguous chunks of rows across the processors.  Determine which
 99: !  rows of the matrix are locally owned.

101:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

103: !  Set matrix elements for the 2-D, five-point stencil in parallel.
104: !   - Each processor needs to insert only elements that it owns
105: !     locally (but any non-local elements will be sent to the
106: !     appropriate processor during matrix assembly).
107: !   - Always specify global row and columns of matrix entries.
108: !   - Note that MatSetValues() uses 0-based row and column numbers
109: !     in Fortran as well as in C.

111: !     Note: this uses the less common natural ordering that orders first
112: !     all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
113: !     instead of JJ = II +- m as you might expect. The more standard ordering
114: !     would first do all variables for y = h, then y = 2h etc.

116:       do 10, II=Istart,Iend-1
117:         v = -1.0
118:         i = II/n
119:         j = II - i*n
120:         if (i.gt.0) then
121:           JJ = II - n
122:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
123:         endif
124:         if (i.lt.m-1) then
125:           JJ = II + n
126:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
127:         endif
128:         if (j.gt.0) then
129:           JJ = II - 1
130:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
131:         endif
132:         if (j.lt.n-1) then
133:           JJ = II + 1
134:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
135:         endif
136:         v = 4.0
137:         call  MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
138:  10   continue

140: !  Assemble matrix, using the 2-step process:
141: !       MatAssemblyBegin(), MatAssemblyEnd()
142: !  Computations can be done while messages are in transition,
143: !  by placing code between these two statements.

145:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
146:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

148: !  Create parallel vectors.
149: !   - Here, the parallel partitioning of the vector is determined by
150: !     PETSc at runtime.  We could also specify the local dimensions
151: !     if desired -- or use the more general routine VecCreate().
152: !   - When solving a linear system, the vectors and matrices MUST
153: !     be partitioned accordingly.  PETSc automatically generates
154: !     appropriately partitioned matrices and vectors when MatCreate()
155: !     and VecCreate() are used with the same communicator.
156: !   - Note: We form 1 vector from scratch and then duplicate as needed.

158:       call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
159:       call VecSetFromOptions(u,ierr)
160:       call VecDuplicate(u,b,ierr)
161:       call VecDuplicate(b,x,ierr)

163: !  Set exact solution; then compute right-hand-side vector.
164: !  By default we use an exact solution of a vector with all
165: !  elements of 1.0;  Alternatively, using the runtime option
166: !  -random_sol forms a solution vector with random components.

168:       call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr)
169:       if (flg) then
170:          call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
171:          call PetscRandomSetFromOptions(rctx,ierr)
172:          call VecSetRandom(u,rctx,ierr)
173:          call PetscRandomDestroy(rctx,ierr)
174:       else
175:          call VecSet(u,one,ierr)
176:       endif
177:       call MatMult(A,u,b,ierr)

179: !  View the exact solution vector if desired

181:       call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr)
182:       if (flg) then
183:          call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
184:       endif

186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187: !         Create the linear solver and set various options
188: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

190: !  Create linear solver context

192:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

194: !  Set operators. Here the matrix that defines the linear system
195: !  also serves as the preconditioning matrix.

197:       call KSPSetOperators(ksp,A,A,ierr)

199: !  Set linear solver defaults for this problem (optional).
200: !   - By extracting the KSP and PC contexts from the KSP context,
201: !     we can then directly directly call any KSP and PC routines
202: !     to set various options.
203: !   - The following four statements are optional; all of these
204: !     parameters could alternatively be specified at runtime via
205: !     KSPSetFromOptions(). All of these defaults can be
206: !     overridden at runtime, as indicated below.

208: !     We comment out this section of code since the Jacobi
209: !     preconditioner is not a good general default.

211: !      call KSPGetPC(ksp,pc,ierr)
212: !      ptype = PCJACOBI
213: !      call PCSetType(pc,ptype,ierr)
214: !      tol = 1.e-7
215: !      call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)

217: !  Set user-defined monitoring routine if desired

219:       call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_monitor',flg,ierr)
220:       if (flg) then
221:         vzero = 0
222:         call KSPMonitorSet(ksp,MyKSPMonitor,vzero,PETSC_NULL_FUNCTION,ierr)
223: !
224: !     Also use the default KSP monitor routine showing how it may be used from Fortran
225: !
226:         call PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,vf,ierr)
227:         call KSPMonitorSet(ksp,KSPMonitorResidual,vf,PetscViewerAndFormatDestroy,ierr)
228:       endif

230: !  Set runtime options, e.g.,
231: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
232: !  These options will override those specified above as long as
233: !  KSPSetFromOptions() is called _after_ any other customization
234: !  routines.

236:       call KSPSetFromOptions(ksp,ierr)

238: !  Set convergence test routine if desired

240:       call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr)
241:       if (flg) then
242:         call KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr)
243:       endif
244: !
245: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
246: !                      Solve the linear system
247: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

249:       call KSPSolve(ksp,b,x,ierr)

251: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
252: !                     Check solution and clean up
253: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

255: !  Check the error
256:       call VecAXPY(x,neg_one,u,ierr)
257:       call VecNorm(x,NORM_2,norm,ierr)
258:       call KSPGetIterationNumber(ksp,its,ierr)
259:       if (rank .eq. 0) then
260:         if (norm .gt. 1.e-12) then
261:            write(6,100) norm,its
262:         else
263:            write(6,110) its
264:         endif
265:       endif
266:   100 format('Norm of error ',e11.4,' iterations ',i5)
267:   110 format('Norm of error < 1.e-12 iterations ',i5)

269: !  Free work space.  All PETSc objects should be destroyed when they
270: !  are no longer needed.

272:       call KSPDestroy(ksp,ierr)
273:       call VecDestroy(u,ierr)
274:       call VecDestroy(x,ierr)
275:       call VecDestroy(b,ierr)
276:       call MatDestroy(A,ierr)

278: !  Always call PetscFinalize() before exiting a program.  This routine
279: !    - finalizes the PETSc libraries as well as MPI
280: !    - provides summary and diagnostic information if certain runtime
281: !      options are chosen (e.g., -log_view).  See PetscFinalize()
282: !      manpage for more information.

284:       call PetscFinalize(ierr)
285:       end

287: ! --------------------------------------------------------------
288: !
289: !  MyKSPMonitor - This is a user-defined routine for monitoring
290: !  the KSP iterative solvers.
291: !
292: !  Input Parameters:
293: !    ksp   - iterative context
294: !    n     - iteration number
295: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
296: !    dummy - optional user-defined monitor context (unused here)
297: !
298:       subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
299:       use petscksp
300:       implicit none

302:       KSP              ksp
303:       Vec              x
304:       PetscErrorCode ierr
305:       PetscInt n,dummy
306:       PetscMPIInt rank
307:       PetscReal rnorm

309: !  Build the solution vector
310:       call KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr)

312: !  Write the solution vector and residual norm to stdout
313: !   - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
314: !     handles data from multiple processors so that the
315: !     output is not jumbled.

317:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
318:       if (rank .eq. 0) write(6,100) n
319:       call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
320:       if (rank .eq. 0) write(6,200) n,rnorm

322:  100  format('iteration ',i5,' solution vector:')
323:  200  format('iteration ',i5,' residual norm ',e11.4)
324:       0
325:       end

327: ! --------------------------------------------------------------
328: !
329: !  MyKSPConverged - This is a user-defined routine for testing
330: !  convergence of the KSP iterative solvers.
331: !
332: !  Input Parameters:
333: !    ksp   - iterative context
334: !    n     - iteration number
335: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
336: !    dummy - optional user-defined monitor context (unused here)
337: !
338:       subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
339:       use petscksp
340:       implicit none

342:       KSP              ksp
343:       PetscErrorCode ierr
344:       PetscInt n,dummy
345:       KSPConvergedReason flag
346:       PetscReal rnorm

348:       if (rnorm .le. .05) then
349:         flag = 1
350:       else
351:         flag = 0
352:       endif
353:       0

355:       end

357: !/*TEST
358: !
359: !   test:
360: !      nsize: 2
361: !      args: -pc_type jacobi -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
362: !
363: !   test:
364: !      suffix: 2
365: !      nsize: 2
366: !      args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
367: !
368: !TEST*/