Actual source code: ex2f.F90
1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: ! -----------------------------------------------------------------------
7: program main
8: #include <petsc/finclude/petscksp.h>
9: use petscksp
10: implicit none
11: !
12: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
13: ! Variable declarations
14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
15: !
16: ! Variables:
17: ! ksp - linear solver context
18: ! ksp - Krylov subspace method context
19: ! pc - preconditioner context
20: ! x, b, u - approx solution, right-hand side, exact solution vectors
21: ! A - matrix that defines linear system
22: ! its - iterations for convergence
23: ! norm - norm of error in solution
24: ! rctx - random number generator context
25: !
26: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
27: ! are mathematical objects that contain more than just an array of
28: ! double precision numbers. I.e., vectors in PETSc are not just
29: ! double precision x(*).
30: ! However, local vector data can be easily accessed via VecGetArray().
31: ! See the Fortran section of the PETSc users manual for details.
32: !
33: PetscReal norm
34: PetscInt i,j,II,JJ,m,n,its
35: PetscInt Istart,Iend,ione
36: PetscErrorCode ierr
37: PetscMPIInt rank,size
38: PetscBool flg
39: PetscScalar v,one,neg_one
40: Vec x,b,u
41: Mat A
42: KSP ksp
43: PetscRandom rctx
44: PetscViewerAndFormat vzero
45: ! PetscViewerAndFormat vf
47: ! These variables are not currently used.
48: ! PC pc
49: ! PCType ptype
50: ! PetscReal tol
52: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
53: ! MUST be declared as external.
55: external MyKSPMonitor,MyKSPConverged
57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
58: ! Beginning of program
59: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,ierr))
62: m = 3
63: n = 3
64: one = 1.0
65: neg_one = -1.0
66: ione = 1
67: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
68: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
69: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
70: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
72: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
73: ! Compute the matrix and right-hand-side vector that define
74: ! the linear system, Ax = b.
75: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77: ! Create parallel matrix, specifying only its global dimensions.
78: ! When using MatCreate(), the matrix format can be specified at
79: ! runtime. Also, the parallel partitioning of the matrix is
80: ! determined by PETSc at runtime.
82: PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
83: PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
84: PetscCallA(MatSetFromOptions(A,ierr))
85: PetscCallA(MatSetUp(A,ierr))
87: ! Currently, all PETSc parallel matrix formats are partitioned by
88: ! contiguous chunks of rows across the processors. Determine which
89: ! rows of the matrix are locally owned.
91: PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))
93: ! Set matrix elements for the 2-D, five-point stencil in parallel.
94: ! - Each processor needs to insert only elements that it owns
95: ! locally (but any non-local elements will be sent to the
96: ! appropriate processor during matrix assembly).
97: ! - Always specify global row and columns of matrix entries.
98: ! - Note that MatSetValues() uses 0-based row and column numbers
99: ! in Fortran as well as in C.
101: ! Note: this uses the less common natural ordering that orders first
102: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
103: ! instead of JJ = II +- m as you might expect. The more standard ordering
104: ! would first do all variables for y = h, then y = 2h etc.
106: do 10, II=Istart,Iend-1
107: v = -1.0
108: i = II/n
109: j = II - i*n
110: if (i.gt.0) then
111: JJ = II - n
112: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
113: endif
114: if (i.lt.m-1) then
115: JJ = II + n
116: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
117: endif
118: if (j.gt.0) then
119: JJ = II - 1
120: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
121: endif
122: if (j.lt.n-1) then
123: JJ = II + 1
124: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
125: endif
126: v = 4.0
127: PetscCallA(MatSetValues(A,ione,[II],ione,[II],[v],INSERT_VALUES,ierr))
128: 10 continue
130: ! Assemble matrix, using the 2-step process:
131: ! MatAssemblyBegin(), MatAssemblyEnd()
132: ! Computations can be done while messages are in transition,
133: ! by placing code between these two statements.
135: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
136: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
138: ! Create parallel vectors.
139: ! - Here, the parallel partitioning of the vector is determined by
140: ! PETSc at runtime. We could also specify the local dimensions
141: ! if desired -- or use the more general routine VecCreate().
142: ! - When solving a linear system, the vectors and matrices MUST
143: ! be partitioned accordingly. PETSc automatically generates
144: ! appropriately partitioned matrices and vectors when MatCreate()
145: ! and VecCreate() are used with the same communicator.
146: ! - Note: We form 1 vector from scratch and then duplicate as needed.
148: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD,PETSC_NULL_CHARACTER,ione,PETSC_DECIDE,m*n,u,ierr))
149: PetscCallA(VecSetFromOptions(u,ierr))
150: PetscCallA(VecDuplicate(u,b,ierr))
151: PetscCallA(VecDuplicate(b,x,ierr))
153: ! Set exact solution; then compute right-hand-side vector.
154: ! By default we use an exact solution of a vector with all
155: ! elements of 1.0; Alternatively, using the runtime option
156: ! -random_sol forms a solution vector with random components.
158: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr))
159: if (flg) then
160: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr))
161: PetscCallA(PetscRandomSetFromOptions(rctx,ierr))
162: PetscCallA(VecSetRandom(u,rctx,ierr))
163: PetscCallA(PetscRandomDestroy(rctx,ierr))
164: else
165: PetscCallA(VecSet(u,one,ierr))
166: endif
167: PetscCallA(MatMult(A,u,b,ierr))
169: ! View the exact solution vector if desired
171: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr))
172: if (flg) then
173: PetscCallA(VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr))
174: endif
176: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: ! Create the linear solver and set various options
178: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180: ! Create linear solver context
182: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
184: ! Set operators. Here the matrix that defines the linear system
185: ! also serves as the preconditioning matrix.
187: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
189: ! Set linear solver defaults for this problem (optional).
190: ! - By extracting the KSP and PC contexts from the KSP context,
191: ! we can then directly call any KSP and PC routines
192: ! to set various options.
193: ! - The following four statements are optional; all of these
194: ! parameters could alternatively be specified at runtime via
195: ! KSPSetFromOptions(). All of these defaults can be
196: ! overridden at runtime, as indicated below.
198: ! We comment out this section of code since the Jacobi
199: ! preconditioner is not a good general default.
201: ! PetscCallA(KSPGetPC(ksp,pc,ierr))
202: ! ptype = PCJACOBI
203: ! PetscCallA(PCSetType(pc,ptype,ierr))
204: ! tol = 1.e-7
205: ! PetscCallA(KSPSetTolerances(ksp,tol,PETSC_CURRENT_REAL,PETSC_CURRENT_REAL,PETSC_CURRENT_INTEGER,ierr))
207: ! Set user-defined monitoring routine if desired
209: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_monitor',flg,ierr))
210: if (flg) then
211: vzero = 0
212: PetscCallA(KSPMonitorSet(ksp,MyKSPMonitor,vzero,PETSC_NULL_FUNCTION,ierr))
213: !
214: ! Cannot also use the default KSP monitor routine showing how it may be used from Fortran
215: ! since the Fortran compiler thinks the calling arguments are different in the two cases
216: !
217: ! PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,vf,ierr))
218: ! PetscCallA(KSPMonitorSet(ksp,KSPMonitorResidual,vf,PetscViewerAndFormatDestroy,ierr))
219: endif
221: ! Set runtime options, e.g.,
222: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
223: ! These options will override those specified above as long as
224: ! KSPSetFromOptions() is called _after_ any other customization
225: ! routines.
227: PetscCallA(KSPSetFromOptions(ksp,ierr))
229: ! Set convergence test routine if desired
231: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr))
232: if (flg) then
233: PetscCallA(KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr))
234: endif
235: !
236: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
237: ! Solve the linear system
238: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
240: PetscCallA(KSPSolve(ksp,b,x,ierr))
242: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
243: ! Check solution and clean up
244: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
246: ! Check the error
247: PetscCallA(VecAXPY(x,neg_one,u,ierr))
248: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
249: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
250: if (rank .eq. 0) then
251: if (norm .gt. 1.e-12) then
252: write(6,100) norm,its
253: else
254: write(6,110) its
255: endif
256: endif
257: 100 format('Norm of error ',e11.4,' iterations ',i5)
258: 110 format('Norm of error < 1.e-12 iterations ',i5)
260: ! Free work space. All PETSc objects should be destroyed when they
261: ! are no longer needed.
263: PetscCallA(KSPDestroy(ksp,ierr))
264: PetscCallA(VecDestroy(u,ierr))
265: PetscCallA(VecDestroy(x,ierr))
266: PetscCallA(VecDestroy(b,ierr))
267: PetscCallA(MatDestroy(A,ierr))
269: ! Always call PetscFinalize() before exiting a program. This routine
270: ! - finalizes the PETSc libraries as well as MPI
271: ! - provides summary and diagnostic information if certain runtime
272: ! options are chosen (e.g., -log_view). See PetscFinalize()
273: ! manpage for more information.
275: PetscCallA(PetscFinalize(ierr))
276: end
278: ! --------------------------------------------------------------
279: !
280: ! MyKSPMonitor - This is a user-defined routine for monitoring
281: ! the KSP iterative solvers.
282: !
283: ! Input Parameters:
284: ! ksp - iterative context
285: ! n - iteration number
286: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
287: ! dummy - optional user-defined monitor context (unused here)
288: !
289: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
290: use petscksp
291: implicit none
293: KSP ksp
294: Vec x
295: PetscErrorCode ierr
296: PetscInt n,dummy
297: PetscMPIInt rank
298: PetscReal rnorm
300: ! Build the solution vector
301: PetscCallA(KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr))
303: ! Write the solution vector and residual norm to stdout
304: ! Since the Fortran IO may be flushed differently than C
305: ! cannot reliably print both together in CI
307: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
308: if (rank .eq. 0) write(6,100) n
309: ! PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
310: if (rank .eq. 0) write(6,200) n,rnorm
312: 100 format('iteration ',i5,' solution vector:')
313: 200 format('iteration ',i5,' residual norm ',e11.4)
314: ierr = 0
315: end
317: ! --------------------------------------------------------------
318: !
319: ! MyKSPConverged - This is a user-defined routine for testing
320: ! convergence of the KSP iterative solvers.
321: !
322: ! Input Parameters:
323: ! ksp - iterative context
324: ! n - iteration number
325: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
326: ! dummy - optional user-defined monitor context (unused here)
327: !
328: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
329: use petscksp
330: implicit none
332: KSP ksp
333: PetscErrorCode ierr
334: PetscInt n,dummy
335: KSPConvergedReason flag
336: PetscReal rnorm
338: if (rnorm .le. .05) then
339: flag = KSP_CONVERGED_RTOL_NORMAL_EQUATIONS
340: else
341: flag = KSP_CONVERGED_ITERATING
342: endif
343: ierr = 0
345: end
347: !/*TEST
348: !
349: ! test:
350: ! nsize: 2
351: ! args: -pc_type jacobi -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
352: !
353: ! test:
354: ! suffix: 2
355: ! nsize: 2
356: ! args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
357: !
358: !TEST*/