Actual source code: ex2f.F90
1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: ! -----------------------------------------------------------------------
6: #include <petsc/finclude/petscksp.h>
8: module ex2fmodule
9: use petscksp
10: implicit none
12: contains
13: ! --------------------------------------------------------------
14: !
15: ! MyKSPMonitor - This is a user-defined routine for monitoring
16: ! the KSP iterative solvers.
17: !
18: ! Input Parameters:
19: ! ksp - iterative context
20: ! n - iteration number
21: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
22: ! unused - optional user-defined monitor context (unused here)
23: !
24: subroutine MyKSPMonitor(ksp, n, rnorm, vf, ierr)
26: KSP ksp
27: Vec x
28: PetscErrorCode ierr
29: PetscInt n
30: PetscViewerAndFormat vf
31: PetscMPIInt rank
32: PetscReal rnorm
34: ! Build the solution vector
35: PetscCallA(KSPBuildSolution(ksp, PETSC_NULL_VEC, x, ierr))
36: PetscCallA(KSPMonitorTrueResidual(ksp, n, rnorm, vf, ierr))
38: ! Write the solution vector and residual norm to stdout
39: ! Since the Fortran IO may be flushed differently than C
40: ! cannot reliably print both together in CI
42: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
43: if (rank == 0) write (6, 100) n
44: ! PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
45: if (rank == 0) write (6, 200) n, rnorm
47: 100 format('iteration ', i5, ' solution vector:')
48: 200 format('iteration ', i5, ' residual norm ', e11.4)
49: ierr = 0
50: end
52: ! --------------------------------------------------------------
53: !
54: ! MyKSPConverged - This is a user-defined routine for testing
55: ! convergence of the KSP iterative solvers.
56: !
57: ! Input Parameters:
58: ! ksp - iterative context
59: ! n - iteration number
60: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
61: ! unused - optional user-defined monitor context (unused here)
62: !
63: subroutine MyKSPConverged(ksp, n, rnorm, flag, unused, ierr)
65: KSP ksp
66: PetscErrorCode ierr
67: PetscInt n, unused
68: KSPConvergedReason flag
69: PetscReal rnorm
71: if (rnorm <= .05) then
72: flag = KSP_CONVERGED_RTOL_NORMAL_EQUATIONS
73: else
74: flag = KSP_CONVERGED_ITERATING
75: end if
76: ierr = 0
78: end
79: end module ex2fmodule
81: program main
82: use petscksp
83: use ex2fmodule
84: implicit none
85: !
86: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
87: ! Variable declarations
88: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: !
90: ! Variables:
91: ! ksp - linear solver context
92: ! ksp - Krylov subspace method context
93: ! pc - preconditioner context
94: ! x, b, u - approx solution, right-hand side, exact solution vectors
95: ! A - matrix that defines linear system
96: ! its - iterations for convergence
97: ! norm - norm of error in solution
98: ! rctx - random number generator context
99: !
100: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
101: ! are mathematical objects that contain more than just an array of
102: ! double precision numbers. I.e., vectors in PETSc are not just
103: ! double precision x(*).
104: ! However, local vector data can be easily accessed via VecGetArray().
105: ! See the Fortran section of the PETSc users manual for details.
106: !
107: PetscReal norm
108: PetscInt i, j, II, JJ, m, n, its
109: PetscInt Istart, Iend, izero, ione, itwo, ithree, col(3)
110: PetscErrorCode ierr
111: PetscMPIInt rank, size
112: PetscBool flg
113: PetscScalar v, one, neg_one, val(3)
114: Vec x, b, u, xx, bb, uu
115: Mat A, AA
116: KSP ksp, kksp
117: PetscRandom rctx
118: PetscViewerAndFormat vf, vf2
119: PetscClassId classid
120: PetscViewer viewer
121: PetscLogEvent petscEventNo
123: ! These variables are not currently used.
124: ! PC pc
125: ! PCType ptype
126: ! PetscReal tol
127: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128: ! Beginning of program
129: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER, ierr))
132: m = 3
133: n = 3
134: one = 1.0
135: neg_one = -1.0
136: izero = 0
137: ione = 1
138: itwo = 2
139: ithree = 3
141: PetscCallA(PetscLogNestedBegin(ierr))
142: PetscCallA(PetscLogEventRegister("myFirstEvent", classid, petscEventNo, ierr))
144: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-m', m, flg, ierr))
145: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
146: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
147: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
149: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: ! Compute the matrix and right-hand-side vector that define
151: ! the linear system, Ax = b.
152: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154: ! Create parallel matrix, specifying only its global dimensions.
155: ! When using MatCreate(), the matrix format can be specified at
156: ! runtime. Also, the parallel partitioning of the matrix is
157: ! determined by PETSc at runtime.
159: PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
160: PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m*n, m*n, ierr))
161: PetscCallA(MatSetFromOptions(A, ierr))
162: PetscCallA(MatSetUp(A, ierr))
164: ! Currently, all PETSc parallel matrix formats are partitioned by
165: ! contiguous chunks of rows across the processors. Determine which
166: ! rows of the matrix are locally owned.
168: PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
170: ! Set matrix elements for the 2-D, five-point stencil in parallel.
171: ! - Each processor needs to insert only elements that it owns
172: ! locally (but any non-local elements will be sent to the
173: ! appropriate processor during matrix assembly).
174: ! - Always specify global row and columns of matrix entries.
175: ! - Note that MatSetValues() uses 0-based row and column numbers
176: ! in Fortran as well as in C.
178: ! Note: this uses the less common natural ordering that orders first
179: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
180: ! instead of JJ = II +- m as you might expect. The more standard ordering
181: ! would first do all variables for y = h, then y = 2h etc.
183: do II = Istart, Iend - 1
184: v = -1.0
185: i = II/n
186: j = II - i*n
187: if (i > 0) then
188: JJ = II - n
189: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
190: end if
191: if (i < m - 1) then
192: JJ = II + n
193: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
194: end if
195: if (j > 0) then
196: JJ = II - 1
197: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
198: end if
199: if (j < n - 1) then
200: JJ = II + 1
201: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
202: end if
203: v = 4.0
204: PetscCallA(MatSetValues(A, ione, [II], ione, [II], [v], INSERT_VALUES, ierr))
205: end do
207: ! Assemble matrix, using the 2-step process:
208: ! MatAssemblyBegin(), MatAssemblyEnd()
209: ! Computations can be done while messages are in transition,
210: ! by placing code between these two statements.
212: PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
213: PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
215: ! Create parallel vectors.
216: ! - Here, the parallel partitioning of the vector is determined by
217: ! PETSc at runtime. We could also specify the local dimensions
218: ! if desired -- or use the more general routine VecCreate().
219: ! - When solving a linear system, the vectors and matrices MUST
220: ! be partitioned accordingly. PETSc automatically generates
221: ! appropriately partitioned matrices and vectors when MatCreate()
222: ! and VecCreate() are used with the same communicator.
223: ! - Note: We form 1 vector from scratch and then duplicate as needed.
225: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD, PETSC_NULL_CHARACTER, ione, PETSC_DECIDE, m*n, u, ierr))
226: PetscCallA(VecSetFromOptions(u, ierr))
227: PetscCallA(VecDuplicate(u, b, ierr))
228: PetscCallA(VecDuplicate(b, x, ierr))
230: ! Set exact solution; then compute right-hand-side vector.
231: ! By default we use an exact solution of a vector with all
232: ! elements of 1.0; Alternatively, using the runtime option
233: ! -random_sol forms a solution vector with random components.
235: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-random_exact_sol', flg, ierr))
236: if (flg) then
237: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
238: PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
239: PetscCallA(VecSetRandom(u, rctx, ierr))
240: PetscCallA(PetscRandomDestroy(rctx, ierr))
241: else
242: PetscCallA(VecSet(u, one, ierr))
243: end if
244: PetscCallA(MatMult(A, u, b, ierr))
246: ! View the exact solution vector if desired
248: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-view_exact_sol', flg, ierr))
249: if (flg) then
250: PetscCallA(VecView(u, PETSC_VIEWER_STDOUT_WORLD, ierr))
251: end if
253: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
254: ! Create the linear solver and set various options
255: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
257: ! Create linear solver context
259: PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
261: ! Set operators. Here the matrix that defines the linear system
262: ! also serves as the matrix from which the preconditioner is constructed.
264: PetscCallA(KSPSetOperators(ksp, A, A, ierr))
266: ! Set linear solver defaults for this problem (optional).
267: ! - By extracting the KSP and PC contexts from the KSP context,
268: ! we can then directly call any KSP and PC routines
269: ! to set various options.
270: ! - The following four statements are optional; all of these
271: ! parameters could alternatively be specified at runtime via
272: ! KSPSetFromOptions(). All of these defaults can be
273: ! overridden at runtime, as indicated below.
275: ! We comment out this section of code since the Jacobi
276: ! preconditioner is not a good general default.
278: ! PetscCallA(KSPGetPC(ksp,pc,ierr))
279: ! ptype = PCJACOBI
280: ! PetscCallA(PCSetType(pc,ptype,ierr))
281: ! tol = 1.e-7
282: ! PetscCallA(KSPSetTolerances(ksp,tol,PETSC_CURRENT_REAL,PETSC_CURRENT_REAL,PETSC_CURRENT_INTEGER,ierr))
284: ! Set user-defined monitoring routine if desired
286: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-my_ksp_monitor', flg, ierr))
287: if (flg) then
288: PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, vf, ierr))
289: PetscCallA(KSPMonitorSet(ksp, MyKSPMonitor, vf, PetscViewerAndFormatDestroy, ierr))
290: !
291: PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, vf2, ierr))
292: PetscCallA(KSPMonitorSet(ksp, KSPMonitorResidual, vf2, PetscViewerAndFormatDestroy, ierr))
293: end if
295: ! Set runtime options, e.g.,
296: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
297: ! These options will override those specified above as long as
298: ! KSPSetFromOptions() is called _after_ any other customization
299: ! routines.
301: PetscCallA(KSPSetFromOptions(ksp, ierr))
303: ! Set convergence test routine if desired
305: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-my_ksp_convergence', flg, ierr))
306: if (flg) then
307: PetscCallA(KSPSetConvergenceTest(ksp, MyKSPConverged, 0, PETSC_NULL_FUNCTION, ierr))
308: end if
309: !
310: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
311: ! Solve the linear system
312: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
314: PetscCallA(PetscLogEventBegin(petscEventNo, ierr))
315: PetscCallA(KSPSolve(ksp, b, x, ierr))
316: PetscCallA(PetscLogEventEnd(petscEventNo, ierr))
318: ! Solve small system on master
320: if (rank == 0) then
322: PetscCallA(MatCreate(PETSC_COMM_SELF, AA, ierr))
323: PetscCallA(MatSetSizes(AA, PETSC_DECIDE, PETSC_DECIDE, m, m, ierr))
324: PetscCallA(MatSetFromOptions(AA, ierr))
326: val = [-1.0, 2.0, -1.0]
327: PetscCallA(MatSetValues(AA, ione, [izero], itwo, [izero, ione], val(2:3), INSERT_VALUES, ierr))
328: do i = 1, m - 2
329: col = [i - 1, i, i + 1]
330: PetscCallA(MatSetValues(AA, ione, [i], itwo, col, val, INSERT_VALUES, ierr))
331: end do
332: PetscCallA(MatSetValues(AA, ione, [m - 1], itwo, [m - 2, m - 1], val(1:2), INSERT_VALUES, ierr))
333: PetscCallA(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY, ierr))
334: PetscCallA(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY, ierr))
336: PetscCallA(VecCreate(PETSC_COMM_SELF, xx, ierr))
337: PetscCallA(VecSetSizes(xx, PETSC_DECIDE, m, ierr))
338: PetscCallA(VecSetFromOptions(xx, ierr))
339: PetscCallA(VecDuplicate(xx, bb, ierr))
340: PetscCallA(VecDuplicate(xx, uu, ierr))
341: PetscCallA(VecSet(uu, one, ierr))
342: PetscCallA(MatMult(AA, uu, bb, ierr))
343: PetscCallA(KSPCreate(PETSC_COMM_SELF, kksp, ierr))
344: PetscCallA(KSPSetOperators(kksp, AA, AA, ierr))
345: PetscCallA(KSPSetFromOptions(kksp, ierr))
346: PetscCallA(KSPSolve(kksp, bb, xx, ierr))
348: end if
350: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
351: ! Check solution and clean up
352: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
354: ! Check the error
355: PetscCallA(VecAXPY(x, neg_one, u, ierr))
356: PetscCallA(VecNorm(x, NORM_2, norm, ierr))
357: PetscCallA(KSPGetIterationNumber(ksp, its, ierr))
358: if (rank == 0) then
359: if (norm > 1.e-12) then
360: write (6, 100) norm, its
361: else
362: write (6, 110) its
363: end if
364: end if
365: 100 format('Norm of error ', e11.4, ' iterations ', i5)
366: 110 format('Norm of error < 1.e-12 iterations ', i5)
368: ! nested log view
369: PetscCallA(PetscViewerASCIIOpen(PETSC_COMM_WORLD, 'report_performance.xml', viewer, ierr))
370: PetscCallA(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_XML, ierr))
371: PetscCallA(PetscLogView(viewer, ierr))
372: PetscCallA(PetscViewerDestroy(viewer, ierr))
374: ! Free work space. All PETSc objects should be destroyed when they
375: ! are no longer needed.
377: PetscCallA(KSPDestroy(ksp, ierr))
378: PetscCallA(VecDestroy(u, ierr))
379: PetscCallA(VecDestroy(x, ierr))
380: PetscCallA(VecDestroy(b, ierr))
381: PetscCallA(MatDestroy(A, ierr))
383: if (rank == 0) then
384: PetscCallA(KSPDestroy(kksp, ierr))
385: PetscCallA(VecDestroy(uu, ierr))
386: PetscCallA(VecDestroy(xx, ierr))
387: PetscCallA(VecDestroy(bb, ierr))
388: PetscCallA(MatDestroy(AA, ierr))
389: end if
391: ! Always call PetscFinalize() before exiting a program. This routine
392: ! - finalizes the PETSc libraries as well as MPI
393: ! - provides summary and diagnostic information if certain runtime
394: ! options are chosen (e.g., -log_view). See PetscFinalize()
395: ! manpage for more information.
397: PetscCallA(PetscFinalize(ierr))
398: end
400: !/*TEST
401: !
402: ! test:
403: ! nsize: 2
404: ! filter: sort -b
405: ! args: -pc_type jacobi -ksp_gmres_cgs_refinement_type refine_always
406: !
407: ! test:
408: ! suffix: 2
409: ! nsize: 2
410: ! filter: sort -b
411: ! args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
412: ! test:
413: ! suffix: 3
414: ! nsize: 2
415: !
416: !
417: !TEST*/