Actual source code: ex57f.F90
1: !
2: ! Description: Modified from ex2f.F and ex52.c to illustrate how use external packages MUMPS
3: ! Solves a linear system in parallel with KSP (Fortran code).
4: ! Also shows how to set a user-defined monitoring routine.
5: !
6: ! -----------------------------------------------------------------------
7: #include <petsc/finclude/petscksp.h>
8: module ex57fmodule
9: use petscksp
10: implicit none
12: contains
13: ! --------------------------------------------------------------
14: !
15: ! MyKSPMonitor - This is a user-defined routine for monitoring
16: ! the KSP iterative solvers.
17: !
18: ! Input Parameters:
19: ! ksp - iterative context
20: ! n - iteration number
21: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
22: ! dummy - optional user-defined monitor context (unused here)
23: !
24: subroutine MyKSPMonitor(ksp, n, rnorm, dummy, ierr)
26: KSP ksp
27: Vec x
28: PetscErrorCode ierr
29: PetscInt n, dummy
30: PetscMPIInt rank
31: PetscReal rnorm
33: ! Build the solution vector
35: PetscCallA(KSPBuildSolution(ksp, PETSC_NULL_VEC, x, ierr))
37: ! Write the solution vector and residual norm to stdout
38: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
39: ! handles data from multiple processors so that the
40: ! output is not jumbled.
42: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
43: if (rank == 0) write (6, 100) n
44: PetscCallA(VecView(x, PETSC_VIEWER_STDOUT_WORLD, ierr))
45: if (rank == 0) write (6, 200) n, rnorm
47: 100 format('iteration ', i5, ' solution vector:')
48: 200 format('iteration ', i5, ' residual norm ', e11.4)
49: ierr = 0
50: end
52: ! --------------------------------------------------------------
53: !
54: ! MyKSPConverged - This is a user-defined routine for testing
55: ! convergence of the KSP iterative solvers.
56: !
57: ! Input Parameters:
58: ! ksp - iterative context
59: ! n - iteration number
60: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
61: ! dummy - optional user-defined monitor context (unused here)
62: !
63: subroutine MyKSPConverged(ksp, n, rnorm, flag, dummy, ierr)
65: KSP ksp
66: PetscErrorCode ierr
67: PetscInt n, dummy
68: KSPConvergedReason flag
69: PetscReal rnorm
71: if (rnorm <= .05) then
72: flag = KSP_CONVERGED_RTOL
73: else
74: flag = KSP_CONVERGED_ITERATING
75: end if
76: ierr = 0
78: end
79: end module ex57fmodule
81: program main
82: use petscksp
83: use ex57fmodule
84: implicit none
86: !
87: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
88: ! Variable declarations
89: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
90: !
91: ! Variables:
92: ! ksp - linear solver context
93: ! ksp - Krylov subspace method context
94: ! pc - preconditioner context
95: ! x, b, u - approx solution, right-hand side, exact solution vectors
96: ! A - matrix that defines linear system
97: ! its - iterations for convergence
98: ! norm - norm of error in solution
99: ! rctx - random number generator context
100: !
101: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
102: ! are mathematical objects that contain more than just an array of
103: ! double precision numbers. I.e., vectors in PETSc are not just
104: ! double precision x(*).
105: ! However, local vector data can be easily accessed via VecGetArray().
106: ! See the Fortran section of the PETSc users manual for details.
107: !
108: #ifdef PETSC_HAVE_MUMPS
109: PetscInt icntl, ival
110: Mat F
111: #endif
112: PC pc
113: PetscReal norm, zero
114: PetscInt i, j, II, JJ, m, n, its
115: PetscInt Istart, Iend, ione
116: PetscErrorCode ierr
117: PetscMPIInt rank, size
118: PetscBool flg
119: PetscScalar v, one, neg_one
120: Vec x, b, u
121: Mat A
122: KSP ksp
123: PetscRandom rctx
124: character*80 ksptype
126: ! These variables are not currently used.
127: ! PC pc
128: ! PCType ptype
129: ! double precision tol
131: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132: ! Beginning of program
133: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: PetscCallA(PetscInitialize(ierr))
136: m = 3
137: n = 3
138: one = 1.0
139: neg_one = -1.0
140: ione = 1
141: zero = 0.0
142: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-m', m, flg, ierr))
143: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
144: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
145: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: ! Compute the matrix and right-hand-side vector that define
149: ! the linear system, Ax = b.
150: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: ! Create parallel matrix, specifying only its global dimensions.
153: ! When using MatCreate(), the matrix format can be specified at
154: ! runtime. Also, the parallel partitioning of the matrix is
155: ! determined by PETSc at runtime.
157: PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
158: PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m*n, m*n, ierr))
159: PetscCallA(MatSetFromOptions(A, ierr))
160: PetscCallA(MatSetUp(A, ierr))
162: ! Currently, all PETSc parallel matrix formats are partitioned by
163: ! contiguous chunks of rows across the processors. Determine which
164: ! rows of the matrix are locally owned.
166: PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
168: ! Set matrix elements for the 2-D, five-point stencil in parallel.
169: ! - Each processor needs to insert only elements that it owns
170: ! locally (but any non-local elements will be sent to the
171: ! appropriate processor during matrix assembly).
172: ! - Always specify global row and columns of matrix entries.
173: ! - Note that MatSetValues() uses 0-based row and column numbers
174: ! in Fortran as well as in C.
176: ! Note: this uses the less common natural ordering that orders first
177: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
178: ! instead of JJ = II +- m as you might expect. The more standard ordering
179: ! would first do all variables for y = h, then y = 2h etc.
181: do II = Istart, Iend - 1
182: v = -1.0
183: i = II/n
184: j = II - i*n
185: if (i > 0) then
186: JJ = II - n
187: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
188: end if
189: if (i < m - 1) then
190: JJ = II + n
191: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
192: end if
193: if (j > 0) then
194: JJ = II - 1
195: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
196: end if
197: if (j < n - 1) then
198: JJ = II + 1
199: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
200: end if
201: v = 4.0
202: PetscCallA(MatSetValues(A, ione, [II], ione, [II], [v], INSERT_VALUES, ierr))
203: end do
204: PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
205: PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
207: ! Check if A is symmetric
208: if (size == 1) then
209: PetscCallA(MatIsSymmetric(A, zero, flg, ierr))
210: if (flg .eqv. PETSC_FALSE) then
211: write (6, 120)
212: end if
213: end if
215: ! Create parallel vectors.
216: ! - Here, the parallel partitioning of the vector is determined by
217: ! PETSc at runtime. We could also specify the local dimensions
218: ! if desired -- or use the more general routine VecCreate().
219: ! - When solving a linear system, the vectors and matrices MUST
220: ! be partitioned accordingly. PETSc automatically generates
221: ! appropriately partitioned matrices and vectors when MatCreate()
222: ! and VecCreate() are used with the same communicator.
223: ! - Note: We form 1 vector from scratch and then duplicate as needed.
225: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD, PETSC_NULL_CHARACTER, ione, PETSC_DECIDE, m*n, u, ierr))
226: PetscCallA(VecSetFromOptions(u, ierr))
227: PetscCallA(VecDuplicate(u, b, ierr))
228: PetscCallA(VecDuplicate(b, x, ierr))
230: ! Set exact solution; then compute right-hand-side vector.
231: ! By default we use an exact solution of a vector with all
232: ! elements of 1.0; Alternatively, using the runtime option
233: ! -random_sol forms a solution vector with random components.
235: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-random_exact_sol', flg, ierr))
236: if (flg) then
237: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
238: PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
239: PetscCallA(VecSetRandom(u, rctx, ierr))
240: PetscCallA(PetscRandomDestroy(rctx, ierr))
241: else
242: PetscCallA(VecSet(u, one, ierr))
243: end if
244: PetscCallA(MatMult(A, u, b, ierr))
246: ! View the exact solution vector if desired
248: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-view_exact_sol', flg, ierr))
249: if (flg) then
250: PetscCallA(VecView(u, PETSC_VIEWER_STDOUT_WORLD, ierr))
251: end if
253: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
254: ! Create the linear solver and set various options
255: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
257: ! Create linear solver context
259: PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
261: ! Set operators. Here the matrix that defines the linear system
262: ! also serves as the matrix from which the preconditioner is constructed.
264: PetscCallA(KSPSetOperators(ksp, A, A, ierr))
266: PetscCallA(KSPSetType(ksp, KSPPREONLY, ierr))
267: PetscCallA(KSPGetType(ksp, ksptype, ierr))
268: PetscCheckA(ksptype == KSPPREONLY, PETSC_COMM_WORLD, PETSC_ERR_PLIB, 'Error')
269: PetscCallA(KSPGetPC(ksp, pc, ierr))
270: PetscCallA(PCSetType(pc, PCCHOLESKY, ierr))
271: #ifdef PETSC_HAVE_MUMPS
272: PetscCallA(PCFactorSetMatSolverType(pc, MATSOLVERMUMPS, ierr))
273: PetscCallA(PCFactorSetUpMatSolverType(pc, ierr))
274: PetscCallA(PCFactorGetMatrix(pc, F, ierr))
275: PetscCallA(KSPSetFromOptions(ksp, ierr))
276: icntl = 7; ival = 2
277: PetscCallA(MatMumpsSetIcntl(F, icntl, ival, ierr))
278: #endif
280: ! Set runtime options, e.g.,
281: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
282: ! These options will override those specified above as long as
283: ! KSPSetFromOptions() is called _after_ any other customization
284: ! routines.
286: PetscCallA(KSPSetFromOptions(ksp, ierr))
288: ! Set convergence test routine if desired
290: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-my_ksp_convergence', flg, ierr))
291: if (flg) then
292: PetscCallA(KSPSetConvergenceTest(ksp, MyKSPConverged, 0, PETSC_NULL_FUNCTION, ierr))
293: end if
294: !
295: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
296: ! Solve the linear system
297: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
299: PetscCallA(KSPSolve(ksp, b, x, ierr))
301: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
302: ! Check solution and clean up
303: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
305: ! Check the error
306: PetscCallA(VecAXPY(x, neg_one, u, ierr))
307: PetscCallA(VecNorm(x, NORM_2, norm, ierr))
308: PetscCallA(KSPGetIterationNumber(ksp, its, ierr))
309: if (rank == 0) then
310: write (6, 100) norm, its
311: end if
312: 100 format('Norm of error ', e11.4, ' iterations ', i5)
313: 120 format('Matrix A is non-symmetric ')
315: ! Free work space. All PETSc objects should be destroyed when they
316: ! are no longer needed.
318: PetscCallA(KSPDestroy(ksp, ierr))
319: PetscCallA(VecDestroy(u, ierr))
320: PetscCallA(VecDestroy(x, ierr))
321: PetscCallA(VecDestroy(b, ierr))
322: PetscCallA(MatDestroy(A, ierr))
324: ! Always call PetscFinalize() before exiting a program. This routine
325: ! - finalizes the PETSc libraries as well as MPI
326: ! - provides summary and diagnostic information if certain runtime
327: ! options are chosen (e.g., -log_view). See PetscFinalize()
328: ! manpage for more information.
330: PetscCallA(PetscFinalize(ierr))
331: end
333: !/*TEST
334: !
335: ! test:
336: !
337: !TEST*/