Actual source code: ex57f.F90
1: !
2: ! Description: Modified from ex2f.F and ex52.c to illustrate how use external packages MUMPS
3: ! Solves a linear system in parallel with KSP (Fortran code).
4: ! Also shows how to set a user-defined monitoring routine.
5: !
6: ! -----------------------------------------------------------------------
8: program main
9: #include <petsc/finclude/petscksp.h>
10: use petscksp
11: implicit none
13: !
14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
15: ! Variable declarations
16: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
17: !
18: ! Variables:
19: ! ksp - linear solver context
20: ! ksp - Krylov subspace method context
21: ! pc - preconditioner context
22: ! x, b, u - approx solution, right-hand side, exact solution vectors
23: ! A - matrix that defines linear system
24: ! its - iterations for convergence
25: ! norm - norm of error in solution
26: ! rctx - random number generator context
27: !
28: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
29: ! are mathematical objects that contain more than just an array of
30: ! double precision numbers. I.e., vectors in PETSc are not just
31: ! double precision x(*).
32: ! However, local vector data can be easily accessed via VecGetArray().
33: ! See the Fortran section of the PETSc users manual for details.
34: !
35: #ifdef PETSC_HAVE_MUMPS
36: PetscInt icntl,ival
37: Mat F
38: #endif
39: PC pc
40: PetscReal norm,zero
41: PetscInt i,j,II,JJ,m,n,its
42: PetscInt Istart,Iend,ione
43: PetscErrorCode ierr
44: PetscMPIInt rank,size
45: PetscBool flg
46: PetscScalar v,one,neg_one
47: Vec x,b,u
48: Mat A
49: KSP ksp
50: PetscRandom rctx
52: ! These variables are not currently used.
53: ! PC pc
54: ! PCType ptype
55: ! double precision tol
57: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
58: ! MUST be declared as external.
60: external MyKSPMonitor,MyKSPConverged
62: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
63: ! Beginning of program
64: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
66: PetscCallA(PetscInitialize(ierr))
67: m = 3
68: n = 3
69: one = 1.0
70: neg_one = -1.0
71: ione = 1
72: zero = 0.0
73: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
74: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
75: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
76: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
78: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79: ! Compute the matrix and right-hand-side vector that define
80: ! the linear system, Ax = b.
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: ! Create parallel matrix, specifying only its global dimensions.
84: ! When using MatCreate(), the matrix format can be specified at
85: ! runtime. Also, the parallel partitioning of the matrix is
86: ! determined by PETSc at runtime.
88: PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
89: PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
90: PetscCallA(MatSetFromOptions(A,ierr))
91: PetscCallA(MatSetUp(A,ierr))
93: ! Currently, all PETSc parallel matrix formats are partitioned by
94: ! contiguous chunks of rows across the processors. Determine which
95: ! rows of the matrix are locally owned.
97: PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))
99: ! Set matrix elements for the 2-D, five-point stencil in parallel.
100: ! - Each processor needs to insert only elements that it owns
101: ! locally (but any non-local elements will be sent to the
102: ! appropriate processor during matrix assembly).
103: ! - Always specify global row and columns of matrix entries.
104: ! - Note that MatSetValues() uses 0-based row and column numbers
105: ! in Fortran as well as in C.
107: ! Note: this uses the less common natural ordering that orders first
108: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
109: ! instead of JJ = II +- m as you might expect. The more standard ordering
110: ! would first do all variables for y = h, then y = 2h etc.
112: do 10, II=Istart,Iend-1
113: v = -1.0
114: i = II/n
115: j = II - i*n
116: if (i.gt.0) then
117: JJ = II - n
118: PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr))
119: endif
120: if (i.lt.m-1) then
121: JJ = II + n
122: PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr))
123: endif
124: if (j.gt.0) then
125: JJ = II - 1
126: PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr))
127: endif
128: if (j.lt.n-1) then
129: JJ = II + 1
130: PetscCallA(MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr))
131: endif
132: v = 4.0
133: PetscCallA( MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr))
134: 10 continue
135: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
136: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
138: ! Check if A is symmetric
139: if (size == 1) then
140: PetscCallA(MatIsSymmetric(A,zero,flg,ierr))
141: if (flg .eqv. PETSC_FALSE) then
142: write(6,120)
143: endif
144: endif
146: ! Create parallel vectors.
147: ! - Here, the parallel partitioning of the vector is determined by
148: ! PETSc at runtime. We could also specify the local dimensions
149: ! if desired -- or use the more general routine VecCreate().
150: ! - When solving a linear system, the vectors and matrices MUST
151: ! be partitioned accordingly. PETSc automatically generates
152: ! appropriately partitioned matrices and vectors when MatCreate()
153: ! and VecCreate() are used with the same communicator.
154: ! - Note: We form 1 vector from scratch and then duplicate as needed.
156: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD,PETSC_NULL_CHARACTER,ione,PETSC_DECIDE,m*n,u,ierr))
157: PetscCallA(VecSetFromOptions(u,ierr))
158: PetscCallA(VecDuplicate(u,b,ierr))
159: PetscCallA(VecDuplicate(b,x,ierr))
161: ! Set exact solution; then compute right-hand-side vector.
162: ! By default we use an exact solution of a vector with all
163: ! elements of 1.0; Alternatively, using the runtime option
164: ! -random_sol forms a solution vector with random components.
166: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr))
167: if (flg) then
168: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr))
169: PetscCallA(PetscRandomSetFromOptions(rctx,ierr))
170: PetscCallA(VecSetRandom(u,rctx,ierr))
171: PetscCallA(PetscRandomDestroy(rctx,ierr))
172: else
173: PetscCallA(VecSet(u,one,ierr))
174: endif
175: PetscCallA(MatMult(A,u,b,ierr))
177: ! View the exact solution vector if desired
179: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr))
180: if (flg) then
181: PetscCallA(VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr))
182: endif
184: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: ! Create the linear solver and set various options
186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188: ! Create linear solver context
190: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
192: ! Set operators. Here the matrix that defines the linear system
193: ! also serves as the preconditioning matrix.
195: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
197: PetscCallA(KSPSetType(ksp,KSPPREONLY,ierr))
198: PetscCallA(KSPGetPC(ksp,pc,ierr))
199: PetscCallA(PCSetType(pc,PCCHOLESKY,ierr))
200: #ifdef PETSC_HAVE_MUMPS
201: PetscCallA(PCFactorSetMatSolverType(pc,MATSOLVERMUMPS,ierr))
202: PetscCallA(PCFactorSetUpMatSolverType(pc,ierr))
203: PetscCallA(PCFactorGetMatrix(pc,F,ierr))
204: PetscCallA(KSPSetFromOptions(ksp,ierr))
205: icntl = 7; ival = 2;
206: PetscCallA(MatMumpsSetIcntl(F,icntl,ival,ierr))
207: #endif
209: ! Set runtime options, e.g.,
210: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
211: ! These options will override those specified above as long as
212: ! KSPSetFromOptions() is called _after_ any other customization
213: ! routines.
215: PetscCallA(KSPSetFromOptions(ksp,ierr))
217: ! Set convergence test routine if desired
219: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr))
220: if (flg) then
221: PetscCallA(KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr))
222: endif
223: !
224: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
225: ! Solve the linear system
226: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
228: PetscCallA(KSPSolve(ksp,b,x,ierr))
230: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
231: ! Check solution and clean up
232: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
234: ! Check the error
235: PetscCallA(VecAXPY(x,neg_one,u,ierr))
236: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
237: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
238: if (rank .eq. 0) then
239: write(6,100) norm,its
240: endif
241: 100 format('Norm of error ',e11.4,' iterations ',i5)
242: 120 format('Matrix A is non-symmetric ')
244: ! Free work space. All PETSc objects should be destroyed when they
245: ! are no longer needed.
247: PetscCallA(KSPDestroy(ksp,ierr))
248: PetscCallA(VecDestroy(u,ierr))
249: PetscCallA(VecDestroy(x,ierr))
250: PetscCallA(VecDestroy(b,ierr))
251: PetscCallA(MatDestroy(A,ierr))
253: ! Always call PetscFinalize() before exiting a program. This routine
254: ! - finalizes the PETSc libraries as well as MPI
255: ! - provides summary and diagnostic information if certain runtime
256: ! options are chosen (e.g., -log_view). See PetscFinalize()
257: ! manpage for more information.
259: PetscCallA(PetscFinalize(ierr))
260: end
262: ! --------------------------------------------------------------
263: !
264: ! MyKSPMonitor - This is a user-defined routine for monitoring
265: ! the KSP iterative solvers.
266: !
267: ! Input Parameters:
268: ! ksp - iterative context
269: ! n - iteration number
270: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
271: ! dummy - optional user-defined monitor context (unused here)
272: !
273: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
274: use petscksp
275: implicit none
277: KSP ksp
278: Vec x
279: PetscErrorCode ierr
280: PetscInt n,dummy
281: PetscMPIInt rank
282: PetscReal rnorm
284: ! Build the solution vector
286: PetscCallA(KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr))
288: ! Write the solution vector and residual norm to stdout
289: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
290: ! handles data from multiple processors so that the
291: ! output is not jumbled.
293: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
294: if (rank .eq. 0) write(6,100) n
295: PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
296: if (rank .eq. 0) write(6,200) n,rnorm
298: 100 format('iteration ',i5,' solution vector:')
299: 200 format('iteration ',i5,' residual norm ',e11.4)
300: ierr = 0
301: end
303: ! --------------------------------------------------------------
304: !
305: ! MyKSPConverged - This is a user-defined routine for testing
306: ! convergence of the KSP iterative solvers.
307: !
308: ! Input Parameters:
309: ! ksp - iterative context
310: ! n - iteration number
311: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
312: ! dummy - optional user-defined monitor context (unused here)
313: !
314: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
315: use petscksp
316: implicit none
318: KSP ksp
319: PetscErrorCode ierr
320: PetscInt n,dummy
321: KSPConvergedReason flag
322: PetscReal rnorm
324: if (rnorm .le. .05) then
325: flag = 1
326: else
327: flag = 0
328: endif
329: ierr = 0
331: end
333: !/*TEST
334: !
335: ! test:
336: !
337: !TEST*/