Actual source code: ex57f.F90
1: !
2: ! Description: Modified from ex2f.F and ex52.c to illustrate how use external packages MUMPS
3: ! Solves a linear system in parallel with KSP (Fortran code).
4: ! Also shows how to set a user-defined monitoring routine.
5: !
6: ! -----------------------------------------------------------------------
8: program main
9: #include <petsc/finclude/petscksp.h>
10: use petscksp
11: implicit none
13: !
14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
15: ! Variable declarations
16: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
17: !
18: ! Variables:
19: ! ksp - linear solver context
20: ! ksp - Krylov subspace method context
21: ! pc - preconditioner context
22: ! x, b, u - approx solution, right-hand side, exact solution vectors
23: ! A - matrix that defines linear system
24: ! its - iterations for convergence
25: ! norm - norm of error in solution
26: ! rctx - random number generator context
27: !
28: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
29: ! are mathematical objects that contain more than just an array of
30: ! double precision numbers. I.e., vectors in PETSc are not just
31: ! double precision x(*).
32: ! However, local vector data can be easily accessed via VecGetArray().
33: ! See the Fortran section of the PETSc users manual for details.
34: !
35: #ifdef PETSC_HAVE_MUMPS
36: PetscInt icntl,ival
37: Mat F
38: #endif
39: PC pc
40: PetscReal norm,zero
41: PetscInt i,j,II,JJ,m,n,its
42: PetscInt Istart,Iend,ione
43: PetscErrorCode ierr
44: PetscMPIInt rank,size
45: PetscBool flg
46: PetscScalar v,one,neg_one
47: Vec x,b,u
48: Mat A
49: KSP ksp
50: PetscRandom rctx
51: character*80 ksptype
53: ! These variables are not currently used.
54: ! PC pc
55: ! PCType ptype
56: ! double precision tol
58: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
59: ! MUST be declared as external.
61: external MyKSPMonitor,MyKSPConverged
63: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
64: ! Beginning of program
65: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
67: PetscCallA(PetscInitialize(ierr))
68: m = 3
69: n = 3
70: one = 1.0
71: neg_one = -1.0
72: ione = 1
73: zero = 0.0
74: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
75: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
76: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
77: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
79: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: ! Compute the matrix and right-hand-side vector that define
81: ! the linear system, Ax = b.
82: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
84: ! Create parallel matrix, specifying only its global dimensions.
85: ! When using MatCreate(), the matrix format can be specified at
86: ! runtime. Also, the parallel partitioning of the matrix is
87: ! determined by PETSc at runtime.
89: PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
90: PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
91: PetscCallA(MatSetFromOptions(A,ierr))
92: PetscCallA(MatSetUp(A,ierr))
94: ! Currently, all PETSc parallel matrix formats are partitioned by
95: ! contiguous chunks of rows across the processors. Determine which
96: ! rows of the matrix are locally owned.
98: PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))
100: ! Set matrix elements for the 2-D, five-point stencil in parallel.
101: ! - Each processor needs to insert only elements that it owns
102: ! locally (but any non-local elements will be sent to the
103: ! appropriate processor during matrix assembly).
104: ! - Always specify global row and columns of matrix entries.
105: ! - Note that MatSetValues() uses 0-based row and column numbers
106: ! in Fortran as well as in C.
108: ! Note: this uses the less common natural ordering that orders first
109: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
110: ! instead of JJ = II +- m as you might expect. The more standard ordering
111: ! would first do all variables for y = h, then y = 2h etc.
113: do 10, II=Istart,Iend-1
114: v = -1.0
115: i = II/n
116: j = II - i*n
117: if (i.gt.0) then
118: JJ = II - n
119: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
120: endif
121: if (i.lt.m-1) then
122: JJ = II + n
123: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
124: endif
125: if (j.gt.0) then
126: JJ = II - 1
127: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
128: endif
129: if (j.lt.n-1) then
130: JJ = II + 1
131: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
132: endif
133: v = 4.0
134: PetscCallA( MatSetValues(A,ione,[II],ione,[II],[v],INSERT_VALUES,ierr))
135: 10 continue
136: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
137: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
139: ! Check if A is symmetric
140: if (size == 1) then
141: PetscCallA(MatIsSymmetric(A,zero,flg,ierr))
142: if (flg .eqv. PETSC_FALSE) then
143: write(6,120)
144: endif
145: endif
147: ! Create parallel vectors.
148: ! - Here, the parallel partitioning of the vector is determined by
149: ! PETSc at runtime. We could also specify the local dimensions
150: ! if desired -- or use the more general routine VecCreate().
151: ! - When solving a linear system, the vectors and matrices MUST
152: ! be partitioned accordingly. PETSc automatically generates
153: ! appropriately partitioned matrices and vectors when MatCreate()
154: ! and VecCreate() are used with the same communicator.
155: ! - Note: We form 1 vector from scratch and then duplicate as needed.
157: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD,PETSC_NULL_CHARACTER,ione,PETSC_DECIDE,m*n,u,ierr))
158: PetscCallA(VecSetFromOptions(u,ierr))
159: PetscCallA(VecDuplicate(u,b,ierr))
160: PetscCallA(VecDuplicate(b,x,ierr))
162: ! Set exact solution; then compute right-hand-side vector.
163: ! By default we use an exact solution of a vector with all
164: ! elements of 1.0; Alternatively, using the runtime option
165: ! -random_sol forms a solution vector with random components.
167: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr))
168: if (flg) then
169: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr))
170: PetscCallA(PetscRandomSetFromOptions(rctx,ierr))
171: PetscCallA(VecSetRandom(u,rctx,ierr))
172: PetscCallA(PetscRandomDestroy(rctx,ierr))
173: else
174: PetscCallA(VecSet(u,one,ierr))
175: endif
176: PetscCallA(MatMult(A,u,b,ierr))
178: ! View the exact solution vector if desired
180: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr))
181: if (flg) then
182: PetscCallA(VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr))
183: endif
185: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186: ! Create the linear solver and set various options
187: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: ! Create linear solver context
191: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
193: ! Set operators. Here the matrix that defines the linear system
194: ! also serves as the preconditioning matrix.
196: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
198: PetscCallA(KSPSetType(ksp,KSPPREONLY,ierr))
199: PetscCallA(KSPGetType(ksp,ksptype,ierr))
200: PetscCheckA(ksptype == KSPPREONLY,PETSC_COMM_WORLD,PETSC_ERR_PLIB,'Error')
201: PetscCallA(KSPGetPC(ksp,pc,ierr))
202: PetscCallA(PCSetType(pc,PCCHOLESKY,ierr))
203: #ifdef PETSC_HAVE_MUMPS
204: PetscCallA(PCFactorSetMatSolverType(pc,MATSOLVERMUMPS,ierr))
205: PetscCallA(PCFactorSetUpMatSolverType(pc,ierr))
206: PetscCallA(PCFactorGetMatrix(pc,F,ierr))
207: PetscCallA(KSPSetFromOptions(ksp,ierr))
208: icntl = 7; ival = 2;
209: PetscCallA(MatMumpsSetIcntl(F,icntl,ival,ierr))
210: #endif
212: ! Set runtime options, e.g.,
213: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
214: ! These options will override those specified above as long as
215: ! KSPSetFromOptions() is called _after_ any other customization
216: ! routines.
218: PetscCallA(KSPSetFromOptions(ksp,ierr))
220: ! Set convergence test routine if desired
222: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr))
223: if (flg) then
224: PetscCallA(KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr))
225: endif
226: !
227: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
228: ! Solve the linear system
229: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
231: PetscCallA(KSPSolve(ksp,b,x,ierr))
233: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
234: ! Check solution and clean up
235: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
237: ! Check the error
238: PetscCallA(VecAXPY(x,neg_one,u,ierr))
239: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
240: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
241: if (rank .eq. 0) then
242: write(6,100) norm,its
243: endif
244: 100 format('Norm of error ',e11.4,' iterations ',i5)
245: 120 format('Matrix A is non-symmetric ')
247: ! Free work space. All PETSc objects should be destroyed when they
248: ! are no longer needed.
250: PetscCallA(KSPDestroy(ksp,ierr))
251: PetscCallA(VecDestroy(u,ierr))
252: PetscCallA(VecDestroy(x,ierr))
253: PetscCallA(VecDestroy(b,ierr))
254: PetscCallA(MatDestroy(A,ierr))
256: ! Always call PetscFinalize() before exiting a program. This routine
257: ! - finalizes the PETSc libraries as well as MPI
258: ! - provides summary and diagnostic information if certain runtime
259: ! options are chosen (e.g., -log_view). See PetscFinalize()
260: ! manpage for more information.
262: PetscCallA(PetscFinalize(ierr))
263: end
265: ! --------------------------------------------------------------
266: !
267: ! MyKSPMonitor - This is a user-defined routine for monitoring
268: ! the KSP iterative solvers.
269: !
270: ! Input Parameters:
271: ! ksp - iterative context
272: ! n - iteration number
273: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
274: ! dummy - optional user-defined monitor context (unused here)
275: !
276: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
277: use petscksp
278: implicit none
280: KSP ksp
281: Vec x
282: PetscErrorCode ierr
283: PetscInt n,dummy
284: PetscMPIInt rank
285: PetscReal rnorm
287: ! Build the solution vector
289: PetscCallA(KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr))
291: ! Write the solution vector and residual norm to stdout
292: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
293: ! handles data from multiple processors so that the
294: ! output is not jumbled.
296: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
297: if (rank .eq. 0) write(6,100) n
298: PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
299: if (rank .eq. 0) write(6,200) n,rnorm
301: 100 format('iteration ',i5,' solution vector:')
302: 200 format('iteration ',i5,' residual norm ',e11.4)
303: ierr = 0
304: end
306: ! --------------------------------------------------------------
307: !
308: ! MyKSPConverged - This is a user-defined routine for testing
309: ! convergence of the KSP iterative solvers.
310: !
311: ! Input Parameters:
312: ! ksp - iterative context
313: ! n - iteration number
314: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
315: ! dummy - optional user-defined monitor context (unused here)
316: !
317: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
318: use petscksp
319: implicit none
321: KSP ksp
322: PetscErrorCode ierr
323: PetscInt n,dummy
324: KSPConvergedReason flag
325: PetscReal rnorm
327: if (rnorm .le. .05) then
328: flag = 1
329: else
330: flag = 0
331: endif
332: ierr = 0
334: end
336: !/*TEST
337: !
338: ! test:
339: !
340: !TEST*/