Actual source code: ex15f.F90
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: ! -user_defined_pc : Activate a user-defined preconditioner
5: !
6: ! -------------------------------------------------------------------------
7: !
8: ! Module contains diag needed by shell preconditioner
9: !
10: module ex15fmodule
11: #include <petsc/finclude/petscksp.h>
12: use petscksp
13: Vec diag
14: end module
16: program main
17: use ex15fmodule
18: implicit none
20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
21: ! Variable declarations
22: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
23: !
24: ! Variables:
25: ! ksp - linear solver context
26: ! ksp - Krylov subspace method context
27: ! pc - preconditioner context
28: ! x, b, u - approx solution, right-hand side, exact solution vectors
29: ! A - matrix that defines linear system
30: ! its - iterations for convergence
31: ! norm - norm of solution error
33: Vec x,b,u
34: Mat A
35: PC pc
36: KSP ksp
37: PetscScalar v,one,neg_one
38: PetscReal norm,tol
39: PetscErrorCode ierr
40: PetscInt i,j,II,JJ,Istart
41: PetscInt Iend,m,n,i1,its,five
42: PetscMPIInt rank
43: PetscBool user_defined_pc,flg
45: ! Note: Any user-defined Fortran routines MUST be declared as external.
47: external SampleShellPCSetUp, SampleShellPCApply
48: external SampleShellPCDestroy
50: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
51: ! Beginning of program
52: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
54: PetscCallA(PetscInitialize(ierr))
55: one = 1.0
56: neg_one = -1.0
57: i1 = 1
58: m = 8
59: n = 7
60: five = 5
61: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
62: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
63: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
65: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
66: ! Compute the matrix and right-hand-side vector that define
67: ! the linear system, Ax = b.
68: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70: ! Create parallel matrix, specifying only its global dimensions.
71: ! When using MatCreate(), the matrix format can be specified at
72: ! runtime. Also, the parallel partitioning of the matrix is
73: ! determined by PETSc at runtime.
75: PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
76: PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
77: PetscCallA(MatSetType(A, MATAIJ,ierr))
78: PetscCallA(MatSetFromOptions(A,ierr))
79: PetscCallA(MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr))
80: PetscCallA(MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr))
82: ! Currently, all PETSc parallel matrix formats are partitioned by
83: ! contiguous chunks of rows across the processors. Determine which
84: ! rows of the matrix are locally owned.
86: PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))
88: ! Set matrix elements for the 2-D, five-point stencil in parallel.
89: ! - Each processor needs to insert only elements that it owns
90: ! locally (but any non-local elements will be sent to the
91: ! appropriate processor during matrix assembly).
92: ! - Always specify global row and columns of matrix entries.
93: ! - Note that MatSetValues() uses 0-based row and column numbers
94: ! in Fortran as well as in C.
96: do 10, II=Istart,Iend-1
97: v = -1.0
98: i = II/n
99: j = II - i*n
100: if (i.gt.0) then
101: JJ = II - n
102: PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr))
103: endif
104: if (i.lt.m-1) then
105: JJ = II + n
106: PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr))
107: endif
108: if (j.gt.0) then
109: JJ = II - 1
110: PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr))
111: endif
112: if (j.lt.n-1) then
113: JJ = II + 1
114: PetscCallA(MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr))
115: endif
116: v = 4.0
117: PetscCallA( MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr))
118: 10 continue
120: ! Assemble matrix, using the 2-step process:
121: ! MatAssemblyBegin(), MatAssemblyEnd()
122: ! Computations can be done while messages are in transition,
123: ! by placing code between these two statements.
125: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
126: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
128: ! Create parallel vectors.
129: ! - Here, the parallel partitioning of the vector is determined by
130: ! PETSc at runtime. We could also specify the local dimensions
131: ! if desired -- or use the more general routine VecCreate().
132: ! - When solving a linear system, the vectors and matrices MUST
133: ! be partitioned accordingly. PETSc automatically generates
134: ! appropriately partitioned matrices and vectors when MatCreate()
135: ! and VecCreate() are used with the same communicator.
136: ! - Note: We form 1 vector from scratch and then duplicate as needed.
138: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD,PETSC_NULL_CHARACTER,i1,PETSC_DECIDE,m*n,u,ierr))
139: PetscCallA(VecDuplicate(u,b,ierr))
140: PetscCallA(VecDuplicate(b,x,ierr))
142: ! Set exact solution; then compute right-hand-side vector.
144: PetscCallA(VecSet(u,one,ierr))
145: PetscCallA(MatMult(A,u,b,ierr))
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: ! Create the linear solver and set various options
149: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: ! Create linear solver context
153: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
155: ! Set operators. Here the matrix that defines the linear system
156: ! also serves as the preconditioning matrix.
158: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
160: ! Set linear solver defaults for this problem (optional).
161: ! - By extracting the KSP and PC contexts from the KSP context,
162: ! we can then directly call any KSP and PC routines
163: ! to set various options.
165: PetscCallA(KSPGetPC(ksp,pc,ierr))
166: tol = 1.e-7
167: PetscCallA(KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr))
169: !
170: ! Set a user-defined shell preconditioner if desired
171: !
172: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-user_defined_pc',user_defined_pc,ierr))
174: if (user_defined_pc) then
176: ! (Required) Indicate to PETSc that we are using a shell preconditioner
177: PetscCallA(PCSetType(pc,PCSHELL,ierr))
179: ! (Required) Set the user-defined routine for applying the preconditioner
180: PetscCallA(PCShellSetApply(pc,SampleShellPCApply,ierr))
182: ! (Optional) Do any setup required for the preconditioner
183: PetscCallA(PCShellSetSetUp(pc,SampleShellPCSetUp,ierr))
185: ! (Optional) Frees any objects we created for the preconditioner
186: PetscCallA(PCShellSetDestroy(pc,SampleShellPCDestroy,ierr))
188: else
189: PetscCallA(PCSetType(pc,PCJACOBI,ierr))
190: endif
192: ! Set runtime options, e.g.,
193: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
194: ! These options will override those specified above as long as
195: ! KSPSetFromOptions() is called _after_ any other customization
196: ! routines.
198: PetscCallA(KSPSetFromOptions(ksp,ierr))
200: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201: ! Solve the linear system
202: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204: PetscCallA(KSPSolve(ksp,b,x,ierr))
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
207: ! Check solution and clean up
208: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: ! Check the error
212: PetscCallA(VecAXPY(x,neg_one,u,ierr))
213: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
214: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
216: if (rank .eq. 0) then
217: if (norm .gt. 1.e-12) then
218: write(6,100) norm,its
219: else
220: write(6,110) its
221: endif
222: endif
223: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
224: 110 format('Norm of error < 1.e-12,iterations ',i5)
226: ! Free work space. All PETSc objects should be destroyed when they
227: ! are no longer needed.
229: PetscCallA(KSPDestroy(ksp,ierr))
230: PetscCallA(VecDestroy(u,ierr))
231: PetscCallA(VecDestroy(x,ierr))
232: PetscCallA(VecDestroy(b,ierr))
233: PetscCallA(MatDestroy(A,ierr))
235: ! Always call PetscFinalize() before exiting a program.
237: PetscCallA(PetscFinalize(ierr))
238: end
240: !/***********************************************************************/
241: !/* Routines for a user-defined shell preconditioner */
242: !/***********************************************************************/
244: !
245: ! SampleShellPCSetUp - This routine sets up a user-defined
246: ! preconditioner context.
247: !
248: ! Input Parameters:
249: ! pc - preconditioner object
250: !
251: ! Output Parameter:
252: ! ierr - error code (nonzero if error has been detected)
253: !
254: ! Notes:
255: ! In this example, we define the shell preconditioner to be Jacobi
256: ! method. Thus, here we create a work vector for storing the reciprocal
257: ! of the diagonal of the preconditioner matrix; this vector is then
258: ! used within the routine SampleShellPCApply().
259: !
260: subroutine SampleShellPCSetUp(pc,ierr)
261: use ex15fmodule
262: use petscksp
263: implicit none
265: PC pc
266: Mat pmat
267: PetscErrorCode ierr
269: PetscCallA(PCGetOperators(pc,PETSC_NULL_MAT,pmat,ierr))
270: PetscCallA(MatCreateVecs(pmat,diag,PETSC_NULL_VEC,ierr))
271: PetscCallA(MatGetDiagonal(pmat,diag,ierr))
272: PetscCallA(VecReciprocal(diag,ierr))
274: end
276: ! -------------------------------------------------------------------
277: !
278: ! SampleShellPCApply - This routine demonstrates the use of a
279: ! user-provided preconditioner.
280: !
281: ! Input Parameters:
282: ! pc - preconditioner object
283: ! x - input vector
284: !
285: ! Output Parameters:
286: ! y - preconditioned vector
287: ! ierr - error code (nonzero if error has been detected)
288: !
289: ! Notes:
290: ! This code implements the Jacobi preconditioner, merely as an
291: ! example of working with a PCSHELL. Note that the Jacobi method
292: ! is already provided within PETSc.
293: !
294: subroutine SampleShellPCApply(pc,x,y,ierr)
295: use ex15fmodule
296: implicit none
298: PC pc
299: Vec x,y
300: PetscErrorCode ierr
302: PetscCallA(VecPointwiseMult(y,x,diag,ierr))
304: end
306: !/***********************************************************************/
307: !/* Routines for a user-defined shell preconditioner */
308: !/***********************************************************************/
310: !
311: ! SampleShellPCDestroy - This routine destroys (frees the memory of) any
312: ! objects we made for the preconditioner
313: !
314: ! Input Parameters:
315: ! pc - for this example we use the actual PC as our shell context
316: !
317: ! Output Parameter:
318: ! ierr - error code (nonzero if error has been detected)
319: !
321: subroutine SampleShellPCDestroy(pc,ierr)
322: use ex15fmodule
323: implicit none
325: PC pc
326: PetscErrorCode ierr
328: ! Normally we would recommend storing all the work data (like diag) in
329: ! the context set with PCShellSetContext()
331: PetscCallA(VecDestroy(diag,ierr))
333: end
335: !
336: !/*TEST
337: !
338: ! test:
339: ! nsize: 2
340: ! args: -ksp_view -user_defined_pc -ksp_gmres_cgs_refinement_type refine_always
341: !
342: !TEST*/