Actual source code: ex7f.F90
1: ! Block Jacobi preconditioner for solving a linear system in parallel with KSP
2: ! The code indicates the procedures for setting the particular block sizes and
3: ! for using different linear solvers on the individual blocks
5: ! This example focuses on ways to customize the block Jacobi preconditioner.
6: ! See ex1.c and ex2.c for more detailed comments on the basic usage of KSP
7: ! (including working with matrices and vectors)
9: ! Recall: The block Jacobi method is equivalent to the ASM preconditioner with zero overlap.
11: program main
12: #include <petsc/finclude/petscksp.h>
13: use petscksp
15: implicit none
16: Vec :: x,b,u ! approx solution, RHS, exact solution
17: Mat :: A ! linear system matrix
18: KSP :: ksp ! KSP context
19: PC :: myPc ! PC context
20: PC :: subpc ! PC context for subdomain
21: PetscReal :: norm ! norm of solution error
22: PetscReal,parameter :: tol = 1.e-6
23: PetscErrorCode :: ierr
24: PetscInt :: i,j,Ii,JJ,n
25: PetscInt :: m
26: PetscMPIInt :: rank,size
27: PetscInt :: its,nlocal,first,Istart,Iend
28: PetscScalar :: v
29: PetscScalar, parameter :: &
30: myNone = -1.0, &
31: sone = 1.0
32: PetscBool :: isbjacobi,flg
33: KSP,pointer :: subksp(:) => null()
34: PetscInt :: blks(4)
35: character(len=PETSC_MAX_PATH_LEN) :: outputString
36: PetscInt,parameter :: one = 1, five = 5
38: PetscCallA(PetscInitialize(ierr))
39: m = 4
40: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
41: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
42: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
43: n=m+2
44: blks(1) = n
45: blks(2) = n
46: blks(3) = n
47: blks(4) = n
49: !-------------------------------------------------------------------
50: ! Compute the matrix and right-hand-side vector that define
51: ! the linear system, Ax = b.
52: !---------------------------------------------------------------
54: ! Create and assemble parallel matrix
56: PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
57: PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
58: PetscCallA(MatSetFromOptions(A,ierr))
59: PetscCallA(MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER_ARRAY,five,PETSC_NULL_INTEGER_ARRAY,ierr))
60: PetscCallA(MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER_ARRAY,ierr))
61: PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))
63: do Ii=Istart,Iend-1
64: v =-1.0; i = Ii/n; j = Ii - i*n
65: if (i>0) then
66: JJ = Ii - n
67: PetscCallA(MatSetValues(A,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr))
68: endif
70: if (i<m-1) then
71: JJ = Ii + n
72: PetscCallA(MatSetValues(A,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr))
73: endif
75: if (j>0) then
76: JJ = Ii - 1
77: PetscCallA(MatSetValues(A,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr))
78: endif
80: if (j<n-1) then
81: JJ = Ii + 1
82: PetscCallA(MatSetValues(A,one,[Ii],one,[JJ],[v],ADD_VALUES,ierr))
83: endif
85: v=4.0
86: PetscCallA(MatSetValues(A,one,[Ii],one,[Ii],[v],ADD_VALUES,ierr))
88: enddo
90: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
91: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
93: ! Create parallel vectors
95: PetscCallA(VecCreate(PETSC_COMM_WORLD,u,ierr))
96: PetscCallA(VecSetSizes(u,PETSC_DECIDE,m*n,ierr))
97: PetscCallA(VecSetFromOptions(u,ierr))
98: PetscCallA(VecDuplicate(u,b,ierr))
99: PetscCallA(VecDuplicate(b,x,ierr))
101: ! Set exact solution; then compute right-hand-side vector.
103: PetscCallA(Vecset(u,sone,ierr))
104: PetscCallA(MatMult(A,u,b,ierr))
106: ! Create linear solver context
108: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
110: ! Set operators. Here the matrix that defines the linear system
111: ! also serves as the preconditioning matrix.
113: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
115: ! Set default preconditioner for this program to be block Jacobi.
116: ! This choice can be overridden at runtime with the option
117: ! -pc_type <type>
119: PetscCallA(KSPGetPC(ksp,myPc,ierr))
120: PetscCallA(PCSetType(myPc,PCBJACOBI,ierr))
122: ! -----------------------------------------------------------------
123: ! Define the problem decomposition
124: !-------------------------------------------------------------------
126: ! Call PCBJacobiSetTotalBlocks() to set individually the size of
127: ! each block in the preconditioner. This could also be done with
128: ! the runtime option -pc_bjacobi_blocks <blocks>
129: ! Also, see the command PCBJacobiSetLocalBlocks() to set the
130: ! local blocks.
132: ! Note: The default decomposition is 1 block per processor.
134: PetscCallA(PCBJacobiSetTotalBlocks(myPc,m,blks,ierr))
136: !-------------------------------------------------------------------
137: ! Set the linear solvers for the subblocks
138: !-------------------------------------------------------------------
140: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: ! Basic method, should be sufficient for the needs of most users.
142: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143: ! By default, the block Jacobi method uses the same solver on each
144: ! block of the problem. To set the same solver options on all blocks,
145: ! use the prefix -sub before the usual PC and KSP options, e.g.,
146: ! -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
148: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: ! Advanced method, setting different solvers for various blocks.
150: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: ! Note that each block's KSP context is completely independent of
153: ! the others, and the full range of uniprocessor KSP options is
154: ! available for each block. The following section of code is intended
155: ! to be a simple illustration of setting different linear solvers for
156: ! the individual blocks. These choices are obviously not recommended
157: ! for solving this particular problem.
159: PetscCallA(PetscObjectTypeCompare(myPc,PCBJACOBI,isbjacobi,ierr))
161: if (isbjacobi) then
163: ! Call KSPSetUp() to set the block Jacobi data structures (including
164: ! creation of an internal KSP context for each block).
165: ! Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP()
167: PetscCallA(KSPSetUp(ksp,ierr))
169: ! Extract the array of KSP contexts for the local blocks
170: PetscCallA(PCBJacobiGetSubKSP(myPc,nlocal,first,PETSC_NULL_KSP_POINTER,ierr))
171: PetscCallA(PCBJacobiGetSubKSP(myPc,nlocal,first,subksp,ierr))
173: ! Loop over the local blocks, setting various KSP options for each block
175: do i=0,nlocal-1
177: PetscCallA(KSPGetPC(subksp(i+1),subpc,ierr))
179: if (rank>0) then
181: if (mod(i,2)==1) then
182: PetscCallA(PCSetType(subpc,PCILU,ierr))
184: else
185: PetscCallA(PCSetType(subpc,PCNONE,ierr))
186: PetscCallA(KSPSetType(subksp(i+1),KSPBCGS,ierr))
187: PetscCallA(KSPSetTolerances(subksp(i+1),tol,PETSC_CURRENT_REAL,PETSC_CURRENT_REAL,PETSC_CURRENT_INTEGER,ierr))
188: endif
190: else
191: PetscCallA(PCSetType(subpc,PCJACOBI,ierr))
192: PetscCallA(KSPSetType(subksp(i+1),KSPGMRES,ierr))
193: PetscCallA(KSPSetTolerances(subksp(i+1),tol,PETSC_CURRENT_REAL,PETSC_CURRENT_REAL,PETSC_CURRENT_INTEGER,ierr))
194: endif
196: end do
198: endif
200: !----------------------------------------------------------------
201: ! Solve the linear system
202: !-----------------------------------------------------------------
204: ! Set runtime options
206: PetscCallA(KSPSetFromOptions(ksp,ierr))
208: ! Solve the linear system
210: PetscCallA(KSPSolve(ksp,b,x,ierr))
212: ! -----------------------------------------------------------------
213: ! Check solution and clean up
214: !-------------------------------------------------------------------
216: ! -----------------------------------------------------------------
217: ! Check the error
218: ! -----------------------------------------------------------------
220: !PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
222: PetscCallA(VecAXPY(x,myNone,u,ierr))
224: !PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
226: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
227: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
228: write(outputString,*)'Norm of error',real(norm),'Iterations',its,'\n' ! PETScScalar might be of complex type
229: PetscCallA(PetscPrintf(PETSC_COMM_WORLD,outputString,ierr))
231: ! Free work space. All PETSc objects should be destroyed when they
232: ! are no longer needed.
233: PetscCallA(KSPDestroy(ksp,ierr))
234: PetscCallA(VecDestroy(u,ierr))
235: PetscCallA(VecDestroy(b,ierr))
236: PetscCallA(MatDestroy(A,ierr))
237: PetscCallA(VecDestroy(x,ierr))
238: PetscCallA(PetscFinalize(ierr))
240: end program main
242: !/*TEST
243: !
244: ! test:
245: ! nsize: 2
246: ! args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
247: !
248: ! test:
249: ! suffix: 2
250: ! nsize: 2
251: ! args: -ksp_view ::ascii_info_detail
252: !
253: !TEST*/