Actual source code: ex7f.F90
2: ! Block Jacobi preconditioner for solving a linear system in parallel with KSP
3: ! The code indicates the procedures for setting the particular block sizes and
4: ! for using different linear solvers on the individual blocks
6: ! This example focuses on ways to customize the block Jacobi preconditioner.
7: ! See ex1.c and ex2.c for more detailed comments on the basic usage of KSP
8: ! (including working with matrices and vectors)
10: ! Recall: The block Jacobi method is equivalent to the ASM preconditioner with zero overlap.
12: program main
13: #include <petsc/finclude/petscksp.h>
14: use petscksp
16: implicit none
17: Vec :: x,b,u ! approx solution, RHS, exact solution
18: Mat :: A ! linear system matrix
19: KSP :: ksp ! KSP context
20: PC :: myPc ! PC context
21: PC :: subpc ! PC context for subdomain
22: PetscReal :: norm ! norm of solution error
23: PetscReal,parameter :: tol = 1.e-6
24: PetscErrorCode :: ierr
25: PetscInt :: i,j,Ii,JJ,n
26: PetscInt :: m
27: PetscMPIInt :: myRank,mySize
28: PetscInt :: its,nlocal,first,Istart,Iend
29: PetscScalar :: v
30: PetscScalar, parameter :: &
31: myNone = -1.0, &
32: sone = 1.0
33: PetscBool :: isbjacobi,flg
34: KSP,allocatable,dimension(:) :: subksp ! array of local KSP contexts on this processor
35: PetscInt,allocatable,dimension(:) :: blks
36: character(len=PETSC_MAX_PATH_LEN) :: outputString
37: PetscInt,parameter :: one = 1, five = 5
39: PetscCallA(PetscInitialize(ierr))
40: m = 4
41: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
42: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,myRank,ierr))
43: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,mySize,ierr))
44: n=m+2
46: !-------------------------------------------------------------------
47: ! Compute the matrix and right-hand-side vector that define
48: ! the linear system, Ax = b.
49: !---------------------------------------------------------------
51: ! Create and assemble parallel matrix
53: PetscCallA( MatCreate(PETSC_COMM_WORLD,A,ierr))
54: PetscCallA( MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
55: PetscCallA( MatSetFromOptions(A,ierr))
56: PetscCallA( MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr))
57: PetscCallA( MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr))
58: PetscCallA( MatGetOwnershipRange(A,Istart,Iend,ierr))
60: do Ii=Istart,Iend-1
61: v =-1.0; i = Ii/n; j = Ii - i*n
62: if (i>0) then
63: JJ = Ii - n
64: PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr))
65: endif
67: if (i<m-1) then
68: JJ = Ii + n
69: PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr))
70: endif
72: if (j>0) then
73: JJ = Ii - 1
74: PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr))
75: endif
77: if (j<n-1) then
78: JJ = Ii + 1
79: PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr))
80: endif
82: v=4.0
83: PetscCallA(MatSetValues(A,one,Ii,one,Ii,v,ADD_VALUES,ierr))
85: enddo
87: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
88: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
90: ! Create parallel vectors
92: PetscCallA( VecCreate(PETSC_COMM_WORLD,u,ierr))
93: PetscCallA( VecSetSizes(u,PETSC_DECIDE,m*n,ierr))
94: PetscCallA( VecSetFromOptions(u,ierr))
95: PetscCallA( VecDuplicate(u,b,ierr))
96: PetscCallA( VecDuplicate(b,x,ierr))
98: ! Set exact solution; then compute right-hand-side vector.
100: PetscCallA(Vecset(u,sone,ierr))
101: PetscCallA(MatMult(A,u,b,ierr))
103: ! Create linear solver context
105: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
107: ! Set operators. Here the matrix that defines the linear system
108: ! also serves as the preconditioning matrix.
110: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
112: ! Set default preconditioner for this program to be block Jacobi.
113: ! This choice can be overridden at runtime with the option
114: ! -pc_type <type>
116: PetscCallA(KSPGetPC(ksp,myPc,ierr))
117: PetscCallA(PCSetType(myPc,PCBJACOBI,ierr))
119: ! -----------------------------------------------------------------
120: ! Define the problem decomposition
121: !-------------------------------------------------------------------
123: ! Call PCBJacobiSetTotalBlocks() to set individually the size of
124: ! each block in the preconditioner. This could also be done with
125: ! the runtime option -pc_bjacobi_blocks <blocks>
126: ! Also, see the command PCBJacobiSetLocalBlocks() to set the
127: ! local blocks.
129: ! Note: The default decomposition is 1 block per processor.
131: allocate(blks(m),source = n)
133: PetscCallA(PCBJacobiSetTotalBlocks(myPc,m,blks,ierr))
134: deallocate(blks)
136: !-------------------------------------------------------------------
137: ! Set the linear solvers for the subblocks
138: !-------------------------------------------------------------------
140: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: ! Basic method, should be sufficient for the needs of most users.
142: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143: ! By default, the block Jacobi method uses the same solver on each
144: ! block of the problem. To set the same solver options on all blocks,
145: ! use the prefix -sub before the usual PC and KSP options, e.g.,
146: ! -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
148: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: ! Advanced method, setting different solvers for various blocks.
150: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: ! Note that each block's KSP context is completely independent of
153: ! the others, and the full range of uniprocessor KSP options is
154: ! available for each block. The following section of code is intended
155: ! to be a simple illustration of setting different linear solvers for
156: ! the individual blocks. These choices are obviously not recommended
157: ! for solving this particular problem.
159: PetscCallA(PetscObjectTypeCompare(myPc,PCBJACOBI,isbjacobi,ierr))
161: if (isbjacobi) then
163: ! Call KSPSetUp() to set the block Jacobi data structures (including
164: ! creation of an internal KSP context for each block).
165: ! Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP()
167: PetscCallA(KSPSetUp(ksp,ierr))
169: ! Extract the array of KSP contexts for the local blocks
170: PetscCallA(PCBJacobiGetSubKSP(myPc,nlocal,first,PETSC_NULL_KSP,ierr))
171: allocate(subksp(nlocal))
172: PetscCallA(PCBJacobiGetSubKSP(myPc,nlocal,first,subksp,ierr))
174: ! Loop over the local blocks, setting various KSP options for each block
176: do i=0,nlocal-1
178: PetscCallA(KSPGetPC(subksp(i+1),subpc,ierr))
180: if (myRank>0) then
182: if (mod(i,2)==1) then
183: PetscCallA(PCSetType(subpc,PCILU,ierr))
185: else
186: PetscCallA(PCSetType(subpc,PCNONE,ierr))
187: PetscCallA(KSPSetType(subksp(i+1),KSPBCGS,ierr))
188: PetscCallA(KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr))
189: endif
191: else
192: PetscCallA(PCSetType(subpc,PCJACOBI,ierr))
193: PetscCallA(KSPSetType(subksp(i+1),KSPGMRES,ierr))
194: PetscCallA(KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr))
195: endif
197: end do
199: endif
201: !----------------------------------------------------------------
202: ! Solve the linear system
203: !-----------------------------------------------------------------
205: ! Set runtime options
207: PetscCallA(KSPSetFromOptions(ksp,ierr))
209: ! Solve the linear system
211: PetscCallA(KSPSolve(ksp,b,x,ierr))
213: ! -----------------------------------------------------------------
214: ! Check solution and clean up
215: !-------------------------------------------------------------------
217: ! -----------------------------------------------------------------
218: ! Check the error
219: ! -----------------------------------------------------------------
221: !PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
223: PetscCallA(VecAXPY(x,myNone,u,ierr))
225: !PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
227: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
228: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
229: write(outputString,*)'Norm of error',real(norm),'Iterations',its,'\n' ! PETScScalar might be of complex type
230: PetscCallA(PetscPrintf(PETSC_COMM_WORLD,outputString,ierr))
232: ! Free work space. All PETSc objects should be destroyed when they
233: ! are no longer needed.
234: deallocate(subksp)
235: PetscCallA(KSPDestroy(ksp,ierr))
236: PetscCallA(VecDestroy(u,ierr))
237: PetscCallA(VecDestroy(b,ierr))
238: PetscCallA(MatDestroy(A,ierr))
239: PetscCallA(VecDestroy(x,ierr))
240: PetscCallA(PetscFinalize(ierr))
242: end program main
244: !/*TEST
245: !
246: ! test:
247: ! nsize: 2
248: ! args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
249: !
250: ! test:
251: ! suffix: 2
252: ! nsize: 2
253: ! args: -ksp_view ::ascii_info_detail
254: !
255: !TEST*/