Actual source code: ex7f.F90
1: ! Block Jacobi preconditioner for solving a linear system in parallel with KSP
2: ! The code indicates the procedures for setting the particular block sizes and
3: ! for using different linear solvers on the individual blocks
5: ! This example focuses on ways to customize the block Jacobi preconditioner.
6: ! See ex1.c and ex2.c for more detailed comments on the basic usage of KSP
7: ! (including working with matrices and vectors)
9: ! Recall: The block Jacobi method is equivalent to the ASM preconditioner with zero overlap.
11: program main
12: #include <petsc/finclude/petscksp.h>
13: use petscksp
15: implicit none
16: Vec :: x,b,u ! approx solution, RHS, exact solution
17: Mat :: A ! linear system matrix
18: KSP :: ksp ! KSP context
19: PC :: myPc ! PC context
20: PC :: subpc ! PC context for subdomain
21: PetscReal :: norm ! norm of solution error
22: PetscReal,parameter :: tol = 1.e-6
23: PetscErrorCode :: ierr
24: PetscInt :: i,j,Ii,JJ,n
25: PetscInt :: m
26: PetscMPIInt :: rank,size
27: PetscInt :: its,nlocal,first,Istart,Iend
28: PetscScalar :: v
29: PetscScalar, parameter :: &
30: myNone = -1.0, &
31: sone = 1.0
32: PetscBool :: isbjacobi,flg
33: KSP,allocatable,dimension(:) :: subksp ! array of local KSP contexts on this processor
34: PetscInt,allocatable,dimension(:) :: blks
35: character(len=PETSC_MAX_PATH_LEN) :: outputString
36: PetscInt,parameter :: one = 1, five = 5
38: PetscCallA(PetscInitialize(ierr))
39: m = 4
40: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
41: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
42: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
43: n=m+2
45: !-------------------------------------------------------------------
46: ! Compute the matrix and right-hand-side vector that define
47: ! the linear system, Ax = b.
48: !---------------------------------------------------------------
50: ! Create and assemble parallel matrix
52: PetscCallA( MatCreate(PETSC_COMM_WORLD,A,ierr))
53: PetscCallA( MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
54: PetscCallA( MatSetFromOptions(A,ierr))
55: PetscCallA( MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr))
56: PetscCallA( MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr))
57: PetscCallA( MatGetOwnershipRange(A,Istart,Iend,ierr))
59: do Ii=Istart,Iend-1
60: v =-1.0; i = Ii/n; j = Ii - i*n
61: if (i>0) then
62: JJ = Ii - n
63: PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr))
64: endif
66: if (i<m-1) then
67: JJ = Ii + n
68: PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr))
69: endif
71: if (j>0) then
72: JJ = Ii - 1
73: PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr))
74: endif
76: if (j<n-1) then
77: JJ = Ii + 1
78: PetscCallA(MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr))
79: endif
81: v=4.0
82: PetscCallA(MatSetValues(A,one,Ii,one,Ii,v,ADD_VALUES,ierr))
84: enddo
86: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
87: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
89: ! Create parallel vectors
91: PetscCallA( VecCreate(PETSC_COMM_WORLD,u,ierr))
92: PetscCallA( VecSetSizes(u,PETSC_DECIDE,m*n,ierr))
93: PetscCallA( VecSetFromOptions(u,ierr))
94: PetscCallA( VecDuplicate(u,b,ierr))
95: PetscCallA( VecDuplicate(b,x,ierr))
97: ! Set exact solution; then compute right-hand-side vector.
99: PetscCallA(Vecset(u,sone,ierr))
100: PetscCallA(MatMult(A,u,b,ierr))
102: ! Create linear solver context
104: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
106: ! Set operators. Here the matrix that defines the linear system
107: ! also serves as the preconditioning matrix.
109: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
111: ! Set default preconditioner for this program to be block Jacobi.
112: ! This choice can be overridden at runtime with the option
113: ! -pc_type <type>
115: PetscCallA(KSPGetPC(ksp,myPc,ierr))
116: PetscCallA(PCSetType(myPc,PCBJACOBI,ierr))
118: ! -----------------------------------------------------------------
119: ! Define the problem decomposition
120: !-------------------------------------------------------------------
122: ! Call PCBJacobiSetTotalBlocks() to set individually the size of
123: ! each block in the preconditioner. This could also be done with
124: ! the runtime option -pc_bjacobi_blocks <blocks>
125: ! Also, see the command PCBJacobiSetLocalBlocks() to set the
126: ! local blocks.
128: ! Note: The default decomposition is 1 block per processor.
130: allocate(blks(m),source = n)
132: PetscCallA(PCBJacobiSetTotalBlocks(myPc,m,blks,ierr))
133: deallocate(blks)
135: !-------------------------------------------------------------------
136: ! Set the linear solvers for the subblocks
137: !-------------------------------------------------------------------
139: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: ! Basic method, should be sufficient for the needs of most users.
141: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: ! By default, the block Jacobi method uses the same solver on each
143: ! block of the problem. To set the same solver options on all blocks,
144: ! use the prefix -sub before the usual PC and KSP options, e.g.,
145: ! -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: ! Advanced method, setting different solvers for various blocks.
149: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: ! Note that each block's KSP context is completely independent of
152: ! the others, and the full range of uniprocessor KSP options is
153: ! available for each block. The following section of code is intended
154: ! to be a simple illustration of setting different linear solvers for
155: ! the individual blocks. These choices are obviously not recommended
156: ! for solving this particular problem.
158: PetscCallA(PetscObjectTypeCompare(myPc,PCBJACOBI,isbjacobi,ierr))
160: if (isbjacobi) then
162: ! Call KSPSetUp() to set the block Jacobi data structures (including
163: ! creation of an internal KSP context for each block).
164: ! Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP()
166: PetscCallA(KSPSetUp(ksp,ierr))
168: ! Extract the array of KSP contexts for the local blocks
169: PetscCallA(PCBJacobiGetSubKSP(myPc,nlocal,first,PETSC_NULL_KSP,ierr))
170: allocate(subksp(nlocal))
171: PetscCallA(PCBJacobiGetSubKSP(myPc,nlocal,first,subksp,ierr))
173: ! Loop over the local blocks, setting various KSP options for each block
175: do i=0,nlocal-1
177: PetscCallA(KSPGetPC(subksp(i+1),subpc,ierr))
179: if (rank>0) then
181: if (mod(i,2)==1) then
182: PetscCallA(PCSetType(subpc,PCILU,ierr))
184: else
185: PetscCallA(PCSetType(subpc,PCNONE,ierr))
186: PetscCallA(KSPSetType(subksp(i+1),KSPBCGS,ierr))
187: PetscCallA(KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr))
188: endif
190: else
191: PetscCallA(PCSetType(subpc,PCJACOBI,ierr))
192: PetscCallA(KSPSetType(subksp(i+1),KSPGMRES,ierr))
193: PetscCallA(KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr))
194: endif
196: end do
198: endif
200: !----------------------------------------------------------------
201: ! Solve the linear system
202: !-----------------------------------------------------------------
204: ! Set runtime options
206: PetscCallA(KSPSetFromOptions(ksp,ierr))
208: ! Solve the linear system
210: PetscCallA(KSPSolve(ksp,b,x,ierr))
212: ! -----------------------------------------------------------------
213: ! Check solution and clean up
214: !-------------------------------------------------------------------
216: ! -----------------------------------------------------------------
217: ! Check the error
218: ! -----------------------------------------------------------------
220: !PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
222: PetscCallA(VecAXPY(x,myNone,u,ierr))
224: !PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
226: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
227: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
228: write(outputString,*)'Norm of error',real(norm),'Iterations',its,'\n' ! PETScScalar might be of complex type
229: PetscCallA(PetscPrintf(PETSC_COMM_WORLD,outputString,ierr))
231: ! Free work space. All PETSc objects should be destroyed when they
232: ! are no longer needed.
233: deallocate(subksp)
234: PetscCallA(KSPDestroy(ksp,ierr))
235: PetscCallA(VecDestroy(u,ierr))
236: PetscCallA(VecDestroy(b,ierr))
237: PetscCallA(MatDestroy(A,ierr))
238: PetscCallA(VecDestroy(x,ierr))
239: PetscCallA(PetscFinalize(ierr))
241: end program main
243: !/*TEST
244: !
245: ! test:
246: ! nsize: 2
247: ! args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
248: !
249: ! test:
250: ! suffix: 2
251: ! nsize: 2
252: ! args: -ksp_view ::ascii_info_detail
253: !
254: !TEST*/