Actual source code: ex7f.F90
1: ! Block Jacobi preconditioner for solving a linear system in parallel with KSP
2: ! The code indicates the procedures for setting the particular block sizes and
3: ! for using different linear solvers on the individual blocks
5: ! This example focuses on ways to customize the block Jacobi preconditioner.
6: ! See ex1.c and ex2.c for more detailed comments on the basic usage of KSP
7: ! (including working with matrices and vectors)
9: ! Recall: The block Jacobi method is equivalent to the ASM preconditioner with zero overlap.
11: #include <petsc/finclude/petscksp.h>
12: program main
13: use petscksp
15: implicit none
16: Vec :: x, b, u ! approx solution, RHS, exact solution
17: Mat :: A ! linear system matrix
18: KSP :: ksp ! KSP context
19: PC :: myPc ! PC context
20: PC :: subpc ! PC context for subdomain
21: PetscReal :: norm ! norm of solution error
22: PetscReal, parameter :: tol = 1.e-6
23: PetscErrorCode :: ierr
24: PetscInt :: i, j, Ii, JJ, n
25: PetscInt :: m
26: PetscMPIInt :: rank, size
27: PetscInt :: its, nlocal, first, Istart, Iend
28: PetscScalar :: v
29: PetscScalar, parameter :: &
30: myNone = -1.0, &
31: sone = 1.0
32: PetscBool :: isbjacobi, flg
33: KSP, pointer :: subksp(:) => null()
34: PetscInt :: blks(4)
35: character(len=PETSC_MAX_PATH_LEN) :: outputString
37: PetscCallA(PetscInitialize(ierr))
38: m = 4
39: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-m', m, flg, ierr))
40: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
41: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
42: n = m + 2
43: blks = n
45: !-------------------------------------------------------------------
46: ! Compute the matrix and right-hand-side vector that define
47: ! the linear system, Ax = b.
48: !---------------------------------------------------------------
50: ! Create and assemble parallel matrix
52: PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
53: PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m*n, m*n, ierr))
54: PetscCallA(MatSetFromOptions(A, ierr))
55: PetscCallA(MatMPIAIJSetPreallocation(A, 5_PETSC_INT_KIND, PETSC_NULL_INTEGER_ARRAY, 5_PETSC_INT_KIND, PETSC_NULL_INTEGER_ARRAY, ierr))
56: PetscCallA(MatSeqAIJSetPreallocation(A, 5_PETSC_INT_KIND, PETSC_NULL_INTEGER_ARRAY, ierr))
57: PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
59: do Ii = Istart, Iend - 1
60: v = -1.0; i = Ii/n; j = Ii - i*n
61: if (i > 0) then
62: JJ = Ii - n
63: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [Ii], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
64: end if
66: if (i < m - 1) then
67: JJ = Ii + n
68: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [Ii], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
69: end if
71: if (j > 0) then
72: JJ = Ii - 1
73: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [Ii], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
74: end if
76: if (j < n - 1) then
77: JJ = Ii + 1
78: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [Ii], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
79: end if
81: v = 4.0
82: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [Ii], 1_PETSC_INT_KIND, [Ii], [v], ADD_VALUES, ierr))
84: end do
86: PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
87: PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
89: ! Create parallel vectors
91: PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
92: PetscCallA(VecSetSizes(u, PETSC_DECIDE, m*n, ierr))
93: PetscCallA(VecSetFromOptions(u, ierr))
94: PetscCallA(VecDuplicate(u, b, ierr))
95: PetscCallA(VecDuplicate(b, x, ierr))
97: ! Set exact solution; then compute right-hand-side vector.
99: PetscCallA(Vecset(u, sone, ierr))
100: PetscCallA(MatMult(A, u, b, ierr))
102: ! Create linear solver context
104: PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
106: ! Set operators. Here the matrix that defines the linear system
107: ! also serves as the matrix used to construct the preconditioner.
109: PetscCallA(KSPSetOperators(ksp, A, A, ierr))
111: ! Set default preconditioner for this program to be block Jacobi.
112: ! This choice can be overridden at runtime with the option
113: ! -pc_type <type>
115: PetscCallA(KSPGetPC(ksp, myPc, ierr))
116: PetscCallA(PCSetType(myPc, PCBJACOBI, ierr))
118: ! -----------------------------------------------------------------
119: ! Define the problem decomposition
120: !-------------------------------------------------------------------
122: ! Call PCBJacobiSetTotalBlocks() to set individually the size of
123: ! each block in the preconditioner. This could also be done with
124: ! the runtime option -pc_bjacobi_blocks <blocks>
125: ! Also, see the command PCBJacobiSetLocalBlocks() to set the
126: ! local blocks.
128: ! Note: The default decomposition is 1 block per processor.
130: PetscCallA(PCBJacobiSetTotalBlocks(myPc, m, blks, ierr))
132: !-------------------------------------------------------------------
133: ! Set the linear solvers for the subblocks
134: !-------------------------------------------------------------------
136: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: ! Basic method, should be sufficient for the needs of most users.
138: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139: ! By default, the block Jacobi method uses the same solver on each
140: ! block of the problem. To set the same solver options on all blocks,
141: ! use the prefix -sub before the usual PC and KSP options, e.g.,
142: ! -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
144: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: ! Advanced method, setting different solvers for various blocks.
146: !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: ! Note that each block's KSP context is completely independent of
149: ! the others, and the full range of uniprocessor KSP options is
150: ! available for each block. The following section of code is intended
151: ! to be a simple illustration of setting different linear solvers for
152: ! the individual blocks. These choices are obviously not recommended
153: ! for solving this particular problem.
155: PetscCallA(PetscObjectTypeCompare(myPc, PCBJACOBI, isbjacobi, ierr))
157: if (isbjacobi) then
159: ! Call KSPSetUp() to set the block Jacobi data structures (including
160: ! creation of an internal KSP context for each block).
161: ! Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP()
163: PetscCallA(KSPSetUp(ksp, ierr))
165: ! Extract the array of KSP contexts for the local blocks
166: PetscCallA(PCBJacobiGetSubKSP(myPc, nlocal, first, PETSC_NULL_KSP_POINTER, ierr))
167: PetscCallA(PCBJacobiGetSubKSP(myPc, nlocal, first, subksp, ierr))
169: ! Loop over the local blocks, setting various KSP options for each block
171: do i = 0, nlocal - 1
173: PetscCallA(KSPGetPC(subksp(i + 1), subpc, ierr))
175: if (rank > 0) then
177: if (mod(i, 2) == 1) then
178: PetscCallA(PCSetType(subpc, PCILU, ierr))
180: else
181: PetscCallA(PCSetType(subpc, PCNONE, ierr))
182: PetscCallA(KSPSetType(subksp(i + 1), KSPBCGS, ierr))
183: PetscCallA(KSPSetTolerances(subksp(i + 1), tol, PETSC_CURRENT_REAL, PETSC_CURRENT_REAL, PETSC_CURRENT_INTEGER, ierr))
184: end if
186: else
187: PetscCallA(PCSetType(subpc, PCJACOBI, ierr))
188: PetscCallA(KSPSetType(subksp(i + 1), KSPGMRES, ierr))
189: PetscCallA(KSPSetTolerances(subksp(i + 1), tol, PETSC_CURRENT_REAL, PETSC_CURRENT_REAL, PETSC_CURRENT_INTEGER, ierr))
190: end if
192: end do
194: end if
196: !----------------------------------------------------------------
197: ! Solve the linear system
198: !-----------------------------------------------------------------
200: ! Set runtime options
202: PetscCallA(KSPSetFromOptions(ksp, ierr))
204: ! Solve the linear system
206: PetscCallA(KSPSolve(ksp, b, x, ierr))
208: ! -----------------------------------------------------------------
209: ! Check solution and clean up
210: !-------------------------------------------------------------------
212: ! -----------------------------------------------------------------
213: ! Check the error
214: ! -----------------------------------------------------------------
216: !PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
218: PetscCallA(VecAXPY(x, myNone, u, ierr))
220: !PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
222: PetscCallA(VecNorm(x, NORM_2, norm, ierr))
223: PetscCallA(KSPGetIterationNumber(ksp, its, ierr))
224: write (outputString, *) 'Norm of error', real(norm), 'Iterations', its, '\n' ! PETScScalar might be of complex type
225: PetscCallA(PetscPrintf(PETSC_COMM_WORLD, outputString, ierr))
227: ! Free work space. All PETSc objects should be destroyed when they
228: ! are no longer needed.
229: PetscCallA(KSPDestroy(ksp, ierr))
230: PetscCallA(VecDestroy(u, ierr))
231: PetscCallA(VecDestroy(b, ierr))
232: PetscCallA(MatDestroy(A, ierr))
233: PetscCallA(VecDestroy(x, ierr))
234: PetscCallA(PetscFinalize(ierr))
236: end program main
238: !/*TEST
239: !
240: ! test:
241: ! nsize: 2
242: ! args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
243: !
244: ! test:
245: ! suffix: 2
246: ! nsize: 2
247: ! args: -ksp_view ::ascii_info_detail
248: !
249: !TEST*/