Actual source code: ex6f.F90
1: !
2: ! Description: This example demonstrates repeated linear solves as
3: ! well as the use of different preconditioner and linear system
4: ! matrices. This example also illustrates how to save PETSc objects
5: ! in common blocks.
6: !
7: !
8: #include <petsc/finclude/petscksp.h>
9: program main
10: use petscksp
11: implicit none
13: ! Variables:
14: !
15: ! A - matrix that defines linear system
16: ! ksp - KSP context
17: ! ksp - KSP context
18: ! x, b, u - approx solution, RHS, exact solution vectors
19: !
20: Vec x, u, b
21: Mat A, A2
22: KSP ksp
23: PetscInt i, j, II, JJ, m, n
24: PetscInt Istart, Iend
25: PetscInt nsteps, one
26: PetscErrorCode ierr
27: PetscBool flg
28: PetscScalar v
30: PetscCallA(PetscInitialize(ierr))
31: m = 3
32: n = 3
33: nsteps = 2
34: one = 1
35: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-m', m, flg, ierr))
36: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
37: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-nsteps', nsteps, flg, ierr))
39: ! Create parallel matrix, specifying only its global dimensions.
40: ! When using MatCreate(), the matrix format can be specified at
41: ! runtime. Also, the parallel partitioning of the matrix is
42: ! determined by PETSc at runtime.
44: PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
45: PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m*n, m*n, ierr))
46: PetscCallA(MatSetFromOptions(A, ierr))
47: PetscCallA(MatSetUp(A, ierr))
49: ! The matrix is partitioned by contiguous chunks of rows across the
50: ! processors. Determine which rows of the matrix are locally owned.
52: PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
54: ! Set matrix elements.
55: ! - Each processor needs to insert only elements that it owns
56: ! locally (but any non-local elements will be sent to the
57: ! appropriate processor during matrix assembly).
58: ! - Always specify global rows and columns of matrix entries.
60: do II = Istart, Iend - 1
61: v = -1.0
62: i = II/n
63: j = II - i*n
64: if (i > 0) then
65: JJ = II - n
66: PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
67: end if
68: if (i < m - 1) then
69: JJ = II + n
70: PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
71: end if
72: if (j > 0) then
73: JJ = II - 1
74: PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
75: end if
76: if (j < n - 1) then
77: JJ = II + 1
78: PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
79: end if
80: v = 4.0
81: PetscCallA(MatSetValues(A, one, [II], one, [II], [v], ADD_VALUES, ierr))
82: end do
84: ! Assemble matrix, using the 2-step process:
85: ! MatAssemblyBegin(), MatAssemblyEnd()
86: ! Computations can be done while messages are in transition
87: ! by placing code between these two statements.
89: PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
90: PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
92: ! Create parallel vectors.
93: ! - When using VecCreate(), the parallel partitioning of the vector
94: ! is determined by PETSc at runtime.
95: ! - Note: We form 1 vector from scratch and then duplicate as needed.
97: PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
98: PetscCallA(VecSetSizes(u, PETSC_DECIDE, m*n, ierr))
99: PetscCallA(VecSetFromOptions(u, ierr))
100: PetscCallA(VecDuplicate(u, b, ierr))
101: PetscCallA(VecDuplicate(b, x, ierr))
103: ! Create linear solver context
105: PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
107: ! Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
109: PetscCallA(KSPSetFromOptions(ksp, ierr))
111: ! Solve several linear systems in succession
113: do i = 1, nsteps
114: PetscCallA(solve1(ksp, A, x, b, u, i, nsteps, A2, ierr))
115: end do
117: ! Free work space. All PETSc objects should be destroyed when they
118: ! are no longer needed.
120: PetscCallA(VecDestroy(u, ierr))
121: PetscCallA(VecDestroy(x, ierr))
122: PetscCallA(VecDestroy(b, ierr))
123: PetscCallA(MatDestroy(A, ierr))
124: PetscCallA(KSPDestroy(ksp, ierr))
126: PetscCallA(PetscFinalize(ierr))
127: end
129: ! -----------------------------------------------------------------------
130: !
131: subroutine solve1(ksp, A, x, b, u, count, nsteps, A2, ierr)
132: use petscksp
133: implicit none
135: !
136: ! solve1 - This routine is used for repeated linear system solves.
137: ! We update the linear system matrix each time, but retain the same
138: ! matrix from which the preconditioner is constructed for all linear solves.
139: !
140: ! A - linear system matrix
141: ! A2 - matrix from which the preconditioner is constructed
142: !
143: PetscScalar v, val
144: PetscInt II, Istart, Iend
145: PetscInt count, nsteps, one
146: PetscErrorCode ierr
147: Mat A
148: KSP ksp
149: Vec x, b, u
151: ! Use common block to retain matrix between successive subroutine calls
152: Mat A2
153: PetscMPIInt rank
154: PetscBool pflag
155: common/my_data/rank, pflag
157: one = 1
158: ! First time thorough: Create new matrix to define the linear system
159: if (count == 1) then
160: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
161: pflag = .false.
162: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-mat_view', pflag, ierr))
163: if (pflag) then
164: if (rank == 0) write (6, 100)
165: call PetscFlush(6)
166: end if
167: PetscCallA(MatConvert(A, MATSAME, MAT_INITIAL_MATRIX, A2, ierr))
168: ! All other times: Set previous solution as initial guess for next solve.
169: else
170: PetscCallA(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE, ierr))
171: end if
173: ! Alter the matrix A a bit
174: PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
175: do II = Istart, Iend - 1
176: v = 2.0
177: PetscCallA(MatSetValues(A, one, [II], one, [II], [v], ADD_VALUES, ierr))
178: end do
179: PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
180: if (pflag) then
181: if (rank == 0) write (6, 110)
182: call PetscFlush(6)
183: end if
184: PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
186: ! Set the exact solution; compute the right-hand-side vector
187: val = 1.0*real(count)
188: PetscCallA(VecSet(u, val, ierr))
189: PetscCallA(MatMult(A, u, b, ierr))
191: ! Set operators, keeping the identical preconditioner for
192: ! all linear solves. This approach is often effective when the
193: ! linear systems do not change very much between successive steps.
194: PetscCallA(KSPSetReusePreconditioner(ksp, PETSC_TRUE, ierr))
195: PetscCallA(KSPSetOperators(ksp, A, A2, ierr))
197: ! Solve linear system
198: PetscCallA(KSPSolve(ksp, b, x, ierr))
200: ! Destroy the matrix used to construct the preconditioner on the last time through
201: if (count == nsteps) PetscCallA(MatDestroy(A2, ierr))
203: 100 format('previous matrix: preconditioning')
204: 110 format('next matrix: defines linear system')
206: end
208: !/*TEST
209: !
210: ! test:
211: ! args: -pc_type jacobi -mat_view -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
212: !
213: !TEST*/