Actual source code: ex6f.F90

  1: !
  2: !  Description: This example demonstrates repeated linear solves as
  3: !  well as the use of different preconditioner and linear system
  4: !  matrices.  This example also illustrates how to save retain data
  5: !  between successive subroutine calls
  6: !
  7: #include <petsc/finclude/petscksp.h>
  8: module ex6fmodule
  9:   use petscksp

 11:   implicit none
 12: ! Global data to retain matrix between successive subroutine calls
 13:   PetscMPIInt rank
 14:   PetscBool pflag

 16: contains
 17:   subroutine solve1(ksp, A, x, b, u, count, nsteps, A2, ierr)

 19: !
 20: !   solve1 - This routine is used for repeated linear system solves.
 21: !   We update the linear system matrix each time, but retain the same
 22: !   matrix from which the preconditioner is constructed for all linear solves.
 23: !
 24: !      A - linear system matrix
 25: !      A2 - matrix from which the preconditioner is constructed
 26: !
 27:     PetscInt, intent(in) :: count, nsteps
 28:     PetscErrorCode, intent(out) :: ierr
 29:     Mat A
 30:     KSP ksp
 31:     Vec x, b, u
 32:     Mat A2
 33:     PetscScalar v, val
 34:     PetscInt II, Istart, Iend

 36: ! First time thorough: Create new matrix to define the linear system
 37:     if (count == 1) then
 38:       PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
 39:       pflag = .false.
 40:       PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-mat_view', pflag, ierr))
 41:       if (pflag) then
 42:         if (rank == 0) write (6, 100)
 43:         call PetscFlush(6)
 44:       end if
 45:       PetscCallA(MatConvert(A, MATSAME, MAT_INITIAL_MATRIX, A2, ierr))
 46: ! All other times: Set previous solution as initial guess for next solve.
 47:     else
 48:       PetscCallA(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE, ierr))
 49:     end if

 51: ! Alter the matrix A a bit
 52:     PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
 53:     do II = Istart, Iend - 1
 54:       v = 2.0
 55:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [II], [v], ADD_VALUES, ierr))
 56:     end do
 57:     PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
 58:     if (pflag) then
 59:       if (rank == 0) write (6, 110)
 60:       call PetscFlush(6)
 61:     end if
 62:     PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))

 64: ! Set the exact solution; compute the right-hand-side vector
 65:     val = 1.0*real(count)
 66:     PetscCallA(VecSet(u, val, ierr))
 67:     PetscCallA(MatMult(A, u, b, ierr))

 69: ! Set operators, keeping the identical preconditioner for
 70: ! all linear solves.  This approach is often effective when the
 71: ! linear systems do not change very much between successive steps.
 72:     PetscCallA(KSPSetReusePreconditioner(ksp, PETSC_TRUE, ierr))
 73:     PetscCallA(KSPSetOperators(ksp, A, A2, ierr))

 75: ! Solve linear system
 76:     PetscCallA(KSPSolve(ksp, b, x, ierr))

 78: ! Destroy the matrix used to construct the preconditioner on the last time through
 79:     if (count == nsteps) PetscCallA(MatDestroy(A2, ierr))

 81: 100 format('previous matrix: preconditioning')
 82: 110 format('next matrix: defines linear system')
 83:   end subroutine
 84: end module ex6fmodule

 86: program ex6f
 87:   use petscksp
 88:   use ex6fmodule
 89:   implicit none

 91: !  Variables:
 92: !
 93: !  A       - matrix that defines linear system
 94: !  ksp    - KSP context
 95: !  ksp     - KSP context
 96: !  x, b, u - approx solution, RHS, exact solution vectors
 97: !
 98:   Vec x, u, b
 99:   Mat A, A2
100:   KSP ksp
101:   PetscInt i, j, II, JJ, Istart, Iend
102:   PetscInt m, n, nsteps
103:   PetscErrorCode ierr
104:   PetscBool flg
105:   PetscScalar v

107:   PetscCallA(PetscInitialize(ierr))
108:   m = 3
109:   PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-m', m, flg, ierr))
110:   n = 3
111:   PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
112:   nsteps = 2
113:   PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-nsteps', nsteps, flg, ierr))

115: !  Create parallel matrix, specifying only its global dimensions.
116: !  When using MatCreate(), the matrix format can be specified at
117: !  runtime. Also, the parallel partitioning of the matrix is
118: !  determined by PETSc at runtime.

120:   PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
121:   PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m*n, m*n, ierr))
122:   PetscCallA(MatSetFromOptions(A, ierr))
123:   PetscCallA(MatSetUp(A, ierr))

125: !  The matrix is partitioned by contiguous chunks of rows across the
126: !  processors.  Determine which rows of the matrix are locally owned.

128:   PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))

130: !  Set matrix elements.
131: !   - Each processor needs to insert only elements that it owns
132: !     locally (but any non-local elements will be sent to the
133: !     appropriate processor during matrix assembly).
134: !   - Always specify global rows and columns of matrix entries.

136:   do II = Istart, Iend - 1
137:     v = -1.0
138:     i = II/n
139:     j = II - i*n
140:     if (i > 0) then
141:       JJ = II - n
142:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
143:     end if
144:     if (i < m - 1) then
145:       JJ = II + n
146:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
147:     end if
148:     if (j > 0) then
149:       JJ = II - 1
150:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
151:     end if
152:     if (j < n - 1) then
153:       JJ = II + 1
154:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
155:     end if
156:     v = 4.0
157:     PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [II], [v], ADD_VALUES, ierr))
158:   end do

160: !  Assemble matrix, using the 2-step process:
161: !       MatAssemblyBegin(), MatAssemblyEnd()
162: !  Computations can be done while messages are in transition
163: !  by placing code between these two statements.

165:   PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
166:   PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))

168: !  Create parallel vectors.
169: !   - When using VecCreate(), the parallel partitioning of the vector
170: !     is determined by PETSc at runtime.
171: !   - Note: We form 1 vector from scratch and then duplicate as needed.

173:   PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
174:   PetscCallA(VecSetSizes(u, PETSC_DECIDE, m*n, ierr))
175:   PetscCallA(VecSetFromOptions(u, ierr))
176:   PetscCallA(VecDuplicate(u, b, ierr))
177:   PetscCallA(VecDuplicate(b, x, ierr))

179: !  Create linear solver context

181:   PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))

183: !  Set runtime options (e.g., -ksp_type <type> -pc_type <type>)

185:   PetscCallA(KSPSetFromOptions(ksp, ierr))

187: !  Solve several linear systems in succession

189:   do i = 1, nsteps
190:     PetscCallA(solve1(ksp, A, x, b, u, i, nsteps, A2, ierr))
191:   end do

193: !  Free work space.  All PETSc objects should be destroyed when they
194: !  are no longer needed.

196:   PetscCallA(VecDestroy(u, ierr))
197:   PetscCallA(VecDestroy(x, ierr))
198:   PetscCallA(VecDestroy(b, ierr))
199:   PetscCallA(MatDestroy(A, ierr))
200:   PetscCallA(KSPDestroy(ksp, ierr))

202:   PetscCallA(PetscFinalize(ierr))
203: end program ex6f

205: !/*TEST
206: !
207: !   test:
208: !      args: -pc_type jacobi -mat_view -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
209: !
210: !TEST*/