Actual source code: ex6f.F90

  1: !
  2: !  Description: This example demonstrates repeated linear solves as
  3: !  well as the use of different preconditioner and linear system
  4: !  matrices.  This example also illustrates how to save PETSc objects
  5: !  in common blocks.
  6: !
  7: !

  9:       program main
 10: #include <petsc/finclude/petscksp.h>
 11:       use petscksp
 12:       implicit none

 14: !  Variables:
 15: !
 16: !  A       - matrix that defines linear system
 17: !  ksp    - KSP context
 18: !  ksp     - KSP context
 19: !  x, b, u - approx solution, RHS, exact solution vectors
 20: !
 21:       Vec     x,u,b
 22:       Mat     A,A2
 23:       KSP    ksp
 24:       PetscInt i,j,II,JJ,m,n
 25:       PetscInt Istart,Iend
 26:       PetscInt nsteps,one
 27:       PetscErrorCode ierr
 28:       PetscBool  flg
 29:       PetscScalar  v

 31:       PetscCallA(PetscInitialize(ierr))
 32:       m      = 3
 33:       n      = 3
 34:       nsteps = 2
 35:       one    = 1
 36:       PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
 37:       PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
 38:       PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-nsteps',nsteps,flg,ierr))

 40: !  Create parallel matrix, specifying only its global dimensions.
 41: !  When using MatCreate(), the matrix format can be specified at
 42: !  runtime. Also, the parallel partitioning of the matrix is
 43: !  determined by PETSc at runtime.

 45:       PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
 46:       PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
 47:       PetscCallA(MatSetFromOptions(A,ierr))
 48:       PetscCallA(MatSetUp(A,ierr))

 50: !  The matrix is partitioned by contiguous chunks of rows across the
 51: !  processors.  Determine which rows of the matrix are locally owned.

 53:       PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))

 55: !  Set matrix elements.
 56: !   - Each processor needs to insert only elements that it owns
 57: !     locally (but any non-local elements will be sent to the
 58: !     appropriate processor during matrix assembly).
 59: !   - Always specify global rows and columns of matrix entries.

 61:       do 10, II=Istart,Iend-1
 62:         v = -1.0
 63:         i = II/n
 64:         j = II - i*n
 65:         if (i.gt.0) then
 66:           JJ = II - n
 67:           PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr))
 68:         endif
 69:         if (i.lt.m-1) then
 70:           JJ = II + n
 71:           PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr))
 72:         endif
 73:         if (j.gt.0) then
 74:           JJ = II - 1
 75:           PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr))
 76:         endif
 77:         if (j.lt.n-1) then
 78:           JJ = II + 1
 79:           PetscCallA(MatSetValues(A,one,[II],one,[JJ],[v],ADD_VALUES,ierr))
 80:         endif
 81:         v = 4.0
 82:         PetscCallA( MatSetValues(A,one,[II],one,[II],[v],ADD_VALUES,ierr))
 83:  10   continue

 85: !  Assemble matrix, using the 2-step process:
 86: !       MatAssemblyBegin(), MatAssemblyEnd()
 87: !  Computations can be done while messages are in transition
 88: !  by placing code between these two statements.

 90:       PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
 91:       PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))

 93: !  Create parallel vectors.
 94: !   - When using VecCreate(), the parallel partitioning of the vector
 95: !     is determined by PETSc at runtime.
 96: !   - Note: We form 1 vector from scratch and then duplicate as needed.

 98:       PetscCallA(VecCreate(PETSC_COMM_WORLD,u,ierr))
 99:       PetscCallA(VecSetSizes(u,PETSC_DECIDE,m*n,ierr))
100:       PetscCallA(VecSetFromOptions(u,ierr))
101:       PetscCallA(VecDuplicate(u,b,ierr))
102:       PetscCallA(VecDuplicate(b,x,ierr))

104: !  Create linear solver context

106:       PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))

108: !  Set runtime options (e.g., -ksp_type <type> -pc_type <type>)

110:       PetscCallA(KSPSetFromOptions(ksp,ierr))

112: !  Solve several linear systems in succession

114:       do 100 i=1,nsteps
115:          PetscCallA(solve1(ksp,A,x,b,u,i,nsteps,A2,ierr))
116:  100  continue

118: !  Free work space.  All PETSc objects should be destroyed when they
119: !  are no longer needed.

121:       PetscCallA(VecDestroy(u,ierr))
122:       PetscCallA(VecDestroy(x,ierr))
123:       PetscCallA(VecDestroy(b,ierr))
124:       PetscCallA(MatDestroy(A,ierr))
125:       PetscCallA(KSPDestroy(ksp,ierr))

127:       PetscCallA(PetscFinalize(ierr))
128:       end

130: ! -----------------------------------------------------------------------
131: !
132:       subroutine solve1(ksp,A,x,b,u,count,nsteps,A2,ierr)
133:       use petscksp
134:       implicit none

136: !
137: !   solve1 - This routine is used for repeated linear system solves.
138: !   We update the linear system matrix each time, but retain the same
139: !   preconditioning matrix for all linear solves.
140: !
141: !      A - linear system matrix
142: !      A2 - preconditioning matrix
143: !
144:       PetscScalar  v,val
145:       PetscInt II,Istart,Iend
146:       PetscInt count,nsteps,one
147:       PetscErrorCode ierr
148:       Mat     A
149:       KSP     ksp
150:       Vec     x,b,u

152: ! Use common block to retain matrix between successive subroutine calls
153:       Mat              A2
154:       PetscMPIInt      rank
155:       PetscBool        pflag
156:       common /my_data/ pflag,rank

158:       one = 1
159: ! First time thorough: Create new matrix to define the linear system
160:       if (count .eq. 1) then
161:         PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
162:         pflag = .false.
163:         PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-mat_view',pflag,ierr))
164:         if (pflag) then
165:           if (rank .eq. 0) write(6,100)
166:           call PetscFlush(6)
167:         endif
168:         PetscCallA(MatConvert(A,MATSAME,MAT_INITIAL_MATRIX,A2,ierr))
169: ! All other times: Set previous solution as initial guess for next solve.
170:       else
171:         PetscCallA(KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr))
172:       endif

174: ! Alter the matrix A a bit
175:       PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))
176:       do 20, II=Istart,Iend-1
177:         v = 2.0
178:         PetscCallA(MatSetValues(A,one,[II],one,[II],[v],ADD_VALUES,ierr))
179:  20   continue
180:       PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
181:       if (pflag) then
182:         if (rank .eq. 0) write(6,110)
183:         call PetscFlush(6)
184:       endif
185:       PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))

187: ! Set the exact solution; compute the right-hand-side vector
188:       val = 1.0*real(count)
189:       PetscCallA(VecSet(u,val,ierr))
190:       PetscCallA(MatMult(A,u,b,ierr))

192: ! Set operators, keeping the identical preconditioner matrix for
193: ! all linear solves.  This approach is often effective when the
194: ! linear systems do not change very much between successive steps.
195:       PetscCallA(KSPSetReusePreconditioner(ksp,PETSC_TRUE,ierr))
196:       PetscCallA(KSPSetOperators(ksp,A,A2,ierr))

198: ! Solve linear system
199:       PetscCallA(KSPSolve(ksp,b,x,ierr))

201: ! Destroy the preconditioner matrix on the last time through
202:       if (count .eq. nsteps) PetscCallA(MatDestroy(A2,ierr))

204:  100  format('previous matrix: preconditioning')
205:  110  format('next matrix: defines linear system')

207:       end

209: !/*TEST
210: !
211: !   test:
212: !      args: -pc_type jacobi -mat_view -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
213: !
214: !TEST*/