Actual source code: ex13f90.F90

  1: #include <petsc/finclude/petscdmda.h>
  2: program main
  3: !
  4: ! This example intends to show how DMDA is used to solve a PDE on a decomposed
  5: ! domain. The equation we are solving is not a PDE, but a toy example: van der
  6: ! Pol's 2-variable ODE duplicated onto a 3D grid:
  7: ! dx/dt = y
  8: ! dy/dt = mu(1-x**2)y - x
  9: !
 10: ! So we are solving the same equation on all grid points, with no spatial
 11: ! dependencies. Still we tell PETSc to communicate (stencil width >0) so we
 12: ! have communication between different parts of the domain.
 13: !
 14: ! The example is structured so that one can replace the RHS function and
 15: ! the forw_euler routine with a suitable RHS and a suitable time-integration
 16: ! scheme and make little or no modifications to the DMDA parts. In particular,
 17: ! the "inner" parts of the RHS and time-integration do not "know about" the
 18: ! decomposed domain.
 19: !
 20: !     See:     http://dx.doi.org/10.6084/m9.figshare.1368581
 21: !
 22: !     Contributed by Aasmund Ervik (asmunder at pvv.org)
 23: !
 24:   use ex13f90auxmodule

 26:   use petscdmda
 27:   implicit none
 28:   PetscErrorCode ierr
 29:   PetscMPIInt rank, size
 30:   MPIU_Comm comm
 31:   Vec Lvec, coords
 32:   DM SolScal, CoordDM
 33:   DMBoundaryType b_x, b_y, b_z
 34:   PetscReal, pointer :: array(:, :, :, :)
 35:   PetscReal :: t, xmin, xmax, ymin, ymax, zmin, zmax
 36:   PetscReal, allocatable :: f(:, :, :, :), grid(:, :, :, :)
 37:   PetscInt :: i, j, k, ib1, ibn, jb1, jbn, kb1, kbn, imax, jmax, kmax, itime, maxstep, nscreen
 38:   PetscInt, parameter :: &
 39:     stw = 1, & ! stencil width
 40:     dof = 2, & ! number of variables in this DA
 41:     ndim = 3  ! 3D code
 42:   ! time stepping
 43:   PetscReal, parameter :: tend = 100.0, dt = 1e-3
 44:   ! global grid
 45:   PetscReal, parameter :: xgmin = 0.0, ygmin = 0.0, zgmin = 0.0
 46:   PetscReal, parameter :: xgmax = 1.0, ygmax = 1.0, zgmax = 1.0
 47:   PetscInt, parameter :: igmax = 50, jgmax = 50, kgmax = 50

 49:   ! Fire up PETSc:
 50:   PetscCallA(PetscInitialize(ierr))

 52:   comm = PETSC_COMM_WORLD
 53:   PetscCallMPIA(MPI_Comm_rank(comm, rank, ierr))
 54:   PetscCallMPIA(MPI_Comm_size(comm, size, ierr))
 55:   if (rank == 0) then
 56:     write (*, *) 'Hi! We are solving van der Pol using ', size, ' processes.'
 57:     write (*, *) ' '
 58:     write (*, *) '  t     x1         x2'
 59:   end if

 61:   ! Get the BCs and create the DMDA
 62:   call get_boundary_cond(b_x, b_y, b_z)
 63:   PetscCallA(DMDACreate3d(comm, b_x, b_y, b_z, DMDA_STENCIL_STAR, igmax, jgmax, kgmax, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, dof, stw, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, SolScal, ierr))
 64:   PetscCallA(DMSetFromOptions(SolScal, ierr))
 65:   PetscCallA(DMSetUp(SolScal, ierr))

 67:   ! Set global coordinates, get a global and a local work vector
 68:   PetscCallA(DMDASetUniformCoordinates(SolScal, xgmin, xgmax, ygmin, ygmax, zgmin, zgmax, ierr))
 69:   PetscCallA(DMCreateLocalVector(SolScal, Lvec, ierr))

 71:   ! Get ib1,imax,ibn etc. of the local grid.
 72:   ! Our convention is:
 73:   ! the first local ghost cell is ib1
 74:   ! the first local       cell is 1
 75:   ! the last  local       cell is imax
 76:   ! the last  local ghost cell is ibn.
 77:   !
 78:   ! i,j,k must be in this call, but are not used
 79:   PetscCallA(DMDAGetCorners(SolScal, i, j, k, imax, jmax, kmax, ierr))
 80:   ib1 = 1 - stw
 81:   jb1 = 1 - stw
 82:   kb1 = 1 - stw
 83:   ibn = imax + stw
 84:   jbn = jmax + stw
 85:   kbn = kmax + stw
 86:   allocate (f(dof, ib1:ibn, jb1:jbn, kb1:kbn))
 87:   allocate (grid(ndim, ib1:ibn, jb1:jbn, kb1:kbn))

 89:   ! Get xmin,xmax etc. for the local grid
 90:   ! The "coords" local vector here is borrowed, so we shall not destroy it.
 91:   PetscCallA(DMGetCoordinatesLocal(SolScal, coords, ierr))
 92:   ! We need a new DM for coordinate stuff since PETSc supports unstructured grid
 93:   PetscCallA(DMGetCoordinateDM(SolScal, CoordDM, ierr))
 94:   ! petsc_to_local and local_to_petsc are convenience functions, see
 95:   ! ex13f90auxmodule.F90.
 96:   call petsc_to_local(CoordDM, coords, array, grid, ndim, stw)
 97:   xmin = grid(1, 1, 1, 1)
 98:   ymin = grid(2, 1, 1, 1)
 99:   zmin = grid(3, 1, 1, 1)
100:   xmax = grid(1, imax, jmax, kmax)
101:   ymax = grid(2, imax, jmax, kmax)
102:   zmax = grid(3, imax, jmax, kmax)
103:   call local_to_petsc(CoordDM, coords, array, grid, ndim, stw)

105:   ! Note that we never use xmin,xmax in this example, but the preceding way of
106:   ! getting the local xmin,xmax etc. from PETSc for a structured uniform grid
107:   ! is not documented in any other examples I could find.

109:   ! Set up the time-stepping
110:   t = 0.0
111:   maxstep = ceiling((tend - t)/dt)
112:   ! Write output every second (of simulation-time)
113:   nscreen = int(1.0/dt) + 1

115:   ! Set initial condition
116:   PetscCallA(DMDAVecGetArray(SolScal, Lvec, array, ierr))
117:   array(0, :, :, :) = 0.5
118:   array(1, :, :, :) = 0.5
119:   PetscCallA(DMDAVecRestoreArray(SolScal, Lvec, array, ierr))

121:   ! Initial set-up finished.
122:   ! Time loop
123:   maxstep = 5
124:   do itime = 1, maxstep

126:     ! Communicate such that everyone has the correct values in ghost cells
127:     PetscCallA(DMLocalToLocalBegin(SolScal, Lvec, INSERT_VALUES, Lvec, ierr))
128:     PetscCallA(DMLocalToLocalEnd(SolScal, Lvec, INSERT_VALUES, Lvec, ierr))

130:     ! Get the old solution from the PETSc data structures
131:     call petsc_to_local(SolScal, Lvec, array, f, dof, stw)

133:     ! Do the time step
134:     call forw_euler(t, dt, ib1, ibn, jb1, jbn, kb1, kbn, imax, jmax, kmax, dof, f, dfdt_vdp)
135:     t = t + dt

137:     ! Write result to screen (if main process and it's time to)
138:     if (rank == 0 .and. mod(itime, nscreen) == 0) then
139:       write (*, 101) t, f(1, 1, 1, 1), f(2, 1, 1, 1)
140:     end if

142:     ! Put our new solution in the PETSc data structures
143:     call local_to_petsc(SolScal, Lvec, array, f, dof, stw)
144:   end do

146:   ! Deallocate and finalize
147:   PetscCallA(DMRestoreLocalVector(SolScal, Lvec, ierr))
148:   PetscCallA(DMDestroy(SolScal, ierr))
149:   deallocate (f)
150:   deallocate (grid)
151:   PetscCallA(PetscFinalize(ierr))

153:   ! Format for writing output to screen
154: 101 format(F5.1, 2F11.6)

156: end program main

158: !/*TEST
159: !
160: !   build:
161: !     requires: !complex
162: !     depends:  ex13f90aux.F90
163: !
164: !   test:
165: !     nsize: 4
166: !
167: !TEST*/