Actual source code: ex13f90.F90
  1: #include <petsc/finclude/petscdmda.h>
  2: program main
  3: !
  4: ! This example intends to show how DMDA is used to solve a PDE on a decomposed
  5: ! domain. The equation we are solving is not a PDE, but a toy example: van der
  6: ! Pol's 2-variable ODE duplicated onto a 3D grid:
  7: ! dx/dt = y
  8: ! dy/dt = mu(1-x**2)y - x
  9: !
 10: ! So we are solving the same equation on all grid points, with no spatial
 11: ! dependencies. Still we tell PETSc to communicate (stencil width >0) so we
 12: ! have communication between different parts of the domain.
 13: !
 14: ! The example is structured so that one can replace the RHS function and
 15: ! the forw_euler routine with a suitable RHS and a suitable time-integration
 16: ! scheme and make little or no modifications to the DMDA parts. In particular,
 17: ! the "inner" parts of the RHS and time-integration do not "know about" the
 18: ! decomposed domain.
 19: !
 20: !     See:     http://dx.doi.org/10.6084/m9.figshare.1368581
 21: !
 22: !     Contributed by Aasmund Ervik (asmunder at pvv.org)
 23: !
 24:   use ex13f90auxmodule
 26:   use petscdmda
 28:   PetscErrorCode ierr
 29:   PetscMPIInt rank, size
 30:   MPI_Comm comm
 31:   Vec Lvec, coords
 32:   DM SolScal, CoordDM
 33:   DMBoundaryType b_x, b_y, b_z
 34:   PetscReal, pointer :: array(:, :, :, :)
 35:   PetscReal :: t, tend, dt, xmin, xmax, ymin, ymax, zmin, zmax, xgmin, xgmax, ygmin, ygmax, zgmin, zgmax
 36:   PetscReal, allocatable :: f(:, :, :, :), grid(:, :, :, :)
 37:   PetscInt :: i, j, k, igmax, jgmax, kgmax, ib1, ibn, jb1, jbn, kb1, kbn, imax, jmax, kmax, itime, maxstep, nscreen, dof, stw, ndim
 39:   ! Fire up PETSc:
 40:   PetscCallA(PetscInitialize(ierr))
 42:   comm = PETSC_COMM_WORLD
 43:   PetscCallMPIA(MPI_Comm_rank(comm, rank, ierr))
 44:   PetscCallMPIA(MPI_Comm_size(comm, size, ierr))
 45:   if (rank == 0) then
 46:     write (*, *) 'Hi! We are solving van der Pol using ', size, ' processes.'
 47:     write (*, *) ' '
 48:     write (*, *) '  t     x1         x2'
 49:   end if
 51:   ! Set up the global grid
 52:   igmax = 50
 53:   jgmax = 50
 54:   kgmax = 50
 55:   xgmin = 0.0
 56:   ygmin = 0.0
 57:   zgmin = 0.0
 58:   xgmax = 1.0
 59:   ygmax = 1.0
 60:   zgmax = 1.0
 61:   stw = 1 ! stencil width
 62:   dof = 2 ! number of variables in this DA
 63:   ndim = 3 ! 3D code
 65:   ! Get the BCs and create the DMDA
 66:   call get_boundary_cond(b_x, b_y, b_z)
 67:   PetscCallA(DMDACreate3d(comm, b_x, b_y, b_z, DMDA_STENCIL_STAR, igmax, jgmax, kgmax, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, dof, stw, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, SolScal, ierr))
 68:   PetscCallA(DMSetFromOptions(SolScal, ierr))
 69:   PetscCallA(DMSetUp(SolScal, ierr))
 71:   ! Set global coordinates, get a global and a local work vector
 72:   PetscCallA(DMDASetUniformCoordinates(SolScal, xgmin, xgmax, ygmin, ygmax, zgmin, zgmax, ierr))
 73:   PetscCallA(DMCreateLocalVector(SolScal, Lvec, ierr))
 75:   ! Get ib1,imax,ibn etc. of the local grid.
 76:   ! Our convention is:
 77:   ! the first local ghost cell is ib1
 78:   ! the first local       cell is 1
 79:   ! the last  local       cell is imax
 80:   ! the last  local ghost cell is ibn.
 81:   !
 82:   ! i,j,k must be in this call, but are not used
 83:   PetscCallA(DMDAGetCorners(SolScal, i, j, k, imax, jmax, kmax, ierr))
 84:   ib1 = 1 - stw
 85:   jb1 = 1 - stw
 86:   kb1 = 1 - stw
 87:   ibn = imax + stw
 88:   jbn = jmax + stw
 89:   kbn = kmax + stw
 90:   allocate (f(dof, ib1:ibn, jb1:jbn, kb1:kbn))
 91:   allocate (grid(ndim, ib1:ibn, jb1:jbn, kb1:kbn))
 93:   ! Get xmin,xmax etc. for the local grid
 94:   ! The "coords" local vector here is borrowed, so we shall not destroy it.
 95:   PetscCallA(DMGetCoordinatesLocal(SolScal, coords, ierr))
 96:   ! We need a new DM for coordinate stuff since PETSc supports unstructured grid
 97:   PetscCallA(DMGetCoordinateDM(SolScal, CoordDM, ierr))
 98:   ! petsc_to_local and local_to_petsc are convenience functions, see
 99:   ! ex13f90auxmodule.F90.
100:   call petsc_to_local(CoordDM, coords, array, grid, ndim, stw)
101:   xmin = grid(1, 1, 1, 1)
102:   ymin = grid(2, 1, 1, 1)
103:   zmin = grid(3, 1, 1, 1)
104:   xmax = grid(1, imax, jmax, kmax)
105:   ymax = grid(2, imax, jmax, kmax)
106:   zmax = grid(3, imax, jmax, kmax)
107:   call local_to_petsc(CoordDM, coords, array, grid, ndim, stw)
109:   ! Note that we never use xmin,xmax in this example, but the preceding way of
110:   ! getting the local xmin,xmax etc. from PETSc for a structured uniform grid
111:   ! is not documented in any other examples I could find.
113:   ! Set up the time-stepping
114:   t = 0.0
115:   tend = 100.0
116:   dt = 1e-3
117:   maxstep = ceiling((tend - t)/dt)
118:   ! Write output every second (of simulation-time)
119:   nscreen = int(1.0/dt) + 1
121:   ! Set initial condition
122:   PetscCallA(DMDAVecGetArray(SolScal, Lvec, array, ierr))
123:   array(0, :, :, :) = 0.5
124:   array(1, :, :, :) = 0.5
125:   PetscCallA(DMDAVecRestoreArray(SolScal, Lvec, array, ierr))
127:   ! Initial set-up finished.
128:   ! Time loop
129:   maxstep = 5
130:   do itime = 1, maxstep
132:     ! Communicate such that everyone has the correct values in ghost cells
133:     PetscCallA(DMLocalToLocalBegin(SolScal, Lvec, INSERT_VALUES, Lvec, ierr))
134:     PetscCallA(DMLocalToLocalEnd(SolScal, Lvec, INSERT_VALUES, Lvec, ierr))
136:     ! Get the old solution from the PETSc data structures
137:     call petsc_to_local(SolScal, Lvec, array, f, dof, stw)
139:     ! Do the time step
140:     call forw_euler(t, dt, ib1, ibn, jb1, jbn, kb1, kbn, imax, jmax, kmax, dof, f, dfdt_vdp)
141:     t = t + dt
143:     ! Write result to screen (if main process and it's time to)
144:     if (rank == 0 .and. mod(itime, nscreen) == 0) then
145:       write (*, 101) t, f(1, 1, 1, 1), f(2, 1, 1, 1)
146:     end if
148:     ! Put our new solution in the PETSc data structures
149:     call local_to_petsc(SolScal, Lvec, array, f, dof, stw)
150:   end do
152:   ! Deallocate and finalize
153:   PetscCallA(DMRestoreLocalVector(SolScal, Lvec, ierr))
154:   PetscCallA(DMDestroy(SolScal, ierr))
155:   deallocate (f)
156:   deallocate (grid)
157:   PetscCallA(PetscFinalize(ierr))
159:   ! Format for writing output to screen
160: 101 format(F5.1, 2F11.6)
162: end program main
164: !/*TEST
165: !
166: !   build:
167: !     requires: !complex
168: !     depends:  ex13f90aux.F90
169: !
170: !   test:
171: !     nsize: 4
172: !
173: !TEST*/