Actual source code: ex5.c

  1: static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n";

  3: /*F
  4:      This example is taken from the book, Numerical Solution of Time-Dependent Advection-Diffusion-Reaction Equations by
  5:       W. Hundsdorf and J.G. Verwer,  Page 21, Pattern Formation with Reaction-Diffusion Equations
  6: \begin{eqnarray*}
  7:         u_t = D_1 (u_{xx} + u_{yy})  - u*v^2 + \gamma(1 -u)           \\
  8:         v_t = D_2 (v_{xx} + v_{yy})  + u*v^2 - (\gamma + \kappa)v
  9: \end{eqnarray*}
 10:     Unlike in the book this uses periodic boundary conditions instead of Neumann
 11:     (since they are easier for finite differences).
 12: F*/

 14: /*
 15:       Helpful runtime monitor options:
 16:            -ts_monitor_draw_solution
 17:            -draw_save -draw_save_movie

 19:       Helpful runtime linear solver options:
 20:            -pc_type mg -pc_mg_galerkin pmat -da_refine 1 -snes_monitor -ksp_monitor -ts_view  (note that these Jacobians are so well-conditioned multigrid may not be the best solver)

 22:       Point your browser to localhost:8080 to monitor the simulation
 23:            ./ex5  -ts_view_pre saws  -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root .

 25: */

 27: /*

 29:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 30:    Include "petscts.h" so that we can use SNES numerical (ODE) integrators.  Note that this
 31:    file automatically includes:
 32:      petscsys.h       - base PETSc routines   petscvec.h  - vectors
 33:      petscmat.h - matrices                    petscis.h   - index sets
 34:      petscksp.h - Krylov subspace methods     petscpc.h   - preconditioners
 35:      petscviewer.h - viewers                  petscsnes.h - nonlinear solvers
 36: */
 37: #include "reaction_diffusion.h"
 38: #include <petscdm.h>
 39: #include <petscdmda.h>

 41: /* ------------------------------------------------------------------- */
 42: PetscErrorCode InitialConditions(DM da, Vec U)
 43: {
 44:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
 45:   Field   **u;
 46:   PetscReal hx, hy, x, y;

 48:   PetscFunctionBegin;
 49:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

 51:   hx = 2.5 / (PetscReal)(Mx);
 52:   hy = 2.5 / (PetscReal)(My);

 54:   /*
 55:      Get pointers to actual vector data
 56:   */
 57:   PetscCall(DMDAVecGetArray(da, U, &u));

 59:   /*
 60:      Get local grid boundaries
 61:   */
 62:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

 64:   /*
 65:      Compute function over the locally owned part of the grid
 66:   */
 67:   for (j = ys; j < ys + ym; j++) {
 68:     y = j * hy;
 69:     for (i = xs; i < xs + xm; i++) {
 70:       x = i * hx;
 71:       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
 72:         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
 73:       else u[j][i].v = 0.0;

 75:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
 76:     }
 77:   }

 79:   /*
 80:      Restore access to vector
 81:   */
 82:   PetscCall(DMDAVecRestoreArray(da, U, &u));
 83:   PetscFunctionReturn(PETSC_SUCCESS);
 84: }

 86: int main(int argc, char **argv)
 87: {
 88:   TS     ts; /* ODE integrator */
 89:   Vec    x;  /* solution */
 90:   DM     da;
 91:   AppCtx appctx;

 93:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 94:      Initialize program
 95:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 96:   PetscFunctionBeginUser;
 97:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 98:   appctx.D1    = 8.0e-5;
 99:   appctx.D2    = 4.0e-5;
100:   appctx.gamma = .024;
101:   appctx.kappa = .06;
102:   appctx.aijpc = PETSC_FALSE;

104:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105:      Create distributed array (DMDA) to manage parallel grid and vectors
106:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
107:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
108:   PetscCall(DMSetFromOptions(da));
109:   PetscCall(DMSetUp(da));
110:   PetscCall(DMDASetFieldName(da, 0, "u"));
111:   PetscCall(DMDASetFieldName(da, 1, "v"));

113:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114:      Create global vector from DMDA; this will be used to store the solution
115:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
116:   PetscCall(DMCreateGlobalVector(da, &x));

118:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119:      Create timestepping solver context
120:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
122:   PetscCall(TSSetType(ts, TSARKIMEX));
123:   PetscCall(TSARKIMEXSetFullyImplicit(ts, PETSC_TRUE));
124:   PetscCall(TSSetDM(ts, da));
125:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
126:   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
127:   PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));

129:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130:      Set initial conditions
131:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
132:   PetscCall(InitialConditions(da, x));
133:   PetscCall(TSSetSolution(ts, x));

135:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136:      Set solver options
137:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138:   PetscCall(TSSetMaxTime(ts, 2000.0));
139:   PetscCall(TSSetTimeStep(ts, .0001));
140:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
141:   PetscCall(TSSetFromOptions(ts));

143:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144:      Solve ODE system
145:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
146:   PetscCall(TSSolve(ts, x));

148:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149:      Free work space.
150:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
151:   PetscCall(VecDestroy(&x));
152:   PetscCall(TSDestroy(&ts));
153:   PetscCall(DMDestroy(&da));

155:   PetscCall(PetscFinalize());
156:   return 0;
157: }

159: /*TEST

161:    build:
162:      depends: reaction_diffusion.c

164:    test:
165:       args: -ts_view -ts_monitor -ts_max_time 500
166:       requires: double
167:       timeoutfactor: 3

169:    test:
170:       suffix: 2
171:       args: -ts_view -ts_monitor -ts_max_time 500 -ts_monitor_draw_solution
172:       requires: x double
173:       output_file: output/ex5_1.out
174:       timeoutfactor: 3

176: TEST*/