Actual source code: ex4.c

  1: static char help[] = "Chemo-taxis Problems from Mathematical Biology.\n";

  3: /*
  4:      Page 18, Chemo-taxis Problems from Mathematical Biology

  6:         rho_t =
  7:         c_t   =

  9:      Further discussion on Page 134 and in 2d on Page 409
 10: */

 12: /*

 14:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 15:    Include "petscts.h" so that we can use SNES solvers.  Note that this
 16:    file automatically includes:
 17:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 18:      petscmat.h - matrices
 19:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 20:      petscviewer.h - viewers               petscpc.h  - preconditioners
 21:      petscksp.h   - linear solvers
 22: */

 24: #include <petscdm.h>
 25: #include <petscdmda.h>
 26: #include <petscts.h>

 28: typedef struct {
 29:   PetscScalar rho, c;
 30: } Field;

 32: typedef struct {
 33:   PetscScalar epsilon, delta, alpha, beta, gamma, kappa, lambda, mu, cstar;
 34:   PetscBool   upwind;
 35: } AppCtx;

 37: /*
 38:    User-defined routines
 39: */
 40: extern PetscErrorCode IFunction(TS, PetscReal, Vec, Vec, Vec, void *), InitialConditions(DM, Vec);

 42: int main(int argc, char **argv)
 43: {
 44:   TS     ts; /* nonlinear solver */
 45:   Vec    U;  /* solution, residual vectors */
 46:   DM     da;
 47:   AppCtx appctx;

 49:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 50:      Initialize program
 51:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 52:   PetscFunctionBeginUser;
 53:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));

 55:   appctx.epsilon = 1.0e-3;
 56:   appctx.delta   = 1.0;
 57:   appctx.alpha   = 10.0;
 58:   appctx.beta    = 4.0;
 59:   appctx.gamma   = 1.0;
 60:   appctx.kappa   = .75;
 61:   appctx.lambda  = 1.0;
 62:   appctx.mu      = 100.;
 63:   appctx.cstar   = .2;
 64:   appctx.upwind  = PETSC_TRUE;

 66:   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-delta", &appctx.delta, NULL));
 67:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-upwind", &appctx.upwind, NULL));

 69:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 70:      Create distributed array (DMDA) to manage parallel grid and vectors
 71:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 72:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, 8, 2, 1, NULL, &da));
 73:   PetscCall(DMSetFromOptions(da));
 74:   PetscCall(DMSetUp(da));
 75:   PetscCall(DMDASetFieldName(da, 0, "rho"));
 76:   PetscCall(DMDASetFieldName(da, 1, "c"));

 78:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 79:      Extract global vectors from DMDA; then duplicate for remaining
 80:      vectors that are the same types
 81:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 82:   PetscCall(DMCreateGlobalVector(da, &U));

 84:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 85:      Create timestepping solver context
 86:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 87:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
 88:   PetscCall(TSSetType(ts, TSROSW));
 89:   PetscCall(TSSetDM(ts, da));
 90:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
 91:   PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));

 93:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 94:      Set initial conditions
 95:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 96:   PetscCall(InitialConditions(da, U));
 97:   PetscCall(TSSetSolution(ts, U));

 99:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100:      Set solver options
101:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102:   PetscCall(TSSetTimeStep(ts, .0001));
103:   PetscCall(TSSetMaxTime(ts, 1.0));
104:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
105:   PetscCall(TSSetFromOptions(ts));

107:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108:      Solve nonlinear system
109:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110:   PetscCall(TSSolve(ts, U));

112:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
113:      Free work space.  All PETSc objects should be destroyed when they
114:      are no longer needed.
115:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
116:   PetscCall(VecDestroy(&U));
117:   PetscCall(TSDestroy(&ts));
118:   PetscCall(DMDestroy(&da));

120:   PetscCall(PetscFinalize());
121:   return 0;
122: }
123: /* ------------------------------------------------------------------- */
124: /*
125:    IFunction - Evaluates nonlinear function, F(U).

127:    Input Parameters:
128: .  ts - the TS context
129: .  U - input vector
130: .  ptr - optional user-defined context, as set by SNESSetFunction()

132:    Output Parameter:
133: .  F - function vector
134:  */
135: PetscErrorCode IFunction(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr)
136: {
137:   AppCtx     *appctx = (AppCtx *)ptr;
138:   DM          da;
139:   PetscInt    i, Mx, xs, xm;
140:   PetscReal   hx, sx;
141:   PetscScalar rho, c, rhoxx, cxx, cx, rhox, kcxrhox;
142:   Field      *u, *f, *udot;
143:   Vec         localU;

145:   PetscFunctionBegin;
146:   PetscCall(TSGetDM(ts, &da));
147:   PetscCall(DMGetLocalVector(da, &localU));
148:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

150:   hx = 1.0 / (PetscReal)(Mx - 1);
151:   sx = 1.0 / (hx * hx);

153:   /*
154:      Scatter ghost points to local vector,using the 2-step process
155:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
156:      By placing code between these two statements, computations can be
157:      done while messages are in transition.
158:   */
159:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
160:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));

162:   /*
163:      Get pointers to vector data
164:   */
165:   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
166:   PetscCall(DMDAVecGetArrayRead(da, Udot, &udot));
167:   PetscCall(DMDAVecGetArrayWrite(da, F, &f));

169:   /*
170:      Get local grid boundaries
171:   */
172:   PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));

174:   if (!xs) {
175:     f[0].rho = udot[0].rho; /* u[0].rho - 0.0; */
176:     f[0].c   = udot[0].c;   /* u[0].c   - 1.0; */
177:     xs++;
178:     xm--;
179:   }
180:   if (xs + xm == Mx) {
181:     f[Mx - 1].rho = udot[Mx - 1].rho; /* u[Mx-1].rho - 1.0; */
182:     f[Mx - 1].c   = udot[Mx - 1].c;   /* u[Mx-1].c   - 0.0;  */
183:     xm--;
184:   }

186:   /*
187:      Compute function over the locally owned part of the grid
188:   */
189:   for (i = xs; i < xs + xm; i++) {
190:     rho   = u[i].rho;
191:     rhoxx = (-2.0 * rho + u[i - 1].rho + u[i + 1].rho) * sx;
192:     c     = u[i].c;
193:     cxx   = (-2.0 * c + u[i - 1].c + u[i + 1].c) * sx;

195:     if (!appctx->upwind) {
196:       rhox    = .5 * (u[i + 1].rho - u[i - 1].rho) / hx;
197:       cx      = .5 * (u[i + 1].c - u[i - 1].c) / hx;
198:       kcxrhox = appctx->kappa * (cxx * rho + cx * rhox);
199:     } else {
200:       kcxrhox = appctx->kappa * ((u[i + 1].c - u[i].c) * u[i + 1].rho - (u[i].c - u[i - 1].c) * u[i].rho) * sx;
201:     }

203:     f[i].rho = udot[i].rho - appctx->epsilon * rhoxx + kcxrhox - appctx->mu * PetscAbsScalar(rho) * (1.0 - rho) * PetscMax(0, PetscRealPart(c - appctx->cstar)) + appctx->beta * rho;
204:     f[i].c   = udot[i].c - appctx->delta * cxx + appctx->lambda * c + appctx->alpha * rho * c / (appctx->gamma + c);
205:   }

207:   /*
208:      Restore vectors
209:   */
210:   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
211:   PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot));
212:   PetscCall(DMDAVecRestoreArrayWrite(da, F, &f));
213:   PetscCall(DMRestoreLocalVector(da, &localU));
214:   PetscFunctionReturn(PETSC_SUCCESS);
215: }

217: /* ------------------------------------------------------------------- */
218: PetscErrorCode InitialConditions(DM da, Vec U)
219: {
220:   PetscInt  i, xs, xm, Mx;
221:   Field    *u;
222:   PetscReal hx, x;

224:   PetscFunctionBegin;
225:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

227:   hx = 1.0 / (PetscReal)(Mx - 1);

229:   /*
230:      Get pointers to vector data
231:   */
232:   PetscCall(DMDAVecGetArrayWrite(da, U, &u));

234:   /*
235:      Get local grid boundaries
236:   */
237:   PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));

239:   /*
240:      Compute function over the locally owned part of the grid
241:   */
242:   for (i = xs; i < xs + xm; i++) {
243:     x = i * hx;
244:     if (i < Mx - 1) u[i].rho = 0.0;
245:     else u[i].rho = 1.0;
246:     u[i].c = PetscCosReal(.5 * PETSC_PI * x);
247:   }

249:   /*
250:      Restore vectors
251:   */
252:   PetscCall(DMDAVecRestoreArrayWrite(da, U, &u));
253:   PetscFunctionReturn(PETSC_SUCCESS);
254: }

256: /*TEST

258:    test:
259:       args: -pc_type lu -da_refine 2 -ts_view -ts_monitor -ts_max_time 1
260:       requires: double

262:    test:
263:      suffix: 2
264:      args: -pc_type lu -da_refine 2 -ts_view -ts_monitor_draw_solution -ts_monitor -ts_max_time 1
265:      requires: x double
266:      output_file: output/ex4_1.out

268: TEST*/