Actual source code: adr_ex5adj.cxx

  1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";

  3: /*
  4:    REQUIRES configuration of PETSc with option --download-adolc.

  6:    For documentation on ADOL-C, see
  7:      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf

  9:    REQUIRES configuration of PETSc with option --download-colpack

 11:    For documentation on ColPack, see
 12:      $PETSC_ARCH/externalpackages/git.colpack/README.md
 13: */
 14: /* ------------------------------------------------------------------------
 15:   See ../advection-diffusion-reaction/ex5 for a description of the problem
 16:   ------------------------------------------------------------------------- */

 18: /*
 19:   Runtime options:

 21:     Solver:
 22:       -forwardonly       - Run the forward simulation without adjoint.
 23:       -implicitform      - Provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used.
 24:       -aijpc             - Set the matrix used to compute the preconditioner to be aij (the Jacobian matrix can be of a different type such as ELL).

 26:     Jacobian generation:
 27:       -jacobian_by_hand  - Use the hand-coded Jacobian of ex5.c, rather than generating it automatically.
 28:       -no_annotation     - Do not annotate ADOL-C active variables. (Should be used alongside -jacobian_by_hand.)
 29:       -adolc_sparse      - Calculate Jacobian in compressed format, using ADOL-C sparse functionality.
 30:       -adolc_sparse_view - Print sparsity pattern used by -adolc_sparse option.
 31:  */
 32: /*
 33:   NOTE: If -adolc_sparse option is used, at least four processors should be used, so that no processor owns boundaries which are
 34:         identified by the periodic boundary conditions. The number of grid points in both x- and y-directions should be multiples
 35:         of 5, in order for the 5-point stencil to be cleanly parallelised.
 36: */

 38: #include <petscdmda.h>
 39: #include <petscts.h>
 40: #include "adolc-utils/drivers.cxx"
 41: #include <adolc/adolc.h>

 43: /* (Passive) field for the two variables */
 44: typedef struct {
 45:   PetscScalar u, v;
 46: } Field;

 48: /* Active field for the two variables */
 49: typedef struct {
 50:   adouble u, v;
 51: } AField;

 53: /* Application context */
 54: typedef struct {
 55:   PetscReal D1, D2, gamma, kappa;
 56:   AField  **u_a, **f_a;
 57:   PetscBool aijpc;
 58:   AdolcCtx *adctx; /* Automatic differentation support */
 59: } AppCtx;

 61: extern PetscErrorCode InitialConditions(DM da, Vec U);
 62: extern PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y);
 63: extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr);
 64: extern PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr);
 65: extern PetscErrorCode RHSFunctionActive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr);
 66: extern PetscErrorCode RHSFunctionPassive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr);
 67: extern PetscErrorCode IJacobianByHand(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, PetscCtx ctx);
 68: extern PetscErrorCode IJacobianAdolc(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, PetscCtx ctx);
 69: extern PetscErrorCode RHSJacobianByHand(TS ts, PetscReal t, Vec U, Mat A, Mat B, PetscCtx ctx);
 70: extern PetscErrorCode RHSJacobianAdolc(TS ts, PetscReal t, Vec U, Mat A, Mat B, PetscCtx ctx);

 72: int main(int argc, char **argv)
 73: {
 74:   TS             ts;
 75:   Vec            x, r, xdot;
 76:   DM             da;
 77:   AppCtx         appctx;
 78:   AdolcCtx      *adctx;
 79:   Vec            lambda[1];
 80:   PetscBool      forwardonly = PETSC_FALSE, implicitform = PETSC_FALSE, byhand = PETSC_FALSE;
 81:   PetscInt       gxm, gym, i, dofs = 2, ctrl[3] = {0, 0, 0};
 82:   PetscScalar  **Seed = NULL, **Rec = NULL, *u_vec;
 83:   unsigned int **JP = NULL;
 84:   ISColoring     iscoloring;

 86:   PetscFunctionBeginUser;
 87:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 88:   PetscCall(PetscNew(&adctx));
 89:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
 90:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
 91:   appctx.aijpc = PETSC_FALSE, adctx->no_an = PETSC_FALSE, adctx->sparse = PETSC_FALSE, adctx->sparse_view = PETSC_FALSE;
 92:   adctx->sparse_view_done = PETSC_FALSE;
 93:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));
 94:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-no_annotation", &adctx->no_an, NULL));
 95:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-jacobian_by_hand", &byhand, NULL));
 96:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-adolc_sparse", &adctx->sparse, NULL));
 97:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-adolc_sparse_view", &adctx->sparse_view, NULL));
 98:   appctx.D1    = 8.0e-5;
 99:   appctx.D2    = 4.0e-5;
100:   appctx.gamma = .024;
101:   appctx.kappa = .06;
102:   appctx.adctx = adctx;

104:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105:      Create distributed array (DMDA) to manage parallel grid and vectors
106:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
107:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
108:   PetscCall(DMSetFromOptions(da));
109:   PetscCall(DMSetUp(da));
110:   PetscCall(DMDASetFieldName(da, 0, "u"));
111:   PetscCall(DMDASetFieldName(da, 1, "v"));

113:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114:      Extract global vectors from DMDA; then duplicate for remaining
115:      vectors that are the same types
116:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117:   PetscCall(DMCreateGlobalVector(da, &x));
118:   PetscCall(VecDuplicate(x, &r));
119:   PetscCall(VecDuplicate(x, &xdot));

121:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122:      Create timestepping solver context
123:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
124:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
125:   PetscCall(TSSetType(ts, TSCN));
126:   PetscCall(TSSetDM(ts, da));
127:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
128:   if (!implicitform) {
129:     PetscCall(TSSetRHSFunction(ts, NULL, RHSFunctionPassive, &appctx));
130:   } else {
131:     PetscCall(DMDATSSetIFunctionLocal(da, INSERT_VALUES, (DMDATSIFunctionLocalFn *)IFunctionLocalPassive, &appctx));
132:   }

134:   if (!adctx->no_an) {
135:     PetscCall(DMDAGetGhostCorners(da, NULL, NULL, NULL, &gxm, &gym, NULL));
136:     adctx->m = dofs * gxm * gym;
137:     adctx->n = dofs * gxm * gym; /* Number of dependent and independent variables */

139:     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140:        Trace function(s) just once
141:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142:     if (!implicitform) {
143:       PetscCall(RHSFunctionActive(ts, 1.0, x, r, &appctx));
144:     } else {
145:       PetscCall(IFunctionActive(ts, 1.0, x, xdot, r, &appctx));
146:     }

148:     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149:       In the case where ADOL-C generates the Jacobian in compressed format,
150:       seed and recovery matrices are required. Since the sparsity structure
151:       of the Jacobian does not change over the course of the time
152:       integration, we can save computational effort by only generating
153:       these objects once.
154:        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155:     if (adctx->sparse) {
156:       /*
157:          Generate sparsity pattern

159:          One way to do this is to use the Jacobian pattern driver `jac_pat`
160:          provided by ColPack. Otherwise, it is the responsibility of the user
161:          to obtain the sparsity pattern.
162:       */
163:       PetscCall(PetscMalloc1(adctx->n, &u_vec));
164:       JP = (unsigned int **)malloc(adctx->m * sizeof(unsigned int *));
165:       jac_pat(1, adctx->m, adctx->n, u_vec, JP, ctrl);
166:       if (adctx->sparse_view) PetscCall(PrintSparsity(MPI_COMM_WORLD, adctx->m, JP));

168:       /*
169:         Extract a column colouring

171:         For problems using DMDA, colourings can be extracted directly, as
172:         follows. There are also ColPack drivers available. Otherwise, it is the
173:         responsibility of the user to obtain a suitable colouring.
174:       */
175:       PetscCall(DMCreateColoring(da, IS_COLORING_LOCAL, &iscoloring));
176:       PetscCall(ISColoringGetIS(iscoloring, PETSC_USE_POINTER, &adctx->p, NULL));

178:       /* Generate seed matrix to propagate through the forward mode of AD */
179:       PetscCall(AdolcMalloc2(adctx->n, adctx->p, &Seed));
180:       PetscCall(GenerateSeedMatrix(iscoloring, Seed));
181:       PetscCall(ISColoringDestroy(&iscoloring));

183:       /*
184:         Generate recovery matrix, which is used to recover the Jacobian from
185:         compressed format */
186:       PetscCall(AdolcMalloc2(adctx->m, adctx->p, &Rec));
187:       PetscCall(GetRecoveryMatrix(Seed, JP, adctx->m, adctx->p, Rec));

189:       /* Store results and free workspace */
190:       adctx->Rec = Rec;
191:       for (i = 0; i < adctx->m; i++) free(JP[i]);
192:       free(JP);
193:       PetscCall(PetscFree(u_vec));

195:     } else {
196:       /*
197:         In 'full' Jacobian mode, propagate an identity matrix through the
198:         forward mode of AD.
199:       */
200:       adctx->p = adctx->n;
201:       PetscCall(AdolcMalloc2(adctx->n, adctx->p, &Seed));
202:       PetscCall(Identity(adctx->n, Seed));
203:     }
204:     adctx->Seed = Seed;
205:   }

207:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208:      Set Jacobian
209:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
210:   if (!implicitform) {
211:     if (!byhand) PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobianAdolc, &appctx));
212:     else PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobianByHand, &appctx));
213:   } else {
214:     if (appctx.aijpc) {
215:       Mat A, B;

217:       PetscCall(DMSetMatType(da, MATSELL));
218:       PetscCall(DMCreateMatrix(da, &A));
219:       PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
220:       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
221:       if (!byhand) PetscCall(TSSetIJacobian(ts, A, B, IJacobianAdolc, &appctx));
222:       else PetscCall(TSSetIJacobian(ts, A, B, IJacobianByHand, &appctx));
223:       PetscCall(MatDestroy(&A));
224:       PetscCall(MatDestroy(&B));
225:     } else {
226:       if (!byhand) PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobianAdolc, &appctx));
227:       else PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobianByHand, &appctx));
228:     }
229:   }

231:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
232:      Set initial conditions
233:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
234:   PetscCall(InitialConditions(da, x));
235:   PetscCall(TSSetSolution(ts, x));

237:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
238:     Have the TS save its trajectory so that TSAdjointSolve() may be used
239:     and set solver options
240:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
241:   if (!forwardonly) {
242:     PetscCall(TSSetSaveTrajectory(ts));
243:     PetscCall(TSSetMaxTime(ts, 200.0));
244:     PetscCall(TSSetTimeStep(ts, 0.5));
245:   } else {
246:     PetscCall(TSSetMaxTime(ts, 2000.0));
247:     PetscCall(TSSetTimeStep(ts, 10));
248:   }
249:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
250:   PetscCall(TSSetFromOptions(ts));

252:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
253:      Solve ODE system
254:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
255:   PetscCall(TSSolve(ts, x));
256:   if (!forwardonly) {
257:     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
258:        Start the Adjoint model
259:        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
260:     PetscCall(VecDuplicate(x, &lambda[0]));
261:     /*   Reset initial conditions for the adjoint integration */
262:     PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
263:     PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
264:     PetscCall(TSAdjointSolve(ts));
265:     PetscCall(VecDestroy(&lambda[0]));
266:   }

268:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
269:      Free work space.
270:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
271:   PetscCall(VecDestroy(&xdot));
272:   PetscCall(VecDestroy(&r));
273:   PetscCall(VecDestroy(&x));
274:   PetscCall(TSDestroy(&ts));
275:   if (!adctx->no_an) {
276:     if (adctx->sparse) PetscCall(AdolcFree2(Rec));
277:     PetscCall(AdolcFree2(Seed));
278:   }
279:   PetscCall(DMDestroy(&da));
280:   PetscCall(PetscFree(adctx));
281:   PetscCall(PetscFinalize());
282:   return 0;
283: }

285: PetscErrorCode InitialConditions(DM da, Vec U)
286: {
287:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
288:   Field   **u;
289:   PetscReal hx, hy, x, y;

291:   PetscFunctionBegin;
292:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

294:   hx = 2.5 / (PetscReal)Mx;
295:   hy = 2.5 / (PetscReal)My;

297:   /*
298:      Get pointers to vector data
299:   */
300:   PetscCall(DMDAVecGetArray(da, U, &u));

302:   /*
303:      Get local grid boundaries
304:   */
305:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

307:   /*
308:      Compute function over the locally owned part of the grid
309:   */
310:   for (j = ys; j < ys + ym; j++) {
311:     y = j * hy;
312:     for (i = xs; i < xs + xm; i++) {
313:       x = i * hx;
314:       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
315:         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
316:       else u[j][i].v = 0.0;

318:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
319:     }
320:   }

322:   /*
323:      Restore vectors
324:   */
325:   PetscCall(DMDAVecRestoreArray(da, U, &u));
326:   PetscFunctionReturn(PETSC_SUCCESS);
327: }

329: PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
330: {
331:   PetscInt i, j, Mx, My, xs, ys, xm, ym;
332:   Field  **l;

334:   PetscFunctionBegin;
335:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
336:   /* locate the global i index for x and j index for y */
337:   i = (PetscInt)(x * (Mx - 1));
338:   j = (PetscInt)(y * (My - 1));
339:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

341:   if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
342:     /* the i,j vertex is on this process */
343:     PetscCall(DMDAVecGetArray(da, lambda, &l));
344:     l[j][i].u = 1.0;
345:     l[j][i].v = 1.0;
346:     PetscCall(DMDAVecRestoreArray(da, lambda, &l));
347:   }
348:   PetscFunctionReturn(PETSC_SUCCESS);
349: }

351: PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr)
352: {
353:   AppCtx     *appctx = (AppCtx *)ptr;
354:   PetscInt    i, j, xs, ys, xm, ym;
355:   PetscReal   hx, hy, sx, sy;
356:   PetscScalar uc, uxx, uyy, vc, vxx, vyy;

358:   PetscFunctionBegin;
359:   hx = 2.50 / (PetscReal)info->mx;
360:   sx = 1.0 / (hx * hx);
361:   hy = 2.50 / (PetscReal)info->my;
362:   sy = 1.0 / (hy * hy);

364:   /* Get local grid boundaries */
365:   xs = info->xs;
366:   xm = info->xm;
367:   ys = info->ys;
368:   ym = info->ym;

370:   /* Compute function over the locally owned part of the grid */
371:   for (j = ys; j < ys + ym; j++) {
372:     for (i = xs; i < xs + xm; i++) {
373:       uc        = u[j][i].u;
374:       uxx       = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx;
375:       uyy       = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy;
376:       vc        = u[j][i].v;
377:       vxx       = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx;
378:       vyy       = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy;
379:       f[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
380:       f[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
381:     }
382:   }
383:   PetscCall(PetscLogFlops(16.0 * xm * ym));
384:   PetscFunctionReturn(PETSC_SUCCESS);
385: }

387: PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr)
388: {
389:   AppCtx       *appctx = (AppCtx *)ptr;
390:   DM            da;
391:   DMDALocalInfo info;
392:   Field       **u, **f, **udot;
393:   Vec           localU;
394:   PetscInt      i, j, xs, ys, xm, ym, gxs, gys, gxm, gym;
395:   PetscReal     hx, hy, sx, sy;
396:   adouble       uc, uxx, uyy, vc, vxx, vyy;
397:   AField      **f_a, *f_c, **u_a, *u_c;
398:   PetscScalar   dummy;

400:   PetscFunctionBegin;
401:   PetscCall(TSGetDM(ts, &da));
402:   PetscCall(DMDAGetLocalInfo(da, &info));
403:   PetscCall(DMGetLocalVector(da, &localU));
404:   hx  = 2.50 / (PetscReal)info.mx;
405:   sx  = 1.0 / (hx * hx);
406:   hy  = 2.50 / (PetscReal)info.my;
407:   sy  = 1.0 / (hy * hy);
408:   xs  = info.xs;
409:   xm  = info.xm;
410:   gxs = info.gxs;
411:   gxm = info.gxm;
412:   ys  = info.ys;
413:   ym  = info.ym;
414:   gys = info.gys;
415:   gym = info.gym;

417:   /*
418:      Scatter ghost points to local vector,using the 2-step process
419:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
420:      By placing code between these two statements, computations can be
421:      done while messages are in transition.
422:   */
423:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
424:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));

426:   /*
427:      Get pointers to vector data
428:   */
429:   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
430:   PetscCall(DMDAVecGetArray(da, F, &f));
431:   PetscCall(DMDAVecGetArrayRead(da, Udot, &udot));

433:   /*
434:     Create contiguous 1-arrays of AFields

436:     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
437:           cannot be allocated using PetscMalloc, as this does not call the
438:           relevant class constructor. Instead, we use the C++ keyword `new`.
439:   */
440:   u_c = new AField[info.gxm * info.gym];
441:   f_c = new AField[info.gxm * info.gym];

443:   /* Create corresponding 2-arrays of AFields */
444:   u_a = new AField *[info.gym];
445:   f_a = new AField *[info.gym];

447:   /* Align indices between array types to endow 2d array with ghost points */
448:   PetscCall(GiveGhostPoints(da, u_c, &u_a));
449:   PetscCall(GiveGhostPoints(da, f_c, &f_a));

451:   trace_on(1); /* Start of active section on tape 1 */

453:   /*
454:     Mark independence

456:     NOTE: Ghost points are marked as independent, in place of the points they represent on
457:           other processors / on other boundaries.
458:   */
459:   for (j = gys; j < gys + gym; j++) {
460:     for (i = gxs; i < gxs + gxm; i++) {
461:       u_a[j][i].u <<= u[j][i].u;
462:       u_a[j][i].v <<= u[j][i].v;
463:     }
464:   }

466:   /* Compute function over the locally owned part of the grid */
467:   for (j = ys; j < ys + ym; j++) {
468:     for (i = xs; i < xs + xm; i++) {
469:       uc          = u_a[j][i].u;
470:       uxx         = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx;
471:       uyy         = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy;
472:       vc          = u_a[j][i].v;
473:       vxx         = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx;
474:       vyy         = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy;
475:       f_a[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
476:       f_a[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
477:     }
478:   }

480:   /*
481:     Mark dependence

483:     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
484:           the Jacobian later.
485:   */
486:   for (j = gys; j < gys + gym; j++) {
487:     for (i = gxs; i < gxs + gxm; i++) {
488:       if ((i < xs) || (i >= xs + xm) || (j < ys) || (j >= ys + ym)) {
489:         f_a[j][i].u >>= dummy;
490:         f_a[j][i].v >>= dummy;
491:       } else {
492:         f_a[j][i].u >>= f[j][i].u;
493:         f_a[j][i].v >>= f[j][i].v;
494:       }
495:     }
496:   }
497:   trace_off(); /* End of active section */
498:   PetscCall(PetscLogFlops(16.0 * xm * ym));

500:   /* Restore vectors */
501:   PetscCall(DMDAVecRestoreArray(da, F, &f));
502:   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
503:   PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot));

505:   PetscCall(DMRestoreLocalVector(da, &localU));

507:   /* Destroy AFields appropriately */
508:   f_a += info.gys;
509:   u_a += info.gys;
510:   delete[] f_a;
511:   delete[] u_a;
512:   delete[] f_c;
513:   delete[] u_c;
514:   PetscFunctionReturn(PETSC_SUCCESS);
515: }

517: PetscErrorCode RHSFunctionPassive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr)
518: {
519:   AppCtx     *appctx = (AppCtx *)ptr;
520:   DM          da;
521:   PetscInt    i, j, xs, ys, xm, ym, Mx, My;
522:   PetscReal   hx, hy, sx, sy;
523:   PetscScalar uc, uxx, uyy, vc, vxx, vyy;
524:   Field     **u, **f;
525:   Vec         localU, localF;

527:   PetscFunctionBegin;
528:   PetscCall(TSGetDM(ts, &da));
529:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
530:   hx = 2.50 / (PetscReal)Mx;
531:   sx = 1.0 / (hx * hx);
532:   hy = 2.50 / (PetscReal)My;
533:   sy = 1.0 / (hy * hy);
534:   PetscCall(DMGetLocalVector(da, &localU));
535:   PetscCall(DMGetLocalVector(da, &localF));

537:   /*
538:      Scatter ghost points to local vector,using the 2-step process
539:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
540:      By placing code between these two statements, computations can be
541:      done while messages are in transition.
542:   */
543:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
544:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
545:   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
546:   PetscCall(DMGlobalToLocalBegin(da, F, INSERT_VALUES, localF));
547:   PetscCall(DMGlobalToLocalEnd(da, F, INSERT_VALUES, localF));

549:   /*
550:      Get pointers to vector data
551:   */
552:   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
553:   PetscCall(DMDAVecGetArray(da, localF, &f));

555:   /*
556:      Get local grid boundaries
557:   */
558:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

560:   /*
561:      Compute function over the locally owned part of the grid
562:   */
563:   for (j = ys; j < ys + ym; j++) {
564:     for (i = xs; i < xs + xm; i++) {
565:       uc        = u[j][i].u;
566:       uxx       = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx;
567:       uyy       = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy;
568:       vc        = u[j][i].v;
569:       vxx       = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx;
570:       vyy       = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy;
571:       f[j][i].u = appctx->D1 * (uxx + uyy) - uc * vc * vc + appctx->gamma * (1.0 - uc);
572:       f[j][i].v = appctx->D2 * (vxx + vyy) + uc * vc * vc - (appctx->gamma + appctx->kappa) * vc;
573:     }
574:   }

576:   /*
577:      Gather global vector, using the 2-step process
578:         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().

580:      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
581:                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
582:   */
583:   PetscCall(DMLocalToGlobalBegin(da, localF, ADD_VALUES, F));
584:   PetscCall(DMLocalToGlobalEnd(da, localF, ADD_VALUES, F));

586:   /*
587:      Restore vectors
588:   */
589:   PetscCall(DMDAVecRestoreArray(da, localF, &f));
590:   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
591:   PetscCall(DMRestoreLocalVector(da, &localF));
592:   PetscCall(DMRestoreLocalVector(da, &localU));
593:   PetscCall(PetscLogFlops(16.0 * xm * ym));
594:   PetscFunctionReturn(PETSC_SUCCESS);
595: }

597: PetscErrorCode RHSFunctionActive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr)
598: {
599:   AppCtx   *appctx = (AppCtx *)ptr;
600:   DM        da;
601:   PetscInt  i, j, xs, ys, xm, ym, gxs, gys, gxm, gym, Mx, My;
602:   PetscReal hx, hy, sx, sy;
603:   AField  **f_a, *f_c, **u_a, *u_c;
604:   adouble   uc, uxx, uyy, vc, vxx, vyy;
605:   Field   **u, **f;
606:   Vec       localU, localF;

608:   PetscFunctionBegin;
609:   PetscCall(TSGetDM(ts, &da));
610:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
611:   hx = 2.50 / (PetscReal)Mx;
612:   sx = 1.0 / (hx * hx);
613:   hy = 2.50 / (PetscReal)My;
614:   sy = 1.0 / (hy * hy);
615:   PetscCall(DMGetLocalVector(da, &localU));
616:   PetscCall(DMGetLocalVector(da, &localF));

618:   /*
619:      Scatter ghost points to local vector,using the 2-step process
620:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
621:      By placing code between these two statements, computations can be
622:      done while messages are in transition.
623:   */
624:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
625:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
626:   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
627:   PetscCall(DMGlobalToLocalBegin(da, F, INSERT_VALUES, localF));
628:   PetscCall(DMGlobalToLocalEnd(da, F, INSERT_VALUES, localF));

630:   /*
631:      Get pointers to vector data
632:   */
633:   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
634:   PetscCall(DMDAVecGetArray(da, localF, &f));

636:   /*
637:      Get local and ghosted grid boundaries
638:   */
639:   PetscCall(DMDAGetGhostCorners(da, &gxs, &gys, NULL, &gxm, &gym, NULL));
640:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

642:   /*
643:     Create contiguous 1-arrays of AFields

645:     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
646:           cannot be allocated using PetscMalloc, as this does not call the
647:           relevant class constructor. Instead, we use the C++ keyword `new`.
648:   */
649:   u_c = new AField[gxm * gym];
650:   f_c = new AField[gxm * gym];

652:   /* Create corresponding 2-arrays of AFields */
653:   u_a = new AField *[gym];
654:   f_a = new AField *[gym];

656:   /* Align indices between array types to endow 2d array with ghost points */
657:   PetscCall(GiveGhostPoints(da, u_c, &u_a));
658:   PetscCall(GiveGhostPoints(da, f_c, &f_a));

660:   /*
661:      Compute function over the locally owned part of the grid
662:   */
663:   trace_on(1); // ----------------------------------------------- Start of active section

665:   /*
666:     Mark independence

668:     NOTE: Ghost points are marked as independent, in place of the points they represent on
669:           other processors / on other boundaries.
670:   */
671:   for (j = gys; j < gys + gym; j++) {
672:     for (i = gxs; i < gxs + gxm; i++) {
673:       u_a[j][i].u <<= u[j][i].u;
674:       u_a[j][i].v <<= u[j][i].v;
675:     }
676:   }

678:   /*
679:     Compute function over the locally owned part of the grid
680:   */
681:   for (j = ys; j < ys + ym; j++) {
682:     for (i = xs; i < xs + xm; i++) {
683:       uc          = u_a[j][i].u;
684:       uxx         = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx;
685:       uyy         = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy;
686:       vc          = u_a[j][i].v;
687:       vxx         = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx;
688:       vyy         = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy;
689:       f_a[j][i].u = appctx->D1 * (uxx + uyy) - uc * vc * vc + appctx->gamma * (1.0 - uc);
690:       f_a[j][i].v = appctx->D2 * (vxx + vyy) + uc * vc * vc - (appctx->gamma + appctx->kappa) * vc;
691:     }
692:   }
693:   /*
694:     Mark dependence

696:     NOTE: Ghost points are marked as dependent in order to vastly simplify index notation
697:           during Jacobian assembly.
698:   */
699:   for (j = gys; j < gys + gym; j++) {
700:     for (i = gxs; i < gxs + gxm; i++) {
701:       f_a[j][i].u >>= f[j][i].u;
702:       f_a[j][i].v >>= f[j][i].v;
703:     }
704:   }
705:   trace_off(); // ----------------------------------------------- End of active section

707:   /* Test zeroth order scalar evaluation in ADOL-C gives the same result */
708:   //  if (appctx->adctx->zos) {
709:   //    PetscCall(TestZOS2d(da,f,u,appctx)); // FIXME: Why does this give nonzero?
710:   //  }

712:   /*
713:      Gather global vector, using the 2-step process
714:         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().

716:      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
717:                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
718:   */
719:   PetscCall(DMLocalToGlobalBegin(da, localF, ADD_VALUES, F));
720:   PetscCall(DMLocalToGlobalEnd(da, localF, ADD_VALUES, F));

722:   /*
723:      Restore vectors
724:   */
725:   PetscCall(DMDAVecRestoreArray(da, localF, &f));
726:   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
727:   PetscCall(DMRestoreLocalVector(da, &localF));
728:   PetscCall(DMRestoreLocalVector(da, &localU));
729:   PetscCall(PetscLogFlops(16.0 * xm * ym));

731:   /* Destroy AFields appropriately */
732:   f_a += gys;
733:   u_a += gys;
734:   delete[] f_a;
735:   delete[] u_a;
736:   delete[] f_c;
737:   delete[] u_c;
738:   PetscFunctionReturn(PETSC_SUCCESS);
739: }

741: PetscErrorCode IJacobianAdolc(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, PetscCtx ctx)
742: {
743:   AppCtx            *appctx = (AppCtx *)ctx;
744:   DM                 da;
745:   const PetscScalar *u_vec;
746:   Vec                localU;

748:   PetscFunctionBegin;
749:   PetscCall(TSGetDM(ts, &da));
750:   PetscCall(DMGetLocalVector(da, &localU));

752:   /*
753:      Scatter ghost points to local vector,using the 2-step process
754:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
755:      By placing code between these two statements, computations can be
756:      done while messages are in transition.
757:   */
758:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
759:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));

761:   /* Get pointers to vector data */
762:   PetscCall(VecGetArrayRead(localU, &u_vec));

764:   /*
765:     Compute Jacobian
766:   */
767:   PetscCall(PetscAdolcComputeIJacobianLocalIDMass(1, A, u_vec, a, appctx->adctx));

769:   /*
770:      Restore vectors
771:   */
772:   PetscCall(VecRestoreArrayRead(localU, &u_vec));
773:   PetscCall(DMRestoreLocalVector(da, &localU));
774:   PetscFunctionReturn(PETSC_SUCCESS);
775: }

777: PetscErrorCode IJacobianByHand(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, PetscCtx ctx)
778: {
779:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
780:   DM          da;
781:   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
782:   PetscReal   hx, hy, sx, sy;
783:   PetscScalar uc, vc;
784:   Field     **u;
785:   Vec         localU;
786:   MatStencil  stencil[6], rowstencil;
787:   PetscScalar entries[6];

789:   PetscFunctionBegin;
790:   PetscCall(TSGetDM(ts, &da));
791:   PetscCall(DMGetLocalVector(da, &localU));
792:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

794:   hx = 2.50 / (PetscReal)Mx;
795:   sx = 1.0 / (hx * hx);
796:   hy = 2.50 / (PetscReal)My;
797:   sy = 1.0 / (hy * hy);

799:   /*
800:      Scatter ghost points to local vector,using the 2-step process
801:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
802:      By placing code between these two statements, computations can be
803:      done while messages are in transition.
804:   */
805:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
806:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));

808:   /*
809:      Get pointers to vector data
810:   */
811:   PetscCall(DMDAVecGetArrayRead(da, localU, &u));

813:   /*
814:      Get local grid boundaries
815:   */
816:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

818:   stencil[0].k = 0;
819:   stencil[1].k = 0;
820:   stencil[2].k = 0;
821:   stencil[3].k = 0;
822:   stencil[4].k = 0;
823:   stencil[5].k = 0;
824:   rowstencil.k = 0;
825:   /*
826:      Compute function over the locally owned part of the grid
827:   */
828:   for (j = ys; j < ys + ym; j++) {
829:     stencil[0].j = j - 1;
830:     stencil[1].j = j + 1;
831:     stencil[2].j = j;
832:     stencil[3].j = j;
833:     stencil[4].j = j;
834:     stencil[5].j = j;
835:     rowstencil.k = 0;
836:     rowstencil.j = j;
837:     for (i = xs; i < xs + xm; i++) {
838:       uc = u[j][i].u;
839:       vc = u[j][i].v;

841:       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
842:       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;

844:       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
845:       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
846:        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/

848:       stencil[0].i = i;
849:       stencil[0].c = 0;
850:       entries[0]   = -appctx->D1 * sy;
851:       stencil[1].i = i;
852:       stencil[1].c = 0;
853:       entries[1]   = -appctx->D1 * sy;
854:       stencil[2].i = i - 1;
855:       stencil[2].c = 0;
856:       entries[2]   = -appctx->D1 * sx;
857:       stencil[3].i = i + 1;
858:       stencil[3].c = 0;
859:       entries[3]   = -appctx->D1 * sx;
860:       stencil[4].i = i;
861:       stencil[4].c = 0;
862:       entries[4]   = 2.0 * appctx->D1 * (sx + sy) + vc * vc + appctx->gamma + a;
863:       stencil[5].i = i;
864:       stencil[5].c = 1;
865:       entries[5]   = 2.0 * uc * vc;
866:       rowstencil.i = i;
867:       rowstencil.c = 0;

869:       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
870:       if (appctx->aijpc) PetscCall(MatSetValuesStencil(B, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
871:       stencil[0].c = 1;
872:       entries[0]   = -appctx->D2 * sy;
873:       stencil[1].c = 1;
874:       entries[1]   = -appctx->D2 * sy;
875:       stencil[2].c = 1;
876:       entries[2]   = -appctx->D2 * sx;
877:       stencil[3].c = 1;
878:       entries[3]   = -appctx->D2 * sx;
879:       stencil[4].c = 1;
880:       entries[4]   = 2.0 * appctx->D2 * (sx + sy) - 2.0 * uc * vc + appctx->gamma + appctx->kappa + a;
881:       stencil[5].c = 0;
882:       entries[5]   = -vc * vc;

884:       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
885:       if (appctx->aijpc) PetscCall(MatSetValuesStencil(B, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
886:       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
887:     }
888:   }

890:   /*
891:      Restore vectors
892:   */
893:   PetscCall(PetscLogFlops(19.0 * xm * ym));
894:   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
895:   PetscCall(DMRestoreLocalVector(da, &localU));
896:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
897:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
898:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
899:   if (appctx->aijpc) {
900:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
901:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
902:     PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
903:   }
904:   PetscFunctionReturn(PETSC_SUCCESS);
905: }

907: PetscErrorCode RHSJacobianByHand(TS ts, PetscReal t, Vec U, Mat A, Mat B, PetscCtx ctx)
908: {
909:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
910:   DM          da;
911:   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
912:   PetscReal   hx, hy, sx, sy;
913:   PetscScalar uc, vc;
914:   Field     **u;
915:   Vec         localU;
916:   MatStencil  stencil[6], rowstencil;
917:   PetscScalar entries[6];

919:   PetscFunctionBegin;
920:   PetscCall(TSGetDM(ts, &da));
921:   PetscCall(DMGetLocalVector(da, &localU));
922:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

924:   hx = 2.50 / (PetscReal)Mx;
925:   sx = 1.0 / (hx * hx);
926:   hy = 2.50 / (PetscReal)My;
927:   sy = 1.0 / (hy * hy);

929:   /*
930:      Scatter ghost points to local vector,using the 2-step process
931:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
932:      By placing code between these two statements, computations can be
933:      done while messages are in transition.
934:   */
935:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
936:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));

938:   /*
939:      Get pointers to vector data
940:   */
941:   PetscCall(DMDAVecGetArrayRead(da, localU, &u));

943:   /*
944:      Get local grid boundaries
945:   */
946:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

948:   stencil[0].k = 0;
949:   stencil[1].k = 0;
950:   stencil[2].k = 0;
951:   stencil[3].k = 0;
952:   stencil[4].k = 0;
953:   stencil[5].k = 0;
954:   rowstencil.k = 0;

956:   /*
957:      Compute function over the locally owned part of the grid
958:   */
959:   for (j = ys; j < ys + ym; j++) {
960:     stencil[0].j = j - 1;
961:     stencil[1].j = j + 1;
962:     stencil[2].j = j;
963:     stencil[3].j = j;
964:     stencil[4].j = j;
965:     stencil[5].j = j;
966:     rowstencil.k = 0;
967:     rowstencil.j = j;
968:     for (i = xs; i < xs + xm; i++) {
969:       uc = u[j][i].u;
970:       vc = u[j][i].v;

972:       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
973:       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;

975:       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
976:       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
977:        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/

979:       stencil[0].i = i;
980:       stencil[0].c = 0;
981:       entries[0]   = appctx->D1 * sy;
982:       stencil[1].i = i;
983:       stencil[1].c = 0;
984:       entries[1]   = appctx->D1 * sy;
985:       stencil[2].i = i - 1;
986:       stencil[2].c = 0;
987:       entries[2]   = appctx->D1 * sx;
988:       stencil[3].i = i + 1;
989:       stencil[3].c = 0;
990:       entries[3]   = appctx->D1 * sx;
991:       stencil[4].i = i;
992:       stencil[4].c = 0;
993:       entries[4]   = -2.0 * appctx->D1 * (sx + sy) - vc * vc - appctx->gamma;
994:       stencil[5].i = i;
995:       stencil[5].c = 1;
996:       entries[5]   = -2.0 * uc * vc;
997:       rowstencil.i = i;
998:       rowstencil.c = 0;

1000:       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));

1002:       stencil[0].c = 1;
1003:       entries[0]   = appctx->D2 * sy;
1004:       stencil[1].c = 1;
1005:       entries[1]   = appctx->D2 * sy;
1006:       stencil[2].c = 1;
1007:       entries[2]   = appctx->D2 * sx;
1008:       stencil[3].c = 1;
1009:       entries[3]   = appctx->D2 * sx;
1010:       stencil[4].c = 1;
1011:       entries[4]   = -2.0 * appctx->D2 * (sx + sy) + 2.0 * uc * vc - appctx->gamma - appctx->kappa;
1012:       stencil[5].c = 0;
1013:       entries[5]   = vc * vc;
1014:       rowstencil.c = 1;

1016:       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
1017:       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
1018:     }
1019:   }

1021:   /*
1022:      Restore vectors
1023:   */
1024:   PetscCall(PetscLogFlops(19.0 * xm * ym));
1025:   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
1026:   PetscCall(DMRestoreLocalVector(da, &localU));
1027:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1028:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1029:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
1030:   if (appctx->aijpc) {
1031:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
1032:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
1033:     PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
1034:   }
1035:   PetscFunctionReturn(PETSC_SUCCESS);
1036: }

1038: PetscErrorCode RHSJacobianAdolc(TS ts, PetscReal t, Vec U, Mat A, Mat B, PetscCtx ctx)
1039: {
1040:   AppCtx      *appctx = (AppCtx *)ctx;
1041:   DM           da;
1042:   PetscScalar *u_vec;
1043:   Vec          localU;

1045:   PetscFunctionBegin;
1046:   PetscCall(TSGetDM(ts, &da));
1047:   PetscCall(DMGetLocalVector(da, &localU));

1049:   /*
1050:      Scatter ghost points to local vector,using the 2-step process
1051:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
1052:      By placing code between these two statements, computations can be
1053:      done while messages are in transition.
1054:   */
1055:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
1056:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));

1058:   /* Get pointers to vector data */
1059:   PetscCall(VecGetArray(localU, &u_vec));

1061:   /*
1062:     Compute Jacobian
1063:   */
1064:   PetscCall(PetscAdolcComputeRHSJacobianLocal(1, A, u_vec, appctx->adctx));

1066:   /*
1067:      Restore vectors
1068:   */
1069:   PetscCall(VecRestoreArray(localU, &u_vec));
1070:   PetscCall(DMRestoreLocalVector(da, &localU));
1071:   PetscFunctionReturn(PETSC_SUCCESS);
1072: }

1074: /*TEST

1076:   build:
1077:     requires: double !complex adolc colpack

1079:   test:
1080:     suffix: 1
1081:     nsize: 1
1082:     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
1083:     output_file: output/adr_ex5adj_1.out

1085:   test:
1086:     suffix: 2
1087:     nsize: 1
1088:     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian -implicitform
1089:     output_file: output/adr_ex5adj_2.out

1091:   test:
1092:     suffix: 3
1093:     nsize: 4
1094:     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
1095:     output_file: output/adr_ex5adj_3.out

1097:   test:
1098:     suffix: 4
1099:     nsize: 4
1100:     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor -implicitform
1101:     output_file: output/adr_ex5adj_4.out

1103:   testset:
1104:     suffix: 5
1105:     nsize: 4
1106:     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse
1107:     output_file: output/adr_ex5adj_5.out

1109:   testset:
1110:     suffix: 6
1111:     nsize: 4
1112:     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse -implicitform
1113:     output_file: output/adr_ex5adj_6.out

1115: TEST*/