Actual source code: ex5opt_ic.c

  1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";

  3: /*
  4:    See ex5.c for details on the equation.
  5:    This code demonestrates the TSAdjoint/TAO interface to solve an inverse initial value problem built on a system of
  6:    time-dependent partial differential equations.
  7:    In this problem, the initial value for the PDE is unknown, but the output (the final solution of the PDE) is known.
  8:    We want to determine the initial value that can produce the given output.
  9:    We formulate the problem as a nonlinear optimization problem that minimizes the discrepancy between the simulated
 10:    result and given reference solution, calculate the gradient of the objective function with the discrete adjoint
 11:    solver, and solve the optimization problem with TAO.

 13:    Runtime options:
 14:      -forwardonly  - run only the forward simulation
 15:      -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
 16:  */

 18: #include "reaction_diffusion.h"
 19: #include <petscdm.h>
 20: #include <petscdmda.h>
 21: #include <petsctao.h>

 23: /* User-defined routines */
 24: extern PetscErrorCode FormFunctionAndGradient(Tao, Vec, PetscReal *, Vec, void *);

 26: /*
 27:    Set terminal condition for the adjoint variable
 28:  */
 29: PetscErrorCode InitializeLambda(DM da, Vec lambda, Vec U, AppCtx *appctx)
 30: {
 31:   char        filename[PETSC_MAX_PATH_LEN] = "";
 32:   PetscViewer viewer;
 33:   Vec         Uob;

 35:   PetscFunctionBegin;
 36:   PetscCall(VecDuplicate(U, &Uob));
 37:   PetscCall(PetscSNPrintf(filename, sizeof filename, "ex5opt.ob"));
 38:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, filename, FILE_MODE_READ, &viewer));
 39:   PetscCall(VecLoad(Uob, viewer));
 40:   PetscCall(PetscViewerDestroy(&viewer));
 41:   PetscCall(VecAYPX(Uob, -1., U));
 42:   PetscCall(VecScale(Uob, 2.0));
 43:   PetscCall(VecAXPY(lambda, 1., Uob));
 44:   PetscCall(VecDestroy(&Uob));
 45:   PetscFunctionReturn(PETSC_SUCCESS);
 46: }

 48: /*
 49:    Set up a viewer that dumps data into a binary file
 50:  */
 51: PetscErrorCode OutputBIN(DM da, const char *filename, PetscViewer *viewer)
 52: {
 53:   PetscFunctionBegin;
 54:   PetscCall(PetscViewerCreate(PetscObjectComm((PetscObject)da), viewer));
 55:   PetscCall(PetscViewerSetType(*viewer, PETSCVIEWERBINARY));
 56:   PetscCall(PetscViewerFileSetMode(*viewer, FILE_MODE_WRITE));
 57:   PetscCall(PetscViewerFileSetName(*viewer, filename));
 58:   PetscFunctionReturn(PETSC_SUCCESS);
 59: }

 61: /*
 62:    Generate a reference solution and save it to a binary file
 63:  */
 64: PetscErrorCode GenerateOBs(TS ts, Vec U, AppCtx *appctx)
 65: {
 66:   char        filename[PETSC_MAX_PATH_LEN] = "";
 67:   PetscViewer viewer;
 68:   DM          da;

 70:   PetscFunctionBegin;
 71:   PetscCall(TSGetDM(ts, &da));
 72:   PetscCall(TSSolve(ts, U));
 73:   PetscCall(PetscSNPrintf(filename, sizeof filename, "ex5opt.ob"));
 74:   PetscCall(OutputBIN(da, filename, &viewer));
 75:   PetscCall(VecView(U, viewer));
 76:   PetscCall(PetscViewerDestroy(&viewer));
 77:   PetscFunctionReturn(PETSC_SUCCESS);
 78: }

 80: PetscErrorCode InitialConditions(DM da, Vec U)
 81: {
 82:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
 83:   Field   **u;
 84:   PetscReal hx, hy, x, y;

 86:   PetscFunctionBegin;
 87:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

 89:   hx = 2.5 / (PetscReal)Mx;
 90:   hy = 2.5 / (PetscReal)My;

 92:   PetscCall(DMDAVecGetArray(da, U, &u));
 93:   /* Get local grid boundaries */
 94:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

 96:   /* Compute function over the locally owned part of the grid */
 97:   for (j = ys; j < ys + ym; j++) {
 98:     y = j * hy;
 99:     for (i = xs; i < xs + xm; i++) {
100:       x = i * hx;
101:       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0);
102:       else u[j][i].v = 0.0;

104:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
105:     }
106:   }

108:   PetscCall(DMDAVecRestoreArray(da, U, &u));
109:   PetscFunctionReturn(PETSC_SUCCESS);
110: }

112: PetscErrorCode PerturbedInitialConditions(DM da, Vec U)
113: {
114:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
115:   Field   **u;
116:   PetscReal hx, hy, x, y;

118:   PetscFunctionBegin;
119:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

121:   hx = 2.5 / (PetscReal)Mx;
122:   hy = 2.5 / (PetscReal)My;

124:   PetscCall(DMDAVecGetArray(da, U, &u));
125:   /* Get local grid boundaries */
126:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

128:   /* Compute function over the locally owned part of the grid */
129:   for (j = ys; j < ys + ym; j++) {
130:     y = j * hy;
131:     for (i = xs; i < xs + xm; i++) {
132:       x = i * hx;
133:       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * 0.5 * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * 0.5 * y), 2.0);
134:       else u[j][i].v = 0.0;

136:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
137:     }
138:   }

140:   PetscCall(DMDAVecRestoreArray(da, U, &u));
141:   PetscFunctionReturn(PETSC_SUCCESS);
142: }

144: PetscErrorCode PerturbedInitialConditions2(DM da, Vec U)
145: {
146:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
147:   Field   **u;
148:   PetscReal hx, hy, x, y;

150:   PetscFunctionBegin;
151:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

153:   hx = 2.5 / (PetscReal)Mx;
154:   hy = 2.5 / (PetscReal)My;

156:   PetscCall(DMDAVecGetArray(da, U, &u));
157:   /* Get local grid boundaries */
158:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

160:   /* Compute function over the locally owned part of the grid */
161:   for (j = ys; j < ys + ym; j++) {
162:     y = j * hy;
163:     for (i = xs; i < xs + xm; i++) {
164:       x = i * hx;
165:       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
166:         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * (x - 0.5)), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
167:       else u[j][i].v = 0.0;

169:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
170:     }
171:   }

173:   PetscCall(DMDAVecRestoreArray(da, U, &u));
174:   PetscFunctionReturn(PETSC_SUCCESS);
175: }

177: PetscErrorCode PerturbedInitialConditions3(DM da, Vec U)
178: {
179:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
180:   Field   **u;
181:   PetscReal hx, hy, x, y;

183:   PetscFunctionBegin;
184:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

186:   hx = 2.5 / (PetscReal)Mx;
187:   hy = 2.5 / (PetscReal)My;

189:   PetscCall(DMDAVecGetArray(da, U, &u));
190:   /* Get local grid boundaries */
191:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

193:   /* Compute function over the locally owned part of the grid */
194:   for (j = ys; j < ys + ym; j++) {
195:     y = j * hy;
196:     for (i = xs; i < xs + xm; i++) {
197:       x = i * hx;
198:       if ((0.5 <= x) && (x <= 2.0) && (0.5 <= y) && (y <= 2.0)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0);
199:       else u[j][i].v = 0.0;

201:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
202:     }
203:   }

205:   PetscCall(DMDAVecRestoreArray(da, U, &u));
206:   PetscFunctionReturn(PETSC_SUCCESS);
207: }

209: int main(int argc, char **argv)
210: {
211:   DM        da;
212:   AppCtx    appctx;
213:   PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_FALSE;
214:   PetscInt  perturbic = 1;

216:   PetscFunctionBeginUser;
217:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
218:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
219:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
220:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-perturbic", &perturbic, NULL));

222:   appctx.D1    = 8.0e-5;
223:   appctx.D2    = 4.0e-5;
224:   appctx.gamma = .024;
225:   appctx.kappa = .06;
226:   appctx.aijpc = PETSC_FALSE;

228:   /* Create distributed array (DMDA) to manage parallel grid and vectors */
229:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
230:   PetscCall(DMSetFromOptions(da));
231:   PetscCall(DMSetUp(da));
232:   PetscCall(DMDASetFieldName(da, 0, "u"));
233:   PetscCall(DMDASetFieldName(da, 1, "v"));

235:   /* Extract global vectors from DMDA; then duplicate for remaining
236:      vectors that are the same types */
237:   PetscCall(DMCreateGlobalVector(da, &appctx.U));

239:   /* Create timestepping solver context */
240:   PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
241:   PetscCall(TSSetType(appctx.ts, TSCN));
242:   PetscCall(TSSetDM(appctx.ts, da));
243:   PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR));
244:   PetscCall(TSSetEquationType(appctx.ts, TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
245:   if (!implicitform) {
246:     PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx));
247:     PetscCall(TSSetRHSJacobian(appctx.ts, NULL, NULL, RHSJacobian, &appctx));
248:   } else {
249:     PetscCall(TSSetIFunction(appctx.ts, NULL, IFunction, &appctx));
250:     PetscCall(TSSetIJacobian(appctx.ts, NULL, NULL, IJacobian, &appctx));
251:   }

253:   /* Set initial conditions */
254:   PetscCall(InitialConditions(da, appctx.U));
255:   PetscCall(TSSetSolution(appctx.ts, appctx.U));

257:   /* Set solver options */
258:   PetscCall(TSSetMaxTime(appctx.ts, 2000.0));
259:   PetscCall(TSSetTimeStep(appctx.ts, 0.5));
260:   PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
261:   PetscCall(TSSetFromOptions(appctx.ts));

263:   PetscCall(GenerateOBs(appctx.ts, appctx.U, &appctx));

265:   if (!forwardonly) {
266:     Tao           tao;
267:     Vec           P;
268:     Vec           lambda[1];
269:     PetscLogStage opt_stage;

271:     PetscCall(PetscLogStageRegister("Optimization", &opt_stage));
272:     PetscCall(PetscLogStagePush(opt_stage));
273:     if (perturbic == 1) {
274:       PetscCall(PerturbedInitialConditions(da, appctx.U));
275:     } else if (perturbic == 2) {
276:       PetscCall(PerturbedInitialConditions2(da, appctx.U));
277:     } else if (perturbic == 3) {
278:       PetscCall(PerturbedInitialConditions3(da, appctx.U));
279:     }

281:     PetscCall(VecDuplicate(appctx.U, &lambda[0]));
282:     PetscCall(TSSetCostGradients(appctx.ts, 1, lambda, NULL));

284:     /* Have the TS save its trajectory needed by TSAdjointSolve() */
285:     PetscCall(TSSetSaveTrajectory(appctx.ts));

287:     /* Create TAO solver and set desired solution method */
288:     PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
289:     PetscCall(TaoSetType(tao, TAOBLMVM));

291:     /* Set initial guess for TAO */
292:     PetscCall(VecDuplicate(appctx.U, &P));
293:     PetscCall(VecCopy(appctx.U, P));
294:     PetscCall(TaoSetSolution(tao, P));

296:     /* Set routine for function and gradient evaluation */
297:     PetscCall(TaoSetObjectiveAndGradient(tao, NULL, FormFunctionAndGradient, &appctx));

299:     /* Check for any TAO command line options */
300:     PetscCall(TaoSetFromOptions(tao));

302:     PetscCall(TaoSolve(tao));
303:     PetscCall(TaoDestroy(&tao));
304:     PetscCall(VecDestroy(&lambda[0]));
305:     PetscCall(VecDestroy(&P));
306:     PetscCall(PetscLogStagePop());
307:   }

309:   /* Free work space.  All PETSc objects should be destroyed when they
310:      are no longer needed. */
311:   PetscCall(VecDestroy(&appctx.U));
312:   PetscCall(TSDestroy(&appctx.ts));
313:   PetscCall(DMDestroy(&da));
314:   PetscCall(PetscFinalize());
315:   return 0;
316: }

318: /* ------------------------ TAO callbacks ---------------------------- */

320: /*
321:    FormFunctionAndGradient - Evaluates the function and corresponding gradient.

323:    Input Parameters:
324:    tao - the Tao context
325:    P   - the input vector
326:    ctx - optional user-defined context, as set by TaoSetObjectiveAndGradient()

328:    Output Parameters:
329:    f   - the newly evaluated function
330:    G   - the newly evaluated gradient
331: */
332: PetscErrorCode FormFunctionAndGradient(Tao tao, Vec P, PetscReal *f, Vec G, void *ctx)
333: {
334:   AppCtx     *appctx = (AppCtx *)ctx;
335:   PetscReal   soberr, timestep;
336:   Vec        *lambda;
337:   Vec         SDiff;
338:   DM          da;
339:   char        filename[PETSC_MAX_PATH_LEN] = "";
340:   PetscViewer viewer;

342:   PetscFunctionBeginUser;
343:   PetscCall(TSSetTime(appctx->ts, 0.0));
344:   PetscCall(TSGetTimeStep(appctx->ts, &timestep));
345:   if (timestep < 0) PetscCall(TSSetTimeStep(appctx->ts, -timestep));
346:   PetscCall(TSSetStepNumber(appctx->ts, 0));
347:   PetscCall(TSSetFromOptions(appctx->ts));

349:   PetscCall(VecDuplicate(P, &SDiff));
350:   PetscCall(VecCopy(P, appctx->U));
351:   PetscCall(TSGetDM(appctx->ts, &da));
352:   *f = 0;

354:   PetscCall(TSSolve(appctx->ts, appctx->U));
355:   PetscCall(PetscSNPrintf(filename, sizeof filename, "ex5opt.ob"));
356:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, filename, FILE_MODE_READ, &viewer));
357:   PetscCall(VecLoad(SDiff, viewer));
358:   PetscCall(PetscViewerDestroy(&viewer));
359:   PetscCall(VecAYPX(SDiff, -1., appctx->U));
360:   PetscCall(VecDot(SDiff, SDiff, &soberr));
361:   *f += soberr;

363:   PetscCall(TSGetCostGradients(appctx->ts, NULL, &lambda, NULL));
364:   PetscCall(VecSet(lambda[0], 0.0));
365:   PetscCall(InitializeLambda(da, lambda[0], appctx->U, appctx));
366:   PetscCall(TSAdjointSolve(appctx->ts));

368:   PetscCall(VecCopy(lambda[0], G));

370:   PetscCall(VecDestroy(&SDiff));
371:   PetscFunctionReturn(PETSC_SUCCESS);
372: }

374: /*TEST

376:    build:
377:       depends: reaction_diffusion.c
378:       requires: !complex !single

380:    test:
381:       args: -ts_max_steps 5 -ts_type rk -ts_rk_type 3 -ts_trajectory_type memory -tao_monitor -tao_view -tao_gatol 1e-6
382:       output_file: output/ex5opt_ic_1.out

384: TEST*/