Actual source code: ex5adj.c

  1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";

  3: /*
  4:   See ex5.c for details on the equation.
  5:   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
  6:   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
  7:   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.

  9:   Runtime options:
 10:     -forwardonly  - run the forward simulation without adjoint
 11:     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
 12:     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
 13: */
 14: #include "reaction_diffusion.h"
 15: #include <petscdm.h>
 16: #include <petscdmda.h>

 18: PetscErrorCode InitialConditions(DM, Vec);

 20: PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
 21: {
 22:   PetscInt i, j, Mx, My, xs, ys, xm, ym;
 23:   Field  **l;

 25:   PetscFunctionBegin;
 26:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
 27:   /* locate the global i index for x and j index for y */
 28:   i = (PetscInt)(x * (Mx - 1));
 29:   j = (PetscInt)(y * (My - 1));
 30:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

 32:   if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
 33:     /* the i,j vertex is on this process */
 34:     PetscCall(DMDAVecGetArray(da, lambda, &l));
 35:     l[j][i].u = 1.0;
 36:     l[j][i].v = 1.0;
 37:     PetscCall(DMDAVecRestoreArray(da, lambda, &l));
 38:   }
 39:   PetscFunctionReturn(PETSC_SUCCESS);
 40: }

 42: int main(int argc, char **argv)
 43: {
 44:   TS        ts; /* ODE integrator */
 45:   Vec       x;  /* solution */
 46:   DM        da;
 47:   AppCtx    appctx;
 48:   Vec       lambda[1];
 49:   PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_TRUE;

 51:   PetscFunctionBeginUser;
 52:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 53:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
 54:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
 55:   appctx.aijpc = PETSC_FALSE;
 56:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));

 58:   appctx.D1    = 8.0e-5;
 59:   appctx.D2    = 4.0e-5;
 60:   appctx.gamma = .024;
 61:   appctx.kappa = .06;

 63:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 64:      Create distributed array (DMDA) to manage parallel grid and vectors
 65:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 66:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
 67:   PetscCall(DMSetFromOptions(da));
 68:   PetscCall(DMSetUp(da));
 69:   PetscCall(DMDASetFieldName(da, 0, "u"));
 70:   PetscCall(DMDASetFieldName(da, 1, "v"));

 72:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 73:      Extract global vectors from DMDA; then duplicate for remaining
 74:      vectors that are the same types
 75:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 76:   PetscCall(DMCreateGlobalVector(da, &x));

 78:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 79:      Create timestepping solver context
 80:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 81:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
 82:   PetscCall(TSSetDM(ts, da));
 83:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
 84:   PetscCall(TSSetEquationType(ts, TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
 85:   if (!implicitform) {
 86:     PetscCall(TSSetType(ts, TSRK));
 87:     PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
 88:     PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));
 89:   } else {
 90:     PetscCall(TSSetType(ts, TSCN));
 91:     PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));
 92:     if (appctx.aijpc) {
 93:       Mat A, B;

 95:       PetscCall(DMSetMatType(da, MATSELL));
 96:       PetscCall(DMCreateMatrix(da, &A));
 97:       PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
 98:       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
 99:       PetscCall(TSSetIJacobian(ts, A, B, IJacobian, &appctx));
100:       PetscCall(MatDestroy(&A));
101:       PetscCall(MatDestroy(&B));
102:     } else {
103:       PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobian, &appctx));
104:     }
105:   }

107:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108:      Set initial conditions
109:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110:   PetscCall(InitialConditions(da, x));
111:   PetscCall(TSSetSolution(ts, x));

113:   /*
114:     Have the TS save its trajectory so that TSAdjointSolve() may be used
115:   */
116:   if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));

118:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119:      Set solver options
120:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121:   PetscCall(TSSetMaxTime(ts, 200.0));
122:   PetscCall(TSSetTimeStep(ts, 0.5));
123:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
124:   PetscCall(TSSetFromOptions(ts));

126:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127:      Solve ODE system
128:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129:   PetscCall(TSSolve(ts, x));
130:   if (!forwardonly) {
131:     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132:        Start the Adjoint model
133:        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134:     PetscCall(VecDuplicate(x, &lambda[0]));
135:     /*   Reset initial conditions for the adjoint integration */
136:     PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
137:     PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
138:     PetscCall(TSAdjointSolve(ts));
139:     PetscCall(VecDestroy(&lambda[0]));
140:   }
141:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142:      Free work space.  All PETSc objects should be destroyed when they
143:      are no longer needed.
144:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145:   PetscCall(VecDestroy(&x));
146:   PetscCall(TSDestroy(&ts));
147:   PetscCall(DMDestroy(&da));
148:   PetscCall(PetscFinalize());
149:   return 0;
150: }

152: /* ------------------------------------------------------------------- */
153: PetscErrorCode InitialConditions(DM da, Vec U)
154: {
155:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
156:   Field   **u;
157:   PetscReal hx, hy, x, y;

159:   PetscFunctionBegin;
160:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

162:   hx = 2.5 / (PetscReal)Mx;
163:   hy = 2.5 / (PetscReal)My;

165:   /*
166:      Get pointers to vector data
167:   */
168:   PetscCall(DMDAVecGetArray(da, U, &u));

170:   /*
171:      Get local grid boundaries
172:   */
173:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

175:   /*
176:      Compute function over the locally owned part of the grid
177:   */
178:   for (j = ys; j < ys + ym; j++) {
179:     y = j * hy;
180:     for (i = xs; i < xs + xm; i++) {
181:       x = i * hx;
182:       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
183:         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
184:       else u[j][i].v = 0.0;

186:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
187:     }
188:   }

190:   /*
191:      Restore vectors
192:   */
193:   PetscCall(DMDAVecRestoreArray(da, U, &u));
194:   PetscFunctionReturn(PETSC_SUCCESS);
195: }

197: /*TEST

199:    build:
200:       depends: reaction_diffusion.c
201:       requires: !complex !single

203:    test:
204:       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20
205:       output_file: output/ex5adj_1.out

207:    test:
208:       suffix: 2
209:       nsize: 2
210:       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -ts_trajectory_dirname Test-dir -ts_trajectory_file_template test-%06D.cp

212:    test:
213:       suffix: 3
214:       nsize: 2
215:       args: -ts_max_steps 10 -ts_dt 10 -ts_adjoint_monitor_draw_sensi

217:    test:
218:       suffix: 4
219:       nsize: 2
220:       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -snes_fd_color
221:       output_file: output/ex5adj_2.out

223:    test:
224:       suffix: 5
225:       nsize: 2
226:       args: -ts_max_steps 10 -implicitform 0 -ts_type rk -ts_rk_type 4 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20 -snes_fd_color
227:       output_file: output/ex5adj_1.out

229:    test:
230:       suffix: knl
231:       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -malloc_hbw -ts_trajectory_use_dram 1
232:       output_file: output/ex5adj_3.out
233:       requires: knl

235:    test:
236:       suffix: sell
237:       nsize: 4
238:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type none
239:       output_file: output/ex5adj_sell_1.out

241:    test:
242:       suffix: aijsell
243:       nsize: 4
244:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type none
245:       output_file: output/ex5adj_sell_1.out

247:    test:
248:       suffix: sell2
249:       nsize: 4
250:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
251:       output_file: output/ex5adj_sell_2.out

253:    test:
254:       suffix: aijsell2
255:       nsize: 4
256:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
257:       output_file: output/ex5adj_sell_2.out

259:    test:
260:       suffix: sell3
261:       nsize: 4
262:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
263:       output_file: output/ex5adj_sell_3.out

265:    test:
266:       suffix: sell4
267:       nsize: 4
268:       args: -forwardonly -implicitform -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
269:       output_file: output/ex5adj_sell_4.out

271:    test:
272:       suffix: sell5
273:       nsize: 4
274:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -aijpc
275:       output_file: output/ex5adj_sell_5.out

277:    test:
278:       suffix: aijsell5
279:       nsize: 4
280:       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell
281:       output_file: output/ex5adj_sell_5.out

283:    test:
284:       suffix: sell6
285:       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sell -pc_type jacobi
286:       output_file: output/ex5adj_sell_6.out

288:    test:
289:       suffix: sell7
290:       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sellcuda -dm_vec_type cuda -pc_type jacobi
291:       output_file: output/ex5adj_sell_6.out
292:       requires: cuda

294:    test:
295:       suffix: gamg1
296:       args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
297:       output_file: output/ex5adj_gamg.out

299:    test:
300:       suffix: gamg2
301:       args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_use_explicittranspose -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
302:       output_file: output/ex5adj_gamg.out
303: TEST*/