Actual source code: tssen.c

  1: #include <petsc/private/tsimpl.h>
  2: #include <petscdraw.h>

  4: PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;

  6: /* #define TSADJOINT_STAGE */

  8: /* ------------------------ Sensitivity Context ---------------------------*/

 10: /*@C
 11:   TSSetRHSJacobianP - Sets the function that computes the Jacobian of $G$ w.r.t. the parameters $p$ where $U_t = G(U,p,t)$, as well as the location to store the matrix.

 13:   Logically Collective

 15:   Input Parameters:
 16: + ts   - `TS` context obtained from `TSCreate()`
 17: . Amat - JacobianP matrix
 18: . func - function
 19: - ctx  - [optional] function context

 21:   Level: intermediate

 23:   Note:
 24:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 26: .seealso: [](ch_ts), `TS`, `TSRHSJacobianPFn`, `TSGetRHSJacobianP()`
 27: @*/
 28: PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianPFn *func, PetscCtx ctx)
 29: {
 30:   PetscFunctionBegin;

 34:   ts->rhsjacobianp    = func;
 35:   ts->rhsjacobianpctx = ctx;
 36:   if (Amat) {
 37:     PetscCall(PetscObjectReference((PetscObject)Amat));
 38:     PetscCall(MatDestroy(&ts->Jacprhs));
 39:     ts->Jacprhs = Amat;
 40:   }
 41:   PetscFunctionReturn(PETSC_SUCCESS);
 42: }

 44: /*@C
 45:   TSGetRHSJacobianP - Gets the function that computes the Jacobian of $G $ w.r.t. the parameters $p$ where $ U_t = G(U,p,t)$, as well as the location to store the matrix.

 47:   Logically Collective

 49:   Input Parameter:
 50: . ts - `TS` context obtained from `TSCreate()`

 52:   Output Parameters:
 53: + Amat - JacobianP matrix
 54: . func - function
 55: - ctx  - [optional] function context

 57:   Level: intermediate

 59:   Note:
 60:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 62: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianPFn`
 63: @*/
 64: PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianPFn **func, PetscCtxRt ctx)
 65: {
 66:   PetscFunctionBegin;
 67:   if (func) *func = ts->rhsjacobianp;
 68:   if (ctx) *(void **)ctx = ts->rhsjacobianpctx;
 69:   if (Amat) *Amat = ts->Jacprhs;
 70:   PetscFunctionReturn(PETSC_SUCCESS);
 71: }

 73: /*@C
 74:   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.

 76:   Collective

 78:   Input Parameters:
 79: + ts - The `TS` context obtained from `TSCreate()`
 80: . t  - the time
 81: - U  - the solution at which to compute the Jacobian

 83:   Output Parameter:
 84: . Amat - the computed Jacobian

 86:   Level: developer

 88: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
 89: @*/
 90: PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
 91: {
 92:   PetscFunctionBegin;
 93:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

 97:   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
 98:   else {
 99:     PetscBool assembled;
100:     PetscCall(MatZeroEntries(Amat));
101:     PetscCall(MatAssembled(Amat, &assembled));
102:     if (!assembled) {
103:       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
104:       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
105:     }
106:   }
107:   PetscFunctionReturn(PETSC_SUCCESS);
108: }

110: /*@C
111:   TSSetIJacobianP - Sets the function that computes the Jacobian of $F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t)$, as well as the location to store the matrix.

113:   Logically Collective

115:   Input Parameters:
116: + ts   - `TS` context obtained from `TSCreate()`
117: . Amat - JacobianP matrix
118: . func - function
119: - ctx  - [optional] function context

121:   Calling sequence of `func`:
122: + ts    - the `TS` context
123: . t     - current timestep
124: . U     - input vector (current ODE solution)
125: . Udot  - time derivative of state vector
126: . shift - shift to apply, see the note in `TSSetIJacobian()`
127: . A     - output matrix
128: - ctx   - [optional] function context

130:   Level: intermediate

132:   Note:
133:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

135: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
136: @*/
137: PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, PetscCtx ctx), PetscCtx ctx)
138: {
139:   PetscFunctionBegin;

143:   ts->ijacobianp    = func;
144:   ts->ijacobianpctx = ctx;
145:   if (Amat) {
146:     PetscCall(PetscObjectReference((PetscObject)Amat));
147:     PetscCall(MatDestroy(&ts->Jacp));
148:     ts->Jacp = Amat;
149:   }
150:   PetscFunctionReturn(PETSC_SUCCESS);
151: }

153: /*@C
154:   TSGetIJacobianP - Gets the function that computes the Jacobian of $ F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t) $, as well as the location to store the matrix.

156:   Logically Collective

158:   Input Parameter:
159: . ts - `TS` context obtained from `TSCreate()`

161:   Output Parameters:
162: + Amat - JacobianP matrix
163: . func - the function that computes the JacobianP
164: - ctx  - [optional] function context

166:   Calling sequence of `func`:
167: + ts    - the `TS` context
168: . t     - current timestep
169: . U     - input vector (current ODE solution)
170: . Udot  - time derivative of state vector
171: . shift - shift to apply, see the note in `TSSetIJacobian()`
172: . A     - output matrix
173: - ctx   - [optional] function context

175:   Level: intermediate

177:   Note:
178:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

180: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSSetIJacobianP()`, `TSGetRHSJacobianP()`
181: @*/
182: PetscErrorCode TSGetIJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, PetscCtx ctx), PetscCtxRt ctx)
183: {
184:   PetscFunctionBegin;

187:   if (func) *func = ts->ijacobianp;
188:   if (ctx) *(void **)ctx = ts->ijacobianpctx;
189:   if (Amat) *Amat = ts->Jacp;
190:   PetscFunctionReturn(PETSC_SUCCESS);
191: }

193: /*@
194:   TSComputeIJacobianP - Runs the user-defined IJacobianP function.

196:   Collective

198:   Input Parameters:
199: + ts    - the `TS` context
200: . t     - current timestep
201: . U     - state vector
202: . Udot  - time derivative of state vector
203: . shift - shift to apply, see note below
204: - imex  - flag indicates if the method is IMEX so that the `RHSJacobianP` should be kept separate

206:   Output Parameter:
207: . Amat - Jacobian matrix

209:   Level: developer

211: .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
212: @*/
213: PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
214: {
215:   PetscFunctionBegin;
216:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

221:   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
222:   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
223:   if (imex) {
224:     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
225:       PetscBool assembled;
226:       PetscCall(MatZeroEntries(Amat));
227:       PetscCall(MatAssembled(Amat, &assembled));
228:       if (!assembled) {
229:         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
230:         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
231:       }
232:     }
233:   } else {
234:     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
235:     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
236:       PetscCall(MatScale(Amat, -1));
237:     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
238:       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
239:       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
240:         PetscCall(MatZeroEntries(Amat));
241:       }
242:       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
243:     }
244:   }
245:   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
246:   PetscFunctionReturn(PETSC_SUCCESS);
247: }

249: /*@C
250:   TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions

252:   Logically Collective

254:   Input Parameters:
255: + ts           - the `TS` context obtained from `TSCreate()`
256: . numcost      - number of gradients to be computed, this is the number of cost functions
257: . costintegral - vector that stores the integral values
258: . rf           - routine for evaluating the integrand function
259: . drduf        - function that computes the gradients of the `r` with respect to `u`
260: . drdpf        - function that computes the gradients of the `r` with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
261: . fwd          - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
262: - ctx          - [optional] application context for private data for the function evaluation routine (may be `NULL`)

264:   Calling sequence of `rf`:
265: + ts  - the integrator
266: . t   - the time
267: . U   - the solution
268: . F   - the computed value of the function
269: - ctx - the application context

271:   Calling sequence of `drduf`:
272: + ts   - the integrator
273: . t    - the time
274: . U    - the solution
275: . dRdU - the computed gradients of the `r` with respect to `u`
276: - ctx  - the application context

278:   Calling sequence of `drdpf`:
279: + ts   - the integrator
280: . t    - the time
281: . U    - the solution
282: . dRdP - the computed gradients of the `r` with respect to `p`
283: - ctx  - the application context

285:   Level: deprecated

287:   Notes:
288:   For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions

290:   Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead

292: .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`,
293:           `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()`
294: @*/
295: PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, PetscCtx ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, PetscCtx ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, PetscCtx ctx), PetscBool fwd, PetscCtx ctx)
296: {
297:   PetscFunctionBegin;
300:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
301:   if (!ts->numcost) ts->numcost = numcost;

303:   if (costintegral) {
304:     PetscCall(PetscObjectReference((PetscObject)costintegral));
305:     PetscCall(VecDestroy(&ts->vec_costintegral));
306:     ts->vec_costintegral = costintegral;
307:   } else {
308:     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
309:       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
310:     } else {
311:       PetscCall(VecSet(ts->vec_costintegral, 0.0));
312:     }
313:   }
314:   if (!ts->vec_costintegrand) {
315:     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
316:   } else {
317:     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
318:   }
319:   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
320:   ts->costintegrand    = rf;
321:   ts->costintegrandctx = ctx;
322:   ts->drdufunction     = drduf;
323:   ts->drdpfunction     = drdpf;
324:   PetscFunctionReturn(PETSC_SUCCESS);
325: }

327: /*@
328:   TSGetCostIntegral - Returns the values of the integral term in the cost functions.
329:   It is valid to call the routine after a backward run.

331:   Not Collective

333:   Input Parameter:
334: . ts - the `TS` context obtained from `TSCreate()`

336:   Output Parameter:
337: . v - the vector containing the integrals for each cost function

339:   Level: intermediate

341: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
342: @*/
343: PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
344: {
345:   TS quadts;

347:   PetscFunctionBegin;
349:   PetscAssertPointer(v, 2);
350:   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
351:   *v = quadts->vec_sol;
352:   PetscFunctionReturn(PETSC_SUCCESS);
353: }

355: /*@
356:   TSComputeCostIntegrand - Evaluates the integral function in the cost functions.

358:   Input Parameters:
359: + ts - the `TS` context
360: . t  - current time
361: - U  - state vector, i.e. current solution

363:   Output Parameter:
364: . Q - vector of size numcost to hold the outputs

366:   Level: deprecated

368:   Note:
369:   Most users should not need to explicitly call this routine, as it
370:   is used internally within the sensitivity analysis context.

372: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
373: @*/
374: PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
375: {
376:   PetscFunctionBegin;

381:   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
382:   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
383:   else PetscCall(VecZeroEntries(Q));
384:   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
385:   PetscFunctionReturn(PETSC_SUCCESS);
386: }

388: // PetscClangLinter pragma disable: -fdoc-*
389: /*@
390:   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

392:   Level: deprecated

394: @*/
395: PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
396: {
397:   PetscFunctionBegin;
398:   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);

402:   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
403:   PetscFunctionReturn(PETSC_SUCCESS);
404: }

406: // PetscClangLinter pragma disable: -fdoc-*
407: /*@
408:   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

410:   Level: deprecated

412: @*/
413: PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
414: {
415:   PetscFunctionBegin;
416:   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);

420:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
421:   PetscFunctionReturn(PETSC_SUCCESS);
422: }

424: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
425: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
426: /*@C
427:   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of `F` (IFunction) w.r.t. the state variable.

429:   Logically Collective

431:   Input Parameters:
432: + ts                   - `TS` context obtained from `TSCreate()`
433: . ihp1                 - an array of vectors storing the result of vector-Hessian-vector product for $F_{UU}$
434: . ihessianproductfunc1 - vector-Hessian-vector product function for $F_{UU}$
435: . ihp2                 - an array of vectors storing the result of vector-Hessian-vector product for $F_{UP}$
436: . ihessianproductfunc2 - vector-Hessian-vector product function for $F_{UP}$
437: . ihp3                 - an array of vectors storing the result of vector-Hessian-vector product for $F_{PU}$
438: . ihessianproductfunc3 - vector-Hessian-vector product function for $F_{PU}$
439: . ihp4                 - an array of vectors storing the result of vector-Hessian-vector product for $F_{PP}$
440: . ihessianproductfunc4 - vector-Hessian-vector product function for $F_{PP}$
441: - ctx                  - [optional] function context

443:   Calling sequence of `ihessianproductfunc1`:
444: + ts  - the `TS` context
445: . t   - current timestep
446: . U   - input vector (current ODE solution)
447: . Vl  - an array of input vectors to be left-multiplied with the Hessian
448: . Vr  - input vector to be right-multiplied with the Hessian
449: . VHV - an array of output vectors for vector-Hessian-vector product
450: - ctx - [optional] function context

452:   Level: intermediate

454:   Notes:
455:   All other functions have the same calling sequence as `ihessianproductfunc1`, so their
456:   descriptions are omitted for brevity.

458:   The first Hessian function and the working array are required.
459:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
460:   $Vl_n^T*F_UP*Vr$
461:   where the vector $Vl_n$ (n-th element in the array `Vl`) and `Vr` are of size `N` and `M` respectively, and the Hessian $F_{UP}$ is of size $N x N x M.$
462:   Each entry of $F_{UP}$ corresponds to the derivative
463:   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.$
464:   The result of the vector-Hessian-vector product for $Vl_n$ needs to be stored in vector $VHV_n$ with the j-th entry being
465:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}$
466:   If the cost function is a scalar, there will be only one vector in `Vl` and `VHV`.

468: .seealso: [](ch_ts), `TS`
469: @*/
470: PetscErrorCode TSSetIHessianProduct(TS ts, Vec ihp1[], PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec ihp2[], PetscErrorCode (*ihessianproductfunc2)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec ihp3[], PetscErrorCode (*ihessianproductfunc3)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec ihp4[], PetscErrorCode (*ihessianproductfunc4)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), PetscCtx ctx)
471: {
472:   PetscFunctionBegin;
474:   PetscAssertPointer(ihp1, 2);

476:   ts->ihessianproductctx = ctx;
477:   if (ihp1) ts->vecs_fuu = ihp1;
478:   if (ihp2) ts->vecs_fup = ihp2;
479:   if (ihp3) ts->vecs_fpu = ihp3;
480:   if (ihp4) ts->vecs_fpp = ihp4;
481:   ts->ihessianproduct_fuu = ihessianproductfunc1;
482:   ts->ihessianproduct_fup = ihessianproductfunc2;
483:   ts->ihessianproduct_fpu = ihessianproductfunc3;
484:   ts->ihessianproduct_fpp = ihessianproductfunc4;
485:   PetscFunctionReturn(PETSC_SUCCESS);
486: }

488: /*@
489:   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.

491:   Collective

493:   Input Parameters:
494: + ts - The `TS` context obtained from `TSCreate()`
495: . t  - the time
496: . U  - the solution at which to compute the Hessian product
497: . Vl - the array of input vectors to be multiplied with the Hessian from the left
498: - Vr - the input vector to be multiplied with the Hessian from the right

500:   Output Parameter:
501: . VHV - the array of output vectors that store the Hessian product

503:   Level: developer

505:   Note:
506:   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
507:   so most users would not generally call this routine themselves.

509: .seealso: [](ch_ts), `TSSetIHessianProduct()`
510: @*/
511: PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
512: {
513:   PetscFunctionBegin;
514:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

518:   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

520:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
521:   if (ts->rhshessianproduct_guu) {
522:     PetscInt nadj;
523:     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
524:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
525:   }
526:   PetscFunctionReturn(PETSC_SUCCESS);
527: }

529: /*@
530:   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.

532:   Collective

534:   Input Parameters:
535: + ts - The `TS` context obtained from `TSCreate()`
536: . t  - the time
537: . U  - the solution at which to compute the Hessian product
538: . Vl - the array of input vectors to be multiplied with the Hessian from the left
539: - Vr - the input vector to be multiplied with the Hessian from the right

541:   Output Parameter:
542: . VHV - the array of output vectors that store the Hessian product

544:   Level: developer

546:   Note:
547:   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
548:   so most users would not generally call this routine themselves.

550: .seealso: [](ch_ts), `TSSetIHessianProduct()`
551: @*/
552: PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
553: {
554:   PetscFunctionBegin;
555:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

559:   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

561:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
562:   if (ts->rhshessianproduct_gup) {
563:     PetscInt nadj;
564:     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
565:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
566:   }
567:   PetscFunctionReturn(PETSC_SUCCESS);
568: }

570: /*@
571:   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.

573:   Collective

575:   Input Parameters:
576: + ts - The `TS` context obtained from `TSCreate()`
577: . t  - the time
578: . U  - the solution at which to compute the Hessian product
579: . Vl - the array of input vectors to be multiplied with the Hessian from the left
580: - Vr - the input vector to be multiplied with the Hessian from the right

582:   Output Parameter:
583: . VHV - the array of output vectors that store the Hessian product

585:   Level: developer

587:   Note:
588:   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
589:   so most users would not generally call this routine themselves.

591: .seealso: [](ch_ts), `TSSetIHessianProduct()`
592: @*/
593: PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
594: {
595:   PetscFunctionBegin;
596:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

600:   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

602:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
603:   if (ts->rhshessianproduct_gpu) {
604:     PetscInt nadj;
605:     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
606:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
607:   }
608:   PetscFunctionReturn(PETSC_SUCCESS);
609: }

611: /*@C
612:   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.

614:   Collective

616:   Input Parameters:
617: + ts - The `TS` context obtained from `TSCreate()`
618: . t  - the time
619: . U  - the solution at which to compute the Hessian product
620: . Vl - the array of input vectors to be multiplied with the Hessian from the left
621: - Vr - the input vector to be multiplied with the Hessian from the right

623:   Output Parameter:
624: . VHV - the array of output vectors that store the Hessian product

626:   Level: developer

628:   Note:
629:   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
630:   so most users would not generally call this routine themselves.

632: .seealso: [](ch_ts), `TSSetIHessianProduct()`
633: @*/
634: PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
635: {
636:   PetscFunctionBegin;
637:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

641:   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

643:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
644:   if (ts->rhshessianproduct_gpp) {
645:     PetscInt nadj;
646:     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
647:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
648:   }
649:   PetscFunctionReturn(PETSC_SUCCESS);
650: }

652: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
653: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
654: /*@C
655:   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector
656:   product. The Hessian is the second-order derivative of `G` (RHSFunction) w.r.t. the state
657:   variable.

659:   Logically Collective

661:   Input Parameters:
662: + ts                     - `TS` context obtained from `TSCreate()`
663: . rhshp1                 - an array of vectors storing the result of vector-Hessian-vector product for $G_{UU}$
664: . rhshessianproductfunc1 - vector-Hessian-vector product function for $G_{UU}$
665: . rhshp2                 - an array of vectors storing the result of vector-Hessian-vector product for $G_{UP}$
666: . rhshessianproductfunc2 - vector-Hessian-vector product function for $G_{UP}$
667: . rhshp3                 - an array of vectors storing the result of vector-Hessian-vector product for $G_{PU}$
668: . rhshessianproductfunc3 - vector-Hessian-vector product function for $G_{PU}$
669: . rhshp4                 - an array of vectors storing the result of vector-Hessian-vector product for $G_{PP}$
670: . rhshessianproductfunc4 - vector-Hessian-vector product function for $G_{PP}$
671: - ctx                    - [optional] function context

673:   Calling sequence of `rhshessianproductfunc1`:
674: + ts  - the `TS` context
675: . t   - current timestep
676: . U   - input vector (current ODE solution)
677: . Vl  - an array of input vectors to be left-multiplied with the Hessian
678: . Vr  - input vector to be right-multiplied with the Hessian
679: . VHV - an array of output vectors for vector-Hessian-vector product
680: - ctx - [optional] function context

682:   Level: intermediate

684:   Notes:
685:   All other functions have the same calling sequence as `rhshessianproductfunc1`, so their
686:   descriptions are omitted for brevity.

688:   The first Hessian function and the working array are required.

690:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
691:   $ Vl_n^T*G_UP*Vr$
692:   where the vector $Vl_n$ (n-th element in the array $Vl$) and $Vr$ are of size $N$ and $M$ respectively, and the Hessian $G_{UP}$ is of size $N x N x M$.
693:   Each entry of $G_{UP}$ corresponds to the derivative
694:   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.$
695:   The result of the vector-Hessian-vector product for $Vl_n$ needs to be stored in vector $VHV_n$ with j-th entry being
696:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}$
697:   If the cost function is a scalar, there will be only one vector in $Vl$ and $VHV$.

699: .seealso: `TS`, `TSAdjoint`
700: @*/
701: PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec rhshp1[], PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec rhshp2[], PetscErrorCode (*rhshessianproductfunc2)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec rhshp3[], PetscErrorCode (*rhshessianproductfunc3)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec rhshp4[], PetscErrorCode (*rhshessianproductfunc4)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), PetscCtx ctx)
702: {
703:   PetscFunctionBegin;
705:   PetscAssertPointer(rhshp1, 2);

707:   ts->rhshessianproductctx = ctx;
708:   if (rhshp1) ts->vecs_guu = rhshp1;
709:   if (rhshp2) ts->vecs_gup = rhshp2;
710:   if (rhshp3) ts->vecs_gpu = rhshp3;
711:   if (rhshp4) ts->vecs_gpp = rhshp4;
712:   ts->rhshessianproduct_guu = rhshessianproductfunc1;
713:   ts->rhshessianproduct_gup = rhshessianproductfunc2;
714:   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
715:   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
716:   PetscFunctionReturn(PETSC_SUCCESS);
717: }

719: /*@
720:   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for $G_{uu}$.

722:   Collective

724:   Input Parameters:
725: + ts - The `TS` context obtained from `TSCreate()`
726: . t  - the time
727: . U  - the solution at which to compute the Hessian product
728: . Vl - the array of input vectors to be multiplied with the Hessian from the left
729: - Vr - the input vector to be multiplied with the Hessian from the right

731:   Output Parameter:
732: . VHV - the array of output vectors that store the Hessian product

734:   Level: developer

736:   Note:
737:   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
738:   so most users would not generally call this routine themselves.

740: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
741: @*/
742: PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
743: {
744:   PetscFunctionBegin;
745:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

749:   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
750:   PetscFunctionReturn(PETSC_SUCCESS);
751: }

753: /*@
754:   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for $G_{up}$.

756:   Collective

758:   Input Parameters:
759: + ts - The `TS` context obtained from `TSCreate()`
760: . t  - the time
761: . U  - the solution at which to compute the Hessian product
762: . Vl - the array of input vectors to be multiplied with the Hessian from the left
763: - Vr - the input vector to be multiplied with the Hessian from the right

765:   Output Parameter:
766: . VHV - the array of output vectors that store the Hessian product

768:   Level: developer

770:   Note:
771:   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
772:   so most users would not generally call this routine themselves.

774: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
775: @*/
776: PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
777: {
778:   PetscFunctionBegin;
779:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

783:   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
784:   PetscFunctionReturn(PETSC_SUCCESS);
785: }

787: /*@
788:   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for $G_{pu}.$

790:   Collective

792:   Input Parameters:
793: + ts - The `TS` context obtained from `TSCreate()`
794: . t  - the time
795: . U  - the solution at which to compute the Hessian product
796: . Vl - the array of input vectors to be multiplied with the Hessian from the left
797: - Vr - the input vector to be multiplied with the Hessian from the right

799:   Output Parameter:
800: . VHV - the array of output vectors that store the Hessian product

802:   Level: developer

804:   Note:
805:   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
806:   so most users would not generally call this routine themselves.

808: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
809: @*/
810: PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
811: {
812:   PetscFunctionBegin;
813:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

817:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
818:   PetscFunctionReturn(PETSC_SUCCESS);
819: }

821: /*@
822:   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for $G_{pp}$.

824:   Collective

826:   Input Parameters:
827: + ts - The `TS` context obtained from `TSCreate()`
828: . t  - the time
829: . U  - the solution at which to compute the Hessian product
830: . Vl - the array of input vectors to be multiplied with the Hessian from the left
831: - Vr - the input vector to be multiplied with the Hessian from the right

833:   Output Parameter:
834: . VHV - the array of output vectors that store the Hessian product

836:   Level: developer

838:   Note:
839:   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
840:   so most users would not generally call this routine themselves.

842: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
843: @*/
844: PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
845: {
846:   PetscFunctionBegin;
847:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

851:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
852:   PetscFunctionReturn(PETSC_SUCCESS);
853: }

855: /* --------------------------- Adjoint sensitivity ---------------------------*/

857: /*@
858:   TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
859:   for use by the `TS` adjoint routines.

861:   Logically Collective

863:   Input Parameters:
864: + ts      - the `TS` context obtained from `TSCreate()`
865: . numcost - number of gradients to be computed, this is the number of cost functions
866: . lambda  - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
867: - mu      - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

869:   Level: beginner

871:   Notes:
872:   the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime

874:   After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities

876: .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
877: @*/
878: PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec lambda[], Vec mu[])
879: {
880:   PetscFunctionBegin;
882:   PetscAssertPointer(lambda, 3);
883:   ts->vecs_sensi  = lambda;
884:   ts->vecs_sensip = mu;
885:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
886:   ts->numcost = numcost;
887:   PetscFunctionReturn(PETSC_SUCCESS);
888: }

890: /*@
891:   TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`

893:   Not Collective, but the vectors returned are parallel if `TS` is parallel

895:   Input Parameter:
896: . ts - the `TS` context obtained from `TSCreate()`

898:   Output Parameters:
899: + numcost - size of returned arrays
900: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
901: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

903:   Level: intermediate

905: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
906: @*/
907: PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec *lambda[], Vec *mu[])
908: {
909:   PetscFunctionBegin;
911:   if (numcost) *numcost = ts->numcost;
912:   if (lambda) *lambda = ts->vecs_sensi;
913:   if (mu) *mu = ts->vecs_sensip;
914:   PetscFunctionReturn(PETSC_SUCCESS);
915: }

917: /*@
918:   TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
919:   for use by the `TS` adjoint routines.

921:   Logically Collective

923:   Input Parameters:
924: + ts      - the `TS` context obtained from `TSCreate()`
925: . numcost - number of cost functions
926: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
927: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
928: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

930:   Level: beginner

932:   Notes:
933:   Hessian of the cost function is completely different from Hessian of the ODE/DAE system

935:   For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.

937:   After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.

939:   Passing `NULL` for `lambda2` disables the second-order calculation.

941: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
942: @*/
943: PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec lambda2[], Vec mu2[], Vec dir)
944: {
945:   PetscFunctionBegin;
947:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
948:   ts->numcost      = numcost;
949:   ts->vecs_sensi2  = lambda2;
950:   ts->vecs_sensi2p = mu2;
951:   ts->vec_dir      = dir;
952:   PetscFunctionReturn(PETSC_SUCCESS);
953: }

955: /*@
956:   TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`

958:   Not Collective, but vectors returned are parallel if `TS` is parallel

960:   Input Parameter:
961: . ts - the `TS` context obtained from `TSCreate()`

963:   Output Parameters:
964: + numcost - number of cost functions
965: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
966: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
967: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

969:   Level: intermediate

971: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
972: @*/
973: PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec *lambda2[], Vec *mu2[], Vec *dir)
974: {
975:   PetscFunctionBegin;
977:   if (numcost) *numcost = ts->numcost;
978:   if (lambda2) *lambda2 = ts->vecs_sensi2;
979:   if (mu2) *mu2 = ts->vecs_sensi2p;
980:   if (dir) *dir = ts->vec_dir;
981:   PetscFunctionReturn(PETSC_SUCCESS);
982: }

984: /*@
985:   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities

987:   Logically Collective

989:   Input Parameters:
990: + ts   - the `TS` context obtained from `TSCreate()`
991: - didp - the derivative of initial values w.r.t. parameters

993:   Level: intermediate

995:   Notes:
996:   When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
997:   `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.

999: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
1000: @*/
1001: PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
1002: {
1003:   Mat          A;
1004:   Vec          sp;
1005:   PetscScalar *xarr;
1006:   PetscInt     lsize;

1008:   PetscFunctionBegin;
1009:   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
1010:   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
1011:   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
1012:   /* create a single-column dense matrix */
1013:   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
1014:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));

1016:   PetscCall(VecDuplicate(ts->vec_sol, &sp));
1017:   PetscCall(MatDenseGetColumn(A, 0, &xarr));
1018:   PetscCall(VecPlaceArray(sp, xarr));
1019:   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
1020:     if (didp) {
1021:       PetscCall(MatMult(didp, ts->vec_dir, sp));
1022:       PetscCall(VecScale(sp, 2.));
1023:     } else {
1024:       PetscCall(VecZeroEntries(sp));
1025:     }
1026:   } else { /* tangent linear variable initialized as dir */
1027:     PetscCall(VecCopy(ts->vec_dir, sp));
1028:   }
1029:   PetscCall(VecResetArray(sp));
1030:   PetscCall(MatDenseRestoreColumn(A, &xarr));
1031:   PetscCall(VecDestroy(&sp));

1033:   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */

1035:   PetscCall(MatDestroy(&A));
1036:   PetscFunctionReturn(PETSC_SUCCESS);
1037: }

1039: /*@
1040:   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context

1042:   Logically Collective

1044:   Input Parameter:
1045: . ts - the `TS` context obtained from `TSCreate()`

1047:   Level: intermediate

1049: .seealso: [](ch_ts), `TSAdjointSetForward()`
1050: @*/
1051: PetscErrorCode TSAdjointResetForward(TS ts)
1052: {
1053:   PetscFunctionBegin;
1054:   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
1055:   PetscCall(TSForwardReset(ts));
1056:   PetscFunctionReturn(PETSC_SUCCESS);
1057: }

1059: /*@
1060:   TSAdjointSetUp - Sets up the internal data structures for the later use
1061:   of an adjoint solver

1063:   Collective

1065:   Input Parameter:
1066: . ts - the `TS` context obtained from `TSCreate()`

1068:   Level: advanced

1070: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1071: @*/
1072: PetscErrorCode TSAdjointSetUp(TS ts)
1073: {
1074:   TSTrajectory tj;
1075:   PetscBool    match;

1077:   PetscFunctionBegin;
1079:   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1080:   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1081:   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1082:   PetscCall(TSGetTrajectory(ts, &tj));
1083:   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1084:   if (match) {
1085:     PetscBool solution_only;
1086:     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1087:     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1088:   }
1089:   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */

1091:   if (ts->quadraturets) { /* if there is integral in the cost function */
1092:     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1093:     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1094:   }

1096:   PetscTryTypeMethod(ts, adjointsetup);
1097:   ts->adjointsetupcalled = PETSC_TRUE;
1098:   PetscFunctionReturn(PETSC_SUCCESS);
1099: }

1101: /*@
1102:   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.

1104:   Collective

1106:   Input Parameter:
1107: . ts - the `TS` context obtained from `TSCreate()`

1109:   Level: beginner

1111: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSDestroy()`
1112: @*/
1113: PetscErrorCode TSAdjointReset(TS ts)
1114: {
1115:   PetscFunctionBegin;
1117:   PetscTryTypeMethod(ts, adjointreset);
1118:   if (ts->quadraturets) { /* if there is integral in the cost function */
1119:     PetscCall(VecDestroy(&ts->vec_drdu_col));
1120:     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1121:   }
1122:   ts->vecs_sensi         = NULL;
1123:   ts->vecs_sensip        = NULL;
1124:   ts->vecs_sensi2        = NULL;
1125:   ts->vecs_sensi2p       = NULL;
1126:   ts->vec_dir            = NULL;
1127:   ts->adjointsetupcalled = PETSC_FALSE;
1128:   PetscFunctionReturn(PETSC_SUCCESS);
1129: }

1131: /*@
1132:   TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time

1134:   Logically Collective

1136:   Input Parameters:
1137: + ts    - the `TS` context obtained from `TSCreate()`
1138: - steps - number of steps to use

1140:   Level: intermediate

1142:   Notes:
1143:   Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1144:   so as to integrate back to less than the original timestep

1146: .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1147: @*/
1148: PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1149: {
1150:   PetscFunctionBegin;
1153:   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1154:   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1155:   ts->adjoint_max_steps = steps;
1156:   PetscFunctionReturn(PETSC_SUCCESS);
1157: }

1159: // PetscClangLinter pragma disable: -fdoc-*
1160: /*@C
1161:   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`

1163:   Level: deprecated
1164: @*/
1165: PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), PetscCtx ctx)
1166: {
1167:   PetscFunctionBegin;

1171:   ts->rhsjacobianp    = func;
1172:   ts->rhsjacobianpctx = ctx;
1173:   if (Amat) {
1174:     PetscCall(PetscObjectReference((PetscObject)Amat));
1175:     PetscCall(MatDestroy(&ts->Jacp));
1176:     ts->Jacp = Amat;
1177:   }
1178:   PetscFunctionReturn(PETSC_SUCCESS);
1179: }

1181: // PetscClangLinter pragma disable: -fdoc-*
1182: /*@C
1183:   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`

1185:   Level: deprecated
1186: @*/
1187: PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1188: {
1189:   PetscFunctionBegin;

1194:   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1195:   PetscFunctionReturn(PETSC_SUCCESS);
1196: }

1198: // PetscClangLinter pragma disable: -fdoc-*
1199: /*@
1200:   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

1202:   Level: deprecated
1203: @*/
1204: PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1205: {
1206:   PetscFunctionBegin;

1210:   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1211:   PetscFunctionReturn(PETSC_SUCCESS);
1212: }

1214: // PetscClangLinter pragma disable: -fdoc-*
1215: /*@
1216:   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

1218:   Level: deprecated
1219: @*/
1220: PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1221: {
1222:   PetscFunctionBegin;

1226:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1227:   PetscFunctionReturn(PETSC_SUCCESS);
1228: }

1230: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
1231: /*@C
1232:   TSAdjointMonitorSensi - monitors the first lambda sensitivity

1234:   Level: intermediate

1236: .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1237: @*/
1238: static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1239: {
1240:   PetscViewer viewer = vf->viewer;

1242:   PetscFunctionBegin;
1244:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1245:   PetscCall(VecView(lambda[0], viewer));
1246:   PetscCall(PetscViewerPopFormat(viewer));
1247:   PetscFunctionReturn(PETSC_SUCCESS);
1248: }

1250: /*@C
1251:   TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user

1253:   Collective

1255:   Input Parameters:
1256: + ts           - `TS` object you wish to monitor
1257: . name         - the monitor type one is seeking
1258: . help         - message indicating what monitoring is done
1259: . manual       - manual page for the monitor
1260: . monitor      - the monitor function, its context must be a `PetscViewerAndFormat`
1261: - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects

1263:   Calling sequence of `monitor`:
1264: + ts      - the `TS` context
1265: . step    - iteration number (after the final time step the monitor routine is called with
1266:                 a step of -1, this is at the final time which may have been interpolated to)
1267: . time    - current time
1268: . u       - current iterate
1269: . numcost - number of cost functions
1270: . lambda  - sensitivities to initial conditions
1271: . mu      - sensitivities to parameters
1272: - vf      - the `PetscViewer` and format the monitor is using

1274:   Calling sequence of `monitorsetup`:
1275: + ts - the `TS` object being monitored
1276: - vf - the `PetscViewer` and format the monitor is using

1278:   Level: developer

1280: .seealso: [](ch_ts), `PetscOptionsCreateViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1281:           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1282:           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`,
1283:           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1284:           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1285:           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1286:           `PetscOptionsFList()`, `PetscOptionsEList()`, `PetscViewerAndFormat`
1287: @*/
1288: PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS ts, PetscInt step, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf), PetscErrorCode (*monitorsetup)(TS ts, PetscViewerAndFormat *vf))
1289: {
1290:   PetscViewer       viewer;
1291:   PetscViewerFormat format;
1292:   PetscBool         flg;

1294:   PetscFunctionBegin;
1295:   PetscCall(PetscOptionsCreateViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1296:   if (flg) {
1297:     PetscViewerAndFormat *vf;
1298:     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1299:     PetscCall(PetscViewerDestroy(&viewer));
1300:     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1301:     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode (*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscCtx))monitor, vf, (PetscCtxDestroyFn *)PetscViewerAndFormatDestroy));
1302:   }
1303:   PetscFunctionReturn(PETSC_SUCCESS);
1304: }

1306: /*@C
1307:   TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1308:   timestep to display the iteration's  progress.

1310:   Logically Collective

1312:   Input Parameters:
1313: + ts              - the `TS` context obtained from `TSCreate()`
1314: . adjointmonitor  - monitoring routine
1315: . adjointmctx     - [optional] context for private data for the monitor routine (use `NULL` if no context is desired)
1316: - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for its calling sequence

1318:   Calling sequence of `adjointmonitor`:
1319: + ts          - the `TS` context
1320: . steps       - iteration number (after the final time step the monitor routine is called with
1321:                 a step of -1, this is at the final time which may have been interpolated to)
1322: . time        - current time
1323: . u           - current iterate
1324: . numcost     - number of cost functions
1325: . lambda      - sensitivities to initial conditions
1326: . mu          - sensitivities to parameters
1327: - adjointmctx - [optional] adjoint monitoring context

1329:   Level: intermediate

1331:   Note:
1332:   This routine adds an additional monitor to the list of monitors that
1333:   already has been loaded.

1335:   Fortran Notes:
1336:   Only a single monitor function can be set for each `TS` object

1338: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`, `PetscCtxDestroyFn`
1339: @*/
1340: PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, PetscCtx adjointmctx), PetscCtx adjointmctx, PetscCtxDestroyFn *adjointmdestroy)
1341: {
1342:   PetscFunctionBegin;
1344:   for (PetscInt i = 0; i < ts->numbermonitors; i++) {
1345:     PetscBool identical;

1347:     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))(PetscVoidFn *)adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode (*)(void))(PetscVoidFn *)ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1348:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1349:   }
1350:   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1351:   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1352:   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1353:   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = adjointmctx;
1354:   PetscFunctionReturn(PETSC_SUCCESS);
1355: }

1357: /*@C
1358:   TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.

1360:   Logically Collective

1362:   Input Parameter:
1363: . ts - the `TS` context obtained from `TSCreate()`

1365:   Notes:
1366:   There is no way to remove a single, specific monitor.

1368:   Level: intermediate

1370: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1371: @*/
1372: PetscErrorCode TSAdjointMonitorCancel(TS ts)
1373: {
1374:   PetscInt i;

1376:   PetscFunctionBegin;
1378:   for (i = 0; i < ts->numberadjointmonitors; i++) {
1379:     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1380:   }
1381:   ts->numberadjointmonitors = 0;
1382:   PetscFunctionReturn(PETSC_SUCCESS);
1383: }

1385: /*@C
1386:   TSAdjointMonitorDefault - the default monitor of adjoint computations

1388:   Input Parameters:
1389: + ts      - the `TS` context
1390: . step    - iteration number (after the final time step the monitor routine is called with a
1391: step of -1, this is at the final time which may have been interpolated to)
1392: . time    - current time
1393: . v       - current iterate
1394: . numcost - number of cost functions
1395: . lambda  - sensitivities to initial conditions
1396: . mu      - sensitivities to parameters
1397: - vf      - the viewer and format

1399:   Level: intermediate

1401: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1402: @*/
1403: PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec lambda[], Vec mu[], PetscViewerAndFormat *vf)
1404: {
1405:   PetscViewer viewer = vf->viewer;

1407:   PetscFunctionBegin;
1408:   (void)v;
1409:   (void)numcost;
1410:   (void)lambda;
1411:   (void)mu;
1413:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1414:   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1415:   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n"));
1416:   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1417:   PetscCall(PetscViewerPopFormat(viewer));
1418:   PetscFunctionReturn(PETSC_SUCCESS);
1419: }

1421: /*@C
1422:   TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1423:   `VecView()` for the sensitivities to initial states at each timestep

1425:   Collective

1427:   Input Parameters:
1428: + ts      - the `TS` context
1429: . step    - current time-step
1430: . ptime   - current time
1431: . u       - current state
1432: . numcost - number of cost functions
1433: . lambda  - sensitivities to initial conditions
1434: . mu      - sensitivities to parameters
1435: - dummy   - either a viewer or `NULL`

1437:   Level: intermediate

1439: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1440: @*/
1441: PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[], void *dummy)
1442: {
1443:   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1444:   PetscDraw        draw;
1445:   PetscReal        xl, yl, xr, yr, h;
1446:   char             time[32];

1448:   PetscFunctionBegin;
1449:   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);

1451:   PetscCall(VecView(lambda[0], ictx->viewer));
1452:   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1453:   PetscCall(PetscSNPrintf(time, 32, "Timestep %" PetscInt_FMT " Time %g", step, (double)ptime));
1454:   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1455:   h = yl + .95 * (yr - yl);
1456:   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1457:   PetscCall(PetscDrawFlush(draw));
1458:   PetscFunctionReturn(PETSC_SUCCESS);
1459: }

1461: /*@C
1462:   TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database.

1464:   Collective

1466:   Input Parameters:
1467: + ts                 - the `TS` context
1468: - PetscOptionsObject - the options context

1470:   Options Database Keys:
1471: + -ts_adjoint_solve <yes,no>     - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`)
1472: . -ts_adjoint_monitor            - print information at each adjoint time step
1473: - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically

1475:   Level: developer

1477:   Note:
1478:   This is not normally called directly by users

1480: .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1481: @*/
1482: PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems PetscOptionsObject)
1483: {
1484:   PetscBool tflg, opt;

1486:   PetscFunctionBegin;
1488:   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1489:   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1490:   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1491:   if (opt) {
1492:     PetscCall(TSSetSaveTrajectory(ts));
1493:     ts->adjoint_solve = tflg;
1494:   }
1495:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1496:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1497:   opt = PETSC_FALSE;
1498:   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1499:   if (opt) {
1500:     TSMonitorDrawCtx ctx;
1501:     PetscInt         howoften = 1;

1503:     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1504:     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1505:     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscCtxDestroyFn *)TSMonitorDrawCtxDestroy));
1506:   }
1507:   PetscFunctionReturn(PETSC_SUCCESS);
1508: }

1510: /*@
1511:   TSAdjointStep - Steps one time step backward in the adjoint run

1513:   Collective

1515:   Input Parameter:
1516: . ts - the `TS` context obtained from `TSCreate()`

1518:   Level: intermediate

1520: .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1521: @*/
1522: PetscErrorCode TSAdjointStep(TS ts)
1523: {
1524:   DM dm;

1526:   PetscFunctionBegin;
1528:   PetscCall(TSGetDM(ts, &dm));
1529:   PetscCall(TSAdjointSetUp(ts));
1530:   ts->steps--; /* must decrease the step index before the adjoint step is taken. */

1532:   ts->reason     = TS_CONVERGED_ITERATING;
1533:   ts->ptime_prev = ts->ptime;
1534:   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1535:   PetscUseTypeMethod(ts, adjointstep);
1536:   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1537:   ts->adjoint_steps++;

1539:   if (ts->reason < 0) {
1540:     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1541:   } else if (!ts->reason) {
1542:     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1543:   }
1544:   PetscFunctionReturn(PETSC_SUCCESS);
1545: }

1547: /*@
1548:   TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE

1550:   Collective
1551:   `

1553:   Input Parameter:
1554: . ts - the `TS` context obtained from `TSCreate()`

1556:   Options Database Key:
1557: . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values

1559:   Level: intermediate

1561:   Notes:
1562:   This must be called after a call to `TSSolve()` that solves the forward problem

1564:   By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time

1566: .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1567: @*/
1568: PetscErrorCode TSAdjointSolve(TS ts)
1569: {
1570:   static PetscBool cite = PETSC_FALSE;
1571: #if defined(TSADJOINT_STAGE)
1572:   PetscLogStage adjoint_stage;
1573: #endif

1575:   PetscFunctionBegin;
1577:   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1578:                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1579:                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1580:                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1581:                                    "  volume        = {44},\n"
1582:                                    "  number        = {1},\n"
1583:                                    "  pages         = {C1-C24},\n"
1584:                                    "  doi           = {10.1137/21M140078X},\n"
1585:                                    "  year          = {2022}\n}\n",
1586:                                    &cite));
1587: #if defined(TSADJOINT_STAGE)
1588:   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1589:   PetscCall(PetscLogStagePush(adjoint_stage));
1590: #endif
1591:   PetscCall(TSAdjointSetUp(ts));

1593:   /* reset time step and iteration counters */
1594:   ts->adjoint_steps     = 0;
1595:   ts->ksp_its           = 0;
1596:   ts->snes_its          = 0;
1597:   ts->num_snes_failures = 0;
1598:   ts->reject            = 0;
1599:   ts->reason            = TS_CONVERGED_ITERATING;

1601:   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1602:   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;

1604:   while (!ts->reason) {
1605:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1606:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1607:     PetscCall(TSAdjointEventHandler(ts));
1608:     PetscCall(TSAdjointStep(ts));
1609:     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1610:   }
1611:   if (!ts->steps) {
1612:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1613:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1614:   }
1615:   ts->solvetime = ts->ptime;
1616:   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1617:   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1618:   ts->adjoint_max_steps = 0;
1619: #if defined(TSADJOINT_STAGE)
1620:   PetscCall(PetscLogStagePop());
1621: #endif
1622:   PetscFunctionReturn(PETSC_SUCCESS);
1623: }

1625: /*@C
1626:   TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`

1628:   Collective

1630:   Input Parameters:
1631: + ts      - time stepping context obtained from `TSCreate()`
1632: . step    - step number that has just completed
1633: . ptime   - model time of the state
1634: . u       - state at the current model time
1635: . numcost - number of cost functions (dimension of lambda  or mu)
1636: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1637: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

1639:   Level: developer

1641:   Note:
1642:   `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1643:   Users would almost never call this routine directly.

1645: .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1646: @*/
1647: PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[])
1648: {
1649:   PetscInt i, n = ts->numberadjointmonitors;

1651:   PetscFunctionBegin;
1654:   PetscCall(VecLockReadPush(u));
1655:   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1656:   PetscCall(VecLockReadPop(u));
1657:   PetscFunctionReturn(PETSC_SUCCESS);
1658: }

1660: /*@
1661:   TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.

1663:   Collective

1665:   Input Parameter:
1666: . ts - time stepping context

1668:   Level: advanced

1670:   Notes:
1671:   This function cannot be called until `TSAdjointStep()` has been completed.

1673: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1674:  @*/
1675: PetscErrorCode TSAdjointCostIntegral(TS ts)
1676: {
1677:   PetscFunctionBegin;
1679:   PetscUseTypeMethod(ts, adjointintegral);
1680:   PetscFunctionReturn(PETSC_SUCCESS);
1681: }

1683: /* ------------------ Forward (tangent linear) sensitivity  ------------------*/

1685: /*@
1686:   TSForwardSetUp - Sets up the internal data structures for the later use
1687:   of forward sensitivity analysis

1689:   Collective

1691:   Input Parameter:
1692: . ts - the `TS` context obtained from `TSCreate()`

1694:   Level: advanced

1696: .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1697: @*/
1698: PetscErrorCode TSForwardSetUp(TS ts)
1699: {
1700:   PetscFunctionBegin;
1702:   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1703:   PetscTryTypeMethod(ts, forwardsetup);
1704:   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1705:   ts->forwardsetupcalled = PETSC_TRUE;
1706:   PetscFunctionReturn(PETSC_SUCCESS);
1707: }

1709: /*@
1710:   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis

1712:   Collective

1714:   Input Parameter:
1715: . ts - the `TS` context obtained from `TSCreate()`

1717:   Level: advanced

1719: .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1720: @*/
1721: PetscErrorCode TSForwardReset(TS ts)
1722: {
1723:   TS quadts = ts->quadraturets;

1725:   PetscFunctionBegin;
1727:   PetscTryTypeMethod(ts, forwardreset);
1728:   PetscCall(MatDestroy(&ts->mat_sensip));
1729:   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1730:   PetscCall(VecDestroy(&ts->vec_sensip_col));
1731:   ts->forward_solve      = PETSC_FALSE;
1732:   ts->forwardsetupcalled = PETSC_FALSE;
1733:   PetscFunctionReturn(PETSC_SUCCESS);
1734: }

1736: /*@
1737:   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.

1739:   Input Parameters:
1740: + ts        - the `TS` context obtained from `TSCreate()`
1741: . numfwdint - number of integrals
1742: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1744:   Level: deprecated

1746: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1747: @*/
1748: PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec vp[])
1749: {
1750:   PetscFunctionBegin;
1752:   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1753:   if (!ts->numcost) ts->numcost = numfwdint;

1755:   ts->vecs_integral_sensip = vp;
1756:   PetscFunctionReturn(PETSC_SUCCESS);
1757: }

1759: /*@
1760:   TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.

1762:   Input Parameter:
1763: . ts - the `TS` context obtained from `TSCreate()`

1765:   Output Parameters:
1766: + numfwdint - number of integrals
1767: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1769:   Level: deprecated

1771: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()`
1772: @*/
1773: PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec *vp[])
1774: {
1775:   PetscFunctionBegin;
1777:   PetscAssertPointer(vp, 3);
1778:   if (numfwdint) *numfwdint = ts->numcost;
1779:   if (vp) *vp = ts->vecs_integral_sensip;
1780:   PetscFunctionReturn(PETSC_SUCCESS);
1781: }

1783: /*@
1784:   TSForwardStep - Compute the forward sensitivity for one time step.

1786:   Collective

1788:   Input Parameter:
1789: . ts - time stepping context

1791:   Level: advanced

1793:   Notes:
1794:   This function cannot be called until `TSStep()` has been completed.

1796: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1797: @*/
1798: PetscErrorCode TSForwardStep(TS ts)
1799: {
1800:   PetscFunctionBegin;
1802:   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1803:   PetscUseTypeMethod(ts, forwardstep);
1804:   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1805:   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1806:   PetscFunctionReturn(PETSC_SUCCESS);
1807: }

1809: /*@
1810:   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.

1812:   Logically Collective

1814:   Input Parameters:
1815: + ts   - the `TS` context obtained from `TSCreate()`
1816: . nump - number of parameters
1817: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1819:   Level: beginner

1821:   Notes:
1822:   Use `PETSC_DETERMINE` to use the number of columns of `Smat` for `nump`

1824:   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1825:   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1826:   You must call this function before `TSSolve()`.
1827:   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.

1829: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1830: @*/
1831: PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1832: {
1833:   PetscFunctionBegin;
1836:   ts->forward_solve = PETSC_TRUE;
1837:   if (nump == PETSC_DEFAULT || nump == PETSC_DETERMINE) PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1838:   else ts->num_parameters = nump;
1839:   PetscCall(PetscObjectReference((PetscObject)Smat));
1840:   PetscCall(MatDestroy(&ts->mat_sensip));
1841:   ts->mat_sensip = Smat;
1842:   PetscFunctionReturn(PETSC_SUCCESS);
1843: }

1845: /*@
1846:   TSForwardGetSensitivities - Returns the trajectory sensitivities

1848:   Not Collective, but Smat returned is parallel if ts is parallel

1850:   Output Parameters:
1851: + ts   - the `TS` context obtained from `TSCreate()`
1852: . nump - number of parameters
1853: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1855:   Level: intermediate

1857: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1858: @*/
1859: PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1860: {
1861:   PetscFunctionBegin;
1863:   if (nump) *nump = ts->num_parameters;
1864:   if (Smat) *Smat = ts->mat_sensip;
1865:   PetscFunctionReturn(PETSC_SUCCESS);
1866: }

1868: /*@
1869:   TSForwardCostIntegral - Evaluate the cost integral in the forward run.

1871:   Collective

1873:   Input Parameter:
1874: . ts - time stepping context

1876:   Level: advanced

1878:   Note:
1879:   This function cannot be called until `TSStep()` has been completed.

1881: .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1882: @*/
1883: PetscErrorCode TSForwardCostIntegral(TS ts)
1884: {
1885:   PetscFunctionBegin;
1887:   PetscUseTypeMethod(ts, forwardintegral);
1888:   PetscFunctionReturn(PETSC_SUCCESS);
1889: }

1891: /*@
1892:   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities

1894:   Collective

1896:   Input Parameters:
1897: + ts   - the `TS` context obtained from `TSCreate()`
1898: - didp - parametric sensitivities of the initial condition

1900:   Level: intermediate

1902:   Notes:
1903:   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1904:   This function is used to set initial values for tangent linear variables.

1906: .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1907: @*/
1908: PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1909: {
1910:   PetscFunctionBegin;
1913:   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DETERMINE, didp));
1914:   PetscFunctionReturn(PETSC_SUCCESS);
1915: }

1917: /*@
1918:   TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages

1920:   Input Parameter:
1921: . ts - the `TS` context obtained from `TSCreate()`

1923:   Output Parameters:
1924: + ns - number of stages
1925: - S  - tangent linear sensitivities at the intermediate stages

1927:   Level: advanced

1929: .seealso: `TS`
1930: @*/
1931: PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1932: {
1933:   PetscFunctionBegin;

1936:   if (!ts->ops->getstages) *S = NULL;
1937:   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1938:   PetscFunctionReturn(PETSC_SUCCESS);
1939: }

1941: /*@
1942:   TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time

1944:   Input Parameters:
1945: + ts  - the `TS` context obtained from `TSCreate()`
1946: - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run

1948:   Output Parameter:
1949: . quadts - the child `TS` context

1951:   Level: intermediate

1953: .seealso: [](ch_ts), `TSGetQuadratureTS()`
1954: @*/
1955: PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1956: {
1957:   char prefix[128];

1959:   PetscFunctionBegin;
1961:   PetscAssertPointer(quadts, 3);
1962:   PetscCall(TSDestroy(&ts->quadraturets));
1963:   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1964:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1965:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1966:   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1967:   *quadts = ts->quadraturets;

1969:   if (ts->numcost) {
1970:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1971:   } else {
1972:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1973:   }
1974:   ts->costintegralfwd = fwd;
1975:   PetscFunctionReturn(PETSC_SUCCESS);
1976: }

1978: /*@
1979:   TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time

1981:   Input Parameter:
1982: . ts - the `TS` context obtained from `TSCreate()`

1984:   Output Parameters:
1985: + fwd    - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1986: - quadts - the child `TS` context

1988:   Level: intermediate

1990: .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1991: @*/
1992: PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1993: {
1994:   PetscFunctionBegin;
1996:   if (fwd) *fwd = ts->costintegralfwd;
1997:   if (quadts) *quadts = ts->quadraturets;
1998:   PetscFunctionReturn(PETSC_SUCCESS);
1999: }

2001: /*@
2002:   TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`

2004:   Collective

2006:   Input Parameters:
2007: + ts - the `TS` context obtained from `TSCreate()`
2008: - x  - state vector

2010:   Output Parameters:
2011: + J    - Jacobian matrix
2012: - Jpre - matrix used to compute the preconditioner for `J` (may be same as `J`)

2014:   Level: developer

2016:   Note:
2017:   Uses finite differencing when `TS` Jacobian is not available.

2019: .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()`
2020: @*/
2021: PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
2022: {
2023:   SNES snes                                          = ts->snes;
2024:   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;

2026:   PetscFunctionBegin;
2027:   /*
2028:     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
2029:     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
2030:     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
2031:     coloring is used.
2032:   */
2033:   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
2034:   if (jac == SNESComputeJacobianDefaultColor) {
2035:     Vec f;
2036:     PetscCall(SNESSetSolution(snes, x));
2037:     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
2038:     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
2039:     PetscCall(SNESComputeFunction(snes, x, f));
2040:   }
2041:   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
2042:   PetscFunctionReturn(PETSC_SUCCESS);
2043: }