Actual source code: tssen.c

  1: #include <petsc/private/tsimpl.h>
  2: #include <petscdraw.h>

  4: PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;

  6: /* #define TSADJOINT_STAGE */

  8: /* ------------------------ Sensitivity Context ---------------------------*/

 10: /*@C
 11:   TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.

 13:   Logically Collective

 15:   Input Parameters:
 16: + ts   - `TS` context obtained from `TSCreate()`
 17: . Amat - JacobianP matrix
 18: . func - function
 19: - ctx  - [optional] user-defined function context

 21:   Level: intermediate

 23:   Note:
 24:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 26: .seealso: [](ch_ts), `TS`, `TSRHSJacobianPFn`, `TSGetRHSJacobianP()`
 27: @*/
 28: PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianPFn *func, void *ctx)
 29: {
 30:   PetscFunctionBegin;

 34:   ts->rhsjacobianp    = func;
 35:   ts->rhsjacobianpctx = ctx;
 36:   if (Amat) {
 37:     PetscCall(PetscObjectReference((PetscObject)Amat));
 38:     PetscCall(MatDestroy(&ts->Jacprhs));
 39:     ts->Jacprhs = Amat;
 40:   }
 41:   PetscFunctionReturn(PETSC_SUCCESS);
 42: }

 44: /*@C
 45:   TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.

 47:   Logically Collective

 49:   Input Parameter:
 50: . ts - `TS` context obtained from `TSCreate()`

 52:   Output Parameters:
 53: + Amat - JacobianP matrix
 54: . func - function
 55: - ctx  - [optional] user-defined function context

 57:   Level: intermediate

 59:   Note:
 60:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 62: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianPFn`
 63: @*/
 64: PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianPFn **func, void **ctx)
 65: {
 66:   PetscFunctionBegin;
 67:   if (func) *func = ts->rhsjacobianp;
 68:   if (ctx) *ctx = ts->rhsjacobianpctx;
 69:   if (Amat) *Amat = ts->Jacprhs;
 70:   PetscFunctionReturn(PETSC_SUCCESS);
 71: }

 73: /*@C
 74:   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.

 76:   Collective

 78:   Input Parameters:
 79: + ts - The `TS` context obtained from `TSCreate()`
 80: . t  - the time
 81: - U  - the solution at which to compute the Jacobian

 83:   Output Parameter:
 84: . Amat - the computed Jacobian

 86:   Level: developer

 88: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
 89: @*/
 90: PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
 91: {
 92:   PetscFunctionBegin;
 93:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

 97:   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
 98:   else {
 99:     PetscBool assembled;
100:     PetscCall(MatZeroEntries(Amat));
101:     PetscCall(MatAssembled(Amat, &assembled));
102:     if (!assembled) {
103:       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
104:       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
105:     }
106:   }
107:   PetscFunctionReturn(PETSC_SUCCESS);
108: }

110: /*@C
111:   TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix.

113:   Logically Collective

115:   Input Parameters:
116: + ts   - `TS` context obtained from `TSCreate()`
117: . Amat - JacobianP matrix
118: . func - function
119: - ctx  - [optional] user-defined function context

121:   Calling sequence of `func`:
122: + ts    - the `TS` context
123: . t     - current timestep
124: . U     - input vector (current ODE solution)
125: . Udot  - time derivative of state vector
126: . shift - shift to apply, see note below
127: . A     - output matrix
128: - ctx   - [optional] user-defined function context

130:   Level: intermediate

132:   Note:
133:   Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p

135: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
136: @*/
137: PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void *ctx)
138: {
139:   PetscFunctionBegin;

143:   ts->ijacobianp    = func;
144:   ts->ijacobianpctx = ctx;
145:   if (Amat) {
146:     PetscCall(PetscObjectReference((PetscObject)Amat));
147:     PetscCall(MatDestroy(&ts->Jacp));
148:     ts->Jacp = Amat;
149:   }
150:   PetscFunctionReturn(PETSC_SUCCESS);
151: }

153: /*@
154:   TSComputeIJacobianP - Runs the user-defined IJacobianP function.

156:   Collective

158:   Input Parameters:
159: + ts    - the `TS` context
160: . t     - current timestep
161: . U     - state vector
162: . Udot  - time derivative of state vector
163: . shift - shift to apply, see note below
164: - imex  - flag indicates if the method is IMEX so that the RHSJacobianP should be kept separate

166:   Output Parameter:
167: . Amat - Jacobian matrix

169:   Level: developer

171: .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
172: @*/
173: PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
174: {
175:   PetscFunctionBegin;
176:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

181:   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
182:   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
183:   if (imex) {
184:     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
185:       PetscBool assembled;
186:       PetscCall(MatZeroEntries(Amat));
187:       PetscCall(MatAssembled(Amat, &assembled));
188:       if (!assembled) {
189:         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
190:         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
191:       }
192:     }
193:   } else {
194:     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
195:     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
196:       PetscCall(MatScale(Amat, -1));
197:     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
198:       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
199:       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
200:         PetscCall(MatZeroEntries(Amat));
201:       }
202:       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
203:     }
204:   }
205:   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
206:   PetscFunctionReturn(PETSC_SUCCESS);
207: }

209: /*@C
210:   TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions

212:   Logically Collective

214:   Input Parameters:
215: + ts           - the `TS` context obtained from `TSCreate()`
216: . numcost      - number of gradients to be computed, this is the number of cost functions
217: . costintegral - vector that stores the integral values
218: . rf           - routine for evaluating the integrand function
219: . drduf        - function that computes the gradients of the r with respect to u
220: . drdpf        - function that computes the gradients of the r with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
221: . fwd          - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
222: - ctx          - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`)

224:   Calling sequence of `rf`:
225: + ts  - the integrator
226: . t   - the time
227: . U   - the solution
228: . F   - the computed value of the function
229: - ctx - the user context

231:   Calling sequence of `drduf`:
232: + ts   - the integrator
233: . t    - the time
234: . U    - the solution
235: . dRdU - the computed gradients of the r with respect to u
236: - ctx  - the user context

238:   Calling sequence of `drdpf`:
239: + ts   - the integrator
240: . t    - the time
241: . U    - the solution
242: . dRdP - the computed gradients of the r with respect to p
243: - ctx  - the user context

245:   Level: deprecated

247:   Notes:
248:   For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions

250:   Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead

252: .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`,
253:           `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()`
254: @*/
255: PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, void *ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx), PetscBool fwd, void *ctx)
256: {
257:   PetscFunctionBegin;
260:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
261:   if (!ts->numcost) ts->numcost = numcost;

263:   if (costintegral) {
264:     PetscCall(PetscObjectReference((PetscObject)costintegral));
265:     PetscCall(VecDestroy(&ts->vec_costintegral));
266:     ts->vec_costintegral = costintegral;
267:   } else {
268:     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
269:       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
270:     } else {
271:       PetscCall(VecSet(ts->vec_costintegral, 0.0));
272:     }
273:   }
274:   if (!ts->vec_costintegrand) {
275:     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
276:   } else {
277:     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
278:   }
279:   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
280:   ts->costintegrand    = rf;
281:   ts->costintegrandctx = ctx;
282:   ts->drdufunction     = drduf;
283:   ts->drdpfunction     = drdpf;
284:   PetscFunctionReturn(PETSC_SUCCESS);
285: }

287: /*@
288:   TSGetCostIntegral - Returns the values of the integral term in the cost functions.
289:   It is valid to call the routine after a backward run.

291:   Not Collective

293:   Input Parameter:
294: . ts - the `TS` context obtained from `TSCreate()`

296:   Output Parameter:
297: . v - the vector containing the integrals for each cost function

299:   Level: intermediate

301: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
302: @*/
303: PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
304: {
305:   TS quadts;

307:   PetscFunctionBegin;
309:   PetscAssertPointer(v, 2);
310:   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
311:   *v = quadts->vec_sol;
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }

315: /*@
316:   TSComputeCostIntegrand - Evaluates the integral function in the cost functions.

318:   Input Parameters:
319: + ts - the `TS` context
320: . t  - current time
321: - U  - state vector, i.e. current solution

323:   Output Parameter:
324: . Q - vector of size numcost to hold the outputs

326:   Level: deprecated

328:   Note:
329:   Most users should not need to explicitly call this routine, as it
330:   is used internally within the sensitivity analysis context.

332: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
333: @*/
334: PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
335: {
336:   PetscFunctionBegin;

341:   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
342:   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
343:   else PetscCall(VecZeroEntries(Q));
344:   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
345:   PetscFunctionReturn(PETSC_SUCCESS);
346: }

348: // PetscClangLinter pragma disable: -fdoc-*
349: /*@
350:   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

352:   Level: deprecated

354: @*/
355: PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
356: {
357:   PetscFunctionBegin;
358:   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);

362:   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
363:   PetscFunctionReturn(PETSC_SUCCESS);
364: }

366: // PetscClangLinter pragma disable: -fdoc-*
367: /*@
368:   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

370:   Level: deprecated

372: @*/
373: PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
374: {
375:   PetscFunctionBegin;
376:   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);

380:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
381:   PetscFunctionReturn(PETSC_SUCCESS);
382: }

384: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
385: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
386: /*@C
387:   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.

389:   Logically Collective

391:   Input Parameters:
392: + ts   - `TS` context obtained from `TSCreate()`
393: . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
394: . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
395: . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
396: . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
397: . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
398: . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
399: . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
400: - hessianproductfunc4 - vector-Hessian-vector product function for F_PP

402:   Calling sequence of `ihessianproductfunc1`:
403: + ts  - the `TS` context
404: + t   - current timestep
405: . U   - input vector (current ODE solution)
406: . Vl  - an array of input vectors to be left-multiplied with the Hessian
407: . Vr  - input vector to be right-multiplied with the Hessian
408: . VHV - an array of output vectors for vector-Hessian-vector product
409: - ctx - [optional] user-defined function context

411:   Level: intermediate

413:   Notes:
414:   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
415:   descriptions are omitted for brevity.

417:   The first Hessian function and the working array are required.
418:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
419:   $ Vl_n^T*F_UP*Vr
420:   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
421:   Each entry of F_UP corresponds to the derivative
422:   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
423:   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
424:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
425:   If the cost function is a scalar, there will be only one vector in Vl and VHV.

427: .seealso: [](ch_ts), `TS`
428: @*/
429: PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
430: {
431:   PetscFunctionBegin;
433:   PetscAssertPointer(ihp1, 2);

435:   ts->ihessianproductctx = ctx;
436:   if (ihp1) ts->vecs_fuu = ihp1;
437:   if (ihp2) ts->vecs_fup = ihp2;
438:   if (ihp3) ts->vecs_fpu = ihp3;
439:   if (ihp4) ts->vecs_fpp = ihp4;
440:   ts->ihessianproduct_fuu = ihessianproductfunc1;
441:   ts->ihessianproduct_fup = ihessianproductfunc2;
442:   ts->ihessianproduct_fpu = ihessianproductfunc3;
443:   ts->ihessianproduct_fpp = ihessianproductfunc4;
444:   PetscFunctionReturn(PETSC_SUCCESS);
445: }

447: /*@
448:   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.

450:   Collective

452:   Input Parameters:
453: + ts - The `TS` context obtained from `TSCreate()`
454: . t  - the time
455: . U  - the solution at which to compute the Hessian product
456: . Vl - the array of input vectors to be multiplied with the Hessian from the left
457: - Vr - the input vector to be multiplied with the Hessian from the right

459:   Output Parameter:
460: . VHV - the array of output vectors that store the Hessian product

462:   Level: developer

464:   Note:
465:   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
466:   so most users would not generally call this routine themselves.

468: .seealso: [](ch_ts), `TSSetIHessianProduct()`
469: @*/
470: PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
471: {
472:   PetscFunctionBegin;
473:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

477:   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

479:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
480:   if (ts->rhshessianproduct_guu) {
481:     PetscInt nadj;
482:     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
483:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
484:   }
485:   PetscFunctionReturn(PETSC_SUCCESS);
486: }

488: /*@
489:   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.

491:   Collective

493:   Input Parameters:
494: + ts - The `TS` context obtained from `TSCreate()`
495: . t  - the time
496: . U  - the solution at which to compute the Hessian product
497: . Vl - the array of input vectors to be multiplied with the Hessian from the left
498: - Vr - the input vector to be multiplied with the Hessian from the right

500:   Output Parameter:
501: . VHV - the array of output vectors that store the Hessian product

503:   Level: developer

505:   Note:
506:   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
507:   so most users would not generally call this routine themselves.

509: .seealso: [](ch_ts), `TSSetIHessianProduct()`
510: @*/
511: PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
512: {
513:   PetscFunctionBegin;
514:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

518:   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

520:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
521:   if (ts->rhshessianproduct_gup) {
522:     PetscInt nadj;
523:     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
524:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
525:   }
526:   PetscFunctionReturn(PETSC_SUCCESS);
527: }

529: /*@
530:   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.

532:   Collective

534:   Input Parameters:
535: + ts - The `TS` context obtained from `TSCreate()`
536: . t  - the time
537: . U  - the solution at which to compute the Hessian product
538: . Vl - the array of input vectors to be multiplied with the Hessian from the left
539: - Vr - the input vector to be multiplied with the Hessian from the right

541:   Output Parameter:
542: . VHV - the array of output vectors that store the Hessian product

544:   Level: developer

546:   Note:
547:   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
548:   so most users would not generally call this routine themselves.

550: .seealso: [](ch_ts), `TSSetIHessianProduct()`
551: @*/
552: PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
553: {
554:   PetscFunctionBegin;
555:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

559:   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

561:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
562:   if (ts->rhshessianproduct_gpu) {
563:     PetscInt nadj;
564:     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
565:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
566:   }
567:   PetscFunctionReturn(PETSC_SUCCESS);
568: }

570: /*@C
571:   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.

573:   Collective

575:   Input Parameters:
576: + ts - The `TS` context obtained from `TSCreate()`
577: . t  - the time
578: . U  - the solution at which to compute the Hessian product
579: . Vl - the array of input vectors to be multiplied with the Hessian from the left
580: - Vr - the input vector to be multiplied with the Hessian from the right

582:   Output Parameter:
583: . VHV - the array of output vectors that store the Hessian product

585:   Level: developer

587:   Note:
588:   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
589:   so most users would not generally call this routine themselves.

591: .seealso: [](ch_ts), `TSSetIHessianProduct()`
592: @*/
593: PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
594: {
595:   PetscFunctionBegin;
596:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

600:   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

602:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
603:   if (ts->rhshessianproduct_gpp) {
604:     PetscInt nadj;
605:     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
606:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
607:   }
608:   PetscFunctionReturn(PETSC_SUCCESS);
609: }

611: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
612: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
613: /*@C
614:   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector
615:   product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state
616:   variable.

618:   Logically Collective

620:   Input Parameters:
621: + ts     - `TS` context obtained from `TSCreate()`
622: . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
623: . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
624: . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
625: . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
626: . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
627: . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
628: . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
629: . hessianproductfunc4 - vector-Hessian-vector product function for G_PP
630: - ctx    - [optional] user-defined function context

632:   Calling sequence of `rhshessianproductfunc1`:
633: + ts  - the `TS` context
634: . t   - current timestep
635: . U   - input vector (current ODE solution)
636: . Vl  - an array of input vectors to be left-multiplied with the Hessian
637: . Vr  - input vector to be right-multiplied with the Hessian
638: . VHV - an array of output vectors for vector-Hessian-vector product
639: - ctx - [optional] user-defined function context

641:   Level: intermediate

643:   Notes:
644:   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
645:   descriptions are omitted for brevity.

647:   The first Hessian function and the working array are required.

649:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
650:   $ Vl_n^T*G_UP*Vr
651:   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
652:   Each entry of G_UP corresponds to the derivative
653:   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
654:   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
655:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
656:   If the cost function is a scalar, there will be only one vector in Vl and VHV.

658: .seealso: `TS`, `TSAdjoint`
659: @*/
660: PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
661: {
662:   PetscFunctionBegin;
664:   PetscAssertPointer(rhshp1, 2);

666:   ts->rhshessianproductctx = ctx;
667:   if (rhshp1) ts->vecs_guu = rhshp1;
668:   if (rhshp2) ts->vecs_gup = rhshp2;
669:   if (rhshp3) ts->vecs_gpu = rhshp3;
670:   if (rhshp4) ts->vecs_gpp = rhshp4;
671:   ts->rhshessianproduct_guu = rhshessianproductfunc1;
672:   ts->rhshessianproduct_gup = rhshessianproductfunc2;
673:   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
674:   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
675:   PetscFunctionReturn(PETSC_SUCCESS);
676: }

678: /*@
679:   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.

681:   Collective

683:   Input Parameters:
684: + ts - The `TS` context obtained from `TSCreate()`
685: . t  - the time
686: . U  - the solution at which to compute the Hessian product
687: . Vl - the array of input vectors to be multiplied with the Hessian from the left
688: - Vr - the input vector to be multiplied with the Hessian from the right

690:   Output Parameter:
691: . VHV - the array of output vectors that store the Hessian product

693:   Level: developer

695:   Note:
696:   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
697:   so most users would not generally call this routine themselves.

699: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
700: @*/
701: PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
702: {
703:   PetscFunctionBegin;
704:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

708:   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
709:   PetscFunctionReturn(PETSC_SUCCESS);
710: }

712: /*@
713:   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.

715:   Collective

717:   Input Parameters:
718: + ts - The `TS` context obtained from `TSCreate()`
719: . t  - the time
720: . U  - the solution at which to compute the Hessian product
721: . Vl - the array of input vectors to be multiplied with the Hessian from the left
722: - Vr - the input vector to be multiplied with the Hessian from the right

724:   Output Parameter:
725: . VHV - the array of output vectors that store the Hessian product

727:   Level: developer

729:   Note:
730:   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
731:   so most users would not generally call this routine themselves.

733: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
734: @*/
735: PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
736: {
737:   PetscFunctionBegin;
738:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

742:   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
743:   PetscFunctionReturn(PETSC_SUCCESS);
744: }

746: /*@
747:   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.

749:   Collective

751:   Input Parameters:
752: + ts - The `TS` context obtained from `TSCreate()`
753: . t  - the time
754: . U  - the solution at which to compute the Hessian product
755: . Vl - the array of input vectors to be multiplied with the Hessian from the left
756: - Vr - the input vector to be multiplied with the Hessian from the right

758:   Output Parameter:
759: . VHV - the array of output vectors that store the Hessian product

761:   Level: developer

763:   Note:
764:   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
765:   so most users would not generally call this routine themselves.

767: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
768: @*/
769: PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
770: {
771:   PetscFunctionBegin;
772:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

776:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
777:   PetscFunctionReturn(PETSC_SUCCESS);
778: }

780: /*@
781:   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.

783:   Collective

785:   Input Parameters:
786: + ts - The `TS` context obtained from `TSCreate()`
787: . t  - the time
788: . U  - the solution at which to compute the Hessian product
789: . Vl - the array of input vectors to be multiplied with the Hessian from the left
790: - Vr - the input vector to be multiplied with the Hessian from the right

792:   Output Parameter:
793: . VHV - the array of output vectors that store the Hessian product

795:   Level: developer

797:   Note:
798:   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
799:   so most users would not generally call this routine themselves.

801: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
802: @*/
803: PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
804: {
805:   PetscFunctionBegin;
806:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

810:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
811:   PetscFunctionReturn(PETSC_SUCCESS);
812: }

814: /* --------------------------- Adjoint sensitivity ---------------------------*/

816: /*@
817:   TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
818:   for use by the `TS` adjoint routines.

820:   Logically Collective

822:   Input Parameters:
823: + ts      - the `TS` context obtained from `TSCreate()`
824: . numcost - number of gradients to be computed, this is the number of cost functions
825: . lambda  - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
826: - mu      - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

828:   Level: beginner

830:   Notes:
831:   the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime

833:   After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities

835: .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
836: @*/
837: PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu)
838: {
839:   PetscFunctionBegin;
841:   PetscAssertPointer(lambda, 3);
842:   ts->vecs_sensi  = lambda;
843:   ts->vecs_sensip = mu;
844:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
845:   ts->numcost = numcost;
846:   PetscFunctionReturn(PETSC_SUCCESS);
847: }

849: /*@
850:   TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`

852:   Not Collective, but the vectors returned are parallel if `TS` is parallel

854:   Input Parameter:
855: . ts - the `TS` context obtained from `TSCreate()`

857:   Output Parameters:
858: + numcost - size of returned arrays
859: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
860: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

862:   Level: intermediate

864: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
865: @*/
866: PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu)
867: {
868:   PetscFunctionBegin;
870:   if (numcost) *numcost = ts->numcost;
871:   if (lambda) *lambda = ts->vecs_sensi;
872:   if (mu) *mu = ts->vecs_sensip;
873:   PetscFunctionReturn(PETSC_SUCCESS);
874: }

876: /*@
877:   TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
878:   for use by the `TS` adjoint routines.

880:   Logically Collective

882:   Input Parameters:
883: + ts      - the `TS` context obtained from `TSCreate()`
884: . numcost - number of cost functions
885: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
886: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
887: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

889:   Level: beginner

891:   Notes:
892:   Hessian of the cost function is completely different from Hessian of the ODE/DAE system

894:   For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.

896:   After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.

898:   Passing `NULL` for `lambda2` disables the second-order calculation.

900: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
901: @*/
902: PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir)
903: {
904:   PetscFunctionBegin;
906:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
907:   ts->numcost      = numcost;
908:   ts->vecs_sensi2  = lambda2;
909:   ts->vecs_sensi2p = mu2;
910:   ts->vec_dir      = dir;
911:   PetscFunctionReturn(PETSC_SUCCESS);
912: }

914: /*@
915:   TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`

917:   Not Collective, but vectors returned are parallel if `TS` is parallel

919:   Input Parameter:
920: . ts - the `TS` context obtained from `TSCreate()`

922:   Output Parameters:
923: + numcost - number of cost functions
924: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
925: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
926: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

928:   Level: intermediate

930: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
931: @*/
932: PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir)
933: {
934:   PetscFunctionBegin;
936:   if (numcost) *numcost = ts->numcost;
937:   if (lambda2) *lambda2 = ts->vecs_sensi2;
938:   if (mu2) *mu2 = ts->vecs_sensi2p;
939:   if (dir) *dir = ts->vec_dir;
940:   PetscFunctionReturn(PETSC_SUCCESS);
941: }

943: /*@
944:   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities

946:   Logically Collective

948:   Input Parameters:
949: + ts   - the `TS` context obtained from `TSCreate()`
950: - didp - the derivative of initial values w.r.t. parameters

952:   Level: intermediate

954:   Notes:
955:   When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
956:   `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.

958: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
959: @*/
960: PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
961: {
962:   Mat          A;
963:   Vec          sp;
964:   PetscScalar *xarr;
965:   PetscInt     lsize;

967:   PetscFunctionBegin;
968:   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
969:   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
970:   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
971:   /* create a single-column dense matrix */
972:   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
973:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));

975:   PetscCall(VecDuplicate(ts->vec_sol, &sp));
976:   PetscCall(MatDenseGetColumn(A, 0, &xarr));
977:   PetscCall(VecPlaceArray(sp, xarr));
978:   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
979:     if (didp) {
980:       PetscCall(MatMult(didp, ts->vec_dir, sp));
981:       PetscCall(VecScale(sp, 2.));
982:     } else {
983:       PetscCall(VecZeroEntries(sp));
984:     }
985:   } else { /* tangent linear variable initialized as dir */
986:     PetscCall(VecCopy(ts->vec_dir, sp));
987:   }
988:   PetscCall(VecResetArray(sp));
989:   PetscCall(MatDenseRestoreColumn(A, &xarr));
990:   PetscCall(VecDestroy(&sp));

992:   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */

994:   PetscCall(MatDestroy(&A));
995:   PetscFunctionReturn(PETSC_SUCCESS);
996: }

998: /*@
999:   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context

1001:   Logically Collective

1003:   Input Parameter:
1004: . ts - the `TS` context obtained from `TSCreate()`

1006:   Level: intermediate

1008: .seealso: [](ch_ts), `TSAdjointSetForward()`
1009: @*/
1010: PetscErrorCode TSAdjointResetForward(TS ts)
1011: {
1012:   PetscFunctionBegin;
1013:   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
1014:   PetscCall(TSForwardReset(ts));
1015:   PetscFunctionReturn(PETSC_SUCCESS);
1016: }

1018: /*@
1019:   TSAdjointSetUp - Sets up the internal data structures for the later use
1020:   of an adjoint solver

1022:   Collective

1024:   Input Parameter:
1025: . ts - the `TS` context obtained from `TSCreate()`

1027:   Level: advanced

1029: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1030: @*/
1031: PetscErrorCode TSAdjointSetUp(TS ts)
1032: {
1033:   TSTrajectory tj;
1034:   PetscBool    match;

1036:   PetscFunctionBegin;
1038:   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1039:   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1040:   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1041:   PetscCall(TSGetTrajectory(ts, &tj));
1042:   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1043:   if (match) {
1044:     PetscBool solution_only;
1045:     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1046:     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1047:   }
1048:   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */

1050:   if (ts->quadraturets) { /* if there is integral in the cost function */
1051:     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1052:     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1053:   }

1055:   PetscTryTypeMethod(ts, adjointsetup);
1056:   ts->adjointsetupcalled = PETSC_TRUE;
1057:   PetscFunctionReturn(PETSC_SUCCESS);
1058: }

1060: /*@
1061:   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.

1063:   Collective

1065:   Input Parameter:
1066: . ts - the `TS` context obtained from `TSCreate()`

1068:   Level: beginner

1070: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()`
1071: @*/
1072: PetscErrorCode TSAdjointReset(TS ts)
1073: {
1074:   PetscFunctionBegin;
1076:   PetscTryTypeMethod(ts, adjointreset);
1077:   if (ts->quadraturets) { /* if there is integral in the cost function */
1078:     PetscCall(VecDestroy(&ts->vec_drdu_col));
1079:     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1080:   }
1081:   ts->vecs_sensi         = NULL;
1082:   ts->vecs_sensip        = NULL;
1083:   ts->vecs_sensi2        = NULL;
1084:   ts->vecs_sensi2p       = NULL;
1085:   ts->vec_dir            = NULL;
1086:   ts->adjointsetupcalled = PETSC_FALSE;
1087:   PetscFunctionReturn(PETSC_SUCCESS);
1088: }

1090: /*@
1091:   TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time

1093:   Logically Collective

1095:   Input Parameters:
1096: + ts    - the `TS` context obtained from `TSCreate()`
1097: - steps - number of steps to use

1099:   Level: intermediate

1101:   Notes:
1102:   Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1103:   so as to integrate back to less than the original timestep

1105: .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1106: @*/
1107: PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1108: {
1109:   PetscFunctionBegin;
1112:   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1113:   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1114:   ts->adjoint_max_steps = steps;
1115:   PetscFunctionReturn(PETSC_SUCCESS);
1116: }

1118: // PetscClangLinter pragma disable: -fdoc-*
1119: /*@C
1120:   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`

1122:   Level: deprecated
1123: @*/
1124: PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
1125: {
1126:   PetscFunctionBegin;

1130:   ts->rhsjacobianp    = func;
1131:   ts->rhsjacobianpctx = ctx;
1132:   if (Amat) {
1133:     PetscCall(PetscObjectReference((PetscObject)Amat));
1134:     PetscCall(MatDestroy(&ts->Jacp));
1135:     ts->Jacp = Amat;
1136:   }
1137:   PetscFunctionReturn(PETSC_SUCCESS);
1138: }

1140: // PetscClangLinter pragma disable: -fdoc-*
1141: /*@C
1142:   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`

1144:   Level: deprecated
1145: @*/
1146: PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1147: {
1148:   PetscFunctionBegin;

1153:   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1154:   PetscFunctionReturn(PETSC_SUCCESS);
1155: }

1157: // PetscClangLinter pragma disable: -fdoc-*
1158: /*@
1159:   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

1161:   Level: deprecated
1162: @*/
1163: PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1164: {
1165:   PetscFunctionBegin;

1169:   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1170:   PetscFunctionReturn(PETSC_SUCCESS);
1171: }

1173: // PetscClangLinter pragma disable: -fdoc-*
1174: /*@
1175:   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

1177:   Level: deprecated
1178: @*/
1179: PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1180: {
1181:   PetscFunctionBegin;

1185:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1186:   PetscFunctionReturn(PETSC_SUCCESS);
1187: }

1189: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
1190: /*@C
1191:   TSAdjointMonitorSensi - monitors the first lambda sensitivity

1193:   Level: intermediate

1195: .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1196: @*/
1197: static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1198: {
1199:   PetscViewer viewer = vf->viewer;

1201:   PetscFunctionBegin;
1203:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1204:   PetscCall(VecView(lambda[0], viewer));
1205:   PetscCall(PetscViewerPopFormat(viewer));
1206:   PetscFunctionReturn(PETSC_SUCCESS);
1207: }

1209: /*@C
1210:   TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user

1212:   Collective

1214:   Input Parameters:
1215: + ts           - `TS` object you wish to monitor
1216: . name         - the monitor type one is seeking
1217: . help         - message indicating what monitoring is done
1218: . manual       - manual page for the monitor
1219: . monitor      - the monitor function, its context must be a `PetscViewerAndFormat`
1220: - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects

1222:   Level: developer

1224: .seealso: [](ch_ts), `PetscOptionsCreateViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1225:           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1226:           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`,
1227:           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1228:           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1229:           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1230:           `PetscOptionsFList()`, `PetscOptionsEList()`, `PetscViewerAndFormat`
1231: @*/
1232: PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *))
1233: {
1234:   PetscViewer       viewer;
1235:   PetscViewerFormat format;
1236:   PetscBool         flg;

1238:   PetscFunctionBegin;
1239:   PetscCall(PetscOptionsCreateViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1240:   if (flg) {
1241:     PetscViewerAndFormat *vf;
1242:     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1243:     PetscCall(PetscViewerDestroy(&viewer));
1244:     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1245:     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode (*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscCtxDestroyFn *)PetscViewerAndFormatDestroy));
1246:   }
1247:   PetscFunctionReturn(PETSC_SUCCESS);
1248: }

1250: /*@C
1251:   TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1252:   timestep to display the iteration's  progress.

1254:   Logically Collective

1256:   Input Parameters:
1257: + ts              - the `TS` context obtained from `TSCreate()`
1258: . adjointmonitor  - monitoring routine
1259: . adjointmctx     - [optional] user-defined context for private data for the monitor routine
1260:                     (use `NULL` if no context is desired)
1261: - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for its calling sequence

1263:   Calling sequence of `adjointmonitor`:
1264: + ts          - the `TS` context
1265: . steps       - iteration number (after the final time step the monitor routine is called with
1266:                 a step of -1, this is at the final time which may have been interpolated to)
1267: . time        - current time
1268: . u           - current iterate
1269: . numcost     - number of cost functionos
1270: . lambda      - sensitivities to initial conditions
1271: . mu          - sensitivities to parameters
1272: - adjointmctx - [optional] adjoint monitoring context

1274:   Level: intermediate

1276:   Note:
1277:   This routine adds an additional monitor to the list of monitors that
1278:   already has been loaded.

1280:   Fortran Notes:
1281:   Only a single monitor function can be set for each `TS` object

1283: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`, `PetscCtxDestroyFn`
1284: @*/
1285: PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx), void *adjointmctx, PetscCtxDestroyFn *adjointmdestroy)
1286: {
1287:   PetscInt  i;
1288:   PetscBool identical;

1290:   PetscFunctionBegin;
1292:   for (i = 0; i < ts->numbermonitors; i++) {
1293:     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode (*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1294:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1295:   }
1296:   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1297:   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1298:   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1299:   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = adjointmctx;
1300:   PetscFunctionReturn(PETSC_SUCCESS);
1301: }

1303: /*@C
1304:   TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.

1306:   Logically Collective

1308:   Input Parameter:
1309: . ts - the `TS` context obtained from `TSCreate()`

1311:   Notes:
1312:   There is no way to remove a single, specific monitor.

1314:   Level: intermediate

1316: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1317: @*/
1318: PetscErrorCode TSAdjointMonitorCancel(TS ts)
1319: {
1320:   PetscInt i;

1322:   PetscFunctionBegin;
1324:   for (i = 0; i < ts->numberadjointmonitors; i++) {
1325:     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1326:   }
1327:   ts->numberadjointmonitors = 0;
1328:   PetscFunctionReturn(PETSC_SUCCESS);
1329: }

1331: /*@C
1332:   TSAdjointMonitorDefault - the default monitor of adjoint computations

1334:   Input Parameters:
1335: + ts      - the `TS` context
1336: . step    - iteration number (after the final time step the monitor routine is called with a
1337: step of -1, this is at the final time which may have been interpolated to)
1338: . time    - current time
1339: . v       - current iterate
1340: . numcost - number of cost functionos
1341: . lambda  - sensitivities to initial conditions
1342: . mu      - sensitivities to parameters
1343: - vf      - the viewer and format

1345:   Level: intermediate

1347: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1348: @*/
1349: PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1350: {
1351:   PetscViewer viewer = vf->viewer;

1353:   PetscFunctionBegin;
1354:   (void)v;
1355:   (void)numcost;
1356:   (void)lambda;
1357:   (void)mu;
1359:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1360:   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1361:   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n"));
1362:   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1363:   PetscCall(PetscViewerPopFormat(viewer));
1364:   PetscFunctionReturn(PETSC_SUCCESS);
1365: }

1367: /*@C
1368:   TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1369:   `VecView()` for the sensitivities to initial states at each timestep

1371:   Collective

1373:   Input Parameters:
1374: + ts      - the `TS` context
1375: . step    - current time-step
1376: . ptime   - current time
1377: . u       - current state
1378: . numcost - number of cost functions
1379: . lambda  - sensitivities to initial conditions
1380: . mu      - sensitivities to parameters
1381: - dummy   - either a viewer or `NULL`

1383:   Level: intermediate

1385: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1386: @*/
1387: PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy)
1388: {
1389:   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1390:   PetscDraw        draw;
1391:   PetscReal        xl, yl, xr, yr, h;
1392:   char             time[32];

1394:   PetscFunctionBegin;
1395:   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);

1397:   PetscCall(VecView(lambda[0], ictx->viewer));
1398:   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1399:   PetscCall(PetscSNPrintf(time, 32, "Timestep %" PetscInt_FMT " Time %g", step, (double)ptime));
1400:   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1401:   h = yl + .95 * (yr - yl);
1402:   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1403:   PetscCall(PetscDrawFlush(draw));
1404:   PetscFunctionReturn(PETSC_SUCCESS);
1405: }

1407: /*@C
1408:   TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database.

1410:   Collective

1412:   Input Parameters:
1413: + ts                 - the `TS` context
1414: - PetscOptionsObject - the options context

1416:   Options Database Keys:
1417: + -ts_adjoint_solve <yes,no>     - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`)
1418: . -ts_adjoint_monitor            - print information at each adjoint time step
1419: - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically

1421:   Level: developer

1423:   Note:
1424:   This is not normally called directly by users

1426: .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1427: @*/
1428: PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject)
1429: {
1430:   PetscBool tflg, opt;

1432:   PetscFunctionBegin;
1434:   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1435:   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1436:   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1437:   if (opt) {
1438:     PetscCall(TSSetSaveTrajectory(ts));
1439:     ts->adjoint_solve = tflg;
1440:   }
1441:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1442:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1443:   opt = PETSC_FALSE;
1444:   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1445:   if (opt) {
1446:     TSMonitorDrawCtx ctx;
1447:     PetscInt         howoften = 1;

1449:     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1450:     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1451:     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscCtxDestroyFn *)TSMonitorDrawCtxDestroy));
1452:   }
1453:   PetscFunctionReturn(PETSC_SUCCESS);
1454: }

1456: /*@
1457:   TSAdjointStep - Steps one time step backward in the adjoint run

1459:   Collective

1461:   Input Parameter:
1462: . ts - the `TS` context obtained from `TSCreate()`

1464:   Level: intermediate

1466: .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1467: @*/
1468: PetscErrorCode TSAdjointStep(TS ts)
1469: {
1470:   DM dm;

1472:   PetscFunctionBegin;
1474:   PetscCall(TSGetDM(ts, &dm));
1475:   PetscCall(TSAdjointSetUp(ts));
1476:   ts->steps--; /* must decrease the step index before the adjoint step is taken. */

1478:   ts->reason     = TS_CONVERGED_ITERATING;
1479:   ts->ptime_prev = ts->ptime;
1480:   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1481:   PetscUseTypeMethod(ts, adjointstep);
1482:   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1483:   ts->adjoint_steps++;

1485:   if (ts->reason < 0) {
1486:     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1487:   } else if (!ts->reason) {
1488:     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1489:   }
1490:   PetscFunctionReturn(PETSC_SUCCESS);
1491: }

1493: /*@
1494:   TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE

1496:   Collective
1497:   `

1499:   Input Parameter:
1500: . ts - the `TS` context obtained from `TSCreate()`

1502:   Options Database Key:
1503: . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values

1505:   Level: intermediate

1507:   Notes:
1508:   This must be called after a call to `TSSolve()` that solves the forward problem

1510:   By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time

1512: .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1513: @*/
1514: PetscErrorCode TSAdjointSolve(TS ts)
1515: {
1516:   static PetscBool cite = PETSC_FALSE;
1517: #if defined(TSADJOINT_STAGE)
1518:   PetscLogStage adjoint_stage;
1519: #endif

1521:   PetscFunctionBegin;
1523:   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1524:                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1525:                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1526:                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1527:                                    "  volume        = {44},\n"
1528:                                    "  number        = {1},\n"
1529:                                    "  pages         = {C1-C24},\n"
1530:                                    "  doi           = {10.1137/21M140078X},\n"
1531:                                    "  year          = {2022}\n}\n",
1532:                                    &cite));
1533: #if defined(TSADJOINT_STAGE)
1534:   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1535:   PetscCall(PetscLogStagePush(adjoint_stage));
1536: #endif
1537:   PetscCall(TSAdjointSetUp(ts));

1539:   /* reset time step and iteration counters */
1540:   ts->adjoint_steps     = 0;
1541:   ts->ksp_its           = 0;
1542:   ts->snes_its          = 0;
1543:   ts->num_snes_failures = 0;
1544:   ts->reject            = 0;
1545:   ts->reason            = TS_CONVERGED_ITERATING;

1547:   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1548:   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;

1550:   while (!ts->reason) {
1551:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1552:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1553:     PetscCall(TSAdjointEventHandler(ts));
1554:     PetscCall(TSAdjointStep(ts));
1555:     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1556:   }
1557:   if (!ts->steps) {
1558:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1559:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1560:   }
1561:   ts->solvetime = ts->ptime;
1562:   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1563:   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1564:   ts->adjoint_max_steps = 0;
1565: #if defined(TSADJOINT_STAGE)
1566:   PetscCall(PetscLogStagePop());
1567: #endif
1568:   PetscFunctionReturn(PETSC_SUCCESS);
1569: }

1571: /*@C
1572:   TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`

1574:   Collective

1576:   Input Parameters:
1577: + ts      - time stepping context obtained from `TSCreate()`
1578: . step    - step number that has just completed
1579: . ptime   - model time of the state
1580: . u       - state at the current model time
1581: . numcost - number of cost functions (dimension of lambda  or mu)
1582: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1583: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

1585:   Level: developer

1587:   Note:
1588:   `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1589:   Users would almost never call this routine directly.

1591: .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1592: @*/
1593: PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu)
1594: {
1595:   PetscInt i, n = ts->numberadjointmonitors;

1597:   PetscFunctionBegin;
1600:   PetscCall(VecLockReadPush(u));
1601:   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1602:   PetscCall(VecLockReadPop(u));
1603:   PetscFunctionReturn(PETSC_SUCCESS);
1604: }

1606: /*@
1607:   TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.

1609:   Collective

1611:   Input Parameter:
1612: . ts - time stepping context

1614:   Level: advanced

1616:   Notes:
1617:   This function cannot be called until `TSAdjointStep()` has been completed.

1619: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1620:  @*/
1621: PetscErrorCode TSAdjointCostIntegral(TS ts)
1622: {
1623:   PetscFunctionBegin;
1625:   PetscUseTypeMethod(ts, adjointintegral);
1626:   PetscFunctionReturn(PETSC_SUCCESS);
1627: }

1629: /* ------------------ Forward (tangent linear) sensitivity  ------------------*/

1631: /*@
1632:   TSForwardSetUp - Sets up the internal data structures for the later use
1633:   of forward sensitivity analysis

1635:   Collective

1637:   Input Parameter:
1638: . ts - the `TS` context obtained from `TSCreate()`

1640:   Level: advanced

1642: .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1643: @*/
1644: PetscErrorCode TSForwardSetUp(TS ts)
1645: {
1646:   PetscFunctionBegin;
1648:   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1649:   PetscTryTypeMethod(ts, forwardsetup);
1650:   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1651:   ts->forwardsetupcalled = PETSC_TRUE;
1652:   PetscFunctionReturn(PETSC_SUCCESS);
1653: }

1655: /*@
1656:   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis

1658:   Collective

1660:   Input Parameter:
1661: . ts - the `TS` context obtained from `TSCreate()`

1663:   Level: advanced

1665: .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1666: @*/
1667: PetscErrorCode TSForwardReset(TS ts)
1668: {
1669:   TS quadts = ts->quadraturets;

1671:   PetscFunctionBegin;
1673:   PetscTryTypeMethod(ts, forwardreset);
1674:   PetscCall(MatDestroy(&ts->mat_sensip));
1675:   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1676:   PetscCall(VecDestroy(&ts->vec_sensip_col));
1677:   ts->forward_solve      = PETSC_FALSE;
1678:   ts->forwardsetupcalled = PETSC_FALSE;
1679:   PetscFunctionReturn(PETSC_SUCCESS);
1680: }

1682: /*@
1683:   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.

1685:   Input Parameters:
1686: + ts        - the `TS` context obtained from `TSCreate()`
1687: . numfwdint - number of integrals
1688: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1690:   Level: deprecated

1692: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1693: @*/
1694: PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp)
1695: {
1696:   PetscFunctionBegin;
1698:   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1699:   if (!ts->numcost) ts->numcost = numfwdint;

1701:   ts->vecs_integral_sensip = vp;
1702:   PetscFunctionReturn(PETSC_SUCCESS);
1703: }

1705: /*@
1706:   TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.

1708:   Input Parameter:
1709: . ts - the `TS` context obtained from `TSCreate()`

1711:   Output Parameters:
1712: + numfwdint - number of integrals
1713: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1715:   Level: deprecated

1717: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()`
1718: @*/
1719: PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp)
1720: {
1721:   PetscFunctionBegin;
1723:   PetscAssertPointer(vp, 3);
1724:   if (numfwdint) *numfwdint = ts->numcost;
1725:   if (vp) *vp = ts->vecs_integral_sensip;
1726:   PetscFunctionReturn(PETSC_SUCCESS);
1727: }

1729: /*@
1730:   TSForwardStep - Compute the forward sensitivity for one time step.

1732:   Collective

1734:   Input Parameter:
1735: . ts - time stepping context

1737:   Level: advanced

1739:   Notes:
1740:   This function cannot be called until `TSStep()` has been completed.

1742: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1743: @*/
1744: PetscErrorCode TSForwardStep(TS ts)
1745: {
1746:   PetscFunctionBegin;
1748:   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1749:   PetscUseTypeMethod(ts, forwardstep);
1750:   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1751:   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1752:   PetscFunctionReturn(PETSC_SUCCESS);
1753: }

1755: /*@
1756:   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.

1758:   Logically Collective

1760:   Input Parameters:
1761: + ts   - the `TS` context obtained from `TSCreate()`
1762: . nump - number of parameters
1763: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1765:   Level: beginner

1767:   Notes:
1768:   Use `PETSC_DETERMINE` to use the number of columns of `Smat` for `nump`

1770:   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1771:   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1772:   You must call this function before `TSSolve()`.
1773:   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.

1775: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1776: @*/
1777: PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1778: {
1779:   PetscFunctionBegin;
1782:   ts->forward_solve = PETSC_TRUE;
1783:   if (nump == PETSC_DEFAULT || nump == PETSC_DETERMINE) {
1784:     PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1785:   } else ts->num_parameters = nump;
1786:   PetscCall(PetscObjectReference((PetscObject)Smat));
1787:   PetscCall(MatDestroy(&ts->mat_sensip));
1788:   ts->mat_sensip = Smat;
1789:   PetscFunctionReturn(PETSC_SUCCESS);
1790: }

1792: /*@
1793:   TSForwardGetSensitivities - Returns the trajectory sensitivities

1795:   Not Collective, but Smat returned is parallel if ts is parallel

1797:   Output Parameters:
1798: + ts   - the `TS` context obtained from `TSCreate()`
1799: . nump - number of parameters
1800: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1802:   Level: intermediate

1804: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1805: @*/
1806: PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1807: {
1808:   PetscFunctionBegin;
1810:   if (nump) *nump = ts->num_parameters;
1811:   if (Smat) *Smat = ts->mat_sensip;
1812:   PetscFunctionReturn(PETSC_SUCCESS);
1813: }

1815: /*@
1816:   TSForwardCostIntegral - Evaluate the cost integral in the forward run.

1818:   Collective

1820:   Input Parameter:
1821: . ts - time stepping context

1823:   Level: advanced

1825:   Note:
1826:   This function cannot be called until `TSStep()` has been completed.

1828: .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1829: @*/
1830: PetscErrorCode TSForwardCostIntegral(TS ts)
1831: {
1832:   PetscFunctionBegin;
1834:   PetscUseTypeMethod(ts, forwardintegral);
1835:   PetscFunctionReturn(PETSC_SUCCESS);
1836: }

1838: /*@
1839:   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities

1841:   Collective

1843:   Input Parameters:
1844: + ts   - the `TS` context obtained from `TSCreate()`
1845: - didp - parametric sensitivities of the initial condition

1847:   Level: intermediate

1849:   Notes:
1850:   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1851:   This function is used to set initial values for tangent linear variables.

1853: .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1854: @*/
1855: PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1856: {
1857:   PetscFunctionBegin;
1860:   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DETERMINE, didp));
1861:   PetscFunctionReturn(PETSC_SUCCESS);
1862: }

1864: /*@
1865:   TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages

1867:   Input Parameter:
1868: . ts - the `TS` context obtained from `TSCreate()`

1870:   Output Parameters:
1871: + ns - number of stages
1872: - S  - tangent linear sensitivities at the intermediate stages

1874:   Level: advanced

1876: .seealso: `TS`
1877: @*/
1878: PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1879: {
1880:   PetscFunctionBegin;

1883:   if (!ts->ops->getstages) *S = NULL;
1884:   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1885:   PetscFunctionReturn(PETSC_SUCCESS);
1886: }

1888: /*@
1889:   TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time

1891:   Input Parameters:
1892: + ts  - the `TS` context obtained from `TSCreate()`
1893: - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run

1895:   Output Parameter:
1896: . quadts - the child `TS` context

1898:   Level: intermediate

1900: .seealso: [](ch_ts), `TSGetQuadratureTS()`
1901: @*/
1902: PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1903: {
1904:   char prefix[128];

1906:   PetscFunctionBegin;
1908:   PetscAssertPointer(quadts, 3);
1909:   PetscCall(TSDestroy(&ts->quadraturets));
1910:   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1911:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1912:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1913:   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1914:   *quadts = ts->quadraturets;

1916:   if (ts->numcost) {
1917:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1918:   } else {
1919:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1920:   }
1921:   ts->costintegralfwd = fwd;
1922:   PetscFunctionReturn(PETSC_SUCCESS);
1923: }

1925: /*@
1926:   TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time

1928:   Input Parameter:
1929: . ts - the `TS` context obtained from `TSCreate()`

1931:   Output Parameters:
1932: + fwd    - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1933: - quadts - the child `TS` context

1935:   Level: intermediate

1937: .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1938: @*/
1939: PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1940: {
1941:   PetscFunctionBegin;
1943:   if (fwd) *fwd = ts->costintegralfwd;
1944:   if (quadts) *quadts = ts->quadraturets;
1945:   PetscFunctionReturn(PETSC_SUCCESS);
1946: }

1948: /*@
1949:   TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`

1951:   Collective

1953:   Input Parameters:
1954: + ts - the `TS` context obtained from `TSCreate()`
1955: - x  - state vector

1957:   Output Parameters:
1958: + J    - Jacobian matrix
1959: - Jpre - preconditioning matrix for J (may be same as J)

1961:   Level: developer

1963:   Note:
1964:   Uses finite differencing when `TS` Jacobian is not available.

1966: .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()`
1967: @*/
1968: PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
1969: {
1970:   SNES snes                                          = ts->snes;
1971:   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;

1973:   PetscFunctionBegin;
1974:   /*
1975:     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
1976:     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
1977:     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
1978:     coloring is used.
1979:   */
1980:   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
1981:   if (jac == SNESComputeJacobianDefaultColor) {
1982:     Vec f;
1983:     PetscCall(SNESSetSolution(snes, x));
1984:     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
1985:     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
1986:     PetscCall(SNESComputeFunction(snes, x, f));
1987:   }
1988:   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
1989:   PetscFunctionReturn(PETSC_SUCCESS);
1990: }