Actual source code: tssen.c

  1: #include <petsc/private/tsimpl.h>
  2: #include <petscdraw.h>

  4: PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;

  6: /* #define TSADJOINT_STAGE */

  8: /* ------------------------ Sensitivity Context ---------------------------*/

 10: /*@C
 11:   TSSetRHSJacobianP - Sets the function that computes the Jacobian of $G$ w.r.t. the parameters $p$ where $U_t = G(U,p,t)$, as well as the location to store the matrix.

 13:   Logically Collective

 15:   Input Parameters:
 16: + ts   - `TS` context obtained from `TSCreate()`
 17: . Amat - JacobianP matrix
 18: . func - function
 19: - ctx  - [optional] function context

 21:   Level: intermediate

 23:   Note:
 24:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 26: .seealso: [](ch_ts), `TS`, `TSRHSJacobianPFn`, `TSGetRHSJacobianP()`
 27: @*/
 28: PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianPFn *func, PetscCtx ctx)
 29: {
 30:   PetscFunctionBegin;

 34:   ts->rhsjacobianp    = func;
 35:   ts->rhsjacobianpctx = ctx;
 36:   if (Amat) {
 37:     PetscCall(PetscObjectReference((PetscObject)Amat));
 38:     PetscCall(MatDestroy(&ts->Jacprhs));
 39:     ts->Jacprhs = Amat;
 40:   }
 41:   PetscFunctionReturn(PETSC_SUCCESS);
 42: }

 44: /*@C
 45:   TSGetRHSJacobianP - Gets the function that computes the Jacobian of $G $ w.r.t. the parameters $p$ where $ U_t = G(U,p,t)$, as well as the location to store the matrix.

 47:   Logically Collective

 49:   Input Parameter:
 50: . ts - `TS` context obtained from `TSCreate()`

 52:   Output Parameters:
 53: + Amat - JacobianP matrix
 54: . func - function
 55: - ctx  - [optional] function context

 57:   Level: intermediate

 59:   Note:
 60:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

 62: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianPFn`
 63: @*/
 64: PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianPFn **func, PetscCtxRt ctx)
 65: {
 66:   PetscFunctionBegin;
 67:   if (func) *func = ts->rhsjacobianp;
 68:   if (ctx) *(void **)ctx = ts->rhsjacobianpctx;
 69:   if (Amat) *Amat = ts->Jacprhs;
 70:   PetscFunctionReturn(PETSC_SUCCESS);
 71: }

 73: /*@C
 74:   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.

 76:   Collective

 78:   Input Parameters:
 79: + ts - The `TS` context obtained from `TSCreate()`
 80: . t  - the time
 81: - U  - the solution at which to compute the Jacobian

 83:   Output Parameter:
 84: . Amat - the computed Jacobian

 86:   Level: developer

 88: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
 89: @*/
 90: PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
 91: {
 92:   PetscFunctionBegin;
 93:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

 97:   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
 98:   else {
 99:     PetscBool assembled;
100:     PetscCall(MatZeroEntries(Amat));
101:     PetscCall(MatAssembled(Amat, &assembled));
102:     if (!assembled) {
103:       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
104:       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
105:     }
106:   }
107:   PetscFunctionReturn(PETSC_SUCCESS);
108: }

110: /*@C
111:   TSSetIJacobianP - Sets the function that computes the Jacobian of $F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t)$, as well as the location to store the matrix.

113:   Logically Collective

115:   Input Parameters:
116: + ts   - `TS` context obtained from `TSCreate()`
117: . Amat - JacobianP matrix
118: . func - function
119: - ctx  - [optional] function context

121:   Calling sequence of `func`:
122: + ts    - the `TS` context
123: . t     - current timestep
124: . U     - input vector (current ODE solution)
125: . Udot  - time derivative of state vector
126: . shift - shift to apply, see the note in `TSSetIJacobian()`
127: . A     - output matrix
128: - ctx   - [optional] function context

130:   Level: intermediate

132:   Note:
133:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

135: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
136: @*/
137: PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, PetscCtx ctx), PetscCtx ctx)
138: {
139:   PetscFunctionBegin;

143:   ts->ijacobianp    = func;
144:   ts->ijacobianpctx = ctx;
145:   if (Amat) {
146:     PetscCall(PetscObjectReference((PetscObject)Amat));
147:     PetscCall(MatDestroy(&ts->Jacp));
148:     ts->Jacp = Amat;
149:   }
150:   PetscFunctionReturn(PETSC_SUCCESS);
151: }

153: /*@C
154:   TSGetIJacobianP - Gets the function that computes the Jacobian of $ F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t) $, as well as the location to store the matrix.

156:   Logically Collective

158:   Input Parameter:
159: . ts - `TS` context obtained from `TSCreate()`

161:   Output Parameters:
162: + Amat - JacobianP matrix
163: . func - the function that computes the JacobianP
164: - ctx  - [optional] function context

166:   Calling sequence of `func`:
167: + ts    - the `TS` context
168: . t     - current timestep
169: . U     - input vector (current ODE solution)
170: . Udot  - time derivative of state vector
171: . shift - shift to apply, see the note in `TSSetIJacobian()`
172: . A     - output matrix
173: - ctx   - [optional] function context

175:   Level: intermediate

177:   Note:
178:   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`

180: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSSetIJacobianP()`, `TSGetRHSJacobianP()`
181: @*/
182: PetscErrorCode TSGetIJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, PetscCtx ctx), PetscCtxRt ctx)
183: {
184:   PetscFunctionBegin;

187:   if (func) *func = ts->ijacobianp;
188:   if (ctx) *(void **)ctx = ts->ijacobianpctx;
189:   if (Amat) *Amat = ts->Jacp;
190:   PetscFunctionReturn(PETSC_SUCCESS);
191: }

193: /*@
194:   TSComputeIJacobianP - Runs the user-defined IJacobianP function.

196:   Collective

198:   Input Parameters:
199: + ts    - the `TS` context
200: . t     - current timestep
201: . U     - state vector
202: . Udot  - time derivative of state vector
203: . shift - shift to apply, see note below
204: - imex  - flag indicates if the method is IMEX so that the `RHSJacobianP` should be kept separate

206:   Output Parameter:
207: . Amat - Jacobian matrix

209:   Level: developer

211: .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
212: @*/
213: PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
214: {
215:   PetscFunctionBegin;
216:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

221:   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
222:   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
223:   if (imex) {
224:     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
225:       PetscBool assembled;
226:       PetscCall(MatZeroEntries(Amat));
227:       PetscCall(MatAssembled(Amat, &assembled));
228:       if (!assembled) {
229:         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
230:         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
231:       }
232:     }
233:   } else {
234:     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
235:     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
236:       PetscCall(MatScale(Amat, -1));
237:     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
238:       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
239:       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
240:         PetscCall(MatZeroEntries(Amat));
241:       }
242:       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
243:     }
244:   }
245:   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
246:   PetscFunctionReturn(PETSC_SUCCESS);
247: }

249: /*@C
250:   TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions

252:   Logically Collective

254:   Input Parameters:
255: + ts           - the `TS` context obtained from `TSCreate()`
256: . numcost      - number of gradients to be computed, this is the number of cost functions
257: . costintegral - vector that stores the integral values
258: . rf           - routine for evaluating the integrand function
259: . drduf        - function that computes the gradients of the `r` with respect to `u`
260: . drdpf        - function that computes the gradients of the `r` with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
261: . fwd          - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
262: - ctx          - [optional] application context for private data for the function evaluation routine (may be `NULL`)

264:   Calling sequence of `rf`:
265: + ts  - the integrator
266: . t   - the time
267: . U   - the solution
268: . F   - the computed value of the function
269: - ctx - the application context

271:   Calling sequence of `drduf`:
272: + ts   - the integrator
273: . t    - the time
274: . U    - the solution
275: . dRdU - the computed gradients of the `r` with respect to `u`
276: - ctx  - the application context

278:   Calling sequence of `drdpf`:
279: + ts   - the integrator
280: . t    - the time
281: . U    - the solution
282: . dRdP - the computed gradients of the `r` with respect to `p`
283: - ctx  - the application context

285:   Level: deprecated

287:   Notes:
288:   For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions

290:   Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead

292: .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`,
293:           `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()`
294: @*/
295: PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, PetscCtx ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, PetscCtx ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, PetscCtx ctx), PetscBool fwd, PetscCtx ctx)
296: {
297:   PetscFunctionBegin;
300:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
301:   if (!ts->numcost) ts->numcost = numcost;

303:   if (costintegral) {
304:     PetscCall(PetscObjectReference((PetscObject)costintegral));
305:     PetscCall(VecDestroy(&ts->vec_costintegral));
306:     ts->vec_costintegral = costintegral;
307:   } else {
308:     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
309:       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
310:     } else {
311:       PetscCall(VecSet(ts->vec_costintegral, 0.0));
312:     }
313:   }
314:   if (!ts->vec_costintegrand) {
315:     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
316:   } else {
317:     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
318:   }
319:   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
320:   ts->costintegrand    = rf;
321:   ts->costintegrandctx = ctx;
322:   ts->drdufunction     = drduf;
323:   ts->drdpfunction     = drdpf;
324:   PetscFunctionReturn(PETSC_SUCCESS);
325: }

327: /*@
328:   TSGetCostIntegral - Returns the values of the integral term in the cost functions.
329:   It is valid to call the routine after a backward run.

331:   Not Collective

333:   Input Parameter:
334: . ts - the `TS` context obtained from `TSCreate()`

336:   Output Parameter:
337: . v - the vector containing the integrals for each cost function

339:   Level: intermediate

341: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
342: @*/
343: PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
344: {
345:   TS quadts;

347:   PetscFunctionBegin;
349:   PetscAssertPointer(v, 2);
350:   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
351:   *v = quadts->vec_sol;
352:   PetscFunctionReturn(PETSC_SUCCESS);
353: }

355: /*@
356:   TSComputeCostIntegrand - Evaluates the integral function in the cost functions.

358:   Input Parameters:
359: + ts - the `TS` context
360: . t  - current time
361: - U  - state vector, i.e. current solution

363:   Output Parameter:
364: . Q - vector of size numcost to hold the outputs

366:   Level: deprecated

368:   Note:
369:   Most users should not need to explicitly call this routine, as it
370:   is used internally within the sensitivity analysis context.

372: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
373: @*/
374: PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
375: {
376:   PetscFunctionBegin;

381:   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
382:   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
383:   else PetscCall(VecZeroEntries(Q));
384:   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
385:   PetscFunctionReturn(PETSC_SUCCESS);
386: }

388: // PetscClangLinter pragma disable: -fdoc-*
389: /*@
390:   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

392:   Level: deprecated

394: @*/
395: PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
396: {
397:   PetscFunctionBegin;
398:   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);

402:   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
403:   PetscFunctionReturn(PETSC_SUCCESS);
404: }

406: // PetscClangLinter pragma disable: -fdoc-*
407: /*@
408:   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

410:   Level: deprecated

412: @*/
413: PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
414: {
415:   PetscFunctionBegin;
416:   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);

420:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
421:   PetscFunctionReturn(PETSC_SUCCESS);
422: }

424: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
425: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
426: /*@C
427:   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of `F` (IFunction) w.r.t. the state variable.

429:   Logically Collective

431:   Input Parameters:
432: + ts                   - `TS` context obtained from `TSCreate()`
433: . ihp1                 - an array of vectors storing the result of vector-Hessian-vector product for $F_{UU}$
434: . ihessianproductfunc1 - vector-Hessian-vector product function for $F_{UU}$
435: . ihp2                 - an array of vectors storing the result of vector-Hessian-vector product for $F_{UP}$
436: . ihessianproductfunc2 - vector-Hessian-vector product function for $F_{UP}$
437: . ihp3                 - an array of vectors storing the result of vector-Hessian-vector product for $F_{PU}$
438: . ihessianproductfunc3 - vector-Hessian-vector product function for $F_{PU}$
439: . ihp4                 - an array of vectors storing the result of vector-Hessian-vector product for $F_{PP}$
440: . ihessianproductfunc4 - vector-Hessian-vector product function for $F_{PP}$
441: - ctx                  - [optional] function context

443:   Calling sequence of `ihessianproductfunc1`:
444: + ts  - the `TS` context
445: . t   - current timestep
446: . U   - input vector (current ODE solution)
447: . Vl  - an array of input vectors to be left-multiplied with the Hessian
448: . Vr  - input vector to be right-multiplied with the Hessian
449: . VHV - an array of output vectors for vector-Hessian-vector product
450: - ctx - [optional] function context

452:   Level: intermediate

454:   Notes:
455:   All other functions have the same calling sequence as `ihessianproductfunc1`, so their
456:   descriptions are omitted for brevity.

458:   The first Hessian function and the working array are required.
459:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
460:   $Vl_n^T*F_UP*Vr$
461:   where the vector $Vl_n$ (n-th element in the array `Vl`) and `Vr` are of size `N` and `M` respectively, and the Hessian $F_{UP}$ is of size $N x N x M.$
462:   Each entry of $F_{UP}$ corresponds to the derivative
463:   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.$
464:   The result of the vector-Hessian-vector product for $Vl_n$ needs to be stored in vector $VHV_n$ with the j-th entry being
465:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}$
466:   If the cost function is a scalar, there will be only one vector in `Vl` and `VHV`.

468: .seealso: [](ch_ts), `TS`
469: @*/
470: PetscErrorCode TSSetIHessianProduct(TS ts, Vec ihp1[], PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec ihp2[], PetscErrorCode (*ihessianproductfunc2)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec ihp3[], PetscErrorCode (*ihessianproductfunc3)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec ihp4[], PetscErrorCode (*ihessianproductfunc4)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), PetscCtx ctx)
471: {
472:   PetscFunctionBegin;
474:   PetscAssertPointer(ihp1, 2);

476:   ts->ihessianproductctx = ctx;
477:   if (ihp1) ts->vecs_fuu = ihp1;
478:   if (ihp2) ts->vecs_fup = ihp2;
479:   if (ihp3) ts->vecs_fpu = ihp3;
480:   if (ihp4) ts->vecs_fpp = ihp4;
481:   ts->ihessianproduct_fuu = ihessianproductfunc1;
482:   ts->ihessianproduct_fup = ihessianproductfunc2;
483:   ts->ihessianproduct_fpu = ihessianproductfunc3;
484:   ts->ihessianproduct_fpp = ihessianproductfunc4;
485:   PetscFunctionReturn(PETSC_SUCCESS);
486: }

488: /*@
489:   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.

491:   Collective

493:   Input Parameters:
494: + ts - The `TS` context obtained from `TSCreate()`
495: . t  - the time
496: . U  - the solution at which to compute the Hessian product
497: . Vl - the array of input vectors to be multiplied with the Hessian from the left
498: - Vr - the input vector to be multiplied with the Hessian from the right

500:   Output Parameter:
501: . VHV - the array of output vectors that store the Hessian product

503:   Level: developer

505:   Note:
506:   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
507:   so most users would not generally call this routine themselves.

509: .seealso: [](ch_ts), `TSSetIHessianProduct()`
510: @*/
511: PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
512: {
513:   PetscFunctionBegin;
514:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

518:   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

520:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
521:   if (ts->rhshessianproduct_guu) {
522:     PetscInt nadj;
523:     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
524:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
525:   }
526:   PetscFunctionReturn(PETSC_SUCCESS);
527: }

529: /*@
530:   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.

532:   Collective

534:   Input Parameters:
535: + ts - The `TS` context obtained from `TSCreate()`
536: . t  - the time
537: . U  - the solution at which to compute the Hessian product
538: . Vl - the array of input vectors to be multiplied with the Hessian from the left
539: - Vr - the input vector to be multiplied with the Hessian from the right

541:   Output Parameter:
542: . VHV - the array of output vectors that store the Hessian product

544:   Level: developer

546:   Note:
547:   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
548:   so most users would not generally call this routine themselves.

550: .seealso: [](ch_ts), `TSSetIHessianProduct()`
551: @*/
552: PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
553: {
554:   PetscFunctionBegin;
555:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

559:   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

561:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
562:   if (ts->rhshessianproduct_gup) {
563:     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
564:     for (PetscInt nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
565:   }
566:   PetscFunctionReturn(PETSC_SUCCESS);
567: }

569: /*@
570:   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.

572:   Collective

574:   Input Parameters:
575: + ts - The `TS` context obtained from `TSCreate()`
576: . t  - the time
577: . U  - the solution at which to compute the Hessian product
578: . Vl - the array of input vectors to be multiplied with the Hessian from the left
579: - Vr - the input vector to be multiplied with the Hessian from the right

581:   Output Parameter:
582: . VHV - the array of output vectors that store the Hessian product

584:   Level: developer

586:   Note:
587:   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
588:   so most users would not generally call this routine themselves.

590: .seealso: [](ch_ts), `TSSetIHessianProduct()`
591: @*/
592: PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
593: {
594:   PetscFunctionBegin;
595:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

599:   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

601:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
602:   if (ts->rhshessianproduct_gpu) {
603:     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
604:     for (PetscInt nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
605:   }
606:   PetscFunctionReturn(PETSC_SUCCESS);
607: }

609: /*@C
610:   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.

612:   Collective

614:   Input Parameters:
615: + ts - The `TS` context obtained from `TSCreate()`
616: . t  - the time
617: . U  - the solution at which to compute the Hessian product
618: . Vl - the array of input vectors to be multiplied with the Hessian from the left
619: - Vr - the input vector to be multiplied with the Hessian from the right

621:   Output Parameter:
622: . VHV - the array of output vectors that store the Hessian product

624:   Level: developer

626:   Note:
627:   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
628:   so most users would not generally call this routine themselves.

630: .seealso: [](ch_ts), `TSSetIHessianProduct()`
631: @*/
632: PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
633: {
634:   PetscFunctionBegin;
635:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

639:   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

641:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
642:   if (ts->rhshessianproduct_gpp) {
643:     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
644:     for (PetscInt nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
645:   }
646:   PetscFunctionReturn(PETSC_SUCCESS);
647: }

649: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
650: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
651: /*@C
652:   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector
653:   product. The Hessian is the second-order derivative of `G` (RHSFunction) w.r.t. the state
654:   variable.

656:   Logically Collective

658:   Input Parameters:
659: + ts                     - `TS` context obtained from `TSCreate()`
660: . rhshp1                 - an array of vectors storing the result of vector-Hessian-vector product for $G_{UU}$
661: . rhshessianproductfunc1 - vector-Hessian-vector product function for $G_{UU}$
662: . rhshp2                 - an array of vectors storing the result of vector-Hessian-vector product for $G_{UP}$
663: . rhshessianproductfunc2 - vector-Hessian-vector product function for $G_{UP}$
664: . rhshp3                 - an array of vectors storing the result of vector-Hessian-vector product for $G_{PU}$
665: . rhshessianproductfunc3 - vector-Hessian-vector product function for $G_{PU}$
666: . rhshp4                 - an array of vectors storing the result of vector-Hessian-vector product for $G_{PP}$
667: . rhshessianproductfunc4 - vector-Hessian-vector product function for $G_{PP}$
668: - ctx                    - [optional] function context

670:   Calling sequence of `rhshessianproductfunc1`:
671: + ts  - the `TS` context
672: . t   - current timestep
673: . U   - input vector (current ODE solution)
674: . Vl  - an array of input vectors to be left-multiplied with the Hessian
675: . Vr  - input vector to be right-multiplied with the Hessian
676: . VHV - an array of output vectors for vector-Hessian-vector product
677: - ctx - [optional] function context

679:   Level: intermediate

681:   Notes:
682:   All other functions have the same calling sequence as `rhshessianproductfunc1`, so their
683:   descriptions are omitted for brevity.

685:   The first Hessian function and the working array are required.

687:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
688:   $ Vl_n^T*G_UP*Vr$
689:   where the vector $Vl_n$ (n-th element in the array $Vl$) and $Vr$ are of size $N$ and $M$ respectively, and the Hessian $G_{UP}$ is of size $N x N x M$.
690:   Each entry of $G_{UP}$ corresponds to the derivative
691:   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.$
692:   The result of the vector-Hessian-vector product for $Vl_n$ needs to be stored in vector $VHV_n$ with j-th entry being
693:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}$
694:   If the cost function is a scalar, there will be only one vector in $Vl$ and $VHV$.

696: .seealso: `TS`, `TSAdjoint`
697: @*/
698: PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec rhshp1[], PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec rhshp2[], PetscErrorCode (*rhshessianproductfunc2)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec rhshp3[], PetscErrorCode (*rhshessianproductfunc3)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), Vec rhshp4[], PetscErrorCode (*rhshessianproductfunc4)(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[], PetscCtx ctx), PetscCtx ctx)
699: {
700:   PetscFunctionBegin;
702:   PetscAssertPointer(rhshp1, 2);

704:   ts->rhshessianproductctx = ctx;
705:   if (rhshp1) ts->vecs_guu = rhshp1;
706:   if (rhshp2) ts->vecs_gup = rhshp2;
707:   if (rhshp3) ts->vecs_gpu = rhshp3;
708:   if (rhshp4) ts->vecs_gpp = rhshp4;
709:   ts->rhshessianproduct_guu = rhshessianproductfunc1;
710:   ts->rhshessianproduct_gup = rhshessianproductfunc2;
711:   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
712:   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
713:   PetscFunctionReturn(PETSC_SUCCESS);
714: }

716: /*@
717:   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for $G_{uu}$.

719:   Collective

721:   Input Parameters:
722: + ts - The `TS` context obtained from `TSCreate()`
723: . t  - the time
724: . U  - the solution at which to compute the Hessian product
725: . Vl - the array of input vectors to be multiplied with the Hessian from the left
726: - Vr - the input vector to be multiplied with the Hessian from the right

728:   Output Parameter:
729: . VHV - the array of output vectors that store the Hessian product

731:   Level: developer

733:   Note:
734:   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
735:   so most users would not generally call this routine themselves.

737: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
738: @*/
739: PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
740: {
741:   PetscFunctionBegin;
742:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

746:   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
747:   PetscFunctionReturn(PETSC_SUCCESS);
748: }

750: /*@
751:   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for $G_{up}$.

753:   Collective

755:   Input Parameters:
756: + ts - The `TS` context obtained from `TSCreate()`
757: . t  - the time
758: . U  - the solution at which to compute the Hessian product
759: . Vl - the array of input vectors to be multiplied with the Hessian from the left
760: - Vr - the input vector to be multiplied with the Hessian from the right

762:   Output Parameter:
763: . VHV - the array of output vectors that store the Hessian product

765:   Level: developer

767:   Note:
768:   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
769:   so most users would not generally call this routine themselves.

771: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
772: @*/
773: PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
774: {
775:   PetscFunctionBegin;
776:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

780:   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
781:   PetscFunctionReturn(PETSC_SUCCESS);
782: }

784: /*@
785:   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for $G_{pu}.$

787:   Collective

789:   Input Parameters:
790: + ts - The `TS` context obtained from `TSCreate()`
791: . t  - the time
792: . U  - the solution at which to compute the Hessian product
793: . Vl - the array of input vectors to be multiplied with the Hessian from the left
794: - Vr - the input vector to be multiplied with the Hessian from the right

796:   Output Parameter:
797: . VHV - the array of output vectors that store the Hessian product

799:   Level: developer

801:   Note:
802:   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
803:   so most users would not generally call this routine themselves.

805: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
806: @*/
807: PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
808: {
809:   PetscFunctionBegin;
810:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

814:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
815:   PetscFunctionReturn(PETSC_SUCCESS);
816: }

818: /*@
819:   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for $G_{pp}$.

821:   Collective

823:   Input Parameters:
824: + ts - The `TS` context obtained from `TSCreate()`
825: . t  - the time
826: . U  - the solution at which to compute the Hessian product
827: . Vl - the array of input vectors to be multiplied with the Hessian from the left
828: - Vr - the input vector to be multiplied with the Hessian from the right

830:   Output Parameter:
831: . VHV - the array of output vectors that store the Hessian product

833:   Level: developer

835:   Note:
836:   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
837:   so most users would not generally call this routine themselves.

839: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
840: @*/
841: PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
842: {
843:   PetscFunctionBegin;
844:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

848:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
849:   PetscFunctionReturn(PETSC_SUCCESS);
850: }

852: /* --------------------------- Adjoint sensitivity ---------------------------*/

854: /*@
855:   TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
856:   for use by the `TS` adjoint routines.

858:   Logically Collective

860:   Input Parameters:
861: + ts      - the `TS` context obtained from `TSCreate()`
862: . numcost - number of gradients to be computed, this is the number of cost functions
863: . lambda  - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
864: - mu      - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

866:   Level: beginner

868:   Notes:
869:   the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime

871:   After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities

873: .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
874: @*/
875: PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec lambda[], Vec mu[])
876: {
877:   PetscFunctionBegin;
879:   PetscAssertPointer(lambda, 3);
880:   ts->vecs_sensi  = lambda;
881:   ts->vecs_sensip = mu;
882:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
883:   ts->numcost = numcost;
884:   PetscFunctionReturn(PETSC_SUCCESS);
885: }

887: /*@
888:   TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`

890:   Not Collective, but the vectors returned are parallel if `TS` is parallel

892:   Input Parameter:
893: . ts - the `TS` context obtained from `TSCreate()`

895:   Output Parameters:
896: + numcost - size of returned arrays
897: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
898: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

900:   Level: intermediate

902: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
903: @*/
904: PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec *lambda[], Vec *mu[])
905: {
906:   PetscFunctionBegin;
908:   if (numcost) *numcost = ts->numcost;
909:   if (lambda) *lambda = ts->vecs_sensi;
910:   if (mu) *mu = ts->vecs_sensip;
911:   PetscFunctionReturn(PETSC_SUCCESS);
912: }

914: /*@
915:   TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
916:   for use by the `TS` adjoint routines.

918:   Logically Collective

920:   Input Parameters:
921: + ts      - the `TS` context obtained from `TSCreate()`
922: . numcost - number of cost functions
923: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
924: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
925: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

927:   Level: beginner

929:   Notes:
930:   Hessian of the cost function is completely different from Hessian of the ODE/DAE system

932:   For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.

934:   After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.

936:   Passing `NULL` for `lambda2` disables the second-order calculation.

938: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
939: @*/
940: PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec lambda2[], Vec mu2[], Vec dir)
941: {
942:   PetscFunctionBegin;
944:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
945:   ts->numcost      = numcost;
946:   ts->vecs_sensi2  = lambda2;
947:   ts->vecs_sensi2p = mu2;
948:   ts->vec_dir      = dir;
949:   PetscFunctionReturn(PETSC_SUCCESS);
950: }

952: /*@
953:   TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`

955:   Not Collective, but vectors returned are parallel if `TS` is parallel

957:   Input Parameter:
958: . ts - the `TS` context obtained from `TSCreate()`

960:   Output Parameters:
961: + numcost - number of cost functions
962: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
963: . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
964: - dir     - the direction vector that are multiplied with the Hessian of the cost functions

966:   Level: intermediate

968: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
969: @*/
970: PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec *lambda2[], Vec *mu2[], Vec *dir)
971: {
972:   PetscFunctionBegin;
974:   if (numcost) *numcost = ts->numcost;
975:   if (lambda2) *lambda2 = ts->vecs_sensi2;
976:   if (mu2) *mu2 = ts->vecs_sensi2p;
977:   if (dir) *dir = ts->vec_dir;
978:   PetscFunctionReturn(PETSC_SUCCESS);
979: }

981: /*@
982:   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities

984:   Logically Collective

986:   Input Parameters:
987: + ts   - the `TS` context obtained from `TSCreate()`
988: - didp - the derivative of initial values w.r.t. parameters

990:   Level: intermediate

992:   Notes:
993:   When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
994:   `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.

996: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
997: @*/
998: PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
999: {
1000:   Mat          A;
1001:   Vec          sp;
1002:   PetscScalar *xarr;
1003:   PetscInt     lsize;

1005:   PetscFunctionBegin;
1006:   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
1007:   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
1008:   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
1009:   /* create a single-column dense matrix */
1010:   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
1011:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));

1013:   PetscCall(VecDuplicate(ts->vec_sol, &sp));
1014:   PetscCall(MatDenseGetColumn(A, 0, &xarr));
1015:   PetscCall(VecPlaceArray(sp, xarr));
1016:   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
1017:     if (didp) {
1018:       PetscCall(MatMult(didp, ts->vec_dir, sp));
1019:       PetscCall(VecScale(sp, 2.));
1020:     } else {
1021:       PetscCall(VecZeroEntries(sp));
1022:     }
1023:   } else { /* tangent linear variable initialized as dir */
1024:     PetscCall(VecCopy(ts->vec_dir, sp));
1025:   }
1026:   PetscCall(VecResetArray(sp));
1027:   PetscCall(MatDenseRestoreColumn(A, &xarr));
1028:   PetscCall(VecDestroy(&sp));

1030:   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */

1032:   PetscCall(MatDestroy(&A));
1033:   PetscFunctionReturn(PETSC_SUCCESS);
1034: }

1036: /*@
1037:   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context

1039:   Logically Collective

1041:   Input Parameter:
1042: . ts - the `TS` context obtained from `TSCreate()`

1044:   Level: intermediate

1046: .seealso: [](ch_ts), `TSAdjointSetForward()`
1047: @*/
1048: PetscErrorCode TSAdjointResetForward(TS ts)
1049: {
1050:   PetscFunctionBegin;
1051:   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
1052:   PetscCall(TSForwardReset(ts));
1053:   PetscFunctionReturn(PETSC_SUCCESS);
1054: }

1056: /*@
1057:   TSAdjointSetUp - Sets up the internal data structures for the later use
1058:   of an adjoint solver

1060:   Collective

1062:   Input Parameter:
1063: . ts - the `TS` context obtained from `TSCreate()`

1065:   Level: advanced

1067: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1068: @*/
1069: PetscErrorCode TSAdjointSetUp(TS ts)
1070: {
1071:   TSTrajectory tj;
1072:   PetscBool    match;

1074:   PetscFunctionBegin;
1076:   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1077:   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1078:   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1079:   PetscCall(TSGetTrajectory(ts, &tj));
1080:   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1081:   if (match) {
1082:     PetscBool solution_only;
1083:     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1084:     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1085:   }
1086:   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */

1088:   if (ts->quadraturets) { /* if there is integral in the cost function */
1089:     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1090:     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1091:   }

1093:   PetscTryTypeMethod(ts, adjointsetup);
1094:   ts->adjointsetupcalled = PETSC_TRUE;
1095:   PetscFunctionReturn(PETSC_SUCCESS);
1096: }

1098: /*@
1099:   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.

1101:   Collective

1103:   Input Parameter:
1104: . ts - the `TS` context obtained from `TSCreate()`

1106:   Level: beginner

1108: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSDestroy()`
1109: @*/
1110: PetscErrorCode TSAdjointReset(TS ts)
1111: {
1112:   PetscFunctionBegin;
1114:   PetscTryTypeMethod(ts, adjointreset);
1115:   if (ts->quadraturets) { /* if there is integral in the cost function */
1116:     PetscCall(VecDestroy(&ts->vec_drdu_col));
1117:     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1118:   }
1119:   ts->vecs_sensi         = NULL;
1120:   ts->vecs_sensip        = NULL;
1121:   ts->vecs_sensi2        = NULL;
1122:   ts->vecs_sensi2p       = NULL;
1123:   ts->vec_dir            = NULL;
1124:   ts->adjointsetupcalled = PETSC_FALSE;
1125:   PetscFunctionReturn(PETSC_SUCCESS);
1126: }

1128: /*@
1129:   TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time

1131:   Logically Collective

1133:   Input Parameters:
1134: + ts    - the `TS` context obtained from `TSCreate()`
1135: - steps - number of steps to use

1137:   Level: intermediate

1139:   Notes:
1140:   Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1141:   so as to integrate back to less than the original timestep

1143: .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1144: @*/
1145: PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1146: {
1147:   PetscFunctionBegin;
1150:   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1151:   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1152:   ts->adjoint_max_steps = steps;
1153:   PetscFunctionReturn(PETSC_SUCCESS);
1154: }

1156: // PetscClangLinter pragma disable: -fdoc-*
1157: /*@C
1158:   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`

1160:   Level: deprecated
1161: @*/
1162: PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), PetscCtx ctx)
1163: {
1164:   PetscFunctionBegin;

1168:   ts->rhsjacobianp    = func;
1169:   ts->rhsjacobianpctx = ctx;
1170:   if (Amat) {
1171:     PetscCall(PetscObjectReference((PetscObject)Amat));
1172:     PetscCall(MatDestroy(&ts->Jacp));
1173:     ts->Jacp = Amat;
1174:   }
1175:   PetscFunctionReturn(PETSC_SUCCESS);
1176: }

1178: // PetscClangLinter pragma disable: -fdoc-*
1179: /*@C
1180:   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`

1182:   Level: deprecated
1183: @*/
1184: PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1185: {
1186:   PetscFunctionBegin;

1191:   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1192:   PetscFunctionReturn(PETSC_SUCCESS);
1193: }

1195: // PetscClangLinter pragma disable: -fdoc-*
1196: /*@
1197:   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

1199:   Level: deprecated
1200: @*/
1201: PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1202: {
1203:   PetscFunctionBegin;

1207:   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1208:   PetscFunctionReturn(PETSC_SUCCESS);
1209: }

1211: // PetscClangLinter pragma disable: -fdoc-*
1212: /*@
1213:   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

1215:   Level: deprecated
1216: @*/
1217: PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1218: {
1219:   PetscFunctionBegin;

1223:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1224:   PetscFunctionReturn(PETSC_SUCCESS);
1225: }

1227: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
1228: /*@C
1229:   TSAdjointMonitorSensi - monitors the first lambda sensitivity

1231:   Level: intermediate

1233: .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1234: @*/
1235: static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1236: {
1237:   PetscViewer viewer = vf->viewer;

1239:   PetscFunctionBegin;
1241:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1242:   PetscCall(VecView(lambda[0], viewer));
1243:   PetscCall(PetscViewerPopFormat(viewer));
1244:   PetscFunctionReturn(PETSC_SUCCESS);
1245: }

1247: /*@C
1248:   TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user

1250:   Collective

1252:   Input Parameters:
1253: + ts           - `TS` object you wish to monitor
1254: . name         - the monitor type one is seeking
1255: . help         - message indicating what monitoring is done
1256: . manual       - manual page for the monitor
1257: . monitor      - the monitor function, its context must be a `PetscViewerAndFormat`
1258: - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects

1260:   Calling sequence of `monitor`:
1261: + ts      - the `TS` context
1262: . step    - iteration number (after the final time step the monitor routine is called with
1263:                 a step of -1, this is at the final time which may have been interpolated to)
1264: . time    - current time
1265: . u       - current iterate
1266: . numcost - number of cost functions
1267: . lambda  - sensitivities to initial conditions
1268: . mu      - sensitivities to parameters
1269: - vf      - the `PetscViewer` and format the monitor is using

1271:   Calling sequence of `monitorsetup`:
1272: + ts - the `TS` object being monitored
1273: - vf - the `PetscViewer` and format the monitor is using

1275:   Level: developer

1277: .seealso: [](ch_ts), `PetscOptionsCreateViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1278:           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`,
1279:           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`,
1280:           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1281:           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1282:           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1283:           `PetscOptionsFList()`, `PetscOptionsEList()`, `PetscViewerAndFormat`
1284: @*/
1285: PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS ts, PetscInt step, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf), PetscErrorCode (*monitorsetup)(TS ts, PetscViewerAndFormat *vf))
1286: {
1287:   PetscViewer       viewer;
1288:   PetscViewerFormat format;
1289:   PetscBool         flg;

1291:   PetscFunctionBegin;
1292:   PetscCall(PetscOptionsCreateViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1293:   if (flg) {
1294:     PetscViewerAndFormat *vf;
1295:     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1296:     PetscCall(PetscViewerDestroy(&viewer));
1297:     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1298:     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode (*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscCtx))monitor, vf, (PetscCtxDestroyFn *)PetscViewerAndFormatDestroy));
1299:   }
1300:   PetscFunctionReturn(PETSC_SUCCESS);
1301: }

1303: /*@C
1304:   TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1305:   timestep to display the iteration's  progress.

1307:   Logically Collective

1309:   Input Parameters:
1310: + ts              - the `TS` context obtained from `TSCreate()`
1311: . adjointmonitor  - monitoring routine
1312: . adjointmctx     - [optional] context for private data for the monitor routine (use `NULL` if no context is desired)
1313: - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for its calling sequence

1315:   Calling sequence of `adjointmonitor`:
1316: + ts          - the `TS` context
1317: . steps       - iteration number (after the final time step the monitor routine is called with
1318:                 a step of -1, this is at the final time which may have been interpolated to)
1319: . time        - current time
1320: . u           - current iterate
1321: . numcost     - number of cost functions
1322: . lambda      - sensitivities to initial conditions
1323: . mu          - sensitivities to parameters
1324: - adjointmctx - [optional] adjoint monitoring context

1326:   Level: intermediate

1328:   Note:
1329:   This routine adds an additional monitor to the list of monitors that
1330:   already has been loaded.

1332:   Fortran Notes:
1333:   Only a single monitor function can be set for each `TS` object

1335: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`, `PetscCtxDestroyFn`
1336: @*/
1337: PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, PetscCtx adjointmctx), PetscCtx adjointmctx, PetscCtxDestroyFn *adjointmdestroy)
1338: {
1339:   PetscFunctionBegin;
1341:   for (PetscInt i = 0; i < ts->numbermonitors; i++) {
1342:     PetscBool identical;

1344:     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))(PetscVoidFn *)adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode (*)(void))(PetscVoidFn *)ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1345:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1346:   }
1347:   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1348:   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1349:   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1350:   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = adjointmctx;
1351:   PetscFunctionReturn(PETSC_SUCCESS);
1352: }

1354: /*@C
1355:   TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.

1357:   Logically Collective

1359:   Input Parameter:
1360: . ts - the `TS` context obtained from `TSCreate()`

1362:   Notes:
1363:   There is no way to remove a single, specific monitor.

1365:   Level: intermediate

1367: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1368: @*/
1369: PetscErrorCode TSAdjointMonitorCancel(TS ts)
1370: {
1371:   PetscFunctionBegin;
1373:   for (PetscInt i = 0; i < ts->numberadjointmonitors; i++) {
1374:     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1375:   }
1376:   ts->numberadjointmonitors = 0;
1377:   PetscFunctionReturn(PETSC_SUCCESS);
1378: }

1380: /*@C
1381:   TSAdjointMonitorDefault - the default monitor of adjoint computations

1383:   Input Parameters:
1384: + ts      - the `TS` context
1385: . step    - iteration number (after the final time step the monitor routine is called with a
1386: step of -1, this is at the final time which may have been interpolated to)
1387: . time    - current time
1388: . v       - current iterate
1389: . numcost - number of cost functions
1390: . lambda  - sensitivities to initial conditions
1391: . mu      - sensitivities to parameters
1392: - vf      - the viewer and format

1394:   Level: intermediate

1396: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1397: @*/
1398: PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec lambda[], Vec mu[], PetscViewerAndFormat *vf)
1399: {
1400:   PetscViewer viewer = vf->viewer;

1402:   PetscFunctionBegin;
1403:   (void)v;
1404:   (void)numcost;
1405:   (void)lambda;
1406:   (void)mu;
1408:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1409:   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1410:   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n"));
1411:   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1412:   PetscCall(PetscViewerPopFormat(viewer));
1413:   PetscFunctionReturn(PETSC_SUCCESS);
1414: }

1416: /*@C
1417:   TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1418:   `VecView()` for the sensitivities to initial states at each timestep

1420:   Collective

1422:   Input Parameters:
1423: + ts      - the `TS` context
1424: . step    - current time-step
1425: . ptime   - current time
1426: . u       - current state
1427: . numcost - number of cost functions
1428: . lambda  - sensitivities to initial conditions
1429: . mu      - sensitivities to parameters
1430: - dummy   - either a viewer or `NULL`

1432:   Level: intermediate

1434: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1435: @*/
1436: PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[], void *dummy)
1437: {
1438:   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1439:   PetscDraw        draw;
1440:   PetscReal        xl, yl, xr, yr, h;
1441:   char             time[32];

1443:   PetscFunctionBegin;
1444:   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);

1446:   PetscCall(VecView(lambda[0], ictx->viewer));
1447:   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1448:   PetscCall(PetscSNPrintf(time, 32, "Timestep %" PetscInt_FMT " Time %g", step, (double)ptime));
1449:   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1450:   h = yl + .95 * (yr - yl);
1451:   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1452:   PetscCall(PetscDrawFlush(draw));
1453:   PetscFunctionReturn(PETSC_SUCCESS);
1454: }

1456: /*@C
1457:   TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database.

1459:   Collective

1461:   Input Parameters:
1462: + ts                 - the `TS` context
1463: - PetscOptionsObject - the options context

1465:   Options Database Keys:
1466: + -ts_adjoint_solve (yes|no)     - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`)
1467: . -ts_adjoint_monitor            - print information at each adjoint time step
1468: - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically

1470:   Level: developer

1472:   Note:
1473:   This is not normally called directly by users

1475: .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1476: @*/
1477: PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems PetscOptionsObject)
1478: {
1479:   PetscBool tflg, opt;

1481:   PetscFunctionBegin;
1483:   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1484:   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1485:   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1486:   if (opt) {
1487:     PetscCall(TSSetSaveTrajectory(ts));
1488:     ts->adjoint_solve = tflg;
1489:   }
1490:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1491:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1492:   opt = PETSC_FALSE;
1493:   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1494:   if (opt) {
1495:     TSMonitorDrawCtx ctx;
1496:     PetscInt         howoften = 1;

1498:     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1499:     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1500:     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscCtxDestroyFn *)TSMonitorDrawCtxDestroy));
1501:   }
1502:   PetscFunctionReturn(PETSC_SUCCESS);
1503: }

1505: /*@
1506:   TSAdjointStep - Steps one time step backward in the adjoint run

1508:   Collective

1510:   Input Parameter:
1511: . ts - the `TS` context obtained from `TSCreate()`

1513:   Level: intermediate

1515: .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1516: @*/
1517: PetscErrorCode TSAdjointStep(TS ts)
1518: {
1519:   DM dm;

1521:   PetscFunctionBegin;
1523:   PetscCall(TSGetDM(ts, &dm));
1524:   PetscCall(TSAdjointSetUp(ts));
1525:   ts->steps--; /* must decrease the step index before the adjoint step is taken. */

1527:   ts->reason     = TS_CONVERGED_ITERATING;
1528:   ts->ptime_prev = ts->ptime;
1529:   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1530:   PetscUseTypeMethod(ts, adjointstep);
1531:   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1532:   ts->adjoint_steps++;

1534:   if (ts->reason < 0) {
1535:     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1536:   } else if (!ts->reason) {
1537:     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1538:   }
1539:   PetscFunctionReturn(PETSC_SUCCESS);
1540: }

1542: /*@
1543:   TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE

1545:   Collective
1546:   `

1548:   Input Parameter:
1549: . ts - the `TS` context obtained from `TSCreate()`

1551:   Options Database Key:
1552: . -ts_adjoint_view_solution viewerinfo - views the first gradient with respect to the initial values

1554:   Level: intermediate

1556:   Notes:
1557:   This must be called after a call to `TSSolve()` that solves the forward problem

1559:   By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time

1561: .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1562: @*/
1563: PetscErrorCode TSAdjointSolve(TS ts)
1564: {
1565:   static PetscBool cite = PETSC_FALSE;
1566: #if defined(TSADJOINT_STAGE)
1567:   PetscLogStage adjoint_stage;
1568: #endif

1570:   PetscFunctionBegin;
1572:   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1573:                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1574:                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1575:                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1576:                                    "  volume        = {44},\n"
1577:                                    "  number        = {1},\n"
1578:                                    "  pages         = {C1-C24},\n"
1579:                                    "  doi           = {10.1137/21M140078X},\n"
1580:                                    "  year          = {2022}\n}\n",
1581:                                    &cite));
1582: #if defined(TSADJOINT_STAGE)
1583:   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1584:   PetscCall(PetscLogStagePush(adjoint_stage));
1585: #endif
1586:   PetscCall(TSAdjointSetUp(ts));

1588:   /* reset time step and iteration counters */
1589:   ts->adjoint_steps     = 0;
1590:   ts->ksp_its           = 0;
1591:   ts->snes_its          = 0;
1592:   ts->num_snes_failures = 0;
1593:   ts->reject            = 0;
1594:   ts->reason            = TS_CONVERGED_ITERATING;

1596:   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1597:   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;

1599:   while (!ts->reason) {
1600:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1601:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1602:     PetscCall(TSAdjointEventHandler(ts));
1603:     PetscCall(TSAdjointStep(ts));
1604:     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1605:   }
1606:   if (!ts->steps) {
1607:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1608:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1609:   }
1610:   ts->solvetime = ts->ptime;
1611:   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1612:   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1613:   ts->adjoint_max_steps = 0;
1614: #if defined(TSADJOINT_STAGE)
1615:   PetscCall(PetscLogStagePop());
1616: #endif
1617:   PetscFunctionReturn(PETSC_SUCCESS);
1618: }

1620: /*@C
1621:   TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`

1623:   Collective

1625:   Input Parameters:
1626: + ts      - time stepping context obtained from `TSCreate()`
1627: . step    - step number that has just completed
1628: . ptime   - model time of the state
1629: . u       - state at the current model time
1630: . numcost - number of cost functions (dimension of lambda  or mu)
1631: . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1632: - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters

1634:   Level: developer

1636:   Note:
1637:   `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1638:   Users would almost never call this routine directly.

1640: .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1641: @*/
1642: PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[])
1643: {
1644:   PetscInt i, n = ts->numberadjointmonitors;

1646:   PetscFunctionBegin;
1649:   PetscCall(VecLockReadPush(u));
1650:   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1651:   PetscCall(VecLockReadPop(u));
1652:   PetscFunctionReturn(PETSC_SUCCESS);
1653: }

1655: /*@
1656:   TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.

1658:   Collective

1660:   Input Parameter:
1661: . ts - time stepping context

1663:   Level: advanced

1665:   Notes:
1666:   This function cannot be called until `TSAdjointStep()` has been completed.

1668: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1669:  @*/
1670: PetscErrorCode TSAdjointCostIntegral(TS ts)
1671: {
1672:   PetscFunctionBegin;
1674:   PetscUseTypeMethod(ts, adjointintegral);
1675:   PetscFunctionReturn(PETSC_SUCCESS);
1676: }

1678: /* ------------------ Forward (tangent linear) sensitivity  ------------------*/

1680: /*@
1681:   TSForwardSetUp - Sets up the internal data structures for the later use
1682:   of forward sensitivity analysis

1684:   Collective

1686:   Input Parameter:
1687: . ts - the `TS` context obtained from `TSCreate()`

1689:   Level: advanced

1691: .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1692: @*/
1693: PetscErrorCode TSForwardSetUp(TS ts)
1694: {
1695:   PetscFunctionBegin;
1697:   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1698:   PetscTryTypeMethod(ts, forwardsetup);
1699:   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1700:   ts->forwardsetupcalled = PETSC_TRUE;
1701:   PetscFunctionReturn(PETSC_SUCCESS);
1702: }

1704: /*@
1705:   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis

1707:   Collective

1709:   Input Parameter:
1710: . ts - the `TS` context obtained from `TSCreate()`

1712:   Level: advanced

1714: .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1715: @*/
1716: PetscErrorCode TSForwardReset(TS ts)
1717: {
1718:   TS quadts = ts->quadraturets;

1720:   PetscFunctionBegin;
1722:   PetscTryTypeMethod(ts, forwardreset);
1723:   PetscCall(MatDestroy(&ts->mat_sensip));
1724:   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1725:   PetscCall(VecDestroy(&ts->vec_sensip_col));
1726:   ts->forward_solve      = PETSC_FALSE;
1727:   ts->forwardsetupcalled = PETSC_FALSE;
1728:   PetscFunctionReturn(PETSC_SUCCESS);
1729: }

1731: /*@
1732:   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.

1734:   Input Parameters:
1735: + ts        - the `TS` context obtained from `TSCreate()`
1736: . numfwdint - number of integrals
1737: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1739:   Level: deprecated

1741: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1742: @*/
1743: PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec vp[])
1744: {
1745:   PetscFunctionBegin;
1747:   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1748:   if (!ts->numcost) ts->numcost = numfwdint;

1750:   ts->vecs_integral_sensip = vp;
1751:   PetscFunctionReturn(PETSC_SUCCESS);
1752: }

1754: /*@
1755:   TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.

1757:   Input Parameter:
1758: . ts - the `TS` context obtained from `TSCreate()`

1760:   Output Parameters:
1761: + numfwdint - number of integrals
1762: - vp        - the vectors containing the gradients for each integral w.r.t. parameters

1764:   Level: deprecated

1766: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()`
1767: @*/
1768: PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec *vp[])
1769: {
1770:   PetscFunctionBegin;
1772:   PetscAssertPointer(vp, 3);
1773:   if (numfwdint) *numfwdint = ts->numcost;
1774:   if (vp) *vp = ts->vecs_integral_sensip;
1775:   PetscFunctionReturn(PETSC_SUCCESS);
1776: }

1778: /*@
1779:   TSForwardStep - Compute the forward sensitivity for one time step.

1781:   Collective

1783:   Input Parameter:
1784: . ts - time stepping context

1786:   Level: advanced

1788:   Notes:
1789:   This function cannot be called until `TSStep()` has been completed.

1791: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1792: @*/
1793: PetscErrorCode TSForwardStep(TS ts)
1794: {
1795:   PetscFunctionBegin;
1797:   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1798:   PetscUseTypeMethod(ts, forwardstep);
1799:   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1800:   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1801:   PetscFunctionReturn(PETSC_SUCCESS);
1802: }

1804: /*@
1805:   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.

1807:   Logically Collective

1809:   Input Parameters:
1810: + ts   - the `TS` context obtained from `TSCreate()`
1811: . nump - number of parameters
1812: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1814:   Level: beginner

1816:   Notes:
1817:   Use `PETSC_DETERMINE` to use the number of columns of `Smat` for `nump`

1819:   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1820:   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1821:   You must call this function before `TSSolve()`.
1822:   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.

1824: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1825: @*/
1826: PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1827: {
1828:   PetscFunctionBegin;
1831:   ts->forward_solve = PETSC_TRUE;
1832:   if (nump == PETSC_DEFAULT || nump == PETSC_DETERMINE) PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1833:   else ts->num_parameters = nump;
1834:   PetscCall(PetscObjectReference((PetscObject)Smat));
1835:   PetscCall(MatDestroy(&ts->mat_sensip));
1836:   ts->mat_sensip = Smat;
1837:   PetscFunctionReturn(PETSC_SUCCESS);
1838: }

1840: /*@
1841:   TSForwardGetSensitivities - Returns the trajectory sensitivities

1843:   Not Collective, but Smat returned is parallel if ts is parallel

1845:   Output Parameters:
1846: + ts   - the `TS` context obtained from `TSCreate()`
1847: . nump - number of parameters
1848: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1850:   Level: intermediate

1852: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1853: @*/
1854: PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1855: {
1856:   PetscFunctionBegin;
1858:   if (nump) *nump = ts->num_parameters;
1859:   if (Smat) *Smat = ts->mat_sensip;
1860:   PetscFunctionReturn(PETSC_SUCCESS);
1861: }

1863: /*@
1864:   TSForwardCostIntegral - Evaluate the cost integral in the forward run.

1866:   Collective

1868:   Input Parameter:
1869: . ts - time stepping context

1871:   Level: advanced

1873:   Note:
1874:   This function cannot be called until `TSStep()` has been completed.

1876: .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1877: @*/
1878: PetscErrorCode TSForwardCostIntegral(TS ts)
1879: {
1880:   PetscFunctionBegin;
1882:   PetscUseTypeMethod(ts, forwardintegral);
1883:   PetscFunctionReturn(PETSC_SUCCESS);
1884: }

1886: /*@
1887:   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities

1889:   Collective

1891:   Input Parameters:
1892: + ts   - the `TS` context obtained from `TSCreate()`
1893: - didp - parametric sensitivities of the initial condition

1895:   Level: intermediate

1897:   Notes:
1898:   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1899:   This function is used to set initial values for tangent linear variables.

1901: .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1902: @*/
1903: PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1904: {
1905:   PetscFunctionBegin;
1908:   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DETERMINE, didp));
1909:   PetscFunctionReturn(PETSC_SUCCESS);
1910: }

1912: /*@
1913:   TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages

1915:   Input Parameter:
1916: . ts - the `TS` context obtained from `TSCreate()`

1918:   Output Parameters:
1919: + ns - number of stages
1920: - S  - tangent linear sensitivities at the intermediate stages

1922:   Level: advanced

1924: .seealso: `TS`
1925: @*/
1926: PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1927: {
1928:   PetscFunctionBegin;

1931:   if (!ts->ops->getstages) *S = NULL;
1932:   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1933:   PetscFunctionReturn(PETSC_SUCCESS);
1934: }

1936: /*@
1937:   TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time

1939:   Input Parameters:
1940: + ts  - the `TS` context obtained from `TSCreate()`
1941: - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run

1943:   Output Parameter:
1944: . quadts - the child `TS` context

1946:   Level: intermediate

1948: .seealso: [](ch_ts), `TSGetQuadratureTS()`
1949: @*/
1950: PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1951: {
1952:   char prefix[128];

1954:   PetscFunctionBegin;
1956:   PetscAssertPointer(quadts, 3);
1957:   PetscCall(TSDestroy(&ts->quadraturets));
1958:   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1959:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1960:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1961:   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1962:   *quadts = ts->quadraturets;

1964:   if (ts->numcost) {
1965:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1966:   } else {
1967:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1968:   }
1969:   ts->costintegralfwd = fwd;
1970:   PetscFunctionReturn(PETSC_SUCCESS);
1971: }

1973: /*@
1974:   TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time

1976:   Input Parameter:
1977: . ts - the `TS` context obtained from `TSCreate()`

1979:   Output Parameters:
1980: + fwd    - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1981: - quadts - the child `TS` context

1983:   Level: intermediate

1985: .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1986: @*/
1987: PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1988: {
1989:   PetscFunctionBegin;
1991:   if (fwd) *fwd = ts->costintegralfwd;
1992:   if (quadts) *quadts = ts->quadraturets;
1993:   PetscFunctionReturn(PETSC_SUCCESS);
1994: }

1996: /*@
1997:   TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`

1999:   Collective

2001:   Input Parameters:
2002: + ts - the `TS` context obtained from `TSCreate()`
2003: - x  - state vector

2005:   Output Parameters:
2006: + J    - Jacobian matrix
2007: - Jpre - matrix used to compute the preconditioner for `J` (may be same as `J`)

2009:   Level: developer

2011:   Note:
2012:   Uses finite differencing when `TS` Jacobian is not available.

2014: .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()`
2015: @*/
2016: PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
2017: {
2018:   SNES snes                                          = ts->snes;
2019:   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;

2021:   PetscFunctionBegin;
2022:   /*
2023:     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
2024:     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
2025:     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
2026:     coloring is used.
2027:   */
2028:   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
2029:   if (jac == SNESComputeJacobianDefaultColor) {
2030:     Vec f;
2031:     PetscCall(SNESSetSolution(snes, x));
2032:     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
2033:     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
2034:     PetscCall(SNESComputeFunction(snes, x, f));
2035:   }
2036:   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
2037:   PetscFunctionReturn(PETSC_SUCCESS);
2038: }