Actual source code: tssen.c
1: #include <petsc/private/tsimpl.h>
2: #include <petscdraw.h>
4: PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;
6: /* #define TSADJOINT_STAGE */
8: /* ------------------------ Sensitivity Context ---------------------------*/
10: /*@C
11: TSSetRHSJacobianP - Sets the function that computes the Jacobian of $G$ w.r.t. the parameters $p$ where $U_t = G(U,p,t)$, as well as the location to store the matrix.
13: Logically Collective
15: Input Parameters:
16: + ts - `TS` context obtained from `TSCreate()`
17: . Amat - JacobianP matrix
18: . func - function
19: - ctx - [optional] user-defined function context
21: Level: intermediate
23: Note:
24: `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
26: .seealso: [](ch_ts), `TS`, `TSRHSJacobianPFn`, `TSGetRHSJacobianP()`
27: @*/
28: PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianPFn *func, void *ctx)
29: {
30: PetscFunctionBegin;
34: ts->rhsjacobianp = func;
35: ts->rhsjacobianpctx = ctx;
36: if (Amat) {
37: PetscCall(PetscObjectReference((PetscObject)Amat));
38: PetscCall(MatDestroy(&ts->Jacprhs));
39: ts->Jacprhs = Amat;
40: }
41: PetscFunctionReturn(PETSC_SUCCESS);
42: }
44: /*@C
45: TSGetRHSJacobianP - Gets the function that computes the Jacobian of $G $ w.r.t. the parameters $p$ where $ U_t = G(U,p,t)$, as well as the location to store the matrix.
47: Logically Collective
49: Input Parameter:
50: . ts - `TS` context obtained from `TSCreate()`
52: Output Parameters:
53: + Amat - JacobianP matrix
54: . func - function
55: - ctx - [optional] user-defined function context
57: Level: intermediate
59: Note:
60: `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
62: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianPFn`
63: @*/
64: PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianPFn **func, void **ctx)
65: {
66: PetscFunctionBegin;
67: if (func) *func = ts->rhsjacobianp;
68: if (ctx) *ctx = ts->rhsjacobianpctx;
69: if (Amat) *Amat = ts->Jacprhs;
70: PetscFunctionReturn(PETSC_SUCCESS);
71: }
73: /*@C
74: TSComputeRHSJacobianP - Runs the user-defined JacobianP function.
76: Collective
78: Input Parameters:
79: + ts - The `TS` context obtained from `TSCreate()`
80: . t - the time
81: - U - the solution at which to compute the Jacobian
83: Output Parameter:
84: . Amat - the computed Jacobian
86: Level: developer
88: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
89: @*/
90: PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
91: {
92: PetscFunctionBegin;
93: if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);
97: if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
98: else {
99: PetscBool assembled;
100: PetscCall(MatZeroEntries(Amat));
101: PetscCall(MatAssembled(Amat, &assembled));
102: if (!assembled) {
103: PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
104: PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
105: }
106: }
107: PetscFunctionReturn(PETSC_SUCCESS);
108: }
110: /*@C
111: TSSetIJacobianP - Sets the function that computes the Jacobian of $F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t)$, as well as the location to store the matrix.
113: Logically Collective
115: Input Parameters:
116: + ts - `TS` context obtained from `TSCreate()`
117: . Amat - JacobianP matrix
118: . func - function
119: - ctx - [optional] user-defined function context
121: Calling sequence of `func`:
122: + ts - the `TS` context
123: . t - current timestep
124: . U - input vector (current ODE solution)
125: . Udot - time derivative of state vector
126: . shift - shift to apply, see the note in `TSSetIJacobian()`
127: . A - output matrix
128: - ctx - [optional] user-defined function context
130: Level: intermediate
132: Note:
133: `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
135: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
136: @*/
137: PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void *ctx)
138: {
139: PetscFunctionBegin;
143: ts->ijacobianp = func;
144: ts->ijacobianpctx = ctx;
145: if (Amat) {
146: PetscCall(PetscObjectReference((PetscObject)Amat));
147: PetscCall(MatDestroy(&ts->Jacp));
148: ts->Jacp = Amat;
149: }
150: PetscFunctionReturn(PETSC_SUCCESS);
151: }
153: /*@C
154: TSGetIJacobianP - Gets the function that computes the Jacobian of $ F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t) $, as well as the location to store the matrix.
156: Logically Collective
158: Input Parameter:
159: . ts - `TS` context obtained from `TSCreate()`
161: Output Parameters:
162: + Amat - JacobianP matrix
163: . func - the function that computes the JacobianP
164: - ctx - [optional] user-defined function context
166: Calling sequence of `func`:
167: + ts - the `TS` context
168: . t - current timestep
169: . U - input vector (current ODE solution)
170: . Udot - time derivative of state vector
171: . shift - shift to apply, see the note in `TSSetIJacobian()`
172: . A - output matrix
173: - ctx - [optional] user-defined function context
175: Level: intermediate
177: Note:
178: `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
180: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSSetIJacobianP()`, `TSGetRHSJacobianP()`
181: @*/
182: PetscErrorCode TSGetIJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void **ctx)
183: {
184: PetscFunctionBegin;
187: if (func) *func = ts->ijacobianp;
188: if (ctx) *ctx = ts->ijacobianpctx;
189: if (Amat) *Amat = ts->Jacp;
190: PetscFunctionReturn(PETSC_SUCCESS);
191: }
193: /*@
194: TSComputeIJacobianP - Runs the user-defined IJacobianP function.
196: Collective
198: Input Parameters:
199: + ts - the `TS` context
200: . t - current timestep
201: . U - state vector
202: . Udot - time derivative of state vector
203: . shift - shift to apply, see note below
204: - imex - flag indicates if the method is IMEX so that the `RHSJacobianP` should be kept separate
206: Output Parameter:
207: . Amat - Jacobian matrix
209: Level: developer
211: .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
212: @*/
213: PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
214: {
215: PetscFunctionBegin;
216: if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);
221: PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
222: if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
223: if (imex) {
224: if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
225: PetscBool assembled;
226: PetscCall(MatZeroEntries(Amat));
227: PetscCall(MatAssembled(Amat, &assembled));
228: if (!assembled) {
229: PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
230: PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
231: }
232: }
233: } else {
234: if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
235: if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
236: PetscCall(MatScale(Amat, -1));
237: } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
238: MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
239: if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
240: PetscCall(MatZeroEntries(Amat));
241: }
242: PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
243: }
244: }
245: PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
246: PetscFunctionReturn(PETSC_SUCCESS);
247: }
249: /*@C
250: TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions
252: Logically Collective
254: Input Parameters:
255: + ts - the `TS` context obtained from `TSCreate()`
256: . numcost - number of gradients to be computed, this is the number of cost functions
257: . costintegral - vector that stores the integral values
258: . rf - routine for evaluating the integrand function
259: . drduf - function that computes the gradients of the r with respect to u
260: . drdpf - function that computes the gradients of the r with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
261: . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
262: - ctx - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`)
264: Calling sequence of `rf`:
265: + ts - the integrator
266: . t - the time
267: . U - the solution
268: . F - the computed value of the function
269: - ctx - the user context
271: Calling sequence of `drduf`:
272: + ts - the integrator
273: . t - the time
274: . U - the solution
275: . dRdU - the computed gradients of the r with respect to u
276: - ctx - the user context
278: Calling sequence of `drdpf`:
279: + ts - the integrator
280: . t - the time
281: . U - the solution
282: . dRdP - the computed gradients of the r with respect to p
283: - ctx - the user context
285: Level: deprecated
287: Notes:
288: For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions
290: Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead
292: .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`,
293: `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()`
294: @*/
295: PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, void *ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx), PetscBool fwd, void *ctx)
296: {
297: PetscFunctionBegin;
300: PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
301: if (!ts->numcost) ts->numcost = numcost;
303: if (costintegral) {
304: PetscCall(PetscObjectReference((PetscObject)costintegral));
305: PetscCall(VecDestroy(&ts->vec_costintegral));
306: ts->vec_costintegral = costintegral;
307: } else {
308: if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
309: PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
310: } else {
311: PetscCall(VecSet(ts->vec_costintegral, 0.0));
312: }
313: }
314: if (!ts->vec_costintegrand) {
315: PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
316: } else {
317: PetscCall(VecSet(ts->vec_costintegrand, 0.0));
318: }
319: ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */
320: ts->costintegrand = rf;
321: ts->costintegrandctx = ctx;
322: ts->drdufunction = drduf;
323: ts->drdpfunction = drdpf;
324: PetscFunctionReturn(PETSC_SUCCESS);
325: }
327: /*@
328: TSGetCostIntegral - Returns the values of the integral term in the cost functions.
329: It is valid to call the routine after a backward run.
331: Not Collective
333: Input Parameter:
334: . ts - the `TS` context obtained from `TSCreate()`
336: Output Parameter:
337: . v - the vector containing the integrals for each cost function
339: Level: intermediate
341: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
342: @*/
343: PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
344: {
345: TS quadts;
347: PetscFunctionBegin;
349: PetscAssertPointer(v, 2);
350: PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
351: *v = quadts->vec_sol;
352: PetscFunctionReturn(PETSC_SUCCESS);
353: }
355: /*@
356: TSComputeCostIntegrand - Evaluates the integral function in the cost functions.
358: Input Parameters:
359: + ts - the `TS` context
360: . t - current time
361: - U - state vector, i.e. current solution
363: Output Parameter:
364: . Q - vector of size numcost to hold the outputs
366: Level: deprecated
368: Note:
369: Most users should not need to explicitly call this routine, as it
370: is used internally within the sensitivity analysis context.
372: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
373: @*/
374: PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
375: {
376: PetscFunctionBegin;
381: PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
382: if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
383: else PetscCall(VecZeroEntries(Q));
384: PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
385: PetscFunctionReturn(PETSC_SUCCESS);
386: }
388: // PetscClangLinter pragma disable: -fdoc-*
389: /*@
390: TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`
392: Level: deprecated
394: @*/
395: PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
396: {
397: PetscFunctionBegin;
398: if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);
402: PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
403: PetscFunctionReturn(PETSC_SUCCESS);
404: }
406: // PetscClangLinter pragma disable: -fdoc-*
407: /*@
408: TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`
410: Level: deprecated
412: @*/
413: PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
414: {
415: PetscFunctionBegin;
416: if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);
420: PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
421: PetscFunctionReturn(PETSC_SUCCESS);
422: }
424: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
425: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
426: /*@C
427: TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.
429: Logically Collective
431: Input Parameters:
432: + ts - `TS` context obtained from `TSCreate()`
433: . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
434: . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
435: . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
436: . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
437: . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
438: . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
439: . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
440: - hessianproductfunc4 - vector-Hessian-vector product function for F_PP
442: Calling sequence of `ihessianproductfunc1`:
443: + ts - the `TS` context
444: + t - current timestep
445: . U - input vector (current ODE solution)
446: . Vl - an array of input vectors to be left-multiplied with the Hessian
447: . Vr - input vector to be right-multiplied with the Hessian
448: . VHV - an array of output vectors for vector-Hessian-vector product
449: - ctx - [optional] user-defined function context
451: Level: intermediate
453: Notes:
454: All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
455: descriptions are omitted for brevity.
457: The first Hessian function and the working array are required.
458: As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
459: $ Vl_n^T*F_UP*Vr
460: where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
461: Each entry of F_UP corresponds to the derivative
462: $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
463: The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
464: $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
465: If the cost function is a scalar, there will be only one vector in Vl and VHV.
467: .seealso: [](ch_ts), `TS`
468: @*/
469: PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
470: {
471: PetscFunctionBegin;
473: PetscAssertPointer(ihp1, 2);
475: ts->ihessianproductctx = ctx;
476: if (ihp1) ts->vecs_fuu = ihp1;
477: if (ihp2) ts->vecs_fup = ihp2;
478: if (ihp3) ts->vecs_fpu = ihp3;
479: if (ihp4) ts->vecs_fpp = ihp4;
480: ts->ihessianproduct_fuu = ihessianproductfunc1;
481: ts->ihessianproduct_fup = ihessianproductfunc2;
482: ts->ihessianproduct_fpu = ihessianproductfunc3;
483: ts->ihessianproduct_fpp = ihessianproductfunc4;
484: PetscFunctionReturn(PETSC_SUCCESS);
485: }
487: /*@
488: TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.
490: Collective
492: Input Parameters:
493: + ts - The `TS` context obtained from `TSCreate()`
494: . t - the time
495: . U - the solution at which to compute the Hessian product
496: . Vl - the array of input vectors to be multiplied with the Hessian from the left
497: - Vr - the input vector to be multiplied with the Hessian from the right
499: Output Parameter:
500: . VHV - the array of output vectors that store the Hessian product
502: Level: developer
504: Note:
505: `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
506: so most users would not generally call this routine themselves.
508: .seealso: [](ch_ts), `TSSetIHessianProduct()`
509: @*/
510: PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
511: {
512: PetscFunctionBegin;
513: if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
517: if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
519: /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
520: if (ts->rhshessianproduct_guu) {
521: PetscInt nadj;
522: PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
523: for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
524: }
525: PetscFunctionReturn(PETSC_SUCCESS);
526: }
528: /*@
529: TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.
531: Collective
533: Input Parameters:
534: + ts - The `TS` context obtained from `TSCreate()`
535: . t - the time
536: . U - the solution at which to compute the Hessian product
537: . Vl - the array of input vectors to be multiplied with the Hessian from the left
538: - Vr - the input vector to be multiplied with the Hessian from the right
540: Output Parameter:
541: . VHV - the array of output vectors that store the Hessian product
543: Level: developer
545: Note:
546: `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
547: so most users would not generally call this routine themselves.
549: .seealso: [](ch_ts), `TSSetIHessianProduct()`
550: @*/
551: PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
552: {
553: PetscFunctionBegin;
554: if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
558: if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
560: /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
561: if (ts->rhshessianproduct_gup) {
562: PetscInt nadj;
563: PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
564: for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
565: }
566: PetscFunctionReturn(PETSC_SUCCESS);
567: }
569: /*@
570: TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.
572: Collective
574: Input Parameters:
575: + ts - The `TS` context obtained from `TSCreate()`
576: . t - the time
577: . U - the solution at which to compute the Hessian product
578: . Vl - the array of input vectors to be multiplied with the Hessian from the left
579: - Vr - the input vector to be multiplied with the Hessian from the right
581: Output Parameter:
582: . VHV - the array of output vectors that store the Hessian product
584: Level: developer
586: Note:
587: `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
588: so most users would not generally call this routine themselves.
590: .seealso: [](ch_ts), `TSSetIHessianProduct()`
591: @*/
592: PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
593: {
594: PetscFunctionBegin;
595: if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
599: if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
601: /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
602: if (ts->rhshessianproduct_gpu) {
603: PetscInt nadj;
604: PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
605: for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
606: }
607: PetscFunctionReturn(PETSC_SUCCESS);
608: }
610: /*@C
611: TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.
613: Collective
615: Input Parameters:
616: + ts - The `TS` context obtained from `TSCreate()`
617: . t - the time
618: . U - the solution at which to compute the Hessian product
619: . Vl - the array of input vectors to be multiplied with the Hessian from the left
620: - Vr - the input vector to be multiplied with the Hessian from the right
622: Output Parameter:
623: . VHV - the array of output vectors that store the Hessian product
625: Level: developer
627: Note:
628: `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
629: so most users would not generally call this routine themselves.
631: .seealso: [](ch_ts), `TSSetIHessianProduct()`
632: @*/
633: PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
634: {
635: PetscFunctionBegin;
636: if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
640: if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
642: /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
643: if (ts->rhshessianproduct_gpp) {
644: PetscInt nadj;
645: PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
646: for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
647: }
648: PetscFunctionReturn(PETSC_SUCCESS);
649: }
651: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
652: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
653: /*@C
654: TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector
655: product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state
656: variable.
658: Logically Collective
660: Input Parameters:
661: + ts - `TS` context obtained from `TSCreate()`
662: . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
663: . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
664: . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
665: . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
666: . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
667: . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
668: . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
669: . hessianproductfunc4 - vector-Hessian-vector product function for G_PP
670: - ctx - [optional] user-defined function context
672: Calling sequence of `rhshessianproductfunc1`:
673: + ts - the `TS` context
674: . t - current timestep
675: . U - input vector (current ODE solution)
676: . Vl - an array of input vectors to be left-multiplied with the Hessian
677: . Vr - input vector to be right-multiplied with the Hessian
678: . VHV - an array of output vectors for vector-Hessian-vector product
679: - ctx - [optional] user-defined function context
681: Level: intermediate
683: Notes:
684: All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
685: descriptions are omitted for brevity.
687: The first Hessian function and the working array are required.
689: As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
690: $ Vl_n^T*G_UP*Vr
691: where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
692: Each entry of G_UP corresponds to the derivative
693: $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
694: The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
695: $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
696: If the cost function is a scalar, there will be only one vector in Vl and VHV.
698: .seealso: `TS`, `TSAdjoint`
699: @*/
700: PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec rhshp1[], PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec rhshp2[], PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec rhshp3[], PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec rhshp4[], PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
701: {
702: PetscFunctionBegin;
704: PetscAssertPointer(rhshp1, 2);
706: ts->rhshessianproductctx = ctx;
707: if (rhshp1) ts->vecs_guu = rhshp1;
708: if (rhshp2) ts->vecs_gup = rhshp2;
709: if (rhshp3) ts->vecs_gpu = rhshp3;
710: if (rhshp4) ts->vecs_gpp = rhshp4;
711: ts->rhshessianproduct_guu = rhshessianproductfunc1;
712: ts->rhshessianproduct_gup = rhshessianproductfunc2;
713: ts->rhshessianproduct_gpu = rhshessianproductfunc3;
714: ts->rhshessianproduct_gpp = rhshessianproductfunc4;
715: PetscFunctionReturn(PETSC_SUCCESS);
716: }
718: /*@
719: TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.
721: Collective
723: Input Parameters:
724: + ts - The `TS` context obtained from `TSCreate()`
725: . t - the time
726: . U - the solution at which to compute the Hessian product
727: . Vl - the array of input vectors to be multiplied with the Hessian from the left
728: - Vr - the input vector to be multiplied with the Hessian from the right
730: Output Parameter:
731: . VHV - the array of output vectors that store the Hessian product
733: Level: developer
735: Note:
736: `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
737: so most users would not generally call this routine themselves.
739: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
740: @*/
741: PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
742: {
743: PetscFunctionBegin;
744: if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
748: PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
749: PetscFunctionReturn(PETSC_SUCCESS);
750: }
752: /*@
753: TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.
755: Collective
757: Input Parameters:
758: + ts - The `TS` context obtained from `TSCreate()`
759: . t - the time
760: . U - the solution at which to compute the Hessian product
761: . Vl - the array of input vectors to be multiplied with the Hessian from the left
762: - Vr - the input vector to be multiplied with the Hessian from the right
764: Output Parameter:
765: . VHV - the array of output vectors that store the Hessian product
767: Level: developer
769: Note:
770: `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
771: so most users would not generally call this routine themselves.
773: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
774: @*/
775: PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
776: {
777: PetscFunctionBegin;
778: if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
782: PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
783: PetscFunctionReturn(PETSC_SUCCESS);
784: }
786: /*@
787: TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.
789: Collective
791: Input Parameters:
792: + ts - The `TS` context obtained from `TSCreate()`
793: . t - the time
794: . U - the solution at which to compute the Hessian product
795: . Vl - the array of input vectors to be multiplied with the Hessian from the left
796: - Vr - the input vector to be multiplied with the Hessian from the right
798: Output Parameter:
799: . VHV - the array of output vectors that store the Hessian product
801: Level: developer
803: Note:
804: `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
805: so most users would not generally call this routine themselves.
807: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
808: @*/
809: PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
810: {
811: PetscFunctionBegin;
812: if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
816: PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
817: PetscFunctionReturn(PETSC_SUCCESS);
818: }
820: /*@
821: TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.
823: Collective
825: Input Parameters:
826: + ts - The `TS` context obtained from `TSCreate()`
827: . t - the time
828: . U - the solution at which to compute the Hessian product
829: . Vl - the array of input vectors to be multiplied with the Hessian from the left
830: - Vr - the input vector to be multiplied with the Hessian from the right
832: Output Parameter:
833: . VHV - the array of output vectors that store the Hessian product
835: Level: developer
837: Note:
838: `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
839: so most users would not generally call this routine themselves.
841: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
842: @*/
843: PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
844: {
845: PetscFunctionBegin;
846: if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
850: PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
851: PetscFunctionReturn(PETSC_SUCCESS);
852: }
854: /* --------------------------- Adjoint sensitivity ---------------------------*/
856: /*@
857: TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
858: for use by the `TS` adjoint routines.
860: Logically Collective
862: Input Parameters:
863: + ts - the `TS` context obtained from `TSCreate()`
864: . numcost - number of gradients to be computed, this is the number of cost functions
865: . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
866: - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
868: Level: beginner
870: Notes:
871: the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime mu_i = df/dp|finaltime
873: After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities
875: .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
876: @*/
877: PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec lambda[], Vec mu[])
878: {
879: PetscFunctionBegin;
881: PetscAssertPointer(lambda, 3);
882: ts->vecs_sensi = lambda;
883: ts->vecs_sensip = mu;
884: PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
885: ts->numcost = numcost;
886: PetscFunctionReturn(PETSC_SUCCESS);
887: }
889: /*@
890: TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`
892: Not Collective, but the vectors returned are parallel if `TS` is parallel
894: Input Parameter:
895: . ts - the `TS` context obtained from `TSCreate()`
897: Output Parameters:
898: + numcost - size of returned arrays
899: . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
900: - mu - vectors containing the gradients of the cost functions with respect to the problem parameters
902: Level: intermediate
904: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
905: @*/
906: PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec *lambda[], Vec *mu[])
907: {
908: PetscFunctionBegin;
910: if (numcost) *numcost = ts->numcost;
911: if (lambda) *lambda = ts->vecs_sensi;
912: if (mu) *mu = ts->vecs_sensip;
913: PetscFunctionReturn(PETSC_SUCCESS);
914: }
916: /*@
917: TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
918: for use by the `TS` adjoint routines.
920: Logically Collective
922: Input Parameters:
923: + ts - the `TS` context obtained from `TSCreate()`
924: . numcost - number of cost functions
925: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
926: . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
927: - dir - the direction vector that are multiplied with the Hessian of the cost functions
929: Level: beginner
931: Notes:
932: Hessian of the cost function is completely different from Hessian of the ODE/DAE system
934: For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.
936: After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.
938: Passing `NULL` for `lambda2` disables the second-order calculation.
940: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
941: @*/
942: PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec lambda2[], Vec mu2[], Vec dir)
943: {
944: PetscFunctionBegin;
946: PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
947: ts->numcost = numcost;
948: ts->vecs_sensi2 = lambda2;
949: ts->vecs_sensi2p = mu2;
950: ts->vec_dir = dir;
951: PetscFunctionReturn(PETSC_SUCCESS);
952: }
954: /*@
955: TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`
957: Not Collective, but vectors returned are parallel if `TS` is parallel
959: Input Parameter:
960: . ts - the `TS` context obtained from `TSCreate()`
962: Output Parameters:
963: + numcost - number of cost functions
964: . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
965: . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
966: - dir - the direction vector that are multiplied with the Hessian of the cost functions
968: Level: intermediate
970: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
971: @*/
972: PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec *lambda2[], Vec *mu2[], Vec *dir)
973: {
974: PetscFunctionBegin;
976: if (numcost) *numcost = ts->numcost;
977: if (lambda2) *lambda2 = ts->vecs_sensi2;
978: if (mu2) *mu2 = ts->vecs_sensi2p;
979: if (dir) *dir = ts->vec_dir;
980: PetscFunctionReturn(PETSC_SUCCESS);
981: }
983: /*@
984: TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities
986: Logically Collective
988: Input Parameters:
989: + ts - the `TS` context obtained from `TSCreate()`
990: - didp - the derivative of initial values w.r.t. parameters
992: Level: intermediate
994: Notes:
995: When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
996: `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.
998: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
999: @*/
1000: PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
1001: {
1002: Mat A;
1003: Vec sp;
1004: PetscScalar *xarr;
1005: PetscInt lsize;
1007: PetscFunctionBegin;
1008: ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
1009: PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
1010: PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
1011: /* create a single-column dense matrix */
1012: PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
1013: PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));
1015: PetscCall(VecDuplicate(ts->vec_sol, &sp));
1016: PetscCall(MatDenseGetColumn(A, 0, &xarr));
1017: PetscCall(VecPlaceArray(sp, xarr));
1018: if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
1019: if (didp) {
1020: PetscCall(MatMult(didp, ts->vec_dir, sp));
1021: PetscCall(VecScale(sp, 2.));
1022: } else {
1023: PetscCall(VecZeroEntries(sp));
1024: }
1025: } else { /* tangent linear variable initialized as dir */
1026: PetscCall(VecCopy(ts->vec_dir, sp));
1027: }
1028: PetscCall(VecResetArray(sp));
1029: PetscCall(MatDenseRestoreColumn(A, &xarr));
1030: PetscCall(VecDestroy(&sp));
1032: PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */
1034: PetscCall(MatDestroy(&A));
1035: PetscFunctionReturn(PETSC_SUCCESS);
1036: }
1038: /*@
1039: TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context
1041: Logically Collective
1043: Input Parameter:
1044: . ts - the `TS` context obtained from `TSCreate()`
1046: Level: intermediate
1048: .seealso: [](ch_ts), `TSAdjointSetForward()`
1049: @*/
1050: PetscErrorCode TSAdjointResetForward(TS ts)
1051: {
1052: PetscFunctionBegin;
1053: ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
1054: PetscCall(TSForwardReset(ts));
1055: PetscFunctionReturn(PETSC_SUCCESS);
1056: }
1058: /*@
1059: TSAdjointSetUp - Sets up the internal data structures for the later use
1060: of an adjoint solver
1062: Collective
1064: Input Parameter:
1065: . ts - the `TS` context obtained from `TSCreate()`
1067: Level: advanced
1069: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1070: @*/
1071: PetscErrorCode TSAdjointSetUp(TS ts)
1072: {
1073: TSTrajectory tj;
1074: PetscBool match;
1076: PetscFunctionBegin;
1078: if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1079: PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1080: PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1081: PetscCall(TSGetTrajectory(ts, &tj));
1082: PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1083: if (match) {
1084: PetscBool solution_only;
1085: PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1086: PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1087: }
1088: PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */
1090: if (ts->quadraturets) { /* if there is integral in the cost function */
1091: PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1092: if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1093: }
1095: PetscTryTypeMethod(ts, adjointsetup);
1096: ts->adjointsetupcalled = PETSC_TRUE;
1097: PetscFunctionReturn(PETSC_SUCCESS);
1098: }
1100: /*@
1101: TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.
1103: Collective
1105: Input Parameter:
1106: . ts - the `TS` context obtained from `TSCreate()`
1108: Level: beginner
1110: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSDestroy()`
1111: @*/
1112: PetscErrorCode TSAdjointReset(TS ts)
1113: {
1114: PetscFunctionBegin;
1116: PetscTryTypeMethod(ts, adjointreset);
1117: if (ts->quadraturets) { /* if there is integral in the cost function */
1118: PetscCall(VecDestroy(&ts->vec_drdu_col));
1119: if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1120: }
1121: ts->vecs_sensi = NULL;
1122: ts->vecs_sensip = NULL;
1123: ts->vecs_sensi2 = NULL;
1124: ts->vecs_sensi2p = NULL;
1125: ts->vec_dir = NULL;
1126: ts->adjointsetupcalled = PETSC_FALSE;
1127: PetscFunctionReturn(PETSC_SUCCESS);
1128: }
1130: /*@
1131: TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time
1133: Logically Collective
1135: Input Parameters:
1136: + ts - the `TS` context obtained from `TSCreate()`
1137: - steps - number of steps to use
1139: Level: intermediate
1141: Notes:
1142: Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1143: so as to integrate back to less than the original timestep
1145: .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1146: @*/
1147: PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1148: {
1149: PetscFunctionBegin;
1152: PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1153: PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1154: ts->adjoint_max_steps = steps;
1155: PetscFunctionReturn(PETSC_SUCCESS);
1156: }
1158: // PetscClangLinter pragma disable: -fdoc-*
1159: /*@C
1160: TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`
1162: Level: deprecated
1163: @*/
1164: PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
1165: {
1166: PetscFunctionBegin;
1170: ts->rhsjacobianp = func;
1171: ts->rhsjacobianpctx = ctx;
1172: if (Amat) {
1173: PetscCall(PetscObjectReference((PetscObject)Amat));
1174: PetscCall(MatDestroy(&ts->Jacp));
1175: ts->Jacp = Amat;
1176: }
1177: PetscFunctionReturn(PETSC_SUCCESS);
1178: }
1180: // PetscClangLinter pragma disable: -fdoc-*
1181: /*@C
1182: TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`
1184: Level: deprecated
1185: @*/
1186: PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1187: {
1188: PetscFunctionBegin;
1193: PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1194: PetscFunctionReturn(PETSC_SUCCESS);
1195: }
1197: // PetscClangLinter pragma disable: -fdoc-*
1198: /*@
1199: TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`
1201: Level: deprecated
1202: @*/
1203: PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1204: {
1205: PetscFunctionBegin;
1209: PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1210: PetscFunctionReturn(PETSC_SUCCESS);
1211: }
1213: // PetscClangLinter pragma disable: -fdoc-*
1214: /*@
1215: TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`
1217: Level: deprecated
1218: @*/
1219: PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1220: {
1221: PetscFunctionBegin;
1225: PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1226: PetscFunctionReturn(PETSC_SUCCESS);
1227: }
1229: // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
1230: /*@C
1231: TSAdjointMonitorSensi - monitors the first lambda sensitivity
1233: Level: intermediate
1235: .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1236: @*/
1237: static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1238: {
1239: PetscViewer viewer = vf->viewer;
1241: PetscFunctionBegin;
1243: PetscCall(PetscViewerPushFormat(viewer, vf->format));
1244: PetscCall(VecView(lambda[0], viewer));
1245: PetscCall(PetscViewerPopFormat(viewer));
1246: PetscFunctionReturn(PETSC_SUCCESS);
1247: }
1249: /*@C
1250: TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user
1252: Collective
1254: Input Parameters:
1255: + ts - `TS` object you wish to monitor
1256: . name - the monitor type one is seeking
1257: . help - message indicating what monitoring is done
1258: . manual - manual page for the monitor
1259: . monitor - the monitor function, its context must be a `PetscViewerAndFormat`
1260: - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects
1262: Level: developer
1264: .seealso: [](ch_ts), `PetscOptionsCreateViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1265: `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1266: `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`,
1267: `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1268: `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1269: `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1270: `PetscOptionsFList()`, `PetscOptionsEList()`, `PetscViewerAndFormat`
1271: @*/
1272: PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *))
1273: {
1274: PetscViewer viewer;
1275: PetscViewerFormat format;
1276: PetscBool flg;
1278: PetscFunctionBegin;
1279: PetscCall(PetscOptionsCreateViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1280: if (flg) {
1281: PetscViewerAndFormat *vf;
1282: PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1283: PetscCall(PetscViewerDestroy(&viewer));
1284: if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1285: PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode (*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscCtxDestroyFn *)PetscViewerAndFormatDestroy));
1286: }
1287: PetscFunctionReturn(PETSC_SUCCESS);
1288: }
1290: /*@C
1291: TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1292: timestep to display the iteration's progress.
1294: Logically Collective
1296: Input Parameters:
1297: + ts - the `TS` context obtained from `TSCreate()`
1298: . adjointmonitor - monitoring routine
1299: . adjointmctx - [optional] user-defined context for private data for the monitor routine
1300: (use `NULL` if no context is desired)
1301: - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for its calling sequence
1303: Calling sequence of `adjointmonitor`:
1304: + ts - the `TS` context
1305: . steps - iteration number (after the final time step the monitor routine is called with
1306: a step of -1, this is at the final time which may have been interpolated to)
1307: . time - current time
1308: . u - current iterate
1309: . numcost - number of cost functionos
1310: . lambda - sensitivities to initial conditions
1311: . mu - sensitivities to parameters
1312: - adjointmctx - [optional] adjoint monitoring context
1314: Level: intermediate
1316: Note:
1317: This routine adds an additional monitor to the list of monitors that
1318: already has been loaded.
1320: Fortran Notes:
1321: Only a single monitor function can be set for each `TS` object
1323: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`, `PetscCtxDestroyFn`
1324: @*/
1325: PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx), void *adjointmctx, PetscCtxDestroyFn *adjointmdestroy)
1326: {
1327: PetscFunctionBegin;
1329: for (PetscInt i = 0; i < ts->numbermonitors; i++) {
1330: PetscBool identical;
1332: PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))(PetscVoidFn *)adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode (*)(void))(PetscVoidFn *)ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1333: if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1334: }
1335: PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1336: ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor;
1337: ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy;
1338: ts->adjointmonitorcontext[ts->numberadjointmonitors++] = adjointmctx;
1339: PetscFunctionReturn(PETSC_SUCCESS);
1340: }
1342: /*@C
1343: TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.
1345: Logically Collective
1347: Input Parameter:
1348: . ts - the `TS` context obtained from `TSCreate()`
1350: Notes:
1351: There is no way to remove a single, specific monitor.
1353: Level: intermediate
1355: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1356: @*/
1357: PetscErrorCode TSAdjointMonitorCancel(TS ts)
1358: {
1359: PetscInt i;
1361: PetscFunctionBegin;
1363: for (i = 0; i < ts->numberadjointmonitors; i++) {
1364: if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1365: }
1366: ts->numberadjointmonitors = 0;
1367: PetscFunctionReturn(PETSC_SUCCESS);
1368: }
1370: /*@C
1371: TSAdjointMonitorDefault - the default monitor of adjoint computations
1373: Input Parameters:
1374: + ts - the `TS` context
1375: . step - iteration number (after the final time step the monitor routine is called with a
1376: step of -1, this is at the final time which may have been interpolated to)
1377: . time - current time
1378: . v - current iterate
1379: . numcost - number of cost functionos
1380: . lambda - sensitivities to initial conditions
1381: . mu - sensitivities to parameters
1382: - vf - the viewer and format
1384: Level: intermediate
1386: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1387: @*/
1388: PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec lambda[], Vec mu[], PetscViewerAndFormat *vf)
1389: {
1390: PetscViewer viewer = vf->viewer;
1392: PetscFunctionBegin;
1393: (void)v;
1394: (void)numcost;
1395: (void)lambda;
1396: (void)mu;
1398: PetscCall(PetscViewerPushFormat(viewer, vf->format));
1399: PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1400: PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n"));
1401: PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1402: PetscCall(PetscViewerPopFormat(viewer));
1403: PetscFunctionReturn(PETSC_SUCCESS);
1404: }
1406: /*@C
1407: TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1408: `VecView()` for the sensitivities to initial states at each timestep
1410: Collective
1412: Input Parameters:
1413: + ts - the `TS` context
1414: . step - current time-step
1415: . ptime - current time
1416: . u - current state
1417: . numcost - number of cost functions
1418: . lambda - sensitivities to initial conditions
1419: . mu - sensitivities to parameters
1420: - dummy - either a viewer or `NULL`
1422: Level: intermediate
1424: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1425: @*/
1426: PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[], void *dummy)
1427: {
1428: TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1429: PetscDraw draw;
1430: PetscReal xl, yl, xr, yr, h;
1431: char time[32];
1433: PetscFunctionBegin;
1434: if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);
1436: PetscCall(VecView(lambda[0], ictx->viewer));
1437: PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1438: PetscCall(PetscSNPrintf(time, 32, "Timestep %" PetscInt_FMT " Time %g", step, (double)ptime));
1439: PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1440: h = yl + .95 * (yr - yl);
1441: PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1442: PetscCall(PetscDrawFlush(draw));
1443: PetscFunctionReturn(PETSC_SUCCESS);
1444: }
1446: /*@C
1447: TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database.
1449: Collective
1451: Input Parameters:
1452: + ts - the `TS` context
1453: - PetscOptionsObject - the options context
1455: Options Database Keys:
1456: + -ts_adjoint_solve <yes,no> - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`)
1457: . -ts_adjoint_monitor - print information at each adjoint time step
1458: - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically
1460: Level: developer
1462: Note:
1463: This is not normally called directly by users
1465: .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1466: @*/
1467: PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems PetscOptionsObject)
1468: {
1469: PetscBool tflg, opt;
1471: PetscFunctionBegin;
1473: PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1474: tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1475: PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1476: if (opt) {
1477: PetscCall(TSSetSaveTrajectory(ts));
1478: ts->adjoint_solve = tflg;
1479: }
1480: PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1481: PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1482: opt = PETSC_FALSE;
1483: PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1484: if (opt) {
1485: TSMonitorDrawCtx ctx;
1486: PetscInt howoften = 1;
1488: PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1489: PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1490: PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscCtxDestroyFn *)TSMonitorDrawCtxDestroy));
1491: }
1492: PetscFunctionReturn(PETSC_SUCCESS);
1493: }
1495: /*@
1496: TSAdjointStep - Steps one time step backward in the adjoint run
1498: Collective
1500: Input Parameter:
1501: . ts - the `TS` context obtained from `TSCreate()`
1503: Level: intermediate
1505: .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1506: @*/
1507: PetscErrorCode TSAdjointStep(TS ts)
1508: {
1509: DM dm;
1511: PetscFunctionBegin;
1513: PetscCall(TSGetDM(ts, &dm));
1514: PetscCall(TSAdjointSetUp(ts));
1515: ts->steps--; /* must decrease the step index before the adjoint step is taken. */
1517: ts->reason = TS_CONVERGED_ITERATING;
1518: ts->ptime_prev = ts->ptime;
1519: PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1520: PetscUseTypeMethod(ts, adjointstep);
1521: PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1522: ts->adjoint_steps++;
1524: if (ts->reason < 0) {
1525: PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1526: } else if (!ts->reason) {
1527: if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1528: }
1529: PetscFunctionReturn(PETSC_SUCCESS);
1530: }
1532: /*@
1533: TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE
1535: Collective
1536: `
1538: Input Parameter:
1539: . ts - the `TS` context obtained from `TSCreate()`
1541: Options Database Key:
1542: . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values
1544: Level: intermediate
1546: Notes:
1547: This must be called after a call to `TSSolve()` that solves the forward problem
1549: By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time
1551: .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1552: @*/
1553: PetscErrorCode TSAdjointSolve(TS ts)
1554: {
1555: static PetscBool cite = PETSC_FALSE;
1556: #if defined(TSADJOINT_STAGE)
1557: PetscLogStage adjoint_stage;
1558: #endif
1560: PetscFunctionBegin;
1562: PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1563: " title = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1564: " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n"
1565: " journal = {SIAM Journal on Scientific Computing},\n"
1566: " volume = {44},\n"
1567: " number = {1},\n"
1568: " pages = {C1-C24},\n"
1569: " doi = {10.1137/21M140078X},\n"
1570: " year = {2022}\n}\n",
1571: &cite));
1572: #if defined(TSADJOINT_STAGE)
1573: PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1574: PetscCall(PetscLogStagePush(adjoint_stage));
1575: #endif
1576: PetscCall(TSAdjointSetUp(ts));
1578: /* reset time step and iteration counters */
1579: ts->adjoint_steps = 0;
1580: ts->ksp_its = 0;
1581: ts->snes_its = 0;
1582: ts->num_snes_failures = 0;
1583: ts->reject = 0;
1584: ts->reason = TS_CONVERGED_ITERATING;
1586: if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1587: if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1589: while (!ts->reason) {
1590: PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1591: PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1592: PetscCall(TSAdjointEventHandler(ts));
1593: PetscCall(TSAdjointStep(ts));
1594: if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1595: }
1596: if (!ts->steps) {
1597: PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1598: PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1599: }
1600: ts->solvetime = ts->ptime;
1601: PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1602: PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1603: ts->adjoint_max_steps = 0;
1604: #if defined(TSADJOINT_STAGE)
1605: PetscCall(PetscLogStagePop());
1606: #endif
1607: PetscFunctionReturn(PETSC_SUCCESS);
1608: }
1610: /*@C
1611: TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`
1613: Collective
1615: Input Parameters:
1616: + ts - time stepping context obtained from `TSCreate()`
1617: . step - step number that has just completed
1618: . ptime - model time of the state
1619: . u - state at the current model time
1620: . numcost - number of cost functions (dimension of lambda or mu)
1621: . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1622: - mu - vectors containing the gradients of the cost functions with respect to the problem parameters
1624: Level: developer
1626: Note:
1627: `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1628: Users would almost never call this routine directly.
1630: .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1631: @*/
1632: PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[])
1633: {
1634: PetscInt i, n = ts->numberadjointmonitors;
1636: PetscFunctionBegin;
1639: PetscCall(VecLockReadPush(u));
1640: for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1641: PetscCall(VecLockReadPop(u));
1642: PetscFunctionReturn(PETSC_SUCCESS);
1643: }
1645: /*@
1646: TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.
1648: Collective
1650: Input Parameter:
1651: . ts - time stepping context
1653: Level: advanced
1655: Notes:
1656: This function cannot be called until `TSAdjointStep()` has been completed.
1658: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1659: @*/
1660: PetscErrorCode TSAdjointCostIntegral(TS ts)
1661: {
1662: PetscFunctionBegin;
1664: PetscUseTypeMethod(ts, adjointintegral);
1665: PetscFunctionReturn(PETSC_SUCCESS);
1666: }
1668: /* ------------------ Forward (tangent linear) sensitivity ------------------*/
1670: /*@
1671: TSForwardSetUp - Sets up the internal data structures for the later use
1672: of forward sensitivity analysis
1674: Collective
1676: Input Parameter:
1677: . ts - the `TS` context obtained from `TSCreate()`
1679: Level: advanced
1681: .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1682: @*/
1683: PetscErrorCode TSForwardSetUp(TS ts)
1684: {
1685: PetscFunctionBegin;
1687: if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1688: PetscTryTypeMethod(ts, forwardsetup);
1689: PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1690: ts->forwardsetupcalled = PETSC_TRUE;
1691: PetscFunctionReturn(PETSC_SUCCESS);
1692: }
1694: /*@
1695: TSForwardReset - Reset the internal data structures used by forward sensitivity analysis
1697: Collective
1699: Input Parameter:
1700: . ts - the `TS` context obtained from `TSCreate()`
1702: Level: advanced
1704: .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1705: @*/
1706: PetscErrorCode TSForwardReset(TS ts)
1707: {
1708: TS quadts = ts->quadraturets;
1710: PetscFunctionBegin;
1712: PetscTryTypeMethod(ts, forwardreset);
1713: PetscCall(MatDestroy(&ts->mat_sensip));
1714: if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1715: PetscCall(VecDestroy(&ts->vec_sensip_col));
1716: ts->forward_solve = PETSC_FALSE;
1717: ts->forwardsetupcalled = PETSC_FALSE;
1718: PetscFunctionReturn(PETSC_SUCCESS);
1719: }
1721: /*@
1722: TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.
1724: Input Parameters:
1725: + ts - the `TS` context obtained from `TSCreate()`
1726: . numfwdint - number of integrals
1727: - vp - the vectors containing the gradients for each integral w.r.t. parameters
1729: Level: deprecated
1731: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1732: @*/
1733: PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec vp[])
1734: {
1735: PetscFunctionBegin;
1737: PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1738: if (!ts->numcost) ts->numcost = numfwdint;
1740: ts->vecs_integral_sensip = vp;
1741: PetscFunctionReturn(PETSC_SUCCESS);
1742: }
1744: /*@
1745: TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.
1747: Input Parameter:
1748: . ts - the `TS` context obtained from `TSCreate()`
1750: Output Parameters:
1751: + numfwdint - number of integrals
1752: - vp - the vectors containing the gradients for each integral w.r.t. parameters
1754: Level: deprecated
1756: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()`
1757: @*/
1758: PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec *vp[])
1759: {
1760: PetscFunctionBegin;
1762: PetscAssertPointer(vp, 3);
1763: if (numfwdint) *numfwdint = ts->numcost;
1764: if (vp) *vp = ts->vecs_integral_sensip;
1765: PetscFunctionReturn(PETSC_SUCCESS);
1766: }
1768: /*@
1769: TSForwardStep - Compute the forward sensitivity for one time step.
1771: Collective
1773: Input Parameter:
1774: . ts - time stepping context
1776: Level: advanced
1778: Notes:
1779: This function cannot be called until `TSStep()` has been completed.
1781: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1782: @*/
1783: PetscErrorCode TSForwardStep(TS ts)
1784: {
1785: PetscFunctionBegin;
1787: PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1788: PetscUseTypeMethod(ts, forwardstep);
1789: PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1790: PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1791: PetscFunctionReturn(PETSC_SUCCESS);
1792: }
1794: /*@
1795: TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values.
1797: Logically Collective
1799: Input Parameters:
1800: + ts - the `TS` context obtained from `TSCreate()`
1801: . nump - number of parameters
1802: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1804: Level: beginner
1806: Notes:
1807: Use `PETSC_DETERMINE` to use the number of columns of `Smat` for `nump`
1809: Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1810: This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1811: You must call this function before `TSSolve()`.
1812: The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.
1814: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1815: @*/
1816: PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1817: {
1818: PetscFunctionBegin;
1821: ts->forward_solve = PETSC_TRUE;
1822: if (nump == PETSC_DEFAULT || nump == PETSC_DETERMINE) PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1823: else ts->num_parameters = nump;
1824: PetscCall(PetscObjectReference((PetscObject)Smat));
1825: PetscCall(MatDestroy(&ts->mat_sensip));
1826: ts->mat_sensip = Smat;
1827: PetscFunctionReturn(PETSC_SUCCESS);
1828: }
1830: /*@
1831: TSForwardGetSensitivities - Returns the trajectory sensitivities
1833: Not Collective, but Smat returned is parallel if ts is parallel
1835: Output Parameters:
1836: + ts - the `TS` context obtained from `TSCreate()`
1837: . nump - number of parameters
1838: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1840: Level: intermediate
1842: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1843: @*/
1844: PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1845: {
1846: PetscFunctionBegin;
1848: if (nump) *nump = ts->num_parameters;
1849: if (Smat) *Smat = ts->mat_sensip;
1850: PetscFunctionReturn(PETSC_SUCCESS);
1851: }
1853: /*@
1854: TSForwardCostIntegral - Evaluate the cost integral in the forward run.
1856: Collective
1858: Input Parameter:
1859: . ts - time stepping context
1861: Level: advanced
1863: Note:
1864: This function cannot be called until `TSStep()` has been completed.
1866: .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1867: @*/
1868: PetscErrorCode TSForwardCostIntegral(TS ts)
1869: {
1870: PetscFunctionBegin;
1872: PetscUseTypeMethod(ts, forwardintegral);
1873: PetscFunctionReturn(PETSC_SUCCESS);
1874: }
1876: /*@
1877: TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities
1879: Collective
1881: Input Parameters:
1882: + ts - the `TS` context obtained from `TSCreate()`
1883: - didp - parametric sensitivities of the initial condition
1885: Level: intermediate
1887: Notes:
1888: `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1889: This function is used to set initial values for tangent linear variables.
1891: .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1892: @*/
1893: PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1894: {
1895: PetscFunctionBegin;
1898: if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DETERMINE, didp));
1899: PetscFunctionReturn(PETSC_SUCCESS);
1900: }
1902: /*@
1903: TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages
1905: Input Parameter:
1906: . ts - the `TS` context obtained from `TSCreate()`
1908: Output Parameters:
1909: + ns - number of stages
1910: - S - tangent linear sensitivities at the intermediate stages
1912: Level: advanced
1914: .seealso: `TS`
1915: @*/
1916: PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1917: {
1918: PetscFunctionBegin;
1921: if (!ts->ops->getstages) *S = NULL;
1922: else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1923: PetscFunctionReturn(PETSC_SUCCESS);
1924: }
1926: /*@
1927: TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time
1929: Input Parameters:
1930: + ts - the `TS` context obtained from `TSCreate()`
1931: - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1933: Output Parameter:
1934: . quadts - the child `TS` context
1936: Level: intermediate
1938: .seealso: [](ch_ts), `TSGetQuadratureTS()`
1939: @*/
1940: PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1941: {
1942: char prefix[128];
1944: PetscFunctionBegin;
1946: PetscAssertPointer(quadts, 3);
1947: PetscCall(TSDestroy(&ts->quadraturets));
1948: PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1949: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1950: PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1951: PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1952: *quadts = ts->quadraturets;
1954: if (ts->numcost) {
1955: PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1956: } else {
1957: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1958: }
1959: ts->costintegralfwd = fwd;
1960: PetscFunctionReturn(PETSC_SUCCESS);
1961: }
1963: /*@
1964: TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time
1966: Input Parameter:
1967: . ts - the `TS` context obtained from `TSCreate()`
1969: Output Parameters:
1970: + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1971: - quadts - the child `TS` context
1973: Level: intermediate
1975: .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1976: @*/
1977: PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1978: {
1979: PetscFunctionBegin;
1981: if (fwd) *fwd = ts->costintegralfwd;
1982: if (quadts) *quadts = ts->quadraturets;
1983: PetscFunctionReturn(PETSC_SUCCESS);
1984: }
1986: /*@
1987: TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`
1989: Collective
1991: Input Parameters:
1992: + ts - the `TS` context obtained from `TSCreate()`
1993: - x - state vector
1995: Output Parameters:
1996: + J - Jacobian matrix
1997: - Jpre - matrix used to compute the preconditioner for `J` (may be same as `J`)
1999: Level: developer
2001: Note:
2002: Uses finite differencing when `TS` Jacobian is not available.
2004: .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()`
2005: @*/
2006: PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
2007: {
2008: SNES snes = ts->snes;
2009: PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;
2011: PetscFunctionBegin;
2012: /*
2013: Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
2014: because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
2015: explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
2016: coloring is used.
2017: */
2018: PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
2019: if (jac == SNESComputeJacobianDefaultColor) {
2020: Vec f;
2021: PetscCall(SNESSetSolution(snes, x));
2022: PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
2023: /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
2024: PetscCall(SNESComputeFunction(snes, x, f));
2025: }
2026: PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
2027: PetscFunctionReturn(PETSC_SUCCESS);
2028: }