Actual source code: tssen.c

  1: #include <petsc/private/tsimpl.h>
  2: #include <petscdraw.h>

  4: PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;

  6: /* #define TSADJOINT_STAGE */

  8: /* ------------------------ Sensitivity Context ---------------------------*/

 10: /*@C
 11:   TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.

 13:   Logically Collective

 15:   Input Parameters:
 16: + ts - `TS` context obtained from `TSCreate()`
 17: . Amat - JacobianP matrix
 18: . func - function
 19: - ctx - [optional] user-defined function context

 21:   Calling sequence of `func`:
 22: $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx)
 23: +   t - current timestep
 24: .   U - input vector (current ODE solution)
 25: .   A - output matrix
 26: -   ctx - [optional] user-defined function context

 28:   Level: intermediate

 30:   Note:
 31:     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p

 33: .seealso: [](ch_ts), `TS`, `TSGetRHSJacobianP()`
 34: @*/
 35: PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
 36: {
 37:   PetscFunctionBegin;

 41:   ts->rhsjacobianp    = func;
 42:   ts->rhsjacobianpctx = ctx;
 43:   if (Amat) {
 44:     PetscCall(PetscObjectReference((PetscObject)Amat));
 45:     PetscCall(MatDestroy(&ts->Jacprhs));
 46:     ts->Jacprhs = Amat;
 47:   }
 48:   PetscFunctionReturn(PETSC_SUCCESS);
 49: }

 51: /*@C
 52:   TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.

 54:   Logically Collective

 56:   Input Parameter:
 57: . ts - `TS` context obtained from `TSCreate()`

 59:   Output Parameters:
 60: + Amat - JacobianP matrix
 61: . func - function
 62: - ctx - [optional] user-defined function context

 64:   Calling sequence of `func`:
 65: $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx)
 66: +   t - current timestep
 67: .   U - input vector (current ODE solution)
 68: .   A - output matrix
 69: -   ctx - [optional] user-defined function context

 71:   Level: intermediate

 73:   Note:
 74:     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p

 76: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSGetRHSJacobianP()`
 77: @*/
 78: PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS, PetscReal, Vec, Mat, void *), void **ctx)
 79: {
 80:   PetscFunctionBegin;
 81:   if (func) *func = ts->rhsjacobianp;
 82:   if (ctx) *ctx = ts->rhsjacobianpctx;
 83:   if (Amat) *Amat = ts->Jacprhs;
 84:   PetscFunctionReturn(PETSC_SUCCESS);
 85: }

 87: /*@C
 88:   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.

 90:   Collective

 92:   Input Parameters:
 93: + ts   - The `TS` context obtained from `TSCreate()`
 94: . t - the time
 95: - U - the solution at which to compute the Jacobian

 97:   Output Parameter:
 98: . Amat - the computed Jacobian

100:   Level: developer

102: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
103: @*/
104: PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
105: {
106:   PetscFunctionBegin;
107:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

111:   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
112:   else {
113:     PetscBool assembled;
114:     PetscCall(MatZeroEntries(Amat));
115:     PetscCall(MatAssembled(Amat, &assembled));
116:     if (!assembled) {
117:       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
118:       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
119:     }
120:   }
121:   PetscFunctionReturn(PETSC_SUCCESS);
122: }

124: /*@C
125:   TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix.

127:   Logically Collective

129:   Input Parameters:
130: + ts - `TS` context obtained from `TSCreate()`
131: . Amat - JacobianP matrix
132: . func - function
133: - ctx - [optional] user-defined function context

135:   Calling sequence of `func`:
136: $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx)
137: +   t - current timestep
138: .   U - input vector (current ODE solution)
139: .   Udot - time derivative of state vector
140: .   shift - shift to apply, see note below
141: .   A - output matrix
142: -   ctx - [optional] user-defined function context

144:   Level: intermediate

146:   Note:
147:     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p

149: .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
150: @*/
151: PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Vec, PetscReal, Mat, void *), void *ctx)
152: {
153:   PetscFunctionBegin;

157:   ts->ijacobianp    = func;
158:   ts->ijacobianpctx = ctx;
159:   if (Amat) {
160:     PetscCall(PetscObjectReference((PetscObject)Amat));
161:     PetscCall(MatDestroy(&ts->Jacp));
162:     ts->Jacp = Amat;
163:   }
164:   PetscFunctionReturn(PETSC_SUCCESS);
165: }

167: /*@C
168:   TSComputeIJacobianP - Runs the user-defined IJacobianP function.

170:   Collective

172:   Input Parameters:
173: + ts - the `TS` context
174: . t - current timestep
175: . U - state vector
176: . Udot - time derivative of state vector
177: . shift - shift to apply, see note below
178: - imex - flag indicates if the method is IMEX so that the RHSJacobianP should be kept separate

180:   Output Parameter:
181: . A - Jacobian matrix

183:   Level: developer

185: .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
186: @*/
187: PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
188: {
189:   PetscFunctionBegin;
190:   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);

195:   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
196:   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
197:   if (imex) {
198:     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
199:       PetscBool assembled;
200:       PetscCall(MatZeroEntries(Amat));
201:       PetscCall(MatAssembled(Amat, &assembled));
202:       if (!assembled) {
203:         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
204:         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
205:       }
206:     }
207:   } else {
208:     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
209:     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
210:       PetscCall(MatScale(Amat, -1));
211:     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
212:       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
213:       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
214:         PetscCall(MatZeroEntries(Amat));
215:       }
216:       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
217:     }
218:   }
219:   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
220:   PetscFunctionReturn(PETSC_SUCCESS);
221: }

223: /*@C
224:     TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions

226:     Logically Collective

228:     Input Parameters:
229: +   ts - the `TS` context obtained from `TSCreate()`
230: .   numcost - number of gradients to be computed, this is the number of cost functions
231: .   costintegral - vector that stores the integral values
232: .   rf - routine for evaluating the integrand function
233: .   drduf - function that computes the gradients of the r's with respect to u
234: .   drdpf - function that computes the gradients of the r's with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
235: .   fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
236: -   ctx - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`)

238:     Calling sequence of `rf`:
239: $   PetscErrorCode rf(TS ts, PetscReal t, Vec U, Vec F, oid *ctx)

241:     Calling sequence of `drduf`:
242: $   PetscErroCode drduf(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx)

244:     Calling sequence of `drdpf`:
245: $   PetscErroCode drdpf(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx)

247:     Level: deprecated

249:     Note:
250:     For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions

252: .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`
253: @*/
254: PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS, PetscReal, Vec, Vec, void *), PetscErrorCode (*drduf)(TS, PetscReal, Vec, Vec *, void *), PetscErrorCode (*drdpf)(TS, PetscReal, Vec, Vec *, void *), PetscBool fwd, void *ctx)
255: {
256:   PetscFunctionBegin;
259:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
260:   if (!ts->numcost) ts->numcost = numcost;

262:   if (costintegral) {
263:     PetscCall(PetscObjectReference((PetscObject)costintegral));
264:     PetscCall(VecDestroy(&ts->vec_costintegral));
265:     ts->vec_costintegral = costintegral;
266:   } else {
267:     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
268:       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
269:     } else {
270:       PetscCall(VecSet(ts->vec_costintegral, 0.0));
271:     }
272:   }
273:   if (!ts->vec_costintegrand) {
274:     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
275:   } else {
276:     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
277:   }
278:   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
279:   ts->costintegrand    = rf;
280:   ts->costintegrandctx = ctx;
281:   ts->drdufunction     = drduf;
282:   ts->drdpfunction     = drdpf;
283:   PetscFunctionReturn(PETSC_SUCCESS);
284: }

286: /*@C
287:    TSGetCostIntegral - Returns the values of the integral term in the cost functions.
288:    It is valid to call the routine after a backward run.

290:    Not Collective

292:    Input Parameter:
293: .  ts - the `TS` context obtained from `TSCreate()`

295:    Output Parameter:
296: .  v - the vector containing the integrals for each cost function

298:    Level: intermediate

300: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()`
301: @*/
302: PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
303: {
304:   TS quadts;

306:   PetscFunctionBegin;
309:   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
310:   *v = quadts->vec_sol;
311:   PetscFunctionReturn(PETSC_SUCCESS);
312: }

314: /*@C
315:    TSComputeCostIntegrand - Evaluates the integral function in the cost functions.

317:    Input Parameters:
318: +  ts - the `TS` context
319: .  t - current time
320: -  U - state vector, i.e. current solution

322:    Output Parameter:
323: .  Q - vector of size numcost to hold the outputs

325:    Level: deprecated

327:    Note:
328:    Most users should not need to explicitly call this routine, as it
329:    is used internally within the sensitivity analysis context.

331: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
332: @*/
333: PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
334: {
335:   PetscFunctionBegin;

340:   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
341:   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
342:   else PetscCall(VecZeroEntries(Q));
343:   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
344:   PetscFunctionReturn(PETSC_SUCCESS);
345: }

347: /*@C
348:   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

350:   Level: deprecated

352: @*/
353: PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
354: {
355:   PetscFunctionBegin;
356:   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);

360:   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
361:   PetscFunctionReturn(PETSC_SUCCESS);
362: }

364: /*@C
365:   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

367:   Level: deprecated

369: @*/
370: PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
371: {
372:   PetscFunctionBegin;
373:   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);

377:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
378:   PetscFunctionReturn(PETSC_SUCCESS);
379: }

381: /*@C
382:   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.

384:   Logically Collective

386:   Input Parameters:
387: + ts - `TS` context obtained from `TSCreate()`
388: . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
389: . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
390: . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
391: . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
392: . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
393: . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
394: . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
395: - hessianproductfunc4 - vector-Hessian-vector product function for F_PP

397:   Calling sequence of `ihessianproductfunc`:
398: $ PetscErrorCode ihessianproductfunc(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx);
399: +   t - current timestep
400: .   U - input vector (current ODE solution)
401: .   Vl - an array of input vectors to be left-multiplied with the Hessian
402: .   Vr - input vector to be right-multiplied with the Hessian
403: .   VHV - an array of output vectors for vector-Hessian-vector product
404: -   ctx - [optional] user-defined function context

406:   Level: intermediate

408:   Notes:
409:   The first Hessian function and the working array are required.
410:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
411:   $ Vl_n^T*F_UP*Vr
412:   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
413:   Each entry of F_UP corresponds to the derivative
414:   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
415:   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
416:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
417:   If the cost function is a scalar, there will be only one vector in Vl and VHV.

419: .seealso: [](ch_ts), `TS`
420: @*/
421: PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
422: {
423:   PetscFunctionBegin;

427:   ts->ihessianproductctx = ctx;
428:   if (ihp1) ts->vecs_fuu = ihp1;
429:   if (ihp2) ts->vecs_fup = ihp2;
430:   if (ihp3) ts->vecs_fpu = ihp3;
431:   if (ihp4) ts->vecs_fpp = ihp4;
432:   ts->ihessianproduct_fuu = ihessianproductfunc1;
433:   ts->ihessianproduct_fup = ihessianproductfunc2;
434:   ts->ihessianproduct_fpu = ihessianproductfunc3;
435:   ts->ihessianproduct_fpp = ihessianproductfunc4;
436:   PetscFunctionReturn(PETSC_SUCCESS);
437: }

439: /*@C
440:   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.

442:   Collective

444:   Input Parameters:
445: + ts   - The `TS` context obtained from `TSCreate()`
446: . t - the time
447: . U - the solution at which to compute the Hessian product
448: . Vl - the array of input vectors to be multiplied with the Hessian from the left
449: - Vr - the input vector to be multiplied with the Hessian from the right

451:   Output Parameter:
452: . VhV - the array of output vectors that store the Hessian product

454:   Level: developer

456:   Note:
457:   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
458:   so most users would not generally call this routine themselves.

460: .seealso: [](ch_ts), `TSSetIHessianProduct()`
461: @*/
462: PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
463: {
464:   PetscFunctionBegin;
465:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

469:   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

471:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
472:   if (ts->rhshessianproduct_guu) {
473:     PetscInt nadj;
474:     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
475:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
476:   }
477:   PetscFunctionReturn(PETSC_SUCCESS);
478: }

480: /*@C
481:   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.

483:   Collective

485:   Input Parameters:
486: + ts   - The `TS` context obtained from `TSCreate()`
487: . t - the time
488: . U - the solution at which to compute the Hessian product
489: . Vl - the array of input vectors to be multiplied with the Hessian from the left
490: - Vr - the input vector to be multiplied with the Hessian from the right

492:   Output Parameter:
493: . VhV - the array of output vectors that store the Hessian product

495:   Level: developer

497:   Note:
498:   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
499:   so most users would not generally call this routine themselves.

501: .seealso: [](ch_ts), `TSSetIHessianProduct()`
502: @*/
503: PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
504: {
505:   PetscFunctionBegin;
506:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

510:   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

512:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
513:   if (ts->rhshessianproduct_gup) {
514:     PetscInt nadj;
515:     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
516:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
517:   }
518:   PetscFunctionReturn(PETSC_SUCCESS);
519: }

521: /*@C
522:   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.

524:   Collective

526:   Input Parameters:
527: + ts   - The `TS` context obtained from `TSCreate()`
528: . t - the time
529: . U - the solution at which to compute the Hessian product
530: . Vl - the array of input vectors to be multiplied with the Hessian from the left
531: - Vr - the input vector to be multiplied with the Hessian from the right

533:   Output Parameter:
534: . VhV - the array of output vectors that store the Hessian product

536:   Level: developer

538:   Note:
539:   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
540:   so most users would not generally call this routine themselves.

542: .seealso: [](ch_ts), `TSSetIHessianProduct()`
543: @*/
544: PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
545: {
546:   PetscFunctionBegin;
547:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

551:   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

553:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
554:   if (ts->rhshessianproduct_gpu) {
555:     PetscInt nadj;
556:     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
557:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
558:   }
559:   PetscFunctionReturn(PETSC_SUCCESS);
560: }

562: /*@C
563:   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.

565:   Collective

567:   Input Parameters:
568: + ts   - The `TS` context obtained from `TSCreate()`
569: . t - the time
570: . U - the solution at which to compute the Hessian product
571: . Vl - the array of input vectors to be multiplied with the Hessian from the left
572: - Vr - the input vector to be multiplied with the Hessian from the right

574:   Output Parameter:
575: . VhV - the array of output vectors that store the Hessian product

577:   Level: developer

579:   Note:
580:   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
581:   so most users would not generally call this routine themselves.

583: .seealso: [](ch_ts), `TSSetIHessianProduct()`
584: @*/
585: PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
586: {
587:   PetscFunctionBegin;
588:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

592:   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));

594:   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
595:   if (ts->rhshessianproduct_gpp) {
596:     PetscInt nadj;
597:     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
598:     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
599:   }
600:   PetscFunctionReturn(PETSC_SUCCESS);
601: }

603: /*@C
604:   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable.

606:   Logically Collective

608:   Input Parameters:
609: + ts - `TS` context obtained from `TSCreate()`
610: . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
611: . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
612: . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
613: . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
614: . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
615: . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
616: . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
617: - hessianproductfunc4 - vector-Hessian-vector product function for G_PP

619:   Calling sequence of `ihessianproductfunc`:
620: $ PetscErrorCode rhshessianproductfunc(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx);
621: +   t - current timestep
622: .   U - input vector (current ODE solution)
623: .   Vl - an array of input vectors to be left-multiplied with the Hessian
624: .   Vr - input vector to be right-multiplied with the Hessian
625: .   VHV - an array of output vectors for vector-Hessian-vector product
626: -   ctx - [optional] user-defined function context

628:   Level: intermediate

630:   Notes:
631:   The first Hessian function and the working array are required.
632:   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
633:   $ Vl_n^T*G_UP*Vr
634:   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
635:   Each entry of G_UP corresponds to the derivative
636:   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
637:   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
638:   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
639:   If the cost function is a scalar, there will be only one vector in Vl and VHV.

641: .seealso: `TS`, `TSAdjoint`
642: @*/
643: PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
644: {
645:   PetscFunctionBegin;

649:   ts->rhshessianproductctx = ctx;
650:   if (rhshp1) ts->vecs_guu = rhshp1;
651:   if (rhshp2) ts->vecs_gup = rhshp2;
652:   if (rhshp3) ts->vecs_gpu = rhshp3;
653:   if (rhshp4) ts->vecs_gpp = rhshp4;
654:   ts->rhshessianproduct_guu = rhshessianproductfunc1;
655:   ts->rhshessianproduct_gup = rhshessianproductfunc2;
656:   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
657:   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
658:   PetscFunctionReturn(PETSC_SUCCESS);
659: }

661: /*@C
662:   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.

664:   Collective

666:   Input Parameters:
667: + ts   - The `TS` context obtained from `TSCreate()`
668: . t - the time
669: . U - the solution at which to compute the Hessian product
670: . Vl - the array of input vectors to be multiplied with the Hessian from the left
671: - Vr - the input vector to be multiplied with the Hessian from the right

673:   Output Parameter:
674: . VhV - the array of output vectors that store the Hessian product

676:   Level: developer

678:   Note:
679:   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
680:   so most users would not generally call this routine themselves.

682: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
683: @*/
684: PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
685: {
686:   PetscFunctionBegin;
687:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

691:   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
692:   PetscFunctionReturn(PETSC_SUCCESS);
693: }

695: /*@C
696:   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.

698:   Collective

700:   Input Parameters:
701: + ts   - The `TS` context obtained from `TSCreate()`
702: . t - the time
703: . U - the solution at which to compute the Hessian product
704: . Vl - the array of input vectors to be multiplied with the Hessian from the left
705: - Vr - the input vector to be multiplied with the Hessian from the right

707:   Output Parameter:
708: . VhV - the array of output vectors that store the Hessian product

710:   Level: developer

712:   Note:
713:   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
714:   so most users would not generally call this routine themselves.

716: .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
717: @*/
718: PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
719: {
720:   PetscFunctionBegin;
721:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

725:   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
726:   PetscFunctionReturn(PETSC_SUCCESS);
727: }

729: /*@C
730:   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.

732:   Collective

734:   Input Parameters:
735: + ts   - The `TS` context obtained from `TSCreate()`
736: . t - the time
737: . U - the solution at which to compute the Hessian product
738: . Vl - the array of input vectors to be multiplied with the Hessian from the left
739: - Vr - the input vector to be multiplied with the Hessian from the right

741:   Output Parameter:
742: . VhV - the array of output vectors that store the Hessian product

744:   Level: developer

746:   Note:
747:   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
748:   so most users would not generally call this routine themselves.

750: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
751: @*/
752: PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
753: {
754:   PetscFunctionBegin;
755:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

759:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
760:   PetscFunctionReturn(PETSC_SUCCESS);
761: }

763: /*@C
764:   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.

766:   Collective

768:   Input Parameters:
769: + ts   - The `TS` context obtained from `TSCreate()`
770: . t - the time
771: . U - the solution at which to compute the Hessian product
772: . Vl - the array of input vectors to be multiplied with the Hessian from the left
773: - Vr - the input vector to be multiplied with the Hessian from the right

775:   Output Parameter:
776: . VhV - the array of output vectors that store the Hessian product

778:   Level: developer

780:   Note:
781:   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
782:   so most users would not generally call this routine themselves.

784: .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
785: @*/
786: PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
787: {
788:   PetscFunctionBegin;
789:   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);

793:   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
794:   PetscFunctionReturn(PETSC_SUCCESS);
795: }

797: /* --------------------------- Adjoint sensitivity ---------------------------*/

799: /*@
800:    TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
801:       for use by the `TS` adjoint routines.

803:    Logically Collective

805:    Input Parameters:
806: +  ts - the `TS` context obtained from `TSCreate()`
807: .  numcost - number of gradients to be computed, this is the number of cost functions
808: .  lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
809: -  mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

811:    Level: beginner

813:    Notes:
814:     the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime

816:    After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities

818: .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
819: @*/
820: PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu)
821: {
822:   PetscFunctionBegin;
825:   ts->vecs_sensi  = lambda;
826:   ts->vecs_sensip = mu;
827:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
828:   ts->numcost = numcost;
829:   PetscFunctionReturn(PETSC_SUCCESS);
830: }

832: /*@
833:    TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`

835:    Not Collective, but the vectors returned are parallel if `TS` is parallel

837:    Input Parameter:
838: .  ts - the `TS` context obtained from `TSCreate()`

840:    Output Parameters:
841: +  numcost - size of returned arrays
842: .  lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
843: -  mu - vectors containing the gradients of the cost functions with respect to the problem parameters

845:    Level: intermediate

847: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
848: @*/
849: PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu)
850: {
851:   PetscFunctionBegin;
853:   if (numcost) *numcost = ts->numcost;
854:   if (lambda) *lambda = ts->vecs_sensi;
855:   if (mu) *mu = ts->vecs_sensip;
856:   PetscFunctionReturn(PETSC_SUCCESS);
857: }

859: /*@
860:    TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
861:    for use by the `TS` adjoint routines.

863:    Logically Collective

865:    Input Parameters:
866: +  ts - the `TS` context obtained from `TSCreate()`
867: .  numcost - number of cost functions
868: .  lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
869: .  mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
870: -  dir - the direction vector that are multiplied with the Hessian of the cost functions

872:    Level: beginner

874:    Notes:
875:    Hessian of the cost function is completely different from Hessian of the ODE/DAE system

877:    For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.

879:    After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.

881:    Passing `NULL` for `lambda2` disables the second-order calculation.

883: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
884: @*/
885: PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir)
886: {
887:   PetscFunctionBegin;
889:   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
890:   ts->numcost      = numcost;
891:   ts->vecs_sensi2  = lambda2;
892:   ts->vecs_sensi2p = mu2;
893:   ts->vec_dir      = dir;
894:   PetscFunctionReturn(PETSC_SUCCESS);
895: }

897: /*@
898:    TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`

900:    Not Collective, but vectors returned are parallel if `TS` is parallel

902:    Input Parameter:
903: .  ts - the `TS` context obtained from `TSCreate()`

905:    Output Parameters:
906: +  numcost - number of cost functions
907: .  lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
908: .  mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
909: -  dir - the direction vector that are multiplied with the Hessian of the cost functions

911:    Level: intermediate

913: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
914: @*/
915: PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir)
916: {
917:   PetscFunctionBegin;
919:   if (numcost) *numcost = ts->numcost;
920:   if (lambda2) *lambda2 = ts->vecs_sensi2;
921:   if (mu2) *mu2 = ts->vecs_sensi2p;
922:   if (dir) *dir = ts->vec_dir;
923:   PetscFunctionReturn(PETSC_SUCCESS);
924: }

926: /*@
927:   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities

929:   Logically Collective

931:   Input Parameters:
932: +  ts - the `TS` context obtained from `TSCreate()`
933: -  didp - the derivative of initial values w.r.t. parameters

935:   Level: intermediate

937:   Notes:
938:   When computing sensitivies w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
939:   `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.

941: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
942: @*/
943: PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
944: {
945:   Mat          A;
946:   Vec          sp;
947:   PetscScalar *xarr;
948:   PetscInt     lsize;

950:   PetscFunctionBegin;
951:   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
952:   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
953:   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
954:   /* create a single-column dense matrix */
955:   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
956:   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));

958:   PetscCall(VecDuplicate(ts->vec_sol, &sp));
959:   PetscCall(MatDenseGetColumn(A, 0, &xarr));
960:   PetscCall(VecPlaceArray(sp, xarr));
961:   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
962:     if (didp) {
963:       PetscCall(MatMult(didp, ts->vec_dir, sp));
964:       PetscCall(VecScale(sp, 2.));
965:     } else {
966:       PetscCall(VecZeroEntries(sp));
967:     }
968:   } else { /* tangent linear variable initialized as dir */
969:     PetscCall(VecCopy(ts->vec_dir, sp));
970:   }
971:   PetscCall(VecResetArray(sp));
972:   PetscCall(MatDenseRestoreColumn(A, &xarr));
973:   PetscCall(VecDestroy(&sp));

975:   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */

977:   PetscCall(MatDestroy(&A));
978:   PetscFunctionReturn(PETSC_SUCCESS);
979: }

981: /*@
982:   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context

984:   Logically Collective

986:   Input Parameter:
987: .  ts - the `TS` context obtained from `TSCreate()`

989:   Level: intermediate

991: .seealso: [](ch_ts), `TSAdjointSetForward()`
992: @*/
993: PetscErrorCode TSAdjointResetForward(TS ts)
994: {
995:   PetscFunctionBegin;
996:   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
997:   PetscCall(TSForwardReset(ts));
998:   PetscFunctionReturn(PETSC_SUCCESS);
999: }

1001: /*@
1002:    TSAdjointSetUp - Sets up the internal data structures for the later use
1003:    of an adjoint solver

1005:    Collective

1007:    Input Parameter:
1008: .  ts - the `TS` context obtained from `TSCreate()`

1010:    Level: advanced

1012: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1013: @*/
1014: PetscErrorCode TSAdjointSetUp(TS ts)
1015: {
1016:   TSTrajectory tj;
1017:   PetscBool    match;

1019:   PetscFunctionBegin;
1021:   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1022:   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1023:   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1024:   PetscCall(TSGetTrajectory(ts, &tj));
1025:   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1026:   if (match) {
1027:     PetscBool solution_only;
1028:     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1029:     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1030:   }
1031:   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */

1033:   if (ts->quadraturets) { /* if there is integral in the cost function */
1034:     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1035:     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1036:   }

1038:   PetscTryTypeMethod(ts, adjointsetup);
1039:   ts->adjointsetupcalled = PETSC_TRUE;
1040:   PetscFunctionReturn(PETSC_SUCCESS);
1041: }

1043: /*@
1044:   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.

1046:    Collective

1048:    Input Parameter:
1049: .  ts - the `TS` context obtained from `TSCreate()`

1051:    Level: beginner

1053: .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()`
1054: @*/
1055: PetscErrorCode TSAdjointReset(TS ts)
1056: {
1057:   PetscFunctionBegin;
1059:   PetscTryTypeMethod(ts, adjointreset);
1060:   if (ts->quadraturets) { /* if there is integral in the cost function */
1061:     PetscCall(VecDestroy(&ts->vec_drdu_col));
1062:     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1063:   }
1064:   ts->vecs_sensi         = NULL;
1065:   ts->vecs_sensip        = NULL;
1066:   ts->vecs_sensi2        = NULL;
1067:   ts->vecs_sensi2p       = NULL;
1068:   ts->vec_dir            = NULL;
1069:   ts->adjointsetupcalled = PETSC_FALSE;
1070:   PetscFunctionReturn(PETSC_SUCCESS);
1071: }

1073: /*@
1074:    TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time

1076:    Logically Collective

1078:    Input Parameters:
1079: +  ts - the `TS` context obtained from `TSCreate()`
1080: -  steps - number of steps to use

1082:    Level: intermediate

1084:    Notes:
1085:     Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1086:           so as to integrate back to less than the original timestep

1088: .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1089: @*/
1090: PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1091: {
1092:   PetscFunctionBegin;
1095:   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1096:   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1097:   ts->adjoint_max_steps = steps;
1098:   PetscFunctionReturn(PETSC_SUCCESS);
1099: }

1101: /*@C
1102:   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`

1104:   Level: deprecated

1106: @*/
1107: PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
1108: {
1109:   PetscFunctionBegin;

1113:   ts->rhsjacobianp    = func;
1114:   ts->rhsjacobianpctx = ctx;
1115:   if (Amat) {
1116:     PetscCall(PetscObjectReference((PetscObject)Amat));
1117:     PetscCall(MatDestroy(&ts->Jacp));
1118:     ts->Jacp = Amat;
1119:   }
1120:   PetscFunctionReturn(PETSC_SUCCESS);
1121: }

1123: /*@C
1124:   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`

1126:   Level: deprecated

1128: @*/
1129: PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1130: {
1131:   PetscFunctionBegin;

1136:   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1137:   PetscFunctionReturn(PETSC_SUCCESS);
1138: }

1140: /*@
1141:   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`

1143:   Level: deprecated

1145: @*/
1146: PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1147: {
1148:   PetscFunctionBegin;

1152:   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1153:   PetscFunctionReturn(PETSC_SUCCESS);
1154: }

1156: /*@
1157:   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`

1159:   Level: deprecated

1161: @*/
1162: PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1163: {
1164:   PetscFunctionBegin;

1168:   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1169:   PetscFunctionReturn(PETSC_SUCCESS);
1170: }

1172: /*@C
1173:    TSAdjointMonitorSensi - monitors the first lambda sensitivity

1175:    Level: intermediate

1177: .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1178: @*/
1179: PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1180: {
1181:   PetscViewer viewer = vf->viewer;

1183:   PetscFunctionBegin;
1185:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1186:   PetscCall(VecView(lambda[0], viewer));
1187:   PetscCall(PetscViewerPopFormat(viewer));
1188:   PetscFunctionReturn(PETSC_SUCCESS);
1189: }

1191: /*@C
1192:    TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user

1194:    Collective

1196:    Input Parameters:
1197: +  ts - `TS` object you wish to monitor
1198: .  name - the monitor type one is seeking
1199: .  help - message indicating what monitoring is done
1200: .  manual - manual page for the monitor
1201: .  monitor - the monitor function
1202: -  monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects

1204:    Level: developer

1206: .seealso: [](ch_ts), `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1207:           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1208:           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, `PetscOptionsBool()`,
1209:           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1210:           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1211:           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1212:           `PetscOptionsFList()`, `PetscOptionsEList()`
1213: @*/
1214: PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *))
1215: {
1216:   PetscViewer       viewer;
1217:   PetscViewerFormat format;
1218:   PetscBool         flg;

1220:   PetscFunctionBegin;
1221:   PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1222:   if (flg) {
1223:     PetscViewerAndFormat *vf;
1224:     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1225:     PetscCall(PetscObjectDereference((PetscObject)viewer));
1226:     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1227:     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode(*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscErrorCode(*)(void **))PetscViewerAndFormatDestroy));
1228:   }
1229:   PetscFunctionReturn(PETSC_SUCCESS);
1230: }

1232: /*@C
1233:    TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1234:    timestep to display the iteration's  progress.

1236:    Logically Collective

1238:    Input Parameters:
1239: +  ts - the `TS` context obtained from `TSCreate()`
1240: .  adjointmonitor - monitoring routine
1241: .  adjointmctx - [optional] user-defined context for private data for the
1242:              monitor routine (use `NULL` if no context is desired)
1243: -  adjointmonitordestroy - [optional] routine that frees monitor context
1244:           (may be `NULL`)

1246:    Calling sequence of `adjointmonitor`:
1247: $    PetscErrorCode adjointmonitor(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx)
1248: +    ts - the `TS` context
1249: .    steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have
1250:                                been interpolated to)
1251: .    time - current time
1252: .    u - current iterate
1253: .    numcost - number of cost functionos
1254: .    lambda - sensitivities to initial conditions
1255: .    mu - sensitivities to parameters
1256: -    adjointmctx - [optional] adjoint monitoring context

1258:    Level: intermediate

1260:    Note:
1261:    This routine adds an additional monitor to the list of monitors that
1262:    already has been loaded.

1264:    Fortran Note:
1265:    Only a single monitor function can be set for each `TS` object

1267: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`
1268: @*/
1269: PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *), void *adjointmctx, PetscErrorCode (*adjointmdestroy)(void **))
1270: {
1271:   PetscInt  i;
1272:   PetscBool identical;

1274:   PetscFunctionBegin;
1276:   for (i = 0; i < ts->numbermonitors; i++) {
1277:     PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode(*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1278:     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1279:   }
1280:   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1281:   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1282:   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1283:   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void *)adjointmctx;
1284:   PetscFunctionReturn(PETSC_SUCCESS);
1285: }

1287: /*@C
1288:    TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.

1290:    Logically Collective

1292:    Input Parameter:
1293: .  ts - the `TS` context obtained from `TSCreate()`

1295:    Notes:
1296:    There is no way to remove a single, specific monitor.

1298:    Level: intermediate

1300: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1301: @*/
1302: PetscErrorCode TSAdjointMonitorCancel(TS ts)
1303: {
1304:   PetscInt i;

1306:   PetscFunctionBegin;
1308:   for (i = 0; i < ts->numberadjointmonitors; i++) {
1309:     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1310:   }
1311:   ts->numberadjointmonitors = 0;
1312:   PetscFunctionReturn(PETSC_SUCCESS);
1313: }

1315: /*@C
1316:    TSAdjointMonitorDefault - the default monitor of adjoint computations

1318:    Level: intermediate

1320: .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1321: @*/
1322: PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1323: {
1324:   PetscViewer viewer = vf->viewer;

1326:   PetscFunctionBegin;
1328:   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1329:   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1330:   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)ptime, ts->steprollback ? " (r)\n" : "\n"));
1331:   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1332:   PetscCall(PetscViewerPopFormat(viewer));
1333:   PetscFunctionReturn(PETSC_SUCCESS);
1334: }

1336: /*@C
1337:    TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1338:    `VecView()` for the sensitivities to initial states at each timestep

1340:    Collective

1342:    Input Parameters:
1343: +  ts - the `TS` context
1344: .  step - current time-step
1345: .  ptime - current time
1346: .  u - current state
1347: .  numcost - number of cost functions
1348: .  lambda - sensitivities to initial conditions
1349: .  mu - sensitivities to parameters
1350: -  dummy - either a viewer or `NULL`

1352:    Level: intermediate

1354: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1355: @*/
1356: PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy)
1357: {
1358:   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1359:   PetscDraw        draw;
1360:   PetscReal        xl, yl, xr, yr, h;
1361:   char             time[32];

1363:   PetscFunctionBegin;
1364:   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);

1366:   PetscCall(VecView(lambda[0], ictx->viewer));
1367:   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1368:   PetscCall(PetscSNPrintf(time, 32, "Timestep %d Time %g", (int)step, (double)ptime));
1369:   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1370:   h = yl + .95 * (yr - yl);
1371:   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1372:   PetscCall(PetscDrawFlush(draw));
1373:   PetscFunctionReturn(PETSC_SUCCESS);
1374: }

1376: /*
1377:    TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from user options.

1379:    Collective

1381:    Input Parameter:
1382: .  ts - the `TS` context

1384:    Options Database Keys:
1385: +  -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory)
1386: .  -ts_adjoint_monitor - print information at each adjoint time step
1387: -  -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically

1389:    Level: developer

1391:    Note:
1392:     This is not normally called directly by users

1394: .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1395: */
1396: PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject)
1397: {
1398:   PetscBool tflg, opt;

1400:   PetscFunctionBegin;
1402:   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1403:   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1404:   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1405:   if (opt) {
1406:     PetscCall(TSSetSaveTrajectory(ts));
1407:     ts->adjoint_solve = tflg;
1408:   }
1409:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1410:   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1411:   opt = PETSC_FALSE;
1412:   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1413:   if (opt) {
1414:     TSMonitorDrawCtx ctx;
1415:     PetscInt         howoften = 1;

1417:     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1418:     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1419:     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscErrorCode(*)(void **))TSMonitorDrawCtxDestroy));
1420:   }
1421:   PetscFunctionReturn(PETSC_SUCCESS);
1422: }

1424: /*@
1425:    TSAdjointStep - Steps one time step backward in the adjoint run

1427:    Collective

1429:    Input Parameter:
1430: .  ts - the `TS` context obtained from `TSCreate()`

1432:    Level: intermediate

1434: .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1435: @*/
1436: PetscErrorCode TSAdjointStep(TS ts)
1437: {
1438:   DM dm;

1440:   PetscFunctionBegin;
1442:   PetscCall(TSGetDM(ts, &dm));
1443:   PetscCall(TSAdjointSetUp(ts));
1444:   ts->steps--; /* must decrease the step index before the adjoint step is taken. */

1446:   ts->reason     = TS_CONVERGED_ITERATING;
1447:   ts->ptime_prev = ts->ptime;
1448:   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1449:   PetscUseTypeMethod(ts, adjointstep);
1450:   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1451:   ts->adjoint_steps++;

1453:   if (ts->reason < 0) {
1454:     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1455:   } else if (!ts->reason) {
1456:     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1457:   }
1458:   PetscFunctionReturn(PETSC_SUCCESS);
1459: }

1461: /*@
1462:    TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE

1464:    Collective
1465: `
1466:    Input Parameter:
1467: .  ts - the `TS` context obtained from `TSCreate()`

1469:    Options Database Key:
1470: . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values

1472:    Level: intermediate

1474:    Notes:
1475:    This must be called after a call to `TSSolve()` that solves the forward problem

1477:    By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time

1479: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1480: @*/
1481: PetscErrorCode TSAdjointSolve(TS ts)
1482: {
1483:   static PetscBool cite = PETSC_FALSE;
1484: #if defined(TSADJOINT_STAGE)
1485:   PetscLogStage adjoint_stage;
1486: #endif

1488:   PetscFunctionBegin;
1490:   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1491:                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1492:                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1493:                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1494:                                    "  volume        = {44},\n"
1495:                                    "  number        = {1},\n"
1496:                                    "  pages         = {C1-C24},\n"
1497:                                    "  doi           = {10.1137/21M140078X},\n"
1498:                                    "  year          = {2022}\n}\n",
1499:                                    &cite));
1500: #if defined(TSADJOINT_STAGE)
1501:   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1502:   PetscCall(PetscLogStagePush(adjoint_stage));
1503: #endif
1504:   PetscCall(TSAdjointSetUp(ts));

1506:   /* reset time step and iteration counters */
1507:   ts->adjoint_steps     = 0;
1508:   ts->ksp_its           = 0;
1509:   ts->snes_its          = 0;
1510:   ts->num_snes_failures = 0;
1511:   ts->reject            = 0;
1512:   ts->reason            = TS_CONVERGED_ITERATING;

1514:   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1515:   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;

1517:   while (!ts->reason) {
1518:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1519:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1520:     PetscCall(TSAdjointEventHandler(ts));
1521:     PetscCall(TSAdjointStep(ts));
1522:     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1523:   }
1524:   if (!ts->steps) {
1525:     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1526:     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1527:   }
1528:   ts->solvetime = ts->ptime;
1529:   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1530:   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1531:   ts->adjoint_max_steps = 0;
1532: #if defined(TSADJOINT_STAGE)
1533:   PetscCall(PetscLogStagePop());
1534: #endif
1535:   PetscFunctionReturn(PETSC_SUCCESS);
1536: }

1538: /*@C
1539:    TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`

1541:    Collective

1543:    Input Parameters:
1544: +  ts - time stepping context obtained from `TSCreate()`
1545: .  step - step number that has just completed
1546: .  ptime - model time of the state
1547: .  u - state at the current model time
1548: .  numcost - number of cost functions (dimension of lambda  or mu)
1549: .  lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1550: -  mu - vectors containing the gradients of the cost functions with respect to the problem parameters

1552:    Level: developer

1554:    Note:
1555:    `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1556:    Users would almost never call this routine directly.

1558: .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1559: @*/
1560: PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu)
1561: {
1562:   PetscInt i, n = ts->numberadjointmonitors;

1564:   PetscFunctionBegin;
1567:   PetscCall(VecLockReadPush(u));
1568:   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1569:   PetscCall(VecLockReadPop(u));
1570:   PetscFunctionReturn(PETSC_SUCCESS);
1571: }

1573: /*@
1574:  TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.

1576:  Collective

1578:  Input Parameter:
1579:  .  ts - time stepping context

1581:  Level: advanced

1583:  Notes:
1584:  This function cannot be called until `TSAdjointStep()` has been completed.

1586: .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1587:  @*/
1588: PetscErrorCode TSAdjointCostIntegral(TS ts)
1589: {
1590:   PetscFunctionBegin;
1592:   PetscUseTypeMethod(ts, adjointintegral);
1593:   PetscFunctionReturn(PETSC_SUCCESS);
1594: }

1596: /* ------------------ Forward (tangent linear) sensitivity  ------------------*/

1598: /*@
1599:   TSForwardSetUp - Sets up the internal data structures for the later use
1600:   of forward sensitivity analysis

1602:   Collective

1604:   Input Parameter:
1605: . ts - the `TS` context obtained from `TSCreate()`

1607:   Level: advanced

1609: .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1610: @*/
1611: PetscErrorCode TSForwardSetUp(TS ts)
1612: {
1613:   PetscFunctionBegin;
1615:   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1616:   PetscTryTypeMethod(ts, forwardsetup);
1617:   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1618:   ts->forwardsetupcalled = PETSC_TRUE;
1619:   PetscFunctionReturn(PETSC_SUCCESS);
1620: }

1622: /*@
1623:   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis

1625:   Collective

1627:   Input Parameter:
1628: . ts - the `TS` context obtained from `TSCreate()`

1630:   Level: advanced

1632: .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1633: @*/
1634: PetscErrorCode TSForwardReset(TS ts)
1635: {
1636:   TS quadts = ts->quadraturets;

1638:   PetscFunctionBegin;
1640:   PetscTryTypeMethod(ts, forwardreset);
1641:   PetscCall(MatDestroy(&ts->mat_sensip));
1642:   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1643:   PetscCall(VecDestroy(&ts->vec_sensip_col));
1644:   ts->forward_solve      = PETSC_FALSE;
1645:   ts->forwardsetupcalled = PETSC_FALSE;
1646:   PetscFunctionReturn(PETSC_SUCCESS);
1647: }

1649: /*@
1650:   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.

1652:   Input Parameters:
1653: + ts - the `TS` context obtained from `TSCreate()`
1654: . numfwdint - number of integrals
1655: - vp - the vectors containing the gradients for each integral w.r.t. parameters

1657:   Level: deprecated

1659: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1660: @*/
1661: PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp)
1662: {
1663:   PetscFunctionBegin;
1665:   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1666:   if (!ts->numcost) ts->numcost = numfwdint;

1668:   ts->vecs_integral_sensip = vp;
1669:   PetscFunctionReturn(PETSC_SUCCESS);
1670: }

1672: /*@
1673:   TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.

1675:   Input Parameter:
1676: . ts - the `TS` context obtained from `TSCreate()`

1678:   Output Parameter:
1679: . vp - the vectors containing the gradients for each integral w.r.t. parameters

1681:   Level: deprecated

1683: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1684: @*/
1685: PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp)
1686: {
1687:   PetscFunctionBegin;
1690:   if (numfwdint) *numfwdint = ts->numcost;
1691:   if (vp) *vp = ts->vecs_integral_sensip;
1692:   PetscFunctionReturn(PETSC_SUCCESS);
1693: }

1695: /*@
1696:   TSForwardStep - Compute the forward sensitivity for one time step.

1698:   Collective

1700:   Input Parameter:
1701: . ts - time stepping context

1703:   Level: advanced

1705:   Notes:
1706:   This function cannot be called until `TSStep()` has been completed.

1708: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1709: @*/
1710: PetscErrorCode TSForwardStep(TS ts)
1711: {
1712:   PetscFunctionBegin;
1714:   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1715:   PetscUseTypeMethod(ts, forwardstep);
1716:   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1717:   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1718:   PetscFunctionReturn(PETSC_SUCCESS);
1719: }

1721: /*@
1722:   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.

1724:   Logically Collective

1726:   Input Parameters:
1727: + ts - the `TS` context obtained from `TSCreate()`
1728: . nump - number of parameters
1729: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1731:   Level: beginner

1733:   Notes:
1734:   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1735:   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1736:   You must call this function before `TSSolve()`.
1737:   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.

1739: .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1740: @*/
1741: PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1742: {
1743:   PetscFunctionBegin;
1746:   ts->forward_solve = PETSC_TRUE;
1747:   if (nump == PETSC_DEFAULT) {
1748:     PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1749:   } else ts->num_parameters = nump;
1750:   PetscCall(PetscObjectReference((PetscObject)Smat));
1751:   PetscCall(MatDestroy(&ts->mat_sensip));
1752:   ts->mat_sensip = Smat;
1753:   PetscFunctionReturn(PETSC_SUCCESS);
1754: }

1756: /*@
1757:   TSForwardGetSensitivities - Returns the trajectory sensitivities

1759:   Not Collective, but Smat returned is parallel if ts is parallel

1761:   Output Parameters:
1762: + ts - the `TS` context obtained from `TSCreate()`
1763: . nump - number of parameters
1764: - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters

1766:   Level: intermediate

1768: .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1769: @*/
1770: PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1771: {
1772:   PetscFunctionBegin;
1774:   if (nump) *nump = ts->num_parameters;
1775:   if (Smat) *Smat = ts->mat_sensip;
1776:   PetscFunctionReturn(PETSC_SUCCESS);
1777: }

1779: /*@
1780:    TSForwardCostIntegral - Evaluate the cost integral in the forward run.

1782:    Collective

1784:    Input Parameter:
1785: .  ts - time stepping context

1787:    Level: advanced

1789:    Note:
1790:    This function cannot be called until `TSStep()` has been completed.

1792: .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1793: @*/
1794: PetscErrorCode TSForwardCostIntegral(TS ts)
1795: {
1796:   PetscFunctionBegin;
1798:   PetscUseTypeMethod(ts, forwardintegral);
1799:   PetscFunctionReturn(PETSC_SUCCESS);
1800: }

1802: /*@
1803:   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities

1805:   Collective

1807:   Input Parameters:
1808: + ts - the `TS` context obtained from `TSCreate()`
1809: - didp - parametric sensitivities of the initial condition

1811:   Level: intermediate

1813:   Notes:
1814:   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1815:    This function is used to set initial values for tangent linear variables.

1817: .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1818: @*/
1819: PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1820: {
1821:   PetscFunctionBegin;
1824:   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DEFAULT, didp));
1825:   PetscFunctionReturn(PETSC_SUCCESS);
1826: }

1828: /*@
1829:    TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages

1831:    Input Parameter:
1832: .  ts - the `TS` context obtained from `TSCreate()`

1834:    Output Parameters:
1835: +  ns - number of stages
1836: -  S - tangent linear sensitivities at the intermediate stages

1838:    Level: advanced

1840: .seealso: `TS`
1841: @*/
1842: PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1843: {
1844:   PetscFunctionBegin;

1847:   if (!ts->ops->getstages) *S = NULL;
1848:   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1849:   PetscFunctionReturn(PETSC_SUCCESS);
1850: }

1852: /*@
1853:    TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time

1855:    Input Parameters:
1856: +  ts - the `TS` context obtained from `TSCreate()`
1857: -  fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run

1859:    Output Parameter:
1860: .  quadts - the child `TS` context

1862:    Level: intermediate

1864: .seealso: [](ch_ts), `TSGetQuadratureTS()`
1865: @*/
1866: PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1867: {
1868:   char prefix[128];

1870:   PetscFunctionBegin;
1873:   PetscCall(TSDestroy(&ts->quadraturets));
1874:   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1875:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1876:   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1877:   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1878:   *quadts = ts->quadraturets;

1880:   if (ts->numcost) {
1881:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1882:   } else {
1883:     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1884:   }
1885:   ts->costintegralfwd = fwd;
1886:   PetscFunctionReturn(PETSC_SUCCESS);
1887: }

1889: /*@
1890:    TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time

1892:    Input Parameter:
1893: .  ts - the `TS` context obtained from `TSCreate()`

1895:    Output Parameters:
1896: +  fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1897: -  quadts - the child `TS` context

1899:    Level: intermediate

1901: .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1902: @*/
1903: PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1904: {
1905:   PetscFunctionBegin;
1907:   if (fwd) *fwd = ts->costintegralfwd;
1908:   if (quadts) *quadts = ts->quadraturets;
1909:   PetscFunctionReturn(PETSC_SUCCESS);
1910: }

1912: /*@
1913:    TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`

1915:    Collective

1917:    Input Parameters:
1918: +  ts - the `TS` context obtained from `TSCreate()`
1919: -  x - state vector

1921:    Output Parameters:
1922: +  J - Jacobian matrix
1923: -  Jpre - preconditioning matrix for J (may be same as J)

1925:    Level: developer

1927:    Note:
1928:    Uses finite differencing when `TS` Jacobian is not available.

1930: .seealso: `SNES`, `TS`, `SNESSetJacobian()`, TSSetRHSJacobian()`, `TSSetIJacobian()`
1931: @*/
1932: PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
1933: {
1934:   SNES snes                                          = ts->snes;
1935:   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;

1937:   PetscFunctionBegin;
1938:   /*
1939:     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
1940:     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
1941:     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
1942:     coloring is used.
1943:   */
1944:   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
1945:   if (jac == SNESComputeJacobianDefaultColor) {
1946:     Vec f;
1947:     PetscCall(SNESSetSolution(snes, x));
1948:     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
1949:     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
1950:     PetscCall(SNESComputeFunction(snes, x, f));
1951:   }
1952:   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
1953:   PetscFunctionReturn(PETSC_SUCCESS);
1954: }