Actual source code: taosolver_hj.c
1: #include <petsc/private/taoimpl.h>
3: /*@C
4: TaoSetHessian - Sets the function to compute the Hessian as well as the location to store the matrix.
6: Logically Collective
8: Input Parameters:
9: + tao - the `Tao` context
10: . H - Matrix used for the hessian
11: . Hpre - Matrix that will be used to construct the preconditioner, can be same as `H`
12: . func - Hessian evaluation routine
13: - ctx - [optional] user-defined context for private data for the
14: Hessian evaluation routine (may be `NULL`)
16: Calling sequence of `func`:
17: $ PetscErrorCode func(Tao tao, Vec x, Mat H, Mat Hpre, void *ctx);
18: + tao - the Tao context
19: . x - input vector
20: . H - Hessian matrix
21: . Hpre - matrix used to construct the preconditioner, usually the same as `H`
22: - ctx - [optional] user-defined Hessian context
24: Level: beginner
26: .seealso: [](ch_tao), `Tao`, `TaoTypes`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetObjectiveAndGradient()`, `TaoGetHessian()`
27: @*/
28: PetscErrorCode TaoSetHessian(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
29: {
30: PetscFunctionBegin;
32: if (H) {
34: PetscCheckSameComm(tao, 1, H, 2);
35: }
36: if (Hpre) {
38: PetscCheckSameComm(tao, 1, Hpre, 3);
39: }
40: if (ctx) tao->user_hessP = ctx;
41: if (func) tao->ops->computehessian = func;
42: if (H) {
43: PetscCall(PetscObjectReference((PetscObject)H));
44: PetscCall(MatDestroy(&tao->hessian));
45: tao->hessian = H;
46: }
47: if (Hpre) {
48: PetscCall(PetscObjectReference((PetscObject)Hpre));
49: PetscCall(MatDestroy(&tao->hessian_pre));
50: tao->hessian_pre = Hpre;
51: }
52: PetscFunctionReturn(PETSC_SUCCESS);
53: }
55: /*@C
56: TaoGetHessian - Gets the function to compute the Hessian as well as the location to store the matrix.
58: Not Collective
60: Input Parameter:
61: . tao - the `Tao` context
63: OutputParameters:
64: + H - Matrix used for the hessian
65: . Hpre - Matrix that will be used to construct the preconditioner, can be the same as `H`
66: . func - Hessian evaluation routine
67: - ctx - user-defined context for private data for the Hessian evaluation routine
69: Calling sequence of `func`:
70: $ PetscErrorCode func(Tao tao, Vec x, Mat H, Mat Hpre, void *ctx);
71: + tao - the Tao context
72: . x - input vector
73: . H - Hessian matrix
74: . Hpre - matrix used to construct the preconditioner, usually the same as `H`
75: - ctx - [optional] user-defined Hessian context
77: Level: beginner
79: .seealso: [](ch_tao), `Tao`, TaoType`, `TaoGetObjective()`, `TaoGetGradient()`, `TaoGetObjectiveAndGradient()`, `TaoSetHessian()`
80: @*/
81: PetscErrorCode TaoGetHessian(Tao tao, Mat *H, Mat *Hpre, PetscErrorCode (**func)(Tao, Vec, Mat, Mat, void *), void **ctx)
82: {
83: PetscFunctionBegin;
85: if (H) *H = tao->hessian;
86: if (Hpre) *Hpre = tao->hessian_pre;
87: if (ctx) *ctx = tao->user_hessP;
88: if (func) *func = tao->ops->computehessian;
89: PetscFunctionReturn(PETSC_SUCCESS);
90: }
92: PetscErrorCode TaoTestHessian(Tao tao)
93: {
94: Mat A, B, C, D, hessian;
95: Vec x = tao->solution;
96: PetscReal nrm, gnorm;
97: PetscReal threshold = 1.e-5;
98: PetscInt m, n, M, N;
99: PetscBool complete_print = PETSC_FALSE, test = PETSC_FALSE, flg;
100: PetscViewer viewer, mviewer;
101: MPI_Comm comm;
102: PetscInt tabs;
103: static PetscBool directionsprinted = PETSC_FALSE;
104: PetscViewerFormat format;
106: PetscFunctionBegin;
107: PetscObjectOptionsBegin((PetscObject)tao);
108: PetscCall(PetscOptionsName("-tao_test_hessian", "Compare hand-coded and finite difference Hessians", "None", &test));
109: PetscCall(PetscOptionsReal("-tao_test_hessian", "Threshold for element difference between hand-coded and finite difference being meaningful", "None", threshold, &threshold, NULL));
110: PetscCall(PetscOptionsViewer("-tao_test_hessian_view", "View difference between hand-coded and finite difference Hessians element entries", "None", &mviewer, &format, &complete_print));
111: PetscOptionsEnd();
112: if (!test) PetscFunctionReturn(PETSC_SUCCESS);
114: PetscCall(PetscObjectGetComm((PetscObject)tao, &comm));
115: PetscCall(PetscViewerASCIIGetStdout(comm, &viewer));
116: PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
117: PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
118: PetscCall(PetscViewerASCIIPrintf(viewer, " ---------- Testing Hessian -------------\n"));
119: if (!complete_print && !directionsprinted) {
120: PetscCall(PetscViewerASCIIPrintf(viewer, " Run with -tao_test_hessian_view and optionally -tao_test_hessian <threshold> to show difference\n"));
121: PetscCall(PetscViewerASCIIPrintf(viewer, " of hand-coded and finite difference Hessian entries greater than <threshold>.\n"));
122: }
123: if (!directionsprinted) {
124: PetscCall(PetscViewerASCIIPrintf(viewer, " Testing hand-coded Hessian, if (for double precision runs) ||J - Jfd||_F/||J||_F is\n"));
125: PetscCall(PetscViewerASCIIPrintf(viewer, " O(1.e-8), the hand-coded Hessian is probably correct.\n"));
126: directionsprinted = PETSC_TRUE;
127: }
128: if (complete_print) PetscCall(PetscViewerPushFormat(mviewer, format));
130: PetscCall(PetscObjectTypeCompare((PetscObject)tao->hessian, MATMFFD, &flg));
131: if (!flg) hessian = tao->hessian;
132: else hessian = tao->hessian_pre;
134: while (hessian) {
135: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)hessian, &flg, MATSEQAIJ, MATMPIAIJ, MATSEQDENSE, MATMPIDENSE, MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPIBAIJ, ""));
136: if (flg) {
137: A = hessian;
138: PetscCall(PetscObjectReference((PetscObject)A));
139: } else {
140: PetscCall(MatComputeOperator(hessian, MATAIJ, &A));
141: }
143: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
144: PetscCall(MatGetSize(A, &M, &N));
145: PetscCall(MatGetLocalSize(A, &m, &n));
146: PetscCall(MatSetSizes(B, m, n, M, N));
147: PetscCall(MatSetType(B, ((PetscObject)A)->type_name));
148: PetscCall(MatSetUp(B));
149: PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
151: PetscCall(TaoDefaultComputeHessian(tao, x, B, B, NULL));
153: PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &D));
154: PetscCall(MatAYPX(D, -1.0, A, DIFFERENT_NONZERO_PATTERN));
155: PetscCall(MatNorm(D, NORM_FROBENIUS, &nrm));
156: PetscCall(MatNorm(A, NORM_FROBENIUS, &gnorm));
157: PetscCall(MatDestroy(&D));
158: if (!gnorm) gnorm = 1; /* just in case */
159: PetscCall(PetscViewerASCIIPrintf(viewer, " ||H - Hfd||_F/||H||_F = %g, ||H - Hfd||_F = %g\n", (double)(nrm / gnorm), (double)nrm));
161: if (complete_print) {
162: PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded Hessian ----------\n"));
163: PetscCall(MatView(A, mviewer));
164: PetscCall(PetscViewerASCIIPrintf(viewer, " Finite difference Hessian ----------\n"));
165: PetscCall(MatView(B, mviewer));
166: }
168: if (complete_print) {
169: PetscInt Istart, Iend, *ccols, bncols, cncols, j, row;
170: PetscScalar *cvals;
171: const PetscInt *bcols;
172: const PetscScalar *bvals;
174: PetscCall(MatAYPX(B, -1.0, A, DIFFERENT_NONZERO_PATTERN));
175: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
176: PetscCall(MatSetSizes(C, m, n, M, N));
177: PetscCall(MatSetType(C, ((PetscObject)A)->type_name));
178: PetscCall(MatSetUp(C));
179: PetscCall(MatSetOption(C, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
180: PetscCall(MatGetOwnershipRange(B, &Istart, &Iend));
182: for (row = Istart; row < Iend; row++) {
183: PetscCall(MatGetRow(B, row, &bncols, &bcols, &bvals));
184: PetscCall(PetscMalloc2(bncols, &ccols, bncols, &cvals));
185: for (j = 0, cncols = 0; j < bncols; j++) {
186: if (PetscAbsScalar(bvals[j]) > threshold) {
187: ccols[cncols] = bcols[j];
188: cvals[cncols] = bvals[j];
189: cncols += 1;
190: }
191: }
192: if (cncols) PetscCall(MatSetValues(C, 1, &row, cncols, ccols, cvals, INSERT_VALUES));
193: PetscCall(MatRestoreRow(B, row, &bncols, &bcols, &bvals));
194: PetscCall(PetscFree2(ccols, cvals));
195: }
196: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
197: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
198: PetscCall(PetscViewerASCIIPrintf(viewer, " Finite-difference minus hand-coded Hessian with tolerance %g ----------\n", (double)threshold));
199: PetscCall(MatView(C, mviewer));
200: PetscCall(MatDestroy(&C));
201: }
202: PetscCall(MatDestroy(&A));
203: PetscCall(MatDestroy(&B));
205: if (hessian != tao->hessian_pre) {
206: hessian = tao->hessian_pre;
207: PetscCall(PetscViewerASCIIPrintf(viewer, " ---------- Testing Hessian for preconditioner -------------\n"));
208: } else hessian = NULL;
209: }
210: if (complete_print) {
211: PetscCall(PetscViewerPopFormat(mviewer));
212: PetscCall(PetscViewerDestroy(&mviewer));
213: }
214: PetscCall(PetscViewerASCIISetTab(viewer, tabs));
215: PetscFunctionReturn(PETSC_SUCCESS);
216: }
218: /*@C
219: TaoComputeHessian - Computes the Hessian matrix that has been
220: set with `TaoSetHessian()`.
222: Collective
224: Input Parameters:
225: + tao - the Tao solver context
226: - X - input vector
228: Output Parameters:
229: + H - Hessian matrix
230: - Hpre - Preconditioning matrix
232: Options Database Keys:
233: + -tao_test_hessian - compare the user provided Hessian with one compute via finite differences to check for errors
234: . -tao_test_hessian <numerical value> - display entries in the difference between the user provided Hessian and finite difference Hessian that are greater than a certain value to help users detect errors
235: - -tao_test_hessian_view - display the user provided Hessian, the finite difference Hessian and the difference between them to help users detect the location of errors in the user provided Hessian
237: Level: developer
239: Notes:
240: Most users should not need to explicitly call this routine, as it
241: is used internally within the minimization solvers.
243: `TaoComputeHessian()` is typically used within optimization algorithms,
244: so most users would not generally call this routine
245: themselves.
247: Developer Note:
248: The Hessian test mechanism follows `SNESTestJacobian()`.
250: .seealso: [](ch_tao), `Tao`, `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetHessian()`
251: @*/
252: PetscErrorCode TaoComputeHessian(Tao tao, Vec X, Mat H, Mat Hpre)
253: {
254: PetscFunctionBegin;
257: PetscCheckSameComm(tao, 1, X, 2);
258: PetscCheck(tao->ops->computehessian, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetHessian() not called");
260: ++tao->nhess;
261: PetscCall(VecLockReadPush(X));
262: PetscCall(PetscLogEventBegin(TAO_HessianEval, tao, X, H, Hpre));
263: PetscCallBack("Tao callback Hessian", (*tao->ops->computehessian)(tao, X, H, Hpre, tao->user_hessP));
264: PetscCall(PetscLogEventEnd(TAO_HessianEval, tao, X, H, Hpre));
265: PetscCall(VecLockReadPop(X));
267: PetscCall(TaoTestHessian(tao));
268: PetscFunctionReturn(PETSC_SUCCESS);
269: }
271: /*@C
272: TaoComputeJacobian - Computes the Jacobian matrix that has been
273: set with TaoSetJacobianRoutine().
275: Collective
277: Input Parameters:
278: + tao - the Tao solver context
279: - X - input vector
281: Output Parameters:
282: + J - Jacobian matrix
283: - Jpre - Preconditioning matrix
285: Level: developer
287: Notes:
288: Most users should not need to explicitly call this routine, as it
289: is used internally within the minimization solvers.
291: `TaoComputeJacobian()` is typically used within minimization
292: implementations, so most users would not generally call this routine
293: themselves.
295: .seealso: [](ch_tao), `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetJacobianRoutine()`
296: @*/
297: PetscErrorCode TaoComputeJacobian(Tao tao, Vec X, Mat J, Mat Jpre)
298: {
299: PetscFunctionBegin;
302: PetscCheckSameComm(tao, 1, X, 2);
303: ++tao->njac;
304: PetscCall(VecLockReadPush(X));
305: PetscCall(PetscLogEventBegin(TAO_JacobianEval, tao, X, J, Jpre));
306: PetscCallBack("Tao callback Jacobian", (*tao->ops->computejacobian)(tao, X, J, Jpre, tao->user_jacP));
307: PetscCall(PetscLogEventEnd(TAO_JacobianEval, tao, X, J, Jpre));
308: PetscCall(VecLockReadPop(X));
309: PetscFunctionReturn(PETSC_SUCCESS);
310: }
312: /*@C
313: TaoComputeResidualJacobian - Computes the least-squares residual Jacobian matrix that has been
314: set with `TaoSetJacobianResidual()`.
316: Collective
318: Input Parameters:
319: + tao - the Tao solver context
320: - X - input vector
322: Output Parameters:
323: + J - Jacobian matrix
324: - Jpre - Preconditioning matrix
326: Level: developer
328: Notes:
329: Most users should not need to explicitly call this routine, as it
330: is used internally within the minimization solvers.
332: `TaoComputeResidualJacobian()` is typically used within least-squares
333: implementations, so most users would not generally call this routine
334: themselves.
336: .seealso: [](ch_tao), `Tao`, `TaoComputeResidual()`, `TaoSetJacobianResidual()`
337: @*/
338: PetscErrorCode TaoComputeResidualJacobian(Tao tao, Vec X, Mat J, Mat Jpre)
339: {
340: PetscFunctionBegin;
343: PetscCheckSameComm(tao, 1, X, 2);
344: ++tao->njac;
345: PetscCall(VecLockReadPush(X));
346: PetscCall(PetscLogEventBegin(TAO_JacobianEval, tao, X, J, Jpre));
347: PetscCallBack("Tao callback least-squares residual Jacobian", (*tao->ops->computeresidualjacobian)(tao, X, J, Jpre, tao->user_lsjacP));
348: PetscCall(PetscLogEventEnd(TAO_JacobianEval, tao, X, J, Jpre));
349: PetscCall(VecLockReadPop(X));
350: PetscFunctionReturn(PETSC_SUCCESS);
351: }
353: /*@C
354: TaoComputeJacobianState - Computes the Jacobian matrix that has been
355: set with `TaoSetJacobianStateRoutine()`.
357: Collective
359: Input Parameters:
360: + tao - the `Tao` solver context
361: - X - input vector
363: Output Parameters:
364: + J - Jacobian matrix
365: . Jpre - matrix used to construct the preconditioner, often the same as `J`
366: - Jinv - unknown
368: Level: developer
370: Note:
371: Most users should not need to explicitly call this routine, as it
372: is used internally within the optimization algorithms.
374: .seealso: [](ch_tao), `Tao`, `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetJacobianStateRoutine()`, `TaoComputeJacobianDesign()`, `TaoSetStateDesignIS()`
375: @*/
376: PetscErrorCode TaoComputeJacobianState(Tao tao, Vec X, Mat J, Mat Jpre, Mat Jinv)
377: {
378: PetscFunctionBegin;
381: PetscCheckSameComm(tao, 1, X, 2);
382: ++tao->njac_state;
383: PetscCall(VecLockReadPush(X));
384: PetscCall(PetscLogEventBegin(TAO_JacobianEval, tao, X, J, Jpre));
385: PetscCallBack("Tao callback Jacobian(state)", (*tao->ops->computejacobianstate)(tao, X, J, Jpre, Jinv, tao->user_jac_stateP));
386: PetscCall(PetscLogEventEnd(TAO_JacobianEval, tao, X, J, Jpre));
387: PetscCall(VecLockReadPop(X));
388: PetscFunctionReturn(PETSC_SUCCESS);
389: }
391: /*@C
392: TaoComputeJacobianDesign - Computes the Jacobian matrix that has been
393: set with `TaoSetJacobianDesignRoutine()`.
395: Collective
397: Input Parameters:
398: + tao - the Tao solver context
399: - X - input vector
401: Output Parameter:
402: . J - Jacobian matrix
404: Level: developer
406: Note:
407: Most users should not need to explicitly call this routine, as it
408: is used internally within the optimization algorithms.
410: .seealso: [](ch_tao), `Tao`, `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetJacobianDesignRoutine()`, `TaoComputeJacobianDesign()`, `TaoSetStateDesignIS()`
411: @*/
412: PetscErrorCode TaoComputeJacobianDesign(Tao tao, Vec X, Mat J)
413: {
414: PetscFunctionBegin;
417: PetscCheckSameComm(tao, 1, X, 2);
418: ++tao->njac_design;
419: PetscCall(VecLockReadPush(X));
420: PetscCall(PetscLogEventBegin(TAO_JacobianEval, tao, X, J, NULL));
421: PetscCallBack("Tao callback Jacobian(design)", (*tao->ops->computejacobiandesign)(tao, X, J, tao->user_jac_designP));
422: PetscCall(PetscLogEventEnd(TAO_JacobianEval, tao, X, J, NULL));
423: PetscCall(VecLockReadPop(X));
424: PetscFunctionReturn(PETSC_SUCCESS);
425: }
427: /*@C
428: TaoSetJacobianRoutine - Sets the function to compute the Jacobian as well as the location to store the matrix.
430: Logically Collective
432: Input Parameters:
433: + tao - the `Tao` context
434: . J - Matrix used for the Jacobian
435: . Jpre - Matrix that will be used to construct the preconditioner, can be same as `J`
436: . func - Jacobian evaluation routine
437: - ctx - [optional] user-defined context for private data for the
438: Jacobian evaluation routine (may be `NULL`)
440: Calling sequence of `func`:
441: $ PetscErrorCode func(Tao tao, Vec x, Mat J, Mat Jpre, void *ctx);
442: + tao - the `Tao` context
443: . x - input vector
444: . J - Jacobian matrix
445: . Jpre - matrix used to construct the preconditioner, usually the same as `J`
446: - ctx - [optional] user-defined Jacobian context
448: Level: intermediate
450: .seealso: [](ch_tao), `Tao`, `TaoSetGradient()`, `TaoSetObjective()`
451: @*/
452: PetscErrorCode TaoSetJacobianRoutine(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
453: {
454: PetscFunctionBegin;
456: if (J) {
458: PetscCheckSameComm(tao, 1, J, 2);
459: }
460: if (Jpre) {
462: PetscCheckSameComm(tao, 1, Jpre, 3);
463: }
464: if (ctx) tao->user_jacP = ctx;
465: if (func) tao->ops->computejacobian = func;
466: if (J) {
467: PetscCall(PetscObjectReference((PetscObject)J));
468: PetscCall(MatDestroy(&tao->jacobian));
469: tao->jacobian = J;
470: }
471: if (Jpre) {
472: PetscCall(PetscObjectReference((PetscObject)Jpre));
473: PetscCall(MatDestroy(&tao->jacobian_pre));
474: tao->jacobian_pre = Jpre;
475: }
476: PetscFunctionReturn(PETSC_SUCCESS);
477: }
479: /*@C
480: TaoSetJacobianResidualRoutine - Sets the function to compute the least-squares residual Jacobian as well as the
481: location to store the matrix.
483: Logically Collective
485: Input Parameters:
486: + tao - the `Tao` context
487: . J - Matrix used for the jacobian
488: . Jpre - Matrix that will be used to construct the preconditioner, can be same as `J`
489: . func - Jacobian evaluation routine
490: - ctx - [optional] user-defined context for private data for the
491: Jacobian evaluation routine (may be `NULL`)
493: Calling sequence of `func`:
494: $ PetscErrorCode func(Tao tao, Vec x, Mat J, Mat Jpre, void *ctx);
495: + tao - the `Tao` context
496: . x - input vector
497: . J - Jacobian matrix
498: . Jpre - matrix used to construct the preconditioner, usually the same as `J`
499: - ctx - [optional] user-defined Jacobian context
501: Level: intermediate
503: .seealso: [](ch_tao), `Tao`, `TaoSetGradient()`, `TaoSetObjective()`
504: @*/
505: PetscErrorCode TaoSetJacobianResidualRoutine(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
506: {
507: PetscFunctionBegin;
509: if (J) {
511: PetscCheckSameComm(tao, 1, J, 2);
512: }
513: if (Jpre) {
515: PetscCheckSameComm(tao, 1, Jpre, 3);
516: }
517: if (ctx) tao->user_lsjacP = ctx;
518: if (func) tao->ops->computeresidualjacobian = func;
519: if (J) {
520: PetscCall(PetscObjectReference((PetscObject)J));
521: PetscCall(MatDestroy(&tao->ls_jac));
522: tao->ls_jac = J;
523: }
524: if (Jpre) {
525: PetscCall(PetscObjectReference((PetscObject)Jpre));
526: PetscCall(MatDestroy(&tao->ls_jac_pre));
527: tao->ls_jac_pre = Jpre;
528: }
529: PetscFunctionReturn(PETSC_SUCCESS);
530: }
532: /*@C
533: TaoSetJacobianStateRoutine - Sets the function to compute the Jacobian
534: (and its inverse) of the constraint function with respect to the state variables.
535: Used only for PDE-constrained optimization.
537: Logically Collective
539: Input Parameters:
540: + tao - the `Tao` context
541: . J - Matrix used for the Jacobian
542: . Jpre - Matrix that will be used to construct the preconditioner, can be same as `J`. Only used if `Jinv` is `NULL`
543: . Jinv - [optional] Matrix used to apply the inverse of the state Jacobian. Use `NULL` to default to PETSc `KSP` solvers to apply the inverse.
544: . func - Jacobian evaluation routine
545: - ctx - [optional] user-defined context for private data for the
546: Jacobian evaluation routine (may be `NULL`)
548: Calling sequence of `func`:
549: $ PetscErrorCode func(Tao tao, Vec x, Mat J, Mat Jpre, Mat Jinv, void *ctx);
550: + tao - the `Tao` context
551: . x - input vector
552: . J - Jacobian matrix
553: . Jpre - matrix used to construct the preconditioner, usually the same as `J`
554: . Jinv - inverse of `J`
555: - ctx - [optional] user-defined Jacobian context
557: Level: intermediate
559: .seealso: [](ch_tao), `Tao`, `TaoComputeJacobianState()`, `TaoSetJacobianDesignRoutine()`, `TaoSetStateDesignIS()`
560: @*/
561: PetscErrorCode TaoSetJacobianStateRoutine(Tao tao, Mat J, Mat Jpre, Mat Jinv, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, Mat, void *), void *ctx)
562: {
563: PetscFunctionBegin;
565: if (J) {
567: PetscCheckSameComm(tao, 1, J, 2);
568: }
569: if (Jpre) {
571: PetscCheckSameComm(tao, 1, Jpre, 3);
572: }
573: if (Jinv) {
575: PetscCheckSameComm(tao, 1, Jinv, 4);
576: }
577: if (ctx) tao->user_jac_stateP = ctx;
578: if (func) tao->ops->computejacobianstate = func;
579: if (J) {
580: PetscCall(PetscObjectReference((PetscObject)J));
581: PetscCall(MatDestroy(&tao->jacobian_state));
582: tao->jacobian_state = J;
583: }
584: if (Jpre) {
585: PetscCall(PetscObjectReference((PetscObject)Jpre));
586: PetscCall(MatDestroy(&tao->jacobian_state_pre));
587: tao->jacobian_state_pre = Jpre;
588: }
589: if (Jinv) {
590: PetscCall(PetscObjectReference((PetscObject)Jinv));
591: PetscCall(MatDestroy(&tao->jacobian_state_inv));
592: tao->jacobian_state_inv = Jinv;
593: }
594: PetscFunctionReturn(PETSC_SUCCESS);
595: }
597: /*@C
598: TaoSetJacobianDesignRoutine - Sets the function to compute the Jacobian of
599: the constraint function with respect to the design variables. Used only for
600: PDE-constrained optimization.
602: Logically Collective
604: Input Parameters:
605: + tao - the `Tao` context
606: . J - Matrix used for the Jacobian
607: . func - Jacobian evaluation routine
608: - ctx - [optional] user-defined context for private data for the
609: Jacobian evaluation routine (may be `NULL`)
611: Calling sequence of `func`:
612: $ PetscErrorCode func(Tao tao, Vec x, Mat J, void *ctx);
613: + tao - the `Tao` context
614: . x - input vector
615: . J - Jacobian matrix
616: - ctx - [optional] user-defined Jacobian context
618: Level: intermediate
620: .seealso: [](ch_tao), `Tao`, `TaoComputeJacobianDesign()`, `TaoSetJacobianStateRoutine()`, `TaoSetStateDesignIS()`
621: @*/
622: PetscErrorCode TaoSetJacobianDesignRoutine(Tao tao, Mat J, PetscErrorCode (*func)(Tao, Vec, Mat, void *), void *ctx)
623: {
624: PetscFunctionBegin;
626: if (J) {
628: PetscCheckSameComm(tao, 1, J, 2);
629: }
630: if (ctx) tao->user_jac_designP = ctx;
631: if (func) tao->ops->computejacobiandesign = func;
632: if (J) {
633: PetscCall(PetscObjectReference((PetscObject)J));
634: PetscCall(MatDestroy(&tao->jacobian_design));
635: tao->jacobian_design = J;
636: }
637: PetscFunctionReturn(PETSC_SUCCESS);
638: }
640: /*@
641: TaoSetStateDesignIS - Indicate to the `Tao` object which variables in the
642: solution vector are state variables and which are design. Only applies to
643: PDE-constrained optimization.
645: Logically Collective
647: Input Parameters:
648: + tao - The `Tao` context
649: . s_is - the index set corresponding to the state variables
650: - d_is - the index set corresponding to the design variables
652: Level: intermediate
654: .seealso: [](ch_tao), `Tao`, `TaoSetJacobianStateRoutine()`, `TaoSetJacobianDesignRoutine()`
655: @*/
656: PetscErrorCode TaoSetStateDesignIS(Tao tao, IS s_is, IS d_is)
657: {
658: PetscFunctionBegin;
659: PetscCall(PetscObjectReference((PetscObject)s_is));
660: PetscCall(ISDestroy(&tao->state_is));
661: tao->state_is = s_is;
662: PetscCall(PetscObjectReference((PetscObject)(d_is)));
663: PetscCall(ISDestroy(&tao->design_is));
664: tao->design_is = d_is;
665: PetscFunctionReturn(PETSC_SUCCESS);
666: }
668: /*@C
669: TaoComputeJacobianEquality - Computes the Jacobian matrix that has been
670: set with `TaoSetJacobianEqualityRoutine()`.
672: Collective
674: Input Parameters:
675: + tao - the `Tao` solver context
676: - X - input vector
678: Output Parameters:
679: + J - Jacobian matrix
680: - Jpre - matrix used to construct the preconditioner, often the same as `J`
682: Level: developer
684: Notes:
685: Most users should not need to explicitly call this routine, as it
686: is used internally within the optimization algorithms.
688: .seealso: [](ch_tao), `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetJacobianStateRoutine()`, `TaoComputeJacobianDesign()`, `TaoSetStateDesignIS()`
689: @*/
690: PetscErrorCode TaoComputeJacobianEquality(Tao tao, Vec X, Mat J, Mat Jpre)
691: {
692: PetscFunctionBegin;
695: PetscCheckSameComm(tao, 1, X, 2);
696: ++tao->njac_equality;
697: PetscCall(VecLockReadPush(X));
698: PetscCall(PetscLogEventBegin(TAO_JacobianEval, tao, X, J, Jpre));
699: PetscCallBack("Tao callback Jacobian(equality)", (*tao->ops->computejacobianequality)(tao, X, J, Jpre, tao->user_jac_equalityP));
700: PetscCall(PetscLogEventEnd(TAO_JacobianEval, tao, X, J, Jpre));
701: PetscCall(VecLockReadPop(X));
702: PetscFunctionReturn(PETSC_SUCCESS);
703: }
705: /*@C
706: TaoComputeJacobianInequality - Computes the Jacobian matrix that has been
707: set with `TaoSetJacobianInequalityRoutine()`.
709: Collective
711: Input Parameters:
712: + tao - the `Tao` solver context
713: - X - input vector
715: Output Parameters:
716: + J - Jacobian matrix
717: - Jpre - matrix used to construct the preconditioner
719: Level: developer
721: Note:
722: Most users should not need to explicitly call this routine, as it
723: is used internally within the minimization solvers.
725: .seealso: [](ch_tao), `Tao`, `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetJacobianStateRoutine()`, `TaoComputeJacobianDesign()`, `TaoSetStateDesignIS()`
726: @*/
727: PetscErrorCode TaoComputeJacobianInequality(Tao tao, Vec X, Mat J, Mat Jpre)
728: {
729: PetscFunctionBegin;
732: PetscCheckSameComm(tao, 1, X, 2);
733: ++tao->njac_inequality;
734: PetscCall(VecLockReadPush(X));
735: PetscCall(PetscLogEventBegin(TAO_JacobianEval, tao, X, J, Jpre));
736: PetscCallBack("Tao callback Jacobian (inequality)", (*tao->ops->computejacobianinequality)(tao, X, J, Jpre, tao->user_jac_inequalityP));
737: PetscCall(PetscLogEventEnd(TAO_JacobianEval, tao, X, J, Jpre));
738: PetscCall(VecLockReadPop(X));
739: PetscFunctionReturn(PETSC_SUCCESS);
740: }
742: /*@C
743: TaoSetJacobianEqualityRoutine - Sets the function to compute the Jacobian
744: (and its inverse) of the constraint function with respect to the equality variables.
745: Used only for PDE-constrained optimization.
747: Logically Collective
749: Input Parameters:
750: + tao - the `Tao` context
751: . J - Matrix used for the Jacobian
752: . Jpre - Matrix that will be used to construct the preconditioner, can be same as `J`.
753: . func - Jacobian evaluation routine
754: - ctx - [optional] user-defined context for private data for the
755: Jacobian evaluation routine (may be `NULL`)
757: Calling sequence of `func`:
758: $ PetscErrorCode func(Tao tao, Vec x, Mat J, Mat Jpre, void *ctx)
759: + tao - the `Tao` context
760: . x - input vector
761: . J - Jacobian matrix
762: . Jpre - matrix used to construct the preconditioner, usually the same as `J`
763: - ctx - [optional] user-defined Jacobian context
765: Level: intermediate
767: .seealso: [](ch_tao), `Tao`, `TaoComputeJacobianEquality()`, `TaoSetJacobianDesignRoutine()`, `TaoSetEqualityDesignIS()`
768: @*/
769: PetscErrorCode TaoSetJacobianEqualityRoutine(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
770: {
771: PetscFunctionBegin;
773: if (J) {
775: PetscCheckSameComm(tao, 1, J, 2);
776: }
777: if (Jpre) {
779: PetscCheckSameComm(tao, 1, Jpre, 3);
780: }
781: if (ctx) tao->user_jac_equalityP = ctx;
782: if (func) tao->ops->computejacobianequality = func;
783: if (J) {
784: PetscCall(PetscObjectReference((PetscObject)J));
785: PetscCall(MatDestroy(&tao->jacobian_equality));
786: tao->jacobian_equality = J;
787: }
788: if (Jpre) {
789: PetscCall(PetscObjectReference((PetscObject)Jpre));
790: PetscCall(MatDestroy(&tao->jacobian_equality_pre));
791: tao->jacobian_equality_pre = Jpre;
792: }
793: PetscFunctionReturn(PETSC_SUCCESS);
794: }
796: /*@C
797: TaoSetJacobianInequalityRoutine - Sets the function to compute the Jacobian
798: (and its inverse) of the constraint function with respect to the inequality variables.
799: Used only for PDE-constrained optimization.
801: Logically Collective
803: Input Parameters:
804: + tao - the `Tao` context
805: . J - Matrix used for the Jacobian
806: . Jpre - Matrix that will be used to construct the preconditioner, can be same as `J`.
807: . func - Jacobian evaluation routine
808: - ctx - [optional] user-defined context for private data for the
809: Jacobian evaluation routine (may be `NULL`)
811: Calling sequence of `func`:
812: $ PetscErrorCode func(Tao tao, Vec x, Mat J, Mat Jpre, void *ctx);
813: + tao - the `Tao` context
814: . x - input vector
815: . J - Jacobian matrix
816: . Jpre - matrix used to construct the preconditioner, usually the same as `J`
817: - ctx - [optional] user-defined Jacobian context
819: Level: intermediate
821: .seealso: [](ch_tao), `Tao`, `TaoComputeJacobianInequality()`, `TaoSetJacobianDesignRoutine()`, `TaoSetInequalityDesignIS()`
822: @*/
823: PetscErrorCode TaoSetJacobianInequalityRoutine(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
824: {
825: PetscFunctionBegin;
827: if (J) {
829: PetscCheckSameComm(tao, 1, J, 2);
830: }
831: if (Jpre) {
833: PetscCheckSameComm(tao, 1, Jpre, 3);
834: }
835: if (ctx) tao->user_jac_inequalityP = ctx;
836: if (func) tao->ops->computejacobianinequality = func;
837: if (J) {
838: PetscCall(PetscObjectReference((PetscObject)J));
839: PetscCall(MatDestroy(&tao->jacobian_inequality));
840: tao->jacobian_inequality = J;
841: }
842: if (Jpre) {
843: PetscCall(PetscObjectReference((PetscObject)Jpre));
844: PetscCall(MatDestroy(&tao->jacobian_inequality_pre));
845: tao->jacobian_inequality_pre = Jpre;
846: }
847: PetscFunctionReturn(PETSC_SUCCESS);
848: }