Actual source code: taosolver_fg.c
1: #include <petsc/private/taoimpl.h>
3: /*@
4: TaoSetSolution - Sets the vector holding the initial guess for the solve
6: Logically Collective
8: Input Parameters:
9: + tao - the `Tao` context
10: - x0 - the initial guess
12: Level: beginner
14: .seealso: [](ch_tao), `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()`
15: @*/
16: PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
17: {
18: PetscFunctionBegin;
21: PetscCall(PetscObjectReference((PetscObject)x0));
22: PetscCall(VecDestroy(&tao->solution));
23: tao->solution = x0;
24: PetscFunctionReturn(PETSC_SUCCESS);
25: }
27: PetscErrorCode TaoTestGradient(Tao tao, Vec x, Vec g1)
28: {
29: Vec g2, g3;
30: PetscBool complete_print = PETSC_FALSE, test = PETSC_FALSE;
31: PetscReal hcnorm, fdnorm, hcmax, fdmax, diffmax, diffnorm;
32: PetscScalar dot;
33: MPI_Comm comm;
34: PetscViewer viewer, mviewer;
35: PetscViewerFormat format;
36: PetscInt tabs;
37: static PetscBool directionsprinted = PETSC_FALSE;
39: PetscFunctionBegin;
40: PetscObjectOptionsBegin((PetscObject)tao);
41: PetscCall(PetscOptionsName("-tao_test_gradient", "Compare hand-coded and finite difference Gradients", "None", &test));
42: PetscCall(PetscOptionsViewer("-tao_test_gradient_view", "View difference between hand-coded and finite difference Gradients element entries", "None", &mviewer, &format, &complete_print));
43: PetscOptionsEnd();
44: if (!test) {
45: if (complete_print) PetscCall(PetscViewerDestroy(&mviewer));
46: PetscFunctionReturn(PETSC_SUCCESS);
47: }
49: PetscCall(PetscObjectGetComm((PetscObject)tao, &comm));
50: PetscCall(PetscViewerASCIIGetStdout(comm, &viewer));
51: PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
52: PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
53: PetscCall(PetscViewerASCIIPrintf(viewer, " ---------- Testing Gradient -------------\n"));
54: if (!complete_print && !directionsprinted) {
55: PetscCall(PetscViewerASCIIPrintf(viewer, " Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n"));
56: PetscCall(PetscViewerASCIIPrintf(viewer, " of hand-coded and finite difference gradient entries greater than <threshold>.\n"));
57: }
58: if (!directionsprinted) {
59: PetscCall(PetscViewerASCIIPrintf(viewer, " Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n"));
60: PetscCall(PetscViewerASCIIPrintf(viewer, " O(1.e-8), the hand-coded Gradient is probably correct.\n"));
61: directionsprinted = PETSC_TRUE;
62: }
63: if (complete_print) PetscCall(PetscViewerPushFormat(mviewer, format));
65: PetscCall(VecDuplicate(x, &g2));
66: PetscCall(VecDuplicate(x, &g3));
68: /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
69: PetscCall(TaoDefaultComputeGradient(tao, x, g2, NULL));
71: PetscCall(VecNorm(g2, NORM_2, &fdnorm));
72: PetscCall(VecNorm(g1, NORM_2, &hcnorm));
73: PetscCall(VecNorm(g2, NORM_INFINITY, &fdmax));
74: PetscCall(VecNorm(g1, NORM_INFINITY, &hcmax));
75: PetscCall(VecDot(g1, g2, &dot));
76: PetscCall(VecCopy(g1, g3));
77: PetscCall(VecAXPY(g3, -1.0, g2));
78: PetscCall(VecNorm(g3, NORM_2, &diffnorm));
79: PetscCall(VecNorm(g3, NORM_INFINITY, &diffmax));
80: PetscCall(PetscViewerASCIIPrintf(viewer, " ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot) / (fdnorm * hcnorm))));
81: PetscCall(PetscViewerASCIIPrintf(viewer, " 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffnorm / PetscMax(hcnorm, fdnorm)), (double)diffnorm));
82: PetscCall(PetscViewerASCIIPrintf(viewer, " max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffmax / PetscMax(hcmax, fdmax)), (double)diffmax));
84: if (complete_print) {
85: PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded gradient ----------\n"));
86: PetscCall(VecView(g1, mviewer));
87: PetscCall(PetscViewerASCIIPrintf(viewer, " Finite difference gradient ----------\n"));
88: PetscCall(VecView(g2, mviewer));
89: PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded minus finite-difference gradient ----------\n"));
90: PetscCall(VecView(g3, mviewer));
91: }
92: PetscCall(VecDestroy(&g2));
93: PetscCall(VecDestroy(&g3));
95: if (complete_print) {
96: PetscCall(PetscViewerPopFormat(mviewer));
97: PetscCall(PetscViewerDestroy(&mviewer));
98: }
99: PetscCall(PetscViewerASCIISetTab(viewer, tabs));
100: PetscFunctionReturn(PETSC_SUCCESS);
101: }
103: /*@
104: TaoComputeGradient - Computes the gradient of the objective function
106: Collective
108: Input Parameters:
109: + tao - the `Tao` context
110: - X - input vector
112: Output Parameter:
113: . G - gradient vector
115: Options Database Keys:
116: + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
117: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
119: Level: developer
121: Note:
122: `TaoComputeGradient()` is typically used within the implementation of the optimization method,
123: so most users would not generally call this routine themselves.
125: .seealso: [](ch_tao), `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()`
126: @*/
127: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
128: {
129: PetscReal dummy;
131: PetscFunctionBegin;
135: PetscCheckSameComm(tao, 1, X, 2);
136: PetscCheckSameComm(tao, 1, G, 3);
137: PetscCall(VecLockReadPush(X));
138: if (tao->ops->computegradient) {
139: PetscCall(PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL));
140: PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
141: PetscCall(PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL));
142: tao->ngrads++;
143: } else if (tao->ops->computeobjectiveandgradient) {
144: PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL));
145: PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, &dummy, G, tao->user_objgradP));
146: PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL));
147: tao->nfuncgrads++;
148: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetGradient() has not been called");
149: PetscCall(VecLockReadPop(X));
151: PetscCall(TaoTestGradient(tao, X, G));
152: PetscFunctionReturn(PETSC_SUCCESS);
153: }
155: /*@
156: TaoComputeObjective - Computes the objective function value at a given point
158: Collective
160: Input Parameters:
161: + tao - the `Tao` context
162: - X - input vector
164: Output Parameter:
165: . f - Objective value at X
167: Level: developer
169: Note:
170: `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm
171: so most users would not generally call this routine themselves.
173: .seealso: [](ch_tao), `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
174: @*/
175: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
176: {
177: Vec temp;
179: PetscFunctionBegin;
182: PetscCheckSameComm(tao, 1, X, 2);
183: PetscCall(VecLockReadPush(X));
184: if (tao->ops->computeobjective) {
185: PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
186: PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
187: PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
188: tao->nfuncs++;
189: } else if (tao->ops->computeobjectiveandgradient) {
190: PetscCall(PetscInfo(tao, "Duplicating variable vector in order to call func/grad routine\n"));
191: PetscCall(VecDuplicate(X, &temp));
192: PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, NULL, NULL));
193: PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, temp, tao->user_objgradP));
194: PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, NULL, NULL));
195: PetscCall(VecDestroy(&temp));
196: tao->nfuncgrads++;
197: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() has not been called");
198: PetscCall(PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f)));
199: PetscCall(VecLockReadPop(X));
200: PetscFunctionReturn(PETSC_SUCCESS);
201: }
203: /*@
204: TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
206: Collective
208: Input Parameters:
209: + tao - the `Tao` context
210: - X - input vector
212: Output Parameters:
213: + f - Objective value at `X`
214: - g - Gradient vector at `X`
216: Level: developer
218: Note:
219: `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm,
220: so most users would not generally call this routine themselves.
222: .seealso: [](ch_tao), `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
223: @*/
224: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
225: {
226: PetscFunctionBegin;
230: PetscCheckSameComm(tao, 1, X, 2);
231: PetscCheckSameComm(tao, 1, G, 4);
232: PetscCall(VecLockReadPush(X));
233: if (tao->ops->computeobjectiveandgradient) {
234: PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL));
235: if (tao->ops->computegradient == TaoDefaultComputeGradient) {
236: PetscCall(TaoComputeObjective(tao, X, f));
237: PetscCall(TaoDefaultComputeGradient(tao, X, G, NULL));
238: } else PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, G, tao->user_objgradP));
239: PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL));
240: tao->nfuncgrads++;
241: } else if (tao->ops->computeobjective && tao->ops->computegradient) {
242: PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
243: PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
244: PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
245: tao->nfuncs++;
246: PetscCall(PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL));
247: PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
248: PetscCall(PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL));
249: tao->ngrads++;
250: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() or TaoSetGradient() not set");
251: PetscCall(PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f)));
252: PetscCall(VecLockReadPop(X));
254: PetscCall(TaoTestGradient(tao, X, G));
255: PetscFunctionReturn(PETSC_SUCCESS);
256: }
258: /*@C
259: TaoSetObjective - Sets the function evaluation routine for minimization
261: Logically Collective
263: Input Parameters:
264: + tao - the `Tao` context
265: . func - the objective function
266: - ctx - [optional] user-defined context for private data for the function evaluation
267: routine (may be `NULL`)
269: Calling sequence of `func`:
270: $ PetscErrorCode func(Tao tao, Vec x, PetscReal *f, void *ctx);
271: + tao - the optimizer
272: . x - input vector
273: . f - function value
274: - ctx - [optional] user-defined function context
276: Level: beginner
278: .seealso: [](ch_tao), `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()`
279: @*/
280: PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, void *), void *ctx)
281: {
282: PetscFunctionBegin;
284: if (ctx) tao->user_objP = ctx;
285: if (func) tao->ops->computeobjective = func;
286: PetscFunctionReturn(PETSC_SUCCESS);
287: }
289: /*@C
290: TaoGetObjective - Gets the function evaluation routine for the function to be minimized
292: Not Collective
294: Input Parameter:
295: . tao - the `Tao` context
297: Output Parameters:
298: + func - the objective function
299: - ctx - the user-defined context for private data for the function evaluation
301: Calling sequence of `func`:
302: $ PetscErrorCode func(Tao tao, Vec x, PetscReal *f, void *ctx)
303: + tao - the optimizer
304: . x - input vector
305: . f - function value
306: - ctx - [optional] user-defined function context
308: Level: beginner
310: .seealso: [](ch_tao), `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()`
311: @*/
312: PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal *, void *), void **ctx)
313: {
314: PetscFunctionBegin;
316: if (func) *func = tao->ops->computeobjective;
317: if (ctx) *ctx = tao->user_objP;
318: PetscFunctionReturn(PETSC_SUCCESS);
319: }
321: /*@C
322: TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
324: Logically Collective
326: Input Parameters:
327: + tao - the `Tao` context
328: . func - the residual evaluation routine
329: - ctx - [optional] user-defined context for private data for the function evaluation
330: routine (may be `NULL`)
332: Calling sequence of `func`:
333: $ PetscErrorCode func(Tao tao, Vec x, Vec f, void *ctx);
334: + tao - the optimizer
335: . x - input vector
336: . f - function value vector
337: - ctx - [optional] user-defined function context
339: Level: beginner
341: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()`
342: @*/
343: PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx)
344: {
345: PetscFunctionBegin;
348: PetscCall(PetscObjectReference((PetscObject)res));
349: if (tao->ls_res) PetscCall(VecDestroy(&tao->ls_res));
350: tao->ls_res = res;
351: tao->user_lsresP = ctx;
352: tao->ops->computeresidual = func;
354: PetscFunctionReturn(PETSC_SUCCESS);
355: }
357: /*@
358: TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give.
359: If this function is not provided, or if `sigma_v` and `vals` are both `NULL`, then the identity matrix will be used for weights.
361: Collective
363: Input Parameters:
364: + tao - the `Tao` context
365: . sigma_v - vector of weights (diagonal terms only)
366: . n - the number of weights (if using off-diagonal)
367: . rows - index list of rows for `sigma_v`
368: . cols - index list of columns for `sigma_v`
369: - vals - array of weights
371: Level: intermediate
373: Note:
374: Either `sigma_v` or `vals` should be `NULL`
376: .seealso: [](ch_tao), `Tao`, `TaoSetResidualRoutine()`
377: @*/
378: PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
379: {
380: PetscInt i;
382: PetscFunctionBegin;
385: PetscCall(PetscObjectReference((PetscObject)sigma_v));
386: PetscCall(VecDestroy(&tao->res_weights_v));
387: tao->res_weights_v = sigma_v;
388: if (vals) {
389: PetscCall(PetscFree(tao->res_weights_rows));
390: PetscCall(PetscFree(tao->res_weights_cols));
391: PetscCall(PetscFree(tao->res_weights_w));
392: PetscCall(PetscMalloc1(n, &tao->res_weights_rows));
393: PetscCall(PetscMalloc1(n, &tao->res_weights_cols));
394: PetscCall(PetscMalloc1(n, &tao->res_weights_w));
395: tao->res_weights_n = n;
396: for (i = 0; i < n; i++) {
397: tao->res_weights_rows[i] = rows[i];
398: tao->res_weights_cols[i] = cols[i];
399: tao->res_weights_w[i] = vals[i];
400: }
401: } else {
402: tao->res_weights_n = 0;
403: tao->res_weights_rows = NULL;
404: tao->res_weights_cols = NULL;
405: }
406: PetscFunctionReturn(PETSC_SUCCESS);
407: }
409: /*@
410: TaoComputeResidual - Computes a least-squares residual vector at a given point
412: Collective
414: Input Parameters:
415: + tao - the `Tao` context
416: - X - input vector
418: Output Parameter:
419: . f - Objective vector at `X`
421: Level: advanced
423: Notes:
424: `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm,
425: so most users would not generally call this routine themselves.
427: .seealso: [](ch_tao), `Tao`, `TaoSetResidualRoutine()`
428: @*/
429: PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
430: {
431: PetscFunctionBegin;
435: PetscCheckSameComm(tao, 1, X, 2);
436: PetscCheckSameComm(tao, 1, F, 3);
437: if (tao->ops->computeresidual) {
438: PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
439: PetscCallBack("Tao callback least-squares residual", (*tao->ops->computeresidual)(tao, X, F, tao->user_lsresP));
440: PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
441: tao->nfuncs++;
442: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetResidualRoutine() has not been called");
443: PetscCall(PetscInfo(tao, "TAO least-squares residual evaluation.\n"));
444: PetscFunctionReturn(PETSC_SUCCESS);
445: }
447: /*@C
448: TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized
450: Logically Collective
452: Input Parameters:
453: + tao - the `Tao` context
454: . g - [optional] the vector to internally hold the gradient computation
455: . func - the gradient function
456: - ctx - [optional] user-defined context for private data for the gradient evaluation
457: routine (may be `NULL`)
459: Calling sequence of `func`:
460: $ PetscErrorCode func(Tao tao, Vec x, Vec g, void *ctx);
461: + tao - the optimization solver
462: . x - input vector
463: . g - gradient value (output)
464: - ctx - [optional] user-defined function context
466: Level: beginner
468: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()`
469: @*/
470: PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx)
471: {
472: PetscFunctionBegin;
474: if (g) {
476: PetscCheckSameComm(tao, 1, g, 2);
477: PetscCall(PetscObjectReference((PetscObject)g));
478: PetscCall(VecDestroy(&tao->gradient));
479: tao->gradient = g;
480: }
481: if (func) tao->ops->computegradient = func;
482: if (ctx) tao->user_gradP = ctx;
483: PetscFunctionReturn(PETSC_SUCCESS);
484: }
486: /*@C
487: TaoGetGradient - Gets the gradient evaluation routine for the function being optimized
489: Not Collective
491: Input Parameter:
492: . tao - the `Tao` context
494: Output Parameters:
495: + g - the vector to internally hold the gradient computation
496: . func - the gradient function
497: - ctx - user-defined context for private data for the gradient evaluation routine
499: Calling sequence of `func`:
500: $ PetscErrorCode func(Tao tao, Vec x, Vec g, void *ctx);
501: + tao - the optimizer
502: . x - input vector
503: . g - gradient value (output)
504: - ctx - [optional] user-defined function context
506: Level: beginner
508: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()`
509: @*/
510: PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void *), void **ctx)
511: {
512: PetscFunctionBegin;
514: if (g) *g = tao->gradient;
515: if (func) *func = tao->ops->computegradient;
516: if (ctx) *ctx = tao->user_gradP;
517: PetscFunctionReturn(PETSC_SUCCESS);
518: }
520: /*@C
521: TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized
523: Logically Collective
525: Input Parameters:
526: + tao - the `Tao` context
527: . g - [optional] the vector to internally hold the gradient computation
528: . func - the gradient function
529: - ctx - [optional] user-defined context for private data for the gradient evaluation
530: routine (may be `NULL`)
532: Calling sequence of `func`:
533: $ PetscErrorCode func(Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
534: + tao - the optimization object
535: . x - input vector
536: . f - objective value (output)
537: . g - gradient value (output)
538: - ctx - [optional] user-defined function context
540: Level: beginner
542: Note:
543: For some optimization methods using a combined function can be more eifficient.
545: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()`
546: @*/
547: PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
548: {
549: PetscFunctionBegin;
551: if (g) {
553: PetscCheckSameComm(tao, 1, g, 2);
554: PetscCall(PetscObjectReference((PetscObject)g));
555: PetscCall(VecDestroy(&tao->gradient));
556: tao->gradient = g;
557: }
558: if (ctx) tao->user_objgradP = ctx;
559: if (func) tao->ops->computeobjectiveandgradient = func;
560: PetscFunctionReturn(PETSC_SUCCESS);
561: }
563: /*@C
564: TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized
566: Not Collective
568: Input Parameter:
569: . tao - the `Tao` context
571: Output Parameters:
572: + g - the vector to internally hold the gradient computation
573: . func - the gradient function
574: - ctx - user-defined context for private data for the gradient evaluation routine
576: Calling sequence of `func`:
577: $ PetscErrorCode func(Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
578: + tao - the optimizer
579: . x - input vector
580: . f - objective value (output)
581: . g - gradient value (output)
582: - ctx - [optional] user-defined function context
584: Level: beginner
586: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`
587: @*/
588: PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal *, Vec, void *), void **ctx)
589: {
590: PetscFunctionBegin;
592: if (g) *g = tao->gradient;
593: if (func) *func = tao->ops->computeobjectiveandgradient;
594: if (ctx) *ctx = tao->user_objgradP;
595: PetscFunctionReturn(PETSC_SUCCESS);
596: }
598: /*@
599: TaoIsObjectiveDefined - Checks to see if the user has
600: declared an objective-only routine. Useful for determining when
601: it is appropriate to call `TaoComputeObjective()` or
602: `TaoComputeObjectiveAndGradient()`
604: Not Collective
606: Input Parameter:
607: . tao - the `Tao` context
609: Output Parameter:
610: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
612: Level: developer
614: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()`
615: @*/
616: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
617: {
618: PetscFunctionBegin;
620: if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
621: else *flg = PETSC_TRUE;
622: PetscFunctionReturn(PETSC_SUCCESS);
623: }
625: /*@
626: TaoIsGradientDefined - Checks to see if the user has
627: declared an objective-only routine. Useful for determining when
628: it is appropriate to call `TaoComputeGradient()` or
629: `TaoComputeGradientAndGradient()`
631: Not Collective
633: Input Parameter:
634: . tao - the `Tao` context
636: Output Parameter:
637: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
639: Level: developer
641: .seealso: [](ch_tao), `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()`
642: @*/
643: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
644: {
645: PetscFunctionBegin;
647: if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
648: else *flg = PETSC_TRUE;
649: PetscFunctionReturn(PETSC_SUCCESS);
650: }
652: /*@
653: TaoIsObjectiveAndGradientDefined - Checks to see if the user has
654: declared a joint objective/gradient routine. Useful for determining when
655: it is appropriate to call `TaoComputeObjective()` or
656: `TaoComputeObjectiveAndGradient()`
658: Not Collective
660: Input Parameter:
661: . tao - the `Tao` context
663: Output Parameter:
664: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
666: Level: developer
668: .seealso: [](ch_tao), `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()`
669: @*/
670: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
671: {
672: PetscFunctionBegin;
674: if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
675: else *flg = PETSC_TRUE;
676: PetscFunctionReturn(PETSC_SUCCESS);
677: }