Actual source code: morethuente.c
1: #include <petsc/private/taolinesearchimpl.h>
2: #include <../src/tao/linesearch/impls/morethuente/morethuente.h>
4: /*
5: This algorithm is taken from More' and Thuente, "Line search algorithms
6: with guaranteed sufficient decrease", Argonne National Laboratory,
7: Technical Report MCS-P330-1092.
8: */
10: static PetscErrorCode Tao_mcstep(TaoLineSearch ls, PetscReal *stx, PetscReal *fx, PetscReal *dx, PetscReal *sty, PetscReal *fy, PetscReal *dy, PetscReal *stp, PetscReal *fp, PetscReal *dp);
12: static PetscErrorCode TaoLineSearchDestroy_MT(TaoLineSearch ls)
13: {
14: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)(ls->data);
16: PetscFunctionBegin;
17: PetscCall(PetscObjectDereference((PetscObject)mt->x));
18: PetscCall(VecDestroy(&mt->work));
19: PetscCall(PetscFree(ls->data));
20: PetscFunctionReturn(PETSC_SUCCESS);
21: }
23: static PetscErrorCode TaoLineSearchSetFromOptions_MT(TaoLineSearch ls, PetscOptionItems *PetscOptionsObject)
24: {
25: PetscFunctionBegin;
26: PetscFunctionReturn(PETSC_SUCCESS);
27: }
29: static PetscErrorCode TaoLineSearchMonitor_MT(TaoLineSearch ls)
30: {
31: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)ls->data;
33: PetscFunctionBegin;
34: PetscCall(PetscViewerASCIIPrintf(ls->viewer, "stx: %g, fx: %g, dgx: %g\n", (double)mt->stx, (double)mt->fx, (double)mt->dgx));
35: PetscCall(PetscViewerASCIIPrintf(ls->viewer, "sty: %g, fy: %g, dgy: %g\n", (double)mt->sty, (double)mt->fy, (double)mt->dgy));
36: PetscFunctionReturn(PETSC_SUCCESS);
37: }
39: static PetscErrorCode TaoLineSearchApply_MT(TaoLineSearch ls, Vec x, PetscReal *f, Vec g, Vec s)
40: {
41: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)(ls->data);
42: PetscReal xtrapf = 4.0;
43: PetscReal finit, width, width1, dginit, fm, fxm, fym, dgm, dgxm, dgym;
44: PetscReal dgx, dgy, dg, dg2, fx, fy, stx, sty, dgtest;
45: PetscReal ftest1 = 0.0, ftest2 = 0.0;
46: PetscInt i, stage1, n1, n2, nn1, nn2;
47: PetscReal bstepmin1, bstepmin2, bstepmax, ostepmin, ostepmax;
48: PetscBool g_computed = PETSC_FALSE; /* to prevent extra gradient computation */
50: PetscFunctionBegin;
51: ls->reason = TAOLINESEARCH_CONTINUE_ITERATING;
52: PetscCall(TaoLineSearchMonitor(ls, 0, *f, 0.0));
53: /* Check work vector */
54: if (!mt->work) {
55: PetscCall(VecDuplicate(x, &mt->work));
56: mt->x = x;
57: PetscCall(PetscObjectReference((PetscObject)mt->x));
58: } else if (x != mt->x) {
59: PetscCall(VecDestroy(&mt->work));
60: PetscCall(VecDuplicate(x, &mt->work));
61: PetscCall(PetscObjectDereference((PetscObject)mt->x));
62: mt->x = x;
63: PetscCall(PetscObjectReference((PetscObject)mt->x));
64: }
66: ostepmax = ls->stepmax;
67: ostepmin = ls->stepmin;
69: if (ls->bounded) {
70: /* Compute step length needed to make all variables equal a bound */
71: /* Compute the smallest steplength that will make one nonbinding variable
72: equal the bound */
73: PetscCall(VecGetLocalSize(ls->upper, &n1));
74: PetscCall(VecGetLocalSize(mt->x, &n2));
75: PetscCall(VecGetSize(ls->upper, &nn1));
76: PetscCall(VecGetSize(mt->x, &nn2));
77: PetscCheck(n1 == n2 && nn1 == nn2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Variable vector not compatible with bounds vector");
78: PetscCall(VecScale(s, -1.0));
79: PetscCall(VecBoundGradientProjection(s, x, ls->lower, ls->upper, s));
80: PetscCall(VecScale(s, -1.0));
81: PetscCall(VecStepBoundInfo(x, s, ls->lower, ls->upper, &bstepmin1, &bstepmin2, &bstepmax));
82: ls->stepmax = PetscMin(bstepmax, ls->stepmax);
83: }
85: PetscCall(VecDot(g, s, &dginit));
86: if (PetscIsInfOrNanReal(dginit)) {
87: PetscCall(PetscInfo(ls, "Initial Line Search step * g is Inf or Nan (%g)\n", (double)dginit));
88: ls->reason = TAOLINESEARCH_FAILED_INFORNAN;
89: PetscFunctionReturn(PETSC_SUCCESS);
90: }
91: if (dginit >= 0.0) {
92: PetscCall(PetscInfo(ls, "Initial Line Search step * g is not descent direction (%g)\n", (double)dginit));
93: ls->reason = TAOLINESEARCH_FAILED_ASCENT;
94: PetscFunctionReturn(PETSC_SUCCESS);
95: }
97: /* Initialization */
98: mt->bracket = 0;
99: stage1 = 1;
100: finit = *f;
101: dgtest = ls->ftol * dginit;
102: width = ls->stepmax - ls->stepmin;
103: width1 = width * 2.0;
104: PetscCall(VecCopy(x, mt->work));
105: /* Variable dictionary:
106: stx, fx, dgx - the step, function, and derivative at the best step
107: sty, fy, dgy - the step, function, and derivative at the other endpoint
108: of the interval of uncertainty
109: step, f, dg - the step, function, and derivative at the current step */
111: stx = 0.0;
112: fx = finit;
113: dgx = dginit;
114: sty = 0.0;
115: fy = finit;
116: dgy = dginit;
118: ls->step = ls->initstep;
119: for (i = 0; i < ls->max_funcs; i++) {
120: /* Set min and max steps to correspond to the interval of uncertainty */
121: if (mt->bracket) {
122: ls->stepmin = PetscMin(stx, sty);
123: ls->stepmax = PetscMax(stx, sty);
124: } else {
125: ls->stepmin = stx;
126: ls->stepmax = ls->step + xtrapf * (ls->step - stx);
127: }
129: /* Force the step to be within the bounds */
130: ls->step = PetscMax(ls->step, ls->stepmin);
131: ls->step = PetscMin(ls->step, ls->stepmax);
133: /* If an unusual termination is to occur, then let step be the lowest
134: point obtained thus far */
135: if (stx != 0 && ((mt->bracket && (ls->step <= ls->stepmin || ls->step >= ls->stepmax)) || (mt->bracket && (ls->stepmax - ls->stepmin <= ls->rtol * ls->stepmax)) || (ls->nfeval + ls->nfgeval >= ls->max_funcs - 1) || mt->infoc == 0))
136: ls->step = stx;
138: PetscCall(VecWAXPY(mt->work, ls->step, s, x)); /* W = X + step*S */
140: if (ls->step == 0.0) {
141: PetscCall(PetscInfo(ls, "Step is zero."));
142: ls->reason = TAOLINESEARCH_HALTED_LOWERBOUND;
143: break;
144: }
146: if (ls->bounded) PetscCall(VecMedian(ls->lower, mt->work, ls->upper, mt->work));
147: if (ls->usegts) {
148: PetscCall(TaoLineSearchComputeObjectiveAndGTS(ls, mt->work, f, &dg));
149: g_computed = PETSC_FALSE;
150: } else {
151: PetscCall(TaoLineSearchComputeObjectiveAndGradient(ls, mt->work, f, g));
152: g_computed = PETSC_TRUE;
153: if (ls->bounded) {
154: PetscCall(VecDot(g, x, &dg));
155: PetscCall(VecDot(g, mt->work, &dg2));
156: dg = (dg2 - dg) / ls->step;
157: } else {
158: PetscCall(VecDot(g, s, &dg));
159: }
160: }
162: /* update bracketing parameters in the MT context for printouts in monitor */
163: mt->stx = stx;
164: mt->fx = fx;
165: mt->dgx = dgx;
166: mt->sty = sty;
167: mt->fy = fy;
168: mt->dgy = dgy;
169: PetscCall(TaoLineSearchMonitor(ls, i + 1, *f, ls->step));
171: if (i == 0) ls->f_fullstep = *f;
173: if (PetscIsInfOrNanReal(*f) || PetscIsInfOrNanReal(dg)) {
174: /* User provided compute function generated Not-a-Number, assume
175: domain violation and set function value and directional
176: derivative to infinity. */
177: *f = PETSC_INFINITY;
178: dg = PETSC_INFINITY;
179: }
181: ftest1 = finit + ls->step * dgtest;
182: if (ls->bounded) ftest2 = finit + ls->step * dgtest * ls->ftol;
184: /* Convergence testing */
185: if ((*f - ftest1 <= PETSC_SMALL * PetscAbsReal(finit)) && (PetscAbsReal(dg) + ls->gtol * dginit <= 0.0)) {
186: PetscCall(PetscInfo(ls, "Line search success: Sufficient decrease and directional deriv conditions hold\n"));
187: ls->reason = TAOLINESEARCH_SUCCESS;
188: break;
189: }
191: /* Check Armijo if beyond the first breakpoint */
192: if (ls->bounded && *f <= ftest2 && ls->step >= bstepmin2) {
193: PetscCall(PetscInfo(ls, "Line search success: Sufficient decrease.\n"));
194: ls->reason = TAOLINESEARCH_SUCCESS;
195: break;
196: }
198: /* Checks for bad cases */
199: if ((mt->bracket && (ls->step <= ls->stepmin || ls->step >= ls->stepmax)) || !mt->infoc) {
200: PetscCall(PetscInfo(ls, "Rounding errors may prevent further progress. May not be a step satisfying\nsufficient decrease and curvature conditions. Tolerances may be too small.\n"));
201: ls->reason = TAOLINESEARCH_HALTED_OTHER;
202: break;
203: }
204: if (ls->step == ls->stepmax && *f <= ftest1 && dg <= dgtest) {
205: PetscCall(PetscInfo(ls, "Step is at the upper bound, stepmax (%g)\n", (double)ls->stepmax));
206: ls->reason = TAOLINESEARCH_HALTED_UPPERBOUND;
207: break;
208: }
209: if (ls->step == ls->stepmin && *f >= ftest1 && dg >= dgtest) {
210: PetscCall(PetscInfo(ls, "Step is at the lower bound, stepmin (%g)\n", (double)ls->stepmin));
211: ls->reason = TAOLINESEARCH_HALTED_LOWERBOUND;
212: break;
213: }
214: if (mt->bracket && (ls->stepmax - ls->stepmin <= ls->rtol * ls->stepmax)) {
215: PetscCall(PetscInfo(ls, "Relative width of interval of uncertainty is at most rtol (%g)\n", (double)ls->rtol));
216: ls->reason = TAOLINESEARCH_HALTED_RTOL;
217: break;
218: }
220: /* In the first stage, we seek a step for which the modified function
221: has a nonpositive value and nonnegative derivative */
222: if (stage1 && *f <= ftest1 && dg >= dginit * PetscMin(ls->ftol, ls->gtol)) stage1 = 0;
224: /* A modified function is used to predict the step only if we
225: have not obtained a step for which the modified function has a
226: nonpositive function value and nonnegative derivative, and if a
227: lower function value has been obtained but the decrease is not
228: sufficient */
230: if (stage1 && *f <= fx && *f > ftest1) {
231: fm = *f - ls->step * dgtest; /* Define modified function */
232: fxm = fx - stx * dgtest; /* and derivatives */
233: fym = fy - sty * dgtest;
234: dgm = dg - dgtest;
235: dgxm = dgx - dgtest;
236: dgym = dgy - dgtest;
238: /* if (dgxm * (ls->step - stx) >= 0.0) */
239: /* Update the interval of uncertainty and compute the new step */
240: PetscCall(Tao_mcstep(ls, &stx, &fxm, &dgxm, &sty, &fym, &dgym, &ls->step, &fm, &dgm));
242: fx = fxm + stx * dgtest; /* Reset the function and */
243: fy = fym + sty * dgtest; /* gradient values */
244: dgx = dgxm + dgtest;
245: dgy = dgym + dgtest;
246: } else {
247: /* Update the interval of uncertainty and compute the new step */
248: PetscCall(Tao_mcstep(ls, &stx, &fx, &dgx, &sty, &fy, &dgy, &ls->step, f, &dg));
249: }
251: /* Force a sufficient decrease in the interval of uncertainty */
252: if (mt->bracket) {
253: if (PetscAbsReal(sty - stx) >= 0.66 * width1) ls->step = stx + 0.5 * (sty - stx);
254: width1 = width;
255: width = PetscAbsReal(sty - stx);
256: }
257: }
258: if (ls->nfeval + ls->nfgeval > ls->max_funcs) {
259: PetscCall(PetscInfo(ls, "Number of line search function evals (%" PetscInt_FMT ") > maximum (%" PetscInt_FMT ")\n", ls->nfeval + ls->nfgeval, ls->max_funcs));
260: ls->reason = TAOLINESEARCH_HALTED_MAXFCN;
261: }
262: ls->stepmax = ostepmax;
263: ls->stepmin = ostepmin;
265: /* Finish computations */
266: PetscCall(PetscInfo(ls, "%" PetscInt_FMT " function evals in line search, step = %g\n", ls->nfeval + ls->nfgeval, (double)ls->step));
268: /* Set new solution vector and compute gradient if needed */
269: PetscCall(VecCopy(mt->work, x));
270: if (!g_computed) PetscCall(TaoLineSearchComputeGradient(ls, mt->work, g));
271: PetscFunctionReturn(PETSC_SUCCESS);
272: }
274: /*MC
275: TAOLINESEARCHMT - Line-search type with cubic interpolation that satisfies both the sufficient decrease and
276: curvature conditions. This method can take step lengths greater than 1.
278: More-Thuente line-search can be selected with "-tao_ls_type more-thuente".
280: References:
281: . * - JORGE J. MORE AND DAVID J. THUENTE, LINE SEARCH ALGORITHMS WITH GUARANTEED SUFFICIENT DECREASE.
282: ACM Trans. Math. Software 20, no. 3 (1994): 286-307.
284: Level: developer
286: .seealso: `TaoLineSearchCreate()`, `TaoLineSearchSetType()`, `TaoLineSearchApply()`
288: .keywords: Tao, linesearch
289: M*/
290: PETSC_EXTERN PetscErrorCode TaoLineSearchCreate_MT(TaoLineSearch ls)
291: {
292: TaoLineSearch_MT *ctx;
294: PetscFunctionBegin;
296: PetscCall(PetscNew(&ctx));
297: ctx->bracket = 0;
298: ctx->infoc = 1;
299: ls->data = (void *)ctx;
300: ls->initstep = 1.0;
301: ls->ops->setup = NULL;
302: ls->ops->reset = NULL;
303: ls->ops->apply = TaoLineSearchApply_MT;
304: ls->ops->destroy = TaoLineSearchDestroy_MT;
305: ls->ops->setfromoptions = TaoLineSearchSetFromOptions_MT;
306: ls->ops->monitor = TaoLineSearchMonitor_MT;
307: PetscFunctionReturn(PETSC_SUCCESS);
308: }
310: /*
311: The subroutine mcstep is taken from the work of Jorge Nocedal.
312: this is a variant of More' and Thuente's routine.
314: subroutine mcstep
316: the purpose of mcstep is to compute a safeguarded step for
317: a linesearch and to update an interval of uncertainty for
318: a minimizer of the function.
320: the parameter stx contains the step with the least function
321: value. the parameter stp contains the current step. it is
322: assumed that the derivative at stx is negative in the
323: direction of the step. if bracket is set true then a
324: minimizer has been bracketed in an interval of uncertainty
325: with endpoints stx and sty.
327: the subroutine statement is
329: subroutine mcstep(stx,fx,dx,sty,fy,dy,stp,fp,dp,bracket,
330: stpmin,stpmax,info)
332: where
334: stx, fx, and dx are variables which specify the step,
335: the function, and the derivative at the best step obtained
336: so far. The derivative must be negative in the direction
337: of the step, that is, dx and stp-stx must have opposite
338: signs. On output these parameters are updated appropriately.
340: sty, fy, and dy are variables which specify the step,
341: the function, and the derivative at the other endpoint of
342: the interval of uncertainty. On output these parameters are
343: updated appropriately.
345: stp, fp, and dp are variables which specify the step,
346: the function, and the derivative at the current step.
347: If bracket is set true then on input stp must be
348: between stx and sty. On output stp is set to the new step.
350: bracket is a logical variable which specifies if a minimizer
351: has been bracketed. If the minimizer has not been bracketed
352: then on input bracket must be set false. If the minimizer
353: is bracketed then on output bracket is set true.
355: stpmin and stpmax are input variables which specify lower
356: and upper bounds for the step.
358: info is an integer output variable set as follows:
359: if info = 1,2,3,4,5, then the step has been computed
360: according to one of the five cases below. otherwise
361: info = 0, and this indicates improper input parameters.
363: subprograms called
365: fortran-supplied ... abs,max,min,sqrt
367: argonne national laboratory. minpack project. june 1983
368: jorge j. more', david j. thuente
370: */
372: static PetscErrorCode Tao_mcstep(TaoLineSearch ls, PetscReal *stx, PetscReal *fx, PetscReal *dx, PetscReal *sty, PetscReal *fy, PetscReal *dy, PetscReal *stp, PetscReal *fp, PetscReal *dp)
373: {
374: TaoLineSearch_MT *mtP = (TaoLineSearch_MT *)ls->data;
375: PetscReal gamma1, p, q, r, s, sgnd, stpc, stpf, stpq, theta;
376: PetscInt bound;
378: PetscFunctionBegin;
379: /* Check the input parameters for errors */
380: mtP->infoc = 0;
381: PetscCheck(!mtP->bracket || (*stp > PetscMin(*stx, *sty) && *stp < PetscMax(*stx, *sty)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "bad stp in bracket");
382: PetscCheck(*dx * (*stp - *stx) < 0.0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "dx * (stp-stx) >= 0.0");
383: PetscCheck(ls->stepmax >= ls->stepmin, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "stepmax > stepmin");
385: /* Determine if the derivatives have opposite sign */
386: sgnd = *dp * (*dx / PetscAbsReal(*dx));
388: if (*fp > *fx) {
389: /* Case 1: a higher function value.
390: The minimum is bracketed. If the cubic step is closer
391: to stx than the quadratic step, the cubic step is taken,
392: else the average of the cubic and quadratic steps is taken. */
394: mtP->infoc = 1;
395: bound = 1;
396: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
397: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
398: s = PetscMax(s, PetscAbsReal(*dp));
399: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s));
400: if (*stp < *stx) gamma1 = -gamma1;
401: /* Can p be 0? Check */
402: p = (gamma1 - *dx) + theta;
403: q = ((gamma1 - *dx) + gamma1) + *dp;
404: r = p / q;
405: stpc = *stx + r * (*stp - *stx);
406: stpq = *stx + ((*dx / ((*fx - *fp) / (*stp - *stx) + *dx)) * 0.5) * (*stp - *stx);
408: if (PetscAbsReal(stpc - *stx) < PetscAbsReal(stpq - *stx)) stpf = stpc;
409: else stpf = stpc + 0.5 * (stpq - stpc);
410: mtP->bracket = 1;
411: } else if (sgnd < 0.0) {
412: /* Case 2: A lower function value and derivatives of
413: opposite sign. The minimum is bracketed. If the cubic
414: step is closer to stx than the quadratic (secant) step,
415: the cubic step is taken, else the quadratic step is taken. */
417: mtP->infoc = 2;
418: bound = 0;
419: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
420: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
421: s = PetscMax(s, PetscAbsReal(*dp));
422: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s));
423: if (*stp > *stx) gamma1 = -gamma1;
424: p = (gamma1 - *dp) + theta;
425: q = ((gamma1 - *dp) + gamma1) + *dx;
426: r = p / q;
427: stpc = *stp + r * (*stx - *stp);
428: stpq = *stp + (*dp / (*dp - *dx)) * (*stx - *stp);
430: if (PetscAbsReal(stpc - *stp) > PetscAbsReal(stpq - *stp)) stpf = stpc;
431: else stpf = stpq;
432: mtP->bracket = 1;
433: } else if (PetscAbsReal(*dp) < PetscAbsReal(*dx)) {
434: /* Case 3: A lower function value, derivatives of the
435: same sign, and the magnitude of the derivative decreases.
436: The cubic step is only used if the cubic tends to infinity
437: in the direction of the step or if the minimum of the cubic
438: is beyond stp. Otherwise the cubic step is defined to be
439: either stepmin or stepmax. The quadratic (secant) step is also
440: computed and if the minimum is bracketed then the step
441: closest to stx is taken, else the step farthest away is taken. */
443: mtP->infoc = 3;
444: bound = 1;
445: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
446: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
447: s = PetscMax(s, PetscAbsReal(*dp));
449: /* The case gamma1 = 0 only arises if the cubic does not tend
450: to infinity in the direction of the step. */
451: gamma1 = s * PetscSqrtScalar(PetscMax(0.0, PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s)));
452: if (*stp > *stx) gamma1 = -gamma1;
453: p = (gamma1 - *dp) + theta;
454: q = (gamma1 + (*dx - *dp)) + gamma1;
455: r = p / q;
456: if (r < 0.0 && gamma1 != 0.0) stpc = *stp + r * (*stx - *stp);
457: else if (*stp > *stx) stpc = ls->stepmax;
458: else stpc = ls->stepmin;
459: stpq = *stp + (*dp / (*dp - *dx)) * (*stx - *stp);
461: if (mtP->bracket) {
462: if (PetscAbsReal(*stp - stpc) < PetscAbsReal(*stp - stpq)) stpf = stpc;
463: else stpf = stpq;
464: } else {
465: if (PetscAbsReal(*stp - stpc) > PetscAbsReal(*stp - stpq)) stpf = stpc;
466: else stpf = stpq;
467: }
468: } else {
469: /* Case 4: A lower function value, derivatives of the
470: same sign, and the magnitude of the derivative does
471: not decrease. If the minimum is not bracketed, the step
472: is either stpmin or stpmax, else the cubic step is taken. */
474: mtP->infoc = 4;
475: bound = 0;
476: if (mtP->bracket) {
477: theta = 3 * (*fp - *fy) / (*sty - *stp) + *dy + *dp;
478: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dy));
479: s = PetscMax(s, PetscAbsReal(*dp));
480: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dy / s) * (*dp / s));
481: if (*stp > *sty) gamma1 = -gamma1;
482: p = (gamma1 - *dp) + theta;
483: q = ((gamma1 - *dp) + gamma1) + *dy;
484: r = p / q;
485: stpc = *stp + r * (*sty - *stp);
486: stpf = stpc;
487: } else if (*stp > *stx) {
488: stpf = ls->stepmax;
489: } else {
490: stpf = ls->stepmin;
491: }
492: }
494: /* Update the interval of uncertainty. This update does not
495: depend on the new step or the case analysis above. */
497: if (*fp > *fx) {
498: *sty = *stp;
499: *fy = *fp;
500: *dy = *dp;
501: } else {
502: if (sgnd < 0.0) {
503: *sty = *stx;
504: *fy = *fx;
505: *dy = *dx;
506: }
507: *stx = *stp;
508: *fx = *fp;
509: *dx = *dp;
510: }
512: /* Compute the new step and safeguard it. */
513: stpf = PetscMin(ls->stepmax, stpf);
514: stpf = PetscMax(ls->stepmin, stpf);
515: *stp = stpf;
516: if (mtP->bracket && bound) {
517: if (*sty > *stx) *stp = PetscMin(*stx + 0.66 * (*sty - *stx), *stp);
518: else *stp = PetscMax(*stx + 0.66 * (*sty - *stx), *stp);
519: }
520: PetscFunctionReturn(PETSC_SUCCESS);
521: }