Actual source code: morethuente.c
1: #include <petsc/private/taolinesearchimpl.h>
2: #include <../src/tao/linesearch/impls/morethuente/morethuente.h>
4: /*
5: This algorithm is taken from More' and Thuente, "Line search algorithms
6: with guaranteed sufficient decrease", Argonne National Laboratory,
7: Technical Report MCS-P330-1092.
8: */
10: static PetscErrorCode Tao_mcstep(TaoLineSearch ls, PetscReal *stx, PetscReal *fx, PetscReal *dx, PetscReal *sty, PetscReal *fy, PetscReal *dy, PetscReal *stp, PetscReal *fp, PetscReal *dp);
12: static PetscErrorCode TaoLineSearchDestroy_MT(TaoLineSearch ls)
13: {
14: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)ls->data;
16: PetscFunctionBegin;
17: PetscCall(PetscObjectDereference((PetscObject)mt->x));
18: PetscCall(VecDestroy(&mt->work));
19: PetscCall(PetscFree(ls->data));
20: PetscFunctionReturn(PETSC_SUCCESS);
21: }
23: static PetscErrorCode TaoLineSearchSetFromOptions_MT(TaoLineSearch ls, PetscOptionItems *PetscOptionsObject)
24: {
25: PetscFunctionBegin;
26: PetscFunctionReturn(PETSC_SUCCESS);
27: }
29: static PetscErrorCode TaoLineSearchMonitor_MT(TaoLineSearch ls)
30: {
31: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)ls->data;
33: PetscFunctionBegin;
34: PetscCall(PetscViewerASCIIPrintf(ls->viewer, "stx: %g, fx: %g, dgx: %g\n", (double)mt->stx, (double)mt->fx, (double)mt->dgx));
35: PetscCall(PetscViewerASCIIPrintf(ls->viewer, "sty: %g, fy: %g, dgy: %g\n", (double)mt->sty, (double)mt->fy, (double)mt->dgy));
36: PetscFunctionReturn(PETSC_SUCCESS);
37: }
39: static PetscErrorCode TaoLineSearchApply_MT(TaoLineSearch ls, Vec x, PetscReal *f, Vec g, Vec s)
40: {
41: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)ls->data;
42: PetscReal xtrapf = 4.0;
43: PetscReal finit, width, width1, dginit, fm, fxm, fym, dgm, dgxm, dgym;
44: PetscReal dgx, dgy, dg, dg2, fx, fy, stx, sty, dgtest;
45: PetscReal ftest1 = 0.0, ftest2 = 0.0;
46: PetscInt i, stage1, n1, n2, nn1, nn2;
47: PetscReal bstepmin1, bstepmin2, bstepmax, ostepmin, ostepmax;
48: PetscBool g_computed = PETSC_FALSE; /* to prevent extra gradient computation */
50: PetscFunctionBegin;
51: ls->reason = TAOLINESEARCH_CONTINUE_ITERATING;
52: PetscCall(TaoLineSearchMonitor(ls, 0, *f, 0.0));
53: /* Check work vector */
54: if (!mt->work) {
55: PetscCall(VecDuplicate(x, &mt->work));
56: mt->x = x;
57: PetscCall(PetscObjectReference((PetscObject)mt->x));
58: } else if (x != mt->x) {
59: PetscCall(VecDestroy(&mt->work));
60: PetscCall(VecDuplicate(x, &mt->work));
61: PetscCall(PetscObjectDereference((PetscObject)mt->x));
62: mt->x = x;
63: PetscCall(PetscObjectReference((PetscObject)mt->x));
64: }
66: ostepmax = ls->stepmax;
67: ostepmin = ls->stepmin;
69: if (ls->bounded) {
70: /* Compute step length needed to make all variables equal a bound */
71: /* Compute the smallest steplength that will make one nonbinding variable
72: equal the bound */
73: PetscCall(VecGetLocalSize(ls->upper, &n1));
74: PetscCall(VecGetLocalSize(mt->x, &n2));
75: PetscCall(VecGetSize(ls->upper, &nn1));
76: PetscCall(VecGetSize(mt->x, &nn2));
77: PetscCheck(n1 == n2 && nn1 == nn2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Variable vector not compatible with bounds vector");
78: PetscCall(VecScale(s, -1.0));
79: PetscCall(VecBoundGradientProjection(s, x, ls->lower, ls->upper, s));
80: PetscCall(VecScale(s, -1.0));
81: PetscCall(VecStepBoundInfo(x, s, ls->lower, ls->upper, &bstepmin1, &bstepmin2, &bstepmax));
82: ls->stepmax = PetscMin(bstepmax, ls->stepmax);
83: }
85: PetscCall(VecDot(g, s, &dginit));
86: if (PetscIsInfOrNanReal(dginit)) {
87: PetscCall(PetscInfo(ls, "Initial Line Search step * g is Inf or Nan (%g)\n", (double)dginit));
88: ls->reason = TAOLINESEARCH_FAILED_INFORNAN;
89: PetscFunctionReturn(PETSC_SUCCESS);
90: }
91: if (dginit >= 0.0) {
92: PetscCall(PetscInfo(ls, "Initial Line Search step * g is not descent direction (%g)\n", (double)dginit));
93: ls->reason = TAOLINESEARCH_FAILED_ASCENT;
94: PetscFunctionReturn(PETSC_SUCCESS);
95: }
97: /* Initialization */
98: mt->bracket = 0;
99: stage1 = 1;
100: finit = *f;
101: dgtest = ls->ftol * dginit;
102: width = ls->stepmax - ls->stepmin;
103: width1 = width * 2.0;
104: PetscCall(VecCopy(x, mt->work));
105: /* Variable dictionary:
106: stx, fx, dgx - the step, function, and derivative at the best step
107: sty, fy, dgy - the step, function, and derivative at the other endpoint
108: of the interval of uncertainty
109: step, f, dg - the step, function, and derivative at the current step */
111: stx = 0.0;
112: fx = finit;
113: dgx = dginit;
114: sty = 0.0;
115: fy = finit;
116: dgy = dginit;
118: ls->step = ls->initstep;
119: for (i = 0; i < ls->max_funcs; i++) {
120: /* Set min and max steps to correspond to the interval of uncertainty */
121: if (mt->bracket) {
122: ls->stepmin = PetscMin(stx, sty);
123: ls->stepmax = PetscMax(stx, sty);
124: } else {
125: ls->stepmin = stx;
126: ls->stepmax = ls->step + xtrapf * (ls->step - stx);
127: }
129: /* Force the step to be within the bounds */
130: ls->step = PetscMax(ls->step, ls->stepmin);
131: ls->step = PetscMin(ls->step, ls->stepmax);
133: /* If an unusual termination is to occur, then let step be the lowest
134: point obtained thus far */
135: if (stx != 0 && ((mt->bracket && (ls->step <= ls->stepmin || ls->step >= ls->stepmax)) || (mt->bracket && (ls->stepmax - ls->stepmin <= ls->rtol * ls->stepmax)) || (ls->nfeval + ls->nfgeval >= ls->max_funcs - 1) || mt->infoc == 0))
136: ls->step = stx;
138: PetscCall(VecWAXPY(mt->work, ls->step, s, x)); /* W = X + step*S */
140: if (ls->step == 0.0) {
141: PetscCall(PetscInfo(ls, "Step size is zero.\n"));
142: ls->reason = TAOLINESEARCH_HALTED_LOWERBOUND;
143: break;
144: }
146: if (ls->bounded) PetscCall(VecMedian(ls->lower, mt->work, ls->upper, mt->work));
147: /* Make sure user code doesn't mess with the non-updated solution */
148: PetscCall(VecLockReadPush(x));
149: if (ls->usegts) {
150: PetscCall(TaoLineSearchComputeObjectiveAndGTS(ls, mt->work, f, &dg));
151: g_computed = PETSC_FALSE;
152: } else {
153: PetscCall(TaoLineSearchComputeObjectiveAndGradient(ls, mt->work, f, g));
154: g_computed = PETSC_TRUE;
155: if (ls->bounded) {
156: PetscCall(VecDot(g, x, &dg));
157: PetscCall(VecDot(g, mt->work, &dg2));
158: dg = (dg2 - dg) / ls->step;
159: } else {
160: PetscCall(VecDot(g, s, &dg));
161: }
162: }
163: PetscCall(VecLockReadPop(x));
165: /* update bracketing parameters in the MT context for printouts in monitor */
166: mt->stx = stx;
167: mt->fx = fx;
168: mt->dgx = dgx;
169: mt->sty = sty;
170: mt->fy = fy;
171: mt->dgy = dgy;
172: PetscCall(TaoLineSearchMonitor(ls, i + 1, *f, ls->step));
174: if (i == 0) ls->f_fullstep = *f;
176: if (PetscIsInfOrNanReal(*f) || PetscIsInfOrNanReal(dg)) {
177: /* User provided compute function generated Not-a-Number, assume
178: domain violation and set function value and directional
179: derivative to infinity. */
180: *f = PETSC_INFINITY;
181: dg = PETSC_INFINITY;
182: }
184: ftest1 = finit + ls->step * dgtest;
185: if (ls->bounded) ftest2 = finit + ls->step * dgtest * ls->ftol;
187: /* Convergence testing */
188: if ((*f - ftest1 <= PETSC_SMALL * PetscAbsReal(finit)) && (PetscAbsReal(dg) + ls->gtol * dginit <= 0.0)) {
189: PetscCall(PetscInfo(ls, "Line search success: Sufficient decrease and directional deriv conditions hold\n"));
190: ls->reason = TAOLINESEARCH_SUCCESS;
191: break;
192: }
194: /* Check Armijo if beyond the first breakpoint */
195: if (ls->bounded && *f <= ftest2 && ls->step >= bstepmin2) {
196: PetscCall(PetscInfo(ls, "Line search success: Sufficient decrease.\n"));
197: ls->reason = TAOLINESEARCH_SUCCESS;
198: break;
199: }
201: /* Checks for bad cases */
202: if ((mt->bracket && (ls->step <= ls->stepmin || ls->step >= ls->stepmax)) || !mt->infoc) {
203: PetscCall(PetscInfo(ls, "Rounding errors may prevent further progress. May not be a step satisfying\nsufficient decrease and curvature conditions. Tolerances may be too small.\n"));
204: ls->reason = TAOLINESEARCH_HALTED_OTHER;
205: break;
206: }
207: if (ls->step == ls->stepmax && *f <= ftest1 && dg <= dgtest) {
208: PetscCall(PetscInfo(ls, "Step is at the upper bound, stepmax (%g)\n", (double)ls->stepmax));
209: ls->reason = TAOLINESEARCH_HALTED_UPPERBOUND;
210: break;
211: }
212: if (ls->step == ls->stepmin && *f >= ftest1 && dg >= dgtest) {
213: PetscCall(PetscInfo(ls, "Step is at the lower bound, stepmin (%g)\n", (double)ls->stepmin));
214: ls->reason = TAOLINESEARCH_HALTED_LOWERBOUND;
215: break;
216: }
217: if (mt->bracket && (ls->stepmax - ls->stepmin <= ls->rtol * ls->stepmax)) {
218: PetscCall(PetscInfo(ls, "Relative width of interval of uncertainty is at most rtol (%g)\n", (double)ls->rtol));
219: ls->reason = TAOLINESEARCH_HALTED_RTOL;
220: break;
221: }
223: /* In the first stage, we seek a step for which the modified function
224: has a nonpositive value and nonnegative derivative */
225: if (stage1 && *f <= ftest1 && dg >= dginit * PetscMin(ls->ftol, ls->gtol)) stage1 = 0;
227: /* A modified function is used to predict the step only if we
228: have not obtained a step for which the modified function has a
229: nonpositive function value and nonnegative derivative, and if a
230: lower function value has been obtained but the decrease is not
231: sufficient */
233: if (stage1 && *f <= fx && *f > ftest1) {
234: fm = *f - ls->step * dgtest; /* Define modified function */
235: fxm = fx - stx * dgtest; /* and derivatives */
236: fym = fy - sty * dgtest;
237: dgm = dg - dgtest;
238: dgxm = dgx - dgtest;
239: dgym = dgy - dgtest;
241: /* if (dgxm * (ls->step - stx) >= 0.0) */
242: /* Update the interval of uncertainty and compute the new step */
243: PetscCall(Tao_mcstep(ls, &stx, &fxm, &dgxm, &sty, &fym, &dgym, &ls->step, &fm, &dgm));
245: fx = fxm + stx * dgtest; /* Reset the function and */
246: fy = fym + sty * dgtest; /* gradient values */
247: dgx = dgxm + dgtest;
248: dgy = dgym + dgtest;
249: } else {
250: /* Update the interval of uncertainty and compute the new step */
251: PetscCall(Tao_mcstep(ls, &stx, &fx, &dgx, &sty, &fy, &dgy, &ls->step, f, &dg));
252: }
254: /* Force a sufficient decrease in the interval of uncertainty */
255: if (mt->bracket) {
256: if (PetscAbsReal(sty - stx) >= 0.66 * width1) ls->step = stx + 0.5 * (sty - stx);
257: width1 = width;
258: width = PetscAbsReal(sty - stx);
259: }
260: }
261: if (ls->nfeval + ls->nfgeval > ls->max_funcs) {
262: PetscCall(PetscInfo(ls, "Number of line search function evals (%" PetscInt_FMT ") > maximum (%" PetscInt_FMT ")\n", ls->nfeval + ls->nfgeval, ls->max_funcs));
263: ls->reason = TAOLINESEARCH_HALTED_MAXFCN;
264: }
265: ls->stepmax = ostepmax;
266: ls->stepmin = ostepmin;
268: /* Finish computations */
269: PetscCall(PetscInfo(ls, "%" PetscInt_FMT " function evals in line search, step = %g\n", ls->nfeval + ls->nfgeval, (double)ls->step));
271: /* Set new solution vector and compute gradient if needed */
272: PetscCall(VecCopy(mt->work, x));
273: if (!g_computed) PetscCall(TaoLineSearchComputeGradient(ls, x, g));
274: PetscFunctionReturn(PETSC_SUCCESS);
275: }
277: /*MC
278: TAOLINESEARCHMT - More-Thuente line-search type with cubic interpolation that satisfies both the sufficient decrease and
279: curvature conditions. This method can take step lengths greater than 1, {cite}`more:92`
281: Options Database Key:
282: . -tao_ls_type more-thuente - use this line search type
284: Level: developer
286: .seealso: `TaoLineSearchCreate()`, `TaoLineSearchSetType()`, `TaoLineSearchApply()`
287: M*/
288: PETSC_EXTERN PetscErrorCode TaoLineSearchCreate_MT(TaoLineSearch ls)
289: {
290: TaoLineSearch_MT *ctx;
292: PetscFunctionBegin;
294: PetscCall(PetscNew(&ctx));
295: ctx->bracket = 0;
296: ctx->infoc = 1;
297: ls->data = (void *)ctx;
298: ls->initstep = 1.0;
299: ls->ops->setup = NULL;
300: ls->ops->reset = NULL;
301: ls->ops->apply = TaoLineSearchApply_MT;
302: ls->ops->destroy = TaoLineSearchDestroy_MT;
303: ls->ops->setfromoptions = TaoLineSearchSetFromOptions_MT;
304: ls->ops->monitor = TaoLineSearchMonitor_MT;
305: PetscFunctionReturn(PETSC_SUCCESS);
306: }
308: /*
309: The subroutine mcstep is taken from the work of Jorge Nocedal.
310: this is a variant of More' and Thuente's routine.
312: subroutine mcstep
314: the purpose of mcstep is to compute a safeguarded step for
315: a linesearch and to update an interval of uncertainty for
316: a minimizer of the function.
318: the parameter stx contains the step with the least function
319: value. the parameter stp contains the current step. it is
320: assumed that the derivative at stx is negative in the
321: direction of the step. if bracket is set true then a
322: minimizer has been bracketed in an interval of uncertainty
323: with endpoints stx and sty.
325: the subroutine statement is
327: subroutine mcstep(stx,fx,dx,sty,fy,dy,stp,fp,dp,bracket,
328: stpmin,stpmax,info)
330: where
332: stx, fx, and dx are variables which specify the step,
333: the function, and the derivative at the best step obtained
334: so far. The derivative must be negative in the direction
335: of the step, that is, dx and stp-stx must have opposite
336: signs. On output these parameters are updated appropriately.
338: sty, fy, and dy are variables which specify the step,
339: the function, and the derivative at the other endpoint of
340: the interval of uncertainty. On output these parameters are
341: updated appropriately.
343: stp, fp, and dp are variables which specify the step,
344: the function, and the derivative at the current step.
345: If bracket is set true then on input stp must be
346: between stx and sty. On output stp is set to the new step.
348: bracket is a logical variable which specifies if a minimizer
349: has been bracketed. If the minimizer has not been bracketed
350: then on input bracket must be set false. If the minimizer
351: is bracketed then on output bracket is set true.
353: stpmin and stpmax are input variables which specify lower
354: and upper bounds for the step.
356: info is an integer output variable set as follows:
357: if info = 1,2,3,4,5, then the step has been computed
358: according to one of the five cases below. otherwise
359: info = 0, and this indicates improper input parameters.
361: subprograms called
363: fortran-supplied ... abs,max,min,sqrt
365: argonne national laboratory. minpack project. june 1983
366: jorge j. more', david j. thuente
368: */
370: static PetscErrorCode Tao_mcstep(TaoLineSearch ls, PetscReal *stx, PetscReal *fx, PetscReal *dx, PetscReal *sty, PetscReal *fy, PetscReal *dy, PetscReal *stp, PetscReal *fp, PetscReal *dp)
371: {
372: TaoLineSearch_MT *mtP = (TaoLineSearch_MT *)ls->data;
373: PetscReal gamma1, p, q, r, s, sgnd, stpc, stpf, stpq, theta;
374: PetscInt bound;
376: PetscFunctionBegin;
377: /* Check the input parameters for errors */
378: mtP->infoc = 0;
379: PetscCheck(!mtP->bracket || (*stp > PetscMin(*stx, *sty) && *stp < PetscMax(*stx, *sty)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "bad stp in bracket");
380: PetscCheck(*dx * (*stp - *stx) < 0.0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "dx * (stp-stx) >= 0.0");
381: PetscCheck(ls->stepmax >= ls->stepmin, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "stepmax > stepmin");
383: /* Determine if the derivatives have opposite sign */
384: sgnd = *dp * (*dx / PetscAbsReal(*dx));
386: if (*fp > *fx) {
387: /* Case 1: a higher function value.
388: The minimum is bracketed. If the cubic step is closer
389: to stx than the quadratic step, the cubic step is taken,
390: else the average of the cubic and quadratic steps is taken. */
392: mtP->infoc = 1;
393: bound = 1;
394: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
395: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
396: s = PetscMax(s, PetscAbsReal(*dp));
397: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s));
398: if (*stp < *stx) gamma1 = -gamma1;
399: /* Can p be 0? Check */
400: p = (gamma1 - *dx) + theta;
401: q = ((gamma1 - *dx) + gamma1) + *dp;
402: r = p / q;
403: stpc = *stx + r * (*stp - *stx);
404: stpq = *stx + ((*dx / ((*fx - *fp) / (*stp - *stx) + *dx)) * 0.5) * (*stp - *stx);
406: if (PetscAbsReal(stpc - *stx) < PetscAbsReal(stpq - *stx)) stpf = stpc;
407: else stpf = stpc + 0.5 * (stpq - stpc);
408: mtP->bracket = 1;
409: } else if (sgnd < 0.0) {
410: /* Case 2: A lower function value and derivatives of
411: opposite sign. The minimum is bracketed. If the cubic
412: step is closer to stx than the quadratic (secant) step,
413: the cubic step is taken, else the quadratic step is taken. */
415: mtP->infoc = 2;
416: bound = 0;
417: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
418: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
419: s = PetscMax(s, PetscAbsReal(*dp));
420: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s));
421: if (*stp > *stx) gamma1 = -gamma1;
422: p = (gamma1 - *dp) + theta;
423: q = ((gamma1 - *dp) + gamma1) + *dx;
424: r = p / q;
425: stpc = *stp + r * (*stx - *stp);
426: stpq = *stp + (*dp / (*dp - *dx)) * (*stx - *stp);
428: if (PetscAbsReal(stpc - *stp) > PetscAbsReal(stpq - *stp)) stpf = stpc;
429: else stpf = stpq;
430: mtP->bracket = 1;
431: } else if (PetscAbsReal(*dp) < PetscAbsReal(*dx)) {
432: /* Case 3: A lower function value, derivatives of the
433: same sign, and the magnitude of the derivative decreases.
434: The cubic step is only used if the cubic tends to infinity
435: in the direction of the step or if the minimum of the cubic
436: is beyond stp. Otherwise the cubic step is defined to be
437: either stepmin or stepmax. The quadratic (secant) step is also
438: computed and if the minimum is bracketed then the step
439: closest to stx is taken, else the step farthest away is taken. */
441: mtP->infoc = 3;
442: bound = 1;
443: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
444: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
445: s = PetscMax(s, PetscAbsReal(*dp));
447: /* The case gamma1 = 0 only arises if the cubic does not tend
448: to infinity in the direction of the step. */
449: gamma1 = s * PetscSqrtScalar(PetscMax(0.0, PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s)));
450: if (*stp > *stx) gamma1 = -gamma1;
451: p = (gamma1 - *dp) + theta;
452: q = (gamma1 + (*dx - *dp)) + gamma1;
453: r = p / q;
454: if (r < 0.0 && gamma1 != 0.0) stpc = *stp + r * (*stx - *stp);
455: else if (*stp > *stx) stpc = ls->stepmax;
456: else stpc = ls->stepmin;
457: stpq = *stp + (*dp / (*dp - *dx)) * (*stx - *stp);
459: if (mtP->bracket) {
460: if (PetscAbsReal(*stp - stpc) < PetscAbsReal(*stp - stpq)) stpf = stpc;
461: else stpf = stpq;
462: } else {
463: if (PetscAbsReal(*stp - stpc) > PetscAbsReal(*stp - stpq)) stpf = stpc;
464: else stpf = stpq;
465: }
466: } else {
467: /* Case 4: A lower function value, derivatives of the
468: same sign, and the magnitude of the derivative does
469: not decrease. If the minimum is not bracketed, the step
470: is either stpmin or stpmax, else the cubic step is taken. */
472: mtP->infoc = 4;
473: bound = 0;
474: if (mtP->bracket) {
475: theta = 3 * (*fp - *fy) / (*sty - *stp) + *dy + *dp;
476: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dy));
477: s = PetscMax(s, PetscAbsReal(*dp));
478: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dy / s) * (*dp / s));
479: if (*stp > *sty) gamma1 = -gamma1;
480: p = (gamma1 - *dp) + theta;
481: q = ((gamma1 - *dp) + gamma1) + *dy;
482: r = p / q;
483: stpc = *stp + r * (*sty - *stp);
484: stpf = stpc;
485: } else if (*stp > *stx) {
486: stpf = ls->stepmax;
487: } else {
488: stpf = ls->stepmin;
489: }
490: }
492: /* Update the interval of uncertainty. This update does not
493: depend on the new step or the case analysis above. */
495: if (*fp > *fx) {
496: *sty = *stp;
497: *fy = *fp;
498: *dy = *dp;
499: } else {
500: if (sgnd < 0.0) {
501: *sty = *stx;
502: *fy = *fx;
503: *dy = *dx;
504: }
505: *stx = *stp;
506: *fx = *fp;
507: *dx = *dp;
508: }
510: /* Compute the new step and safeguard it. */
511: stpf = PetscMin(ls->stepmax, stpf);
512: stpf = PetscMax(ls->stepmin, stpf);
513: *stp = stpf;
514: if (mtP->bracket && bound) {
515: if (*sty > *stx) *stp = PetscMin(*stx + 0.66 * (*sty - *stx), *stp);
516: else *stp = PetscMax(*stx + 0.66 * (*sty - *stx), *stp);
517: }
518: PetscFunctionReturn(PETSC_SUCCESS);
519: }