Actual source code: bnk.c
1: #include <petsctaolinesearch.h>
2: #include <../src/tao/bound/impls/bnk/bnk.h>
3: #include <petscksp.h>
5: static const char *BNK_INIT[64] = {"constant", "direction", "interpolation"};
6: static const char *BNK_UPDATE[64] = {"step", "reduction", "interpolation"};
7: static const char *BNK_AS[64] = {"none", "bertsekas"};
9: /* Extracts from the full Hessian the part associated with the current bnk->inactive_idx and set the PCLMVM preconditioner */
11: static PetscErrorCode TaoBNKComputeSubHessian(Tao tao)
12: {
13: TAO_BNK *bnk = (TAO_BNK *)tao->data;
15: PetscFunctionBegin;
16: PetscCall(MatDestroy(&bnk->Hpre_inactive));
17: PetscCall(MatDestroy(&bnk->H_inactive));
18: if (bnk->active_idx) {
19: PetscCall(MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive));
20: if (tao->hessian == tao->hessian_pre) {
21: PetscCall(PetscObjectReference((PetscObject)bnk->H_inactive));
22: bnk->Hpre_inactive = bnk->H_inactive;
23: } else {
24: PetscCall(MatCreateSubMatrix(tao->hessian_pre, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->Hpre_inactive));
25: }
26: if (bnk->bfgs_pre) PetscCall(PCLMVMSetIS(bnk->bfgs_pre, bnk->inactive_idx));
27: } else {
28: PetscCall(PetscObjectReference((PetscObject)tao->hessian));
29: bnk->H_inactive = tao->hessian;
30: PetscCall(PetscObjectReference((PetscObject)tao->hessian_pre));
31: bnk->Hpre_inactive = tao->hessian_pre;
32: if (bnk->bfgs_pre) PetscCall(PCLMVMClearIS(bnk->bfgs_pre));
33: }
34: PetscFunctionReturn(PETSC_SUCCESS);
35: }
37: /* Initializes the KSP solver, the BFGS preconditioner, and the initial trust radius estimation */
39: PetscErrorCode TaoBNKInitialize(Tao tao, PetscInt initType, PetscBool *needH)
40: {
41: TAO_BNK *bnk = (TAO_BNK *)tao->data;
42: PC pc;
43: PetscReal f_min, ftrial, prered, actred, kappa, sigma, resnorm;
44: PetscReal tau, tau_1, tau_2, tau_max, tau_min, max_radius;
45: PetscBool is_bfgs, is_jacobi, is_symmetric, sym_set;
46: PetscInt n, N, nDiff;
47: PetscInt i_max = 5;
48: PetscInt j_max = 1;
49: PetscInt i, j;
50: PetscBool kspTR;
52: PetscFunctionBegin;
53: /* Project the current point onto the feasible set */
54: PetscCall(TaoComputeVariableBounds(tao));
55: PetscCall(TaoSetVariableBounds(bnk->bncg, tao->XL, tao->XU));
56: if (tao->bounded) PetscCall(TaoLineSearchSetVariableBounds(tao->linesearch, tao->XL, tao->XU));
58: /* Project the initial point onto the feasible region */
59: PetscCall(TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution));
61: /* Check convergence criteria */
62: PetscCall(TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient));
63: PetscCall(TaoBNKEstimateActiveSet(tao, bnk->as_type));
64: PetscCall(VecCopy(bnk->unprojected_gradient, tao->gradient));
65: if (bnk->active_idx) PetscCall(VecISSet(tao->gradient, bnk->active_idx, 0.0));
66: PetscCall(TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm));
68: /* Test the initial point for convergence */
69: PetscCall(VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W));
70: PetscCall(VecNorm(bnk->W, NORM_2, &resnorm));
71: PetscCheck(!PetscIsInfOrNanReal(bnk->f) && !PetscIsInfOrNanReal(resnorm), PetscObjectComm((PetscObject)tao), PETSC_ERR_USER, "User provided compute function generated Inf or NaN");
72: PetscCall(TaoLogConvergenceHistory(tao, bnk->f, resnorm, 0.0, tao->ksp_its));
73: PetscCall(TaoMonitor(tao, tao->niter, bnk->f, resnorm, 0.0, 1.0));
74: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
75: if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(PETSC_SUCCESS);
77: /* Reset KSP stopping reason counters */
78: bnk->ksp_atol = 0;
79: bnk->ksp_rtol = 0;
80: bnk->ksp_dtol = 0;
81: bnk->ksp_ctol = 0;
82: bnk->ksp_negc = 0;
83: bnk->ksp_iter = 0;
84: bnk->ksp_othr = 0;
86: /* Reset accepted step type counters */
87: bnk->tot_cg_its = 0;
88: bnk->newt = 0;
89: bnk->bfgs = 0;
90: bnk->sgrad = 0;
91: bnk->grad = 0;
93: /* Initialize the Hessian perturbation */
94: bnk->pert = bnk->sval;
96: /* Reset initial steplength to zero (this helps BNCG reset its direction internally) */
97: PetscCall(VecSet(tao->stepdirection, 0.0));
99: /* Allocate the vectors needed for the BFGS approximation */
100: PetscCall(KSPGetPC(tao->ksp, &pc));
101: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCLMVM, &is_bfgs));
102: PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCJACOBI, &is_jacobi));
103: if (is_bfgs) {
104: bnk->bfgs_pre = pc;
105: PetscCall(PCLMVMGetMatLMVM(bnk->bfgs_pre, &bnk->M));
106: PetscCall(VecGetLocalSize(tao->solution, &n));
107: PetscCall(VecGetSize(tao->solution, &N));
108: PetscCall(MatSetSizes(bnk->M, n, n, N, N));
109: PetscCall(MatLMVMAllocate(bnk->M, tao->solution, bnk->unprojected_gradient));
110: PetscCall(MatIsSymmetricKnown(bnk->M, &sym_set, &is_symmetric));
111: PetscCheck(sym_set && is_symmetric, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_INCOMP, "LMVM matrix in the LMVM preconditioner must be symmetric.");
112: } else if (is_jacobi) PetscCall(PCJacobiSetUseAbs(pc, PETSC_TRUE));
114: /* Prepare the min/max vectors for safeguarding diagonal scales */
115: PetscCall(VecSet(bnk->Diag_min, bnk->dmin));
116: PetscCall(VecSet(bnk->Diag_max, bnk->dmax));
118: /* Initialize trust-region radius. The initialization is only performed
119: when we are using Nash, Steihaug-Toint or the Generalized Lanczos method. */
120: *needH = PETSC_TRUE;
121: PetscCall(PetscObjectHasFunction((PetscObject)tao->ksp, "KSPCGSetRadius_C", &kspTR));
122: if (kspTR) {
123: switch (initType) {
124: case BNK_INIT_CONSTANT:
125: /* Use the initial radius specified */
126: tao->trust = tao->trust0;
127: break;
129: case BNK_INIT_INTERPOLATION:
130: /* Use interpolation based on the initial Hessian */
131: max_radius = 0.0;
132: tao->trust = tao->trust0;
133: for (j = 0; j < j_max; ++j) {
134: f_min = bnk->f;
135: sigma = 0.0;
137: if (*needH) {
138: /* Compute the Hessian at the new step, and extract the inactive subsystem */
139: PetscCall((*bnk->computehessian)(tao));
140: PetscCall(TaoBNKEstimateActiveSet(tao, BNK_AS_NONE));
141: PetscCall(TaoBNKComputeSubHessian(tao));
142: *needH = PETSC_FALSE;
143: }
145: for (i = 0; i < i_max; ++i) {
146: /* Take a steepest descent step and snap it to bounds */
147: PetscCall(VecCopy(tao->solution, bnk->Xold));
148: PetscCall(VecAXPY(tao->solution, -tao->trust / bnk->gnorm, tao->gradient));
149: PetscCall(TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution));
150: /* Compute the step we actually accepted */
151: PetscCall(VecCopy(tao->solution, bnk->W));
152: PetscCall(VecAXPY(bnk->W, -1.0, bnk->Xold));
153: /* Compute the objective at the trial */
154: PetscCall(TaoComputeObjective(tao, tao->solution, &ftrial));
155: PetscCheck(!PetscIsInfOrNanReal(bnk->f), PetscObjectComm((PetscObject)tao), PETSC_ERR_USER, "User provided compute function generated Inf or NaN");
156: PetscCall(VecCopy(bnk->Xold, tao->solution));
157: if (PetscIsInfOrNanReal(ftrial)) {
158: tau = bnk->gamma1_i;
159: } else {
160: if (ftrial < f_min) {
161: f_min = ftrial;
162: sigma = -tao->trust / bnk->gnorm;
163: }
165: /* Compute the predicted and actual reduction */
166: if (bnk->active_idx) {
167: PetscCall(VecGetSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive));
168: PetscCall(VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work));
169: } else {
170: bnk->X_inactive = bnk->W;
171: bnk->inactive_work = bnk->Xwork;
172: }
173: PetscCall(MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work));
174: PetscCall(VecDot(bnk->X_inactive, bnk->inactive_work, &prered));
175: if (bnk->active_idx) {
176: PetscCall(VecRestoreSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive));
177: PetscCall(VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work));
178: }
179: prered = tao->trust * (bnk->gnorm - 0.5 * tao->trust * prered / (bnk->gnorm * bnk->gnorm));
180: actred = bnk->f - ftrial;
181: if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
182: kappa = 1.0;
183: } else {
184: kappa = actred / prered;
185: }
187: tau_1 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust + (1.0 - bnk->theta_i) * prered - actred);
188: tau_2 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust - (1.0 + bnk->theta_i) * prered + actred);
189: tau_min = PetscMin(tau_1, tau_2);
190: tau_max = PetscMax(tau_1, tau_2);
192: if (PetscAbsScalar(kappa - (PetscReal)1.0) <= bnk->mu1_i) {
193: /* Great agreement */
194: max_radius = PetscMax(max_radius, tao->trust);
196: if (tau_max < 1.0) {
197: tau = bnk->gamma3_i;
198: } else if (tau_max > bnk->gamma4_i) {
199: tau = bnk->gamma4_i;
200: } else {
201: tau = tau_max;
202: }
203: } else if (PetscAbsScalar(kappa - (PetscReal)1.0) <= bnk->mu2_i) {
204: /* Good agreement */
205: max_radius = PetscMax(max_radius, tao->trust);
207: if (tau_max < bnk->gamma2_i) {
208: tau = bnk->gamma2_i;
209: } else if (tau_max > bnk->gamma3_i) {
210: tau = bnk->gamma3_i;
211: } else {
212: tau = tau_max;
213: }
214: } else {
215: /* Not good agreement */
216: if (tau_min > 1.0) {
217: tau = bnk->gamma2_i;
218: } else if (tau_max < bnk->gamma1_i) {
219: tau = bnk->gamma1_i;
220: } else if ((tau_min < bnk->gamma1_i) && (tau_max >= 1.0)) {
221: tau = bnk->gamma1_i;
222: } else if ((tau_1 >= bnk->gamma1_i) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
223: tau = tau_1;
224: } else if ((tau_2 >= bnk->gamma1_i) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
225: tau = tau_2;
226: } else {
227: tau = tau_max;
228: }
229: }
230: }
231: tao->trust = tau * tao->trust;
232: }
234: if (f_min < bnk->f) {
235: /* We accidentally found a solution better than the initial, so accept it */
236: bnk->f = f_min;
237: PetscCall(VecCopy(tao->solution, bnk->Xold));
238: PetscCall(VecAXPY(tao->solution, sigma, tao->gradient));
239: PetscCall(TaoBoundSolution(tao->solution, tao->XL, tao->XU, 0.0, &nDiff, tao->solution));
240: PetscCall(VecCopy(tao->solution, tao->stepdirection));
241: PetscCall(VecAXPY(tao->stepdirection, -1.0, bnk->Xold));
242: PetscCall(TaoComputeGradient(tao, tao->solution, bnk->unprojected_gradient));
243: PetscCall(TaoBNKEstimateActiveSet(tao, bnk->as_type));
244: PetscCall(VecCopy(bnk->unprojected_gradient, tao->gradient));
245: if (bnk->active_idx) PetscCall(VecISSet(tao->gradient, bnk->active_idx, 0.0));
246: /* Compute gradient at the new iterate and flip switch to compute the Hessian later */
247: PetscCall(TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm));
248: *needH = PETSC_TRUE;
249: /* Test the new step for convergence */
250: PetscCall(VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W));
251: PetscCall(VecNorm(bnk->W, NORM_2, &resnorm));
252: PetscCheck(!PetscIsInfOrNanReal(resnorm), PetscObjectComm((PetscObject)tao), PETSC_ERR_USER, "User provided compute function generated Inf or NaN");
253: PetscCall(TaoLogConvergenceHistory(tao, bnk->f, resnorm, 0.0, tao->ksp_its));
254: PetscCall(TaoMonitor(tao, tao->niter, bnk->f, resnorm, 0.0, 1.0));
255: PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
256: if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(PETSC_SUCCESS);
257: /* active BNCG recycling early because we have a stepdirection computed */
258: PetscCall(TaoSetRecycleHistory(bnk->bncg, PETSC_TRUE));
259: }
260: }
261: tao->trust = PetscMax(tao->trust, max_radius);
263: /* Ensure that the trust radius is within the limits */
264: tao->trust = PetscMax(tao->trust, bnk->min_radius);
265: tao->trust = PetscMin(tao->trust, bnk->max_radius);
266: break;
268: default:
269: /* Norm of the first direction will initialize radius */
270: tao->trust = 0.0;
271: break;
272: }
273: }
274: PetscFunctionReturn(PETSC_SUCCESS);
275: }
277: /*------------------------------------------------------------*/
279: /* Computes the exact Hessian and extracts its subHessian */
281: PetscErrorCode TaoBNKComputeHessian(Tao tao)
282: {
283: TAO_BNK *bnk = (TAO_BNK *)tao->data;
285: PetscFunctionBegin;
286: /* Compute the Hessian */
287: PetscCall(TaoComputeHessian(tao, tao->solution, tao->hessian, tao->hessian_pre));
288: /* Add a correction to the BFGS preconditioner */
289: if (bnk->M) PetscCall(MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient));
290: /* Prepare the reduced sub-matrices for the inactive set */
291: PetscCall(TaoBNKComputeSubHessian(tao));
292: PetscFunctionReturn(PETSC_SUCCESS);
293: }
295: /*------------------------------------------------------------*/
297: /* Routine for estimating the active set */
299: PetscErrorCode TaoBNKEstimateActiveSet(Tao tao, PetscInt asType)
300: {
301: TAO_BNK *bnk = (TAO_BNK *)tao->data;
302: PetscBool hessComputed, diagExists, hadactive;
304: PetscFunctionBegin;
305: hadactive = bnk->active_idx ? PETSC_TRUE : PETSC_FALSE;
306: switch (asType) {
307: case BNK_AS_NONE:
308: PetscCall(ISDestroy(&bnk->inactive_idx));
309: PetscCall(VecWhichInactive(tao->XL, tao->solution, bnk->unprojected_gradient, tao->XU, PETSC_TRUE, &bnk->inactive_idx));
310: PetscCall(ISDestroy(&bnk->active_idx));
311: PetscCall(ISComplementVec(bnk->inactive_idx, tao->solution, &bnk->active_idx));
312: break;
314: case BNK_AS_BERTSEKAS:
315: /* Compute the trial step vector with which we will estimate the active set at the next iteration */
316: if (bnk->M) {
317: /* If the BFGS preconditioner matrix is available, we will construct a trial step with it */
318: PetscCall(MatSolve(bnk->M, bnk->unprojected_gradient, bnk->W));
319: } else {
320: hessComputed = diagExists = PETSC_FALSE;
321: if (tao->hessian) PetscCall(MatAssembled(tao->hessian, &hessComputed));
322: if (hessComputed) PetscCall(MatHasOperation(tao->hessian, MATOP_GET_DIAGONAL, &diagExists));
323: if (diagExists) {
324: /* BFGS preconditioner doesn't exist so let's invert the absolute diagonal of the Hessian instead onto the gradient */
325: PetscCall(MatGetDiagonal(tao->hessian, bnk->Xwork));
326: PetscCall(VecAbs(bnk->Xwork));
327: PetscCall(VecMedian(bnk->Diag_min, bnk->Xwork, bnk->Diag_max, bnk->Xwork));
328: PetscCall(VecReciprocal(bnk->Xwork));
329: PetscCall(VecPointwiseMult(bnk->W, bnk->Xwork, bnk->unprojected_gradient));
330: } else {
331: /* If the Hessian or its diagonal does not exist, we will simply use gradient step */
332: PetscCall(VecCopy(bnk->unprojected_gradient, bnk->W));
333: }
334: }
335: PetscCall(VecScale(bnk->W, -1.0));
336: PetscCall(TaoEstimateActiveBounds(tao->solution, tao->XL, tao->XU, bnk->unprojected_gradient, bnk->W, bnk->Xwork, bnk->as_step, &bnk->as_tol, &bnk->active_lower, &bnk->active_upper, &bnk->active_fixed, &bnk->active_idx, &bnk->inactive_idx));
337: break;
339: default:
340: break;
341: }
342: bnk->resetksp = (PetscBool)(bnk->active_idx || hadactive); /* inactive Hessian size may have changed, need to reset operators */
343: PetscFunctionReturn(PETSC_SUCCESS);
344: }
346: /*------------------------------------------------------------*/
348: /* Routine for bounding the step direction */
350: PetscErrorCode TaoBNKBoundStep(Tao tao, PetscInt asType, Vec step)
351: {
352: TAO_BNK *bnk = (TAO_BNK *)tao->data;
354: PetscFunctionBegin;
355: switch (asType) {
356: case BNK_AS_NONE:
357: if (bnk->active_idx) PetscCall(VecISSet(step, bnk->active_idx, 0.0));
358: break;
359: case BNK_AS_BERTSEKAS:
360: PetscCall(TaoBoundStep(tao->solution, tao->XL, tao->XU, bnk->active_lower, bnk->active_upper, bnk->active_fixed, 1.0, step));
361: break;
362: default:
363: break;
364: }
365: PetscFunctionReturn(PETSC_SUCCESS);
366: }
368: /*------------------------------------------------------------*/
370: /* Routine for taking a finite number of BNCG iterations to
371: accelerate Newton convergence.
373: In practice, this approach simply trades off Hessian evaluations
374: for more gradient evaluations.
375: */
377: PetscErrorCode TaoBNKTakeCGSteps(Tao tao, PetscBool *terminate)
378: {
379: TAO_BNK *bnk = (TAO_BNK *)tao->data;
381: PetscFunctionBegin;
382: *terminate = PETSC_FALSE;
383: if (bnk->max_cg_its > 0) {
384: /* Copy the current function value (important vectors are already shared) */
385: bnk->bncg_ctx->f = bnk->f;
386: /* Take some small finite number of BNCG iterations */
387: PetscCall(TaoSolve(bnk->bncg));
388: /* Add the number of gradient and function evaluations to the total */
389: tao->nfuncs += bnk->bncg->nfuncs;
390: tao->nfuncgrads += bnk->bncg->nfuncgrads;
391: tao->ngrads += bnk->bncg->ngrads;
392: tao->nhess += bnk->bncg->nhess;
393: bnk->tot_cg_its += bnk->bncg->niter;
394: /* Extract the BNCG function value out and save it into BNK */
395: bnk->f = bnk->bncg_ctx->f;
396: if (bnk->bncg->reason == TAO_CONVERGED_GATOL || bnk->bncg->reason == TAO_CONVERGED_GRTOL || bnk->bncg->reason == TAO_CONVERGED_GTTOL || bnk->bncg->reason == TAO_CONVERGED_MINF) {
397: *terminate = PETSC_TRUE;
398: } else {
399: PetscCall(TaoBNKEstimateActiveSet(tao, bnk->as_type));
400: }
401: }
402: PetscFunctionReturn(PETSC_SUCCESS);
403: }
405: /*------------------------------------------------------------*/
407: /* Routine for computing the Newton step. */
409: PetscErrorCode TaoBNKComputeStep(Tao tao, PetscBool shift, KSPConvergedReason *ksp_reason, PetscInt *step_type)
410: {
411: TAO_BNK *bnk = (TAO_BNK *)tao->data;
412: PetscInt bfgsUpdates = 0;
413: PetscInt kspits;
414: PetscBool is_lmvm;
415: PetscBool kspTR;
417: PetscFunctionBegin;
418: /* If there are no inactive variables left, save some computation and return an adjusted zero step
419: that has (l-x) and (u-x) for lower and upper bounded variables. */
420: if (!bnk->inactive_idx) {
421: PetscCall(VecSet(tao->stepdirection, 0.0));
422: PetscCall(TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: /* Shift the reduced Hessian matrix */
427: if (shift && bnk->pert > 0) {
428: PetscCall(PetscObjectTypeCompare((PetscObject)tao->hessian, MATLMVM, &is_lmvm));
429: if (is_lmvm) {
430: PetscCall(MatShift(tao->hessian, bnk->pert));
431: } else {
432: PetscCall(MatShift(bnk->H_inactive, bnk->pert));
433: if (bnk->H_inactive != bnk->Hpre_inactive) PetscCall(MatShift(bnk->Hpre_inactive, bnk->pert));
434: }
435: }
437: /* Solve the Newton system of equations */
438: tao->ksp_its = 0;
439: PetscCall(VecSet(tao->stepdirection, 0.0));
440: if (bnk->resetksp) {
441: PetscCall(KSPReset(tao->ksp));
442: PetscCall(KSPResetFromOptions(tao->ksp));
443: bnk->resetksp = PETSC_FALSE;
444: }
445: PetscCall(KSPSetOperators(tao->ksp, bnk->H_inactive, bnk->Hpre_inactive));
446: PetscCall(VecCopy(bnk->unprojected_gradient, bnk->Gwork));
447: if (bnk->active_idx) {
448: PetscCall(VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive));
449: PetscCall(VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive));
450: } else {
451: bnk->G_inactive = bnk->unprojected_gradient;
452: bnk->X_inactive = tao->stepdirection;
453: }
454: PetscCall(KSPCGSetRadius(tao->ksp, tao->trust));
455: PetscCall(KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive));
456: PetscCall(KSPGetIterationNumber(tao->ksp, &kspits));
457: tao->ksp_its += kspits;
458: tao->ksp_tot_its += kspits;
459: PetscCall(PetscObjectHasFunction((PetscObject)tao->ksp, "KSPCGGetNormD_C", &kspTR));
460: if (kspTR) {
461: PetscCall(KSPCGGetNormD(tao->ksp, &bnk->dnorm));
463: if (0.0 == tao->trust) {
464: /* Radius was uninitialized; use the norm of the direction */
465: if (bnk->dnorm > 0.0) {
466: tao->trust = bnk->dnorm;
468: /* Modify the radius if it is too large or small */
469: tao->trust = PetscMax(tao->trust, bnk->min_radius);
470: tao->trust = PetscMin(tao->trust, bnk->max_radius);
471: } else {
472: /* The direction was bad; set radius to default value and re-solve
473: the trust-region subproblem to get a direction */
474: tao->trust = tao->trust0;
476: /* Modify the radius if it is too large or small */
477: tao->trust = PetscMax(tao->trust, bnk->min_radius);
478: tao->trust = PetscMin(tao->trust, bnk->max_radius);
480: PetscCall(KSPCGSetRadius(tao->ksp, tao->trust));
481: PetscCall(KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive));
482: PetscCall(KSPGetIterationNumber(tao->ksp, &kspits));
483: tao->ksp_its += kspits;
484: tao->ksp_tot_its += kspits;
485: PetscCall(KSPCGGetNormD(tao->ksp, &bnk->dnorm));
487: PetscCheck(bnk->dnorm != 0.0, PetscObjectComm((PetscObject)tao), PETSC_ERR_PLIB, "Initial direction zero");
488: }
489: }
490: }
491: /* Restore sub vectors back */
492: if (bnk->active_idx) {
493: PetscCall(VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive));
494: PetscCall(VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive));
495: }
496: /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
497: PetscCall(VecScale(tao->stepdirection, -1.0));
498: PetscCall(TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection));
500: /* Record convergence reasons */
501: PetscCall(KSPGetConvergedReason(tao->ksp, ksp_reason));
502: if (KSP_CONVERGED_ATOL == *ksp_reason) {
503: ++bnk->ksp_atol;
504: } else if (KSP_CONVERGED_RTOL == *ksp_reason) {
505: ++bnk->ksp_rtol;
506: } else if (KSP_CONVERGED_STEP_LENGTH == *ksp_reason) {
507: ++bnk->ksp_ctol;
508: } else if (KSP_CONVERGED_NEG_CURVE == *ksp_reason) {
509: ++bnk->ksp_negc;
510: } else if (KSP_DIVERGED_DTOL == *ksp_reason) {
511: ++bnk->ksp_dtol;
512: } else if (KSP_DIVERGED_ITS == *ksp_reason) {
513: ++bnk->ksp_iter;
514: } else {
515: ++bnk->ksp_othr;
516: }
518: /* Make sure the BFGS preconditioner is healthy */
519: if (bnk->M) {
520: PetscCall(MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates));
521: if ((KSP_DIVERGED_INDEFINITE_PC == *ksp_reason) && (bfgsUpdates > 0)) {
522: /* Preconditioner is numerically indefinite; reset the approximation. */
523: PetscCall(MatLMVMReset(bnk->M, PETSC_FALSE));
524: PetscCall(MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient));
525: }
526: }
527: *step_type = BNK_NEWTON;
528: PetscFunctionReturn(PETSC_SUCCESS);
529: }
531: /*------------------------------------------------------------*/
533: /* Routine for recomputing the predicted reduction for a given step vector */
535: PetscErrorCode TaoBNKRecomputePred(Tao tao, Vec S, PetscReal *prered)
536: {
537: TAO_BNK *bnk = (TAO_BNK *)tao->data;
539: PetscFunctionBegin;
540: /* Extract subvectors associated with the inactive set */
541: if (bnk->active_idx) {
542: PetscCall(VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive));
543: PetscCall(VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work));
544: PetscCall(VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive));
545: } else {
546: bnk->X_inactive = tao->stepdirection;
547: bnk->inactive_work = bnk->Xwork;
548: bnk->G_inactive = bnk->Gwork;
549: }
550: /* Recompute the predicted decrease based on the quadratic model */
551: PetscCall(MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work));
552: PetscCall(VecAYPX(bnk->inactive_work, -0.5, bnk->G_inactive));
553: PetscCall(VecDot(bnk->inactive_work, bnk->X_inactive, prered));
554: /* Restore the sub vectors */
555: if (bnk->active_idx) {
556: PetscCall(VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive));
557: PetscCall(VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work));
558: PetscCall(VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive));
559: }
560: PetscFunctionReturn(PETSC_SUCCESS);
561: }
563: /*------------------------------------------------------------*/
565: /* Routine for ensuring that the Newton step is a descent direction.
567: The step direction falls back onto BFGS, scaled gradient and gradient steps
568: in the event that the Newton step fails the test.
569: */
571: PetscErrorCode TaoBNKSafeguardStep(Tao tao, KSPConvergedReason ksp_reason, PetscInt *stepType)
572: {
573: TAO_BNK *bnk = (TAO_BNK *)tao->data;
574: PetscReal gdx, e_min;
575: PetscInt bfgsUpdates;
577: PetscFunctionBegin;
578: switch (*stepType) {
579: case BNK_NEWTON:
580: PetscCall(VecDot(tao->stepdirection, tao->gradient, &gdx));
581: if ((gdx >= 0.0) || PetscIsInfOrNanReal(gdx)) {
582: /* Newton step is not descent or direction produced Inf or NaN
583: Update the perturbation for next time */
584: if (bnk->pert <= 0.0) {
585: PetscBool is_gltr;
587: /* Initialize the perturbation */
588: bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
589: PetscCall(PetscObjectTypeCompare((PetscObject)tao->ksp, KSPGLTR, &is_gltr));
590: if (is_gltr) {
591: PetscCall(KSPGLTRGetMinEig(tao->ksp, &e_min));
592: bnk->pert = PetscMax(bnk->pert, -e_min);
593: }
594: } else {
595: /* Increase the perturbation */
596: bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
597: }
599: if (!bnk->M) {
600: /* We don't have the bfgs matrix around and updated
601: Must use gradient direction in this case */
602: PetscCall(VecCopy(tao->gradient, tao->stepdirection));
603: *stepType = BNK_GRADIENT;
604: } else {
605: /* Attempt to use the BFGS direction */
606: PetscCall(MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection));
608: /* Check for success (descent direction)
609: NOTE: Negative gdx here means not a descent direction because
610: the fall-back step is missing a negative sign. */
611: PetscCall(VecDot(tao->gradient, tao->stepdirection, &gdx));
612: if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
613: /* BFGS direction is not descent or direction produced not a number
614: We can assert bfgsUpdates > 1 in this case because
615: the first solve produces the scaled gradient direction,
616: which is guaranteed to be descent */
618: /* Use steepest descent direction (scaled) */
619: PetscCall(MatLMVMReset(bnk->M, PETSC_FALSE));
620: PetscCall(MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient));
621: PetscCall(MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection));
623: *stepType = BNK_SCALED_GRADIENT;
624: } else {
625: PetscCall(MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates));
626: if (1 == bfgsUpdates) {
627: /* The first BFGS direction is always the scaled gradient */
628: *stepType = BNK_SCALED_GRADIENT;
629: } else {
630: *stepType = BNK_BFGS;
631: }
632: }
633: }
634: /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
635: PetscCall(VecScale(tao->stepdirection, -1.0));
636: PetscCall(TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection));
637: } else {
638: /* Computed Newton step is descent */
639: switch (ksp_reason) {
640: case KSP_DIVERGED_NANORINF:
641: case KSP_DIVERGED_BREAKDOWN:
642: case KSP_DIVERGED_INDEFINITE_MAT:
643: case KSP_DIVERGED_INDEFINITE_PC:
644: case KSP_CONVERGED_NEG_CURVE:
645: /* Matrix or preconditioner is indefinite; increase perturbation */
646: if (bnk->pert <= 0.0) {
647: PetscBool is_gltr;
649: /* Initialize the perturbation */
650: bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
651: PetscCall(PetscObjectTypeCompare((PetscObject)tao->ksp, KSPGLTR, &is_gltr));
652: if (is_gltr) {
653: PetscCall(KSPGLTRGetMinEig(tao->ksp, &e_min));
654: bnk->pert = PetscMax(bnk->pert, -e_min);
655: }
656: } else {
657: /* Increase the perturbation */
658: bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
659: }
660: break;
662: default:
663: /* Newton step computation is good; decrease perturbation */
664: bnk->pert = PetscMin(bnk->psfac * bnk->pert, bnk->pmsfac * bnk->gnorm);
665: if (bnk->pert < bnk->pmin) bnk->pert = 0.0;
666: break;
667: }
668: *stepType = BNK_NEWTON;
669: }
670: break;
672: case BNK_BFGS:
673: /* Check for success (descent direction) */
674: PetscCall(VecDot(tao->stepdirection, tao->gradient, &gdx));
675: if (gdx >= 0 || PetscIsInfOrNanReal(gdx)) {
676: /* Step is not descent or solve was not successful
677: Use steepest descent direction (scaled) */
678: PetscCall(MatLMVMReset(bnk->M, PETSC_FALSE));
679: PetscCall(MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient));
680: PetscCall(MatSolve(bnk->M, tao->gradient, tao->stepdirection));
681: PetscCall(VecScale(tao->stepdirection, -1.0));
682: PetscCall(TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection));
683: *stepType = BNK_SCALED_GRADIENT;
684: } else {
685: *stepType = BNK_BFGS;
686: }
687: break;
689: case BNK_SCALED_GRADIENT:
690: break;
692: default:
693: break;
694: }
695: PetscFunctionReturn(PETSC_SUCCESS);
696: }
698: /*------------------------------------------------------------*/
700: /* Routine for performing a bound-projected More-Thuente line search.
702: Includes fallbacks to BFGS, scaled gradient, and unscaled gradient steps if the
703: Newton step does not produce a valid step length.
704: */
706: PetscErrorCode TaoBNKPerformLineSearch(Tao tao, PetscInt *stepType, PetscReal *steplen, TaoLineSearchConvergedReason *reason)
707: {
708: TAO_BNK *bnk = (TAO_BNK *)tao->data;
709: TaoLineSearchConvergedReason ls_reason;
710: PetscReal e_min, gdx;
711: PetscInt bfgsUpdates;
713: PetscFunctionBegin;
714: /* Perform the linesearch */
715: PetscCall(TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason));
716: PetscCall(TaoAddLineSearchCounts(tao));
718: while (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER && *stepType != BNK_SCALED_GRADIENT && *stepType != BNK_GRADIENT) {
719: /* Linesearch failed, revert solution */
720: bnk->f = bnk->fold;
721: PetscCall(VecCopy(bnk->Xold, tao->solution));
722: PetscCall(VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient));
724: switch (*stepType) {
725: case BNK_NEWTON:
726: /* Failed to obtain acceptable iterate with Newton step
727: Update the perturbation for next time */
728: if (bnk->pert <= 0.0) {
729: PetscBool is_gltr;
731: /* Initialize the perturbation */
732: bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
733: PetscCall(PetscObjectTypeCompare((PetscObject)tao->ksp, KSPGLTR, &is_gltr));
734: if (is_gltr) {
735: PetscCall(KSPGLTRGetMinEig(tao->ksp, &e_min));
736: bnk->pert = PetscMax(bnk->pert, -e_min);
737: }
738: } else {
739: /* Increase the perturbation */
740: bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
741: }
743: if (!bnk->M) {
744: /* We don't have the bfgs matrix around and being updated
745: Must use gradient direction in this case */
746: PetscCall(VecCopy(bnk->unprojected_gradient, tao->stepdirection));
747: *stepType = BNK_GRADIENT;
748: } else {
749: /* Attempt to use the BFGS direction */
750: PetscCall(MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection));
751: /* Check for success (descent direction)
752: NOTE: Negative gdx means not a descent direction because the step here is missing a negative sign. */
753: PetscCall(VecDot(tao->gradient, tao->stepdirection, &gdx));
754: if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
755: /* BFGS direction is not descent or direction produced not a number
756: We can assert bfgsUpdates > 1 in this case
757: Use steepest descent direction (scaled) */
758: PetscCall(MatLMVMReset(bnk->M, PETSC_FALSE));
759: PetscCall(MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient));
760: PetscCall(MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection));
762: bfgsUpdates = 1;
763: *stepType = BNK_SCALED_GRADIENT;
764: } else {
765: PetscCall(MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates));
766: if (1 == bfgsUpdates) {
767: /* The first BFGS direction is always the scaled gradient */
768: *stepType = BNK_SCALED_GRADIENT;
769: } else {
770: *stepType = BNK_BFGS;
771: }
772: }
773: }
774: break;
776: case BNK_BFGS:
777: /* Can only enter if pc_type == BNK_PC_BFGS
778: Failed to obtain acceptable iterate with BFGS step
779: Attempt to use the scaled gradient direction */
780: PetscCall(MatLMVMReset(bnk->M, PETSC_FALSE));
781: PetscCall(MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient));
782: PetscCall(MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection));
784: bfgsUpdates = 1;
785: *stepType = BNK_SCALED_GRADIENT;
786: break;
787: }
788: /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
789: PetscCall(VecScale(tao->stepdirection, -1.0));
790: PetscCall(TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection));
792: /* Perform one last line search with the fall-back step */
793: PetscCall(TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason));
794: PetscCall(TaoAddLineSearchCounts(tao));
795: }
796: *reason = ls_reason;
797: PetscFunctionReturn(PETSC_SUCCESS);
798: }
800: /*------------------------------------------------------------*/
802: /* Routine for updating the trust radius.
804: Function features three different update methods:
805: 1) Line-search step length based
806: 2) Predicted decrease on the CG quadratic model
807: 3) Interpolation
808: */
810: PetscErrorCode TaoBNKUpdateTrustRadius(Tao tao, PetscReal prered, PetscReal actred, PetscInt updateType, PetscInt stepType, PetscBool *accept)
811: {
812: TAO_BNK *bnk = (TAO_BNK *)tao->data;
814: PetscReal step, kappa;
815: PetscReal gdx, tau_1, tau_2, tau_min, tau_max;
817: PetscFunctionBegin;
818: /* Update trust region radius */
819: *accept = PETSC_FALSE;
820: switch (updateType) {
821: case BNK_UPDATE_STEP:
822: *accept = PETSC_TRUE; /* always accept here because line search succeeded */
823: if (stepType == BNK_NEWTON) {
824: PetscCall(TaoLineSearchGetStepLength(tao->linesearch, &step));
825: if (step < bnk->nu1) {
826: /* Very bad step taken; reduce radius */
827: tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
828: } else if (step < bnk->nu2) {
829: /* Reasonably bad step taken; reduce radius */
830: tao->trust = bnk->omega2 * PetscMin(bnk->dnorm, tao->trust);
831: } else if (step < bnk->nu3) {
832: /* Reasonable step was taken; leave radius alone */
833: if (bnk->omega3 < 1.0) {
834: tao->trust = bnk->omega3 * PetscMin(bnk->dnorm, tao->trust);
835: } else if (bnk->omega3 > 1.0) {
836: tao->trust = PetscMax(bnk->omega3 * bnk->dnorm, tao->trust);
837: }
838: } else if (step < bnk->nu4) {
839: /* Full step taken; increase the radius */
840: tao->trust = PetscMax(bnk->omega4 * bnk->dnorm, tao->trust);
841: } else {
842: /* More than full step taken; increase the radius */
843: tao->trust = PetscMax(bnk->omega5 * bnk->dnorm, tao->trust);
844: }
845: } else {
846: /* Newton step was not good; reduce the radius */
847: tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
848: }
849: break;
851: case BNK_UPDATE_REDUCTION:
852: if (stepType == BNK_NEWTON) {
853: if ((prered < 0.0) || PetscIsInfOrNanReal(prered)) {
854: /* The predicted reduction has the wrong sign. This cannot
855: happen in infinite precision arithmetic. Step should
856: be rejected! */
857: tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
858: } else {
859: if (PetscIsInfOrNanReal(actred)) {
860: tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
861: } else {
862: if ((PetscAbsScalar(actred) <= PetscMax(1.0, PetscAbsScalar(bnk->f)) * bnk->epsilon) && (PetscAbsScalar(prered) <= PetscMax(1.0, PetscAbsScalar(bnk->f)) * bnk->epsilon)) {
863: kappa = 1.0;
864: } else {
865: kappa = actred / prered;
866: }
867: /* Accept or reject the step and update radius */
868: if (kappa < bnk->eta1) {
869: /* Reject the step */
870: tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
871: } else {
872: /* Accept the step */
873: *accept = PETSC_TRUE;
874: /* Update the trust region radius only if the computed step is at the trust radius boundary */
875: if (bnk->dnorm == tao->trust) {
876: if (kappa < bnk->eta2) {
877: /* Marginal bad step */
878: tao->trust = bnk->alpha2 * tao->trust;
879: } else if (kappa < bnk->eta3) {
880: /* Reasonable step */
881: tao->trust = bnk->alpha3 * tao->trust;
882: } else if (kappa < bnk->eta4) {
883: /* Good step */
884: tao->trust = bnk->alpha4 * tao->trust;
885: } else {
886: /* Very good step */
887: tao->trust = bnk->alpha5 * tao->trust;
888: }
889: }
890: }
891: }
892: }
893: } else {
894: /* Newton step was not good; reduce the radius */
895: tao->trust = bnk->alpha1 * PetscMin(bnk->dnorm, tao->trust);
896: }
897: break;
899: default:
900: if (stepType == BNK_NEWTON) {
901: if (prered < 0.0) {
902: /* The predicted reduction has the wrong sign. This cannot */
903: /* happen in infinite precision arithmetic. Step should */
904: /* be rejected! */
905: tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
906: } else {
907: if (PetscIsInfOrNanReal(actred)) {
908: tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
909: } else {
910: if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
911: kappa = 1.0;
912: } else {
913: kappa = actred / prered;
914: }
916: PetscCall(VecDot(tao->gradient, tao->stepdirection, &gdx));
917: tau_1 = bnk->theta * gdx / (bnk->theta * gdx - (1.0 - bnk->theta) * prered + actred);
918: tau_2 = bnk->theta * gdx / (bnk->theta * gdx + (1.0 + bnk->theta) * prered - actred);
919: tau_min = PetscMin(tau_1, tau_2);
920: tau_max = PetscMax(tau_1, tau_2);
922: if (kappa >= 1.0 - bnk->mu1) {
923: /* Great agreement */
924: *accept = PETSC_TRUE;
925: if (tau_max < 1.0) {
926: tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
927: } else if (tau_max > bnk->gamma4) {
928: tao->trust = PetscMax(tao->trust, bnk->gamma4 * bnk->dnorm);
929: } else {
930: tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
931: }
932: } else if (kappa >= 1.0 - bnk->mu2) {
933: /* Good agreement */
934: *accept = PETSC_TRUE;
935: if (tau_max < bnk->gamma2) {
936: tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
937: } else if (tau_max > bnk->gamma3) {
938: tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
939: } else if (tau_max < 1.0) {
940: tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
941: } else {
942: tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
943: }
944: } else {
945: /* Not good agreement */
946: if (tau_min > 1.0) {
947: tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
948: } else if (tau_max < bnk->gamma1) {
949: tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
950: } else if ((tau_min < bnk->gamma1) && (tau_max >= 1.0)) {
951: tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
952: } else if ((tau_1 >= bnk->gamma1) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1) || (tau_2 >= 1.0))) {
953: tao->trust = tau_1 * PetscMin(tao->trust, bnk->dnorm);
954: } else if ((tau_2 >= bnk->gamma1) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1) || (tau_2 >= 1.0))) {
955: tao->trust = tau_2 * PetscMin(tao->trust, bnk->dnorm);
956: } else {
957: tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
958: }
959: }
960: }
961: }
962: } else {
963: /* Newton step was not good; reduce the radius */
964: tao->trust = bnk->gamma1 * PetscMin(bnk->dnorm, tao->trust);
965: }
966: break;
967: }
968: /* Make sure the radius does not violate min and max settings */
969: tao->trust = PetscMin(tao->trust, bnk->max_radius);
970: tao->trust = PetscMax(tao->trust, bnk->min_radius);
971: PetscFunctionReturn(PETSC_SUCCESS);
972: }
974: /* ---------------------------------------------------------- */
976: PetscErrorCode TaoBNKAddStepCounts(Tao tao, PetscInt stepType)
977: {
978: TAO_BNK *bnk = (TAO_BNK *)tao->data;
980: PetscFunctionBegin;
981: switch (stepType) {
982: case BNK_NEWTON:
983: ++bnk->newt;
984: break;
985: case BNK_BFGS:
986: ++bnk->bfgs;
987: break;
988: case BNK_SCALED_GRADIENT:
989: ++bnk->sgrad;
990: break;
991: case BNK_GRADIENT:
992: ++bnk->grad;
993: break;
994: default:
995: break;
996: }
997: PetscFunctionReturn(PETSC_SUCCESS);
998: }
1000: /* ---------------------------------------------------------- */
1002: PetscErrorCode TaoSetUp_BNK(Tao tao)
1003: {
1004: TAO_BNK *bnk = (TAO_BNK *)tao->data;
1005: PetscInt i;
1007: PetscFunctionBegin;
1008: if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient));
1009: if (!tao->stepdirection) PetscCall(VecDuplicate(tao->solution, &tao->stepdirection));
1010: if (!bnk->W) PetscCall(VecDuplicate(tao->solution, &bnk->W));
1011: if (!bnk->Xold) PetscCall(VecDuplicate(tao->solution, &bnk->Xold));
1012: if (!bnk->Gold) PetscCall(VecDuplicate(tao->solution, &bnk->Gold));
1013: if (!bnk->Xwork) PetscCall(VecDuplicate(tao->solution, &bnk->Xwork));
1014: if (!bnk->Gwork) PetscCall(VecDuplicate(tao->solution, &bnk->Gwork));
1015: if (!bnk->unprojected_gradient) PetscCall(VecDuplicate(tao->solution, &bnk->unprojected_gradient));
1016: if (!bnk->unprojected_gradient_old) PetscCall(VecDuplicate(tao->solution, &bnk->unprojected_gradient_old));
1017: if (!bnk->Diag_min) PetscCall(VecDuplicate(tao->solution, &bnk->Diag_min));
1018: if (!bnk->Diag_max) PetscCall(VecDuplicate(tao->solution, &bnk->Diag_max));
1019: if (bnk->max_cg_its > 0) {
1020: /* Ensure that the important common vectors are shared between BNK and embedded BNCG */
1021: bnk->bncg_ctx = (TAO_BNCG *)bnk->bncg->data;
1022: PetscCall(PetscObjectReference((PetscObject)bnk->unprojected_gradient_old));
1023: PetscCall(VecDestroy(&bnk->bncg_ctx->unprojected_gradient_old));
1024: bnk->bncg_ctx->unprojected_gradient_old = bnk->unprojected_gradient_old;
1025: PetscCall(PetscObjectReference((PetscObject)bnk->unprojected_gradient));
1026: PetscCall(VecDestroy(&bnk->bncg_ctx->unprojected_gradient));
1027: bnk->bncg_ctx->unprojected_gradient = bnk->unprojected_gradient;
1028: PetscCall(PetscObjectReference((PetscObject)bnk->Gold));
1029: PetscCall(VecDestroy(&bnk->bncg_ctx->G_old));
1030: bnk->bncg_ctx->G_old = bnk->Gold;
1031: PetscCall(PetscObjectReference((PetscObject)tao->gradient));
1032: PetscCall(VecDestroy(&bnk->bncg->gradient));
1033: bnk->bncg->gradient = tao->gradient;
1034: PetscCall(PetscObjectReference((PetscObject)tao->stepdirection));
1035: PetscCall(VecDestroy(&bnk->bncg->stepdirection));
1036: bnk->bncg->stepdirection = tao->stepdirection;
1037: PetscCall(TaoSetSolution(bnk->bncg, tao->solution));
1038: /* Copy over some settings from BNK into BNCG */
1039: PetscCall(TaoSetMaximumIterations(bnk->bncg, bnk->max_cg_its));
1040: PetscCall(TaoSetTolerances(bnk->bncg, tao->gatol, tao->grtol, tao->gttol));
1041: PetscCall(TaoSetFunctionLowerBound(bnk->bncg, tao->fmin));
1042: PetscCall(TaoSetConvergenceTest(bnk->bncg, tao->ops->convergencetest, tao->cnvP));
1043: PetscCall(TaoSetObjective(bnk->bncg, tao->ops->computeobjective, tao->user_objP));
1044: PetscCall(TaoSetGradient(bnk->bncg, NULL, tao->ops->computegradient, tao->user_gradP));
1045: PetscCall(TaoSetObjectiveAndGradient(bnk->bncg, NULL, tao->ops->computeobjectiveandgradient, tao->user_objgradP));
1046: PetscCall(PetscObjectCopyFortranFunctionPointers((PetscObject)tao, (PetscObject)bnk->bncg));
1047: for (i = 0; i < tao->numbermonitors; ++i) {
1048: PetscCall(TaoMonitorSet(bnk->bncg, tao->monitor[i], tao->monitorcontext[i], tao->monitordestroy[i]));
1049: PetscCall(PetscObjectReference((PetscObject)tao->monitorcontext[i]));
1050: }
1051: }
1052: bnk->X_inactive = NULL;
1053: bnk->G_inactive = NULL;
1054: bnk->inactive_work = NULL;
1055: bnk->active_work = NULL;
1056: bnk->inactive_idx = NULL;
1057: bnk->active_idx = NULL;
1058: bnk->active_lower = NULL;
1059: bnk->active_upper = NULL;
1060: bnk->active_fixed = NULL;
1061: bnk->M = NULL;
1062: bnk->H_inactive = NULL;
1063: bnk->Hpre_inactive = NULL;
1064: PetscFunctionReturn(PETSC_SUCCESS);
1065: }
1067: /*------------------------------------------------------------*/
1069: PetscErrorCode TaoDestroy_BNK(Tao tao)
1070: {
1071: TAO_BNK *bnk = (TAO_BNK *)tao->data;
1073: PetscFunctionBegin;
1074: PetscCall(VecDestroy(&bnk->W));
1075: PetscCall(VecDestroy(&bnk->Xold));
1076: PetscCall(VecDestroy(&bnk->Gold));
1077: PetscCall(VecDestroy(&bnk->Xwork));
1078: PetscCall(VecDestroy(&bnk->Gwork));
1079: PetscCall(VecDestroy(&bnk->unprojected_gradient));
1080: PetscCall(VecDestroy(&bnk->unprojected_gradient_old));
1081: PetscCall(VecDestroy(&bnk->Diag_min));
1082: PetscCall(VecDestroy(&bnk->Diag_max));
1083: PetscCall(ISDestroy(&bnk->active_lower));
1084: PetscCall(ISDestroy(&bnk->active_upper));
1085: PetscCall(ISDestroy(&bnk->active_fixed));
1086: PetscCall(ISDestroy(&bnk->active_idx));
1087: PetscCall(ISDestroy(&bnk->inactive_idx));
1088: PetscCall(MatDestroy(&bnk->Hpre_inactive));
1089: PetscCall(MatDestroy(&bnk->H_inactive));
1090: PetscCall(TaoDestroy(&bnk->bncg));
1091: PetscCall(KSPDestroy(&tao->ksp));
1092: PetscCall(PetscFree(tao->data));
1093: PetscFunctionReturn(PETSC_SUCCESS);
1094: }
1096: /*------------------------------------------------------------*/
1098: PetscErrorCode TaoSetFromOptions_BNK(Tao tao, PetscOptionItems *PetscOptionsObject)
1099: {
1100: TAO_BNK *bnk = (TAO_BNK *)tao->data;
1102: PetscFunctionBegin;
1103: PetscOptionsHeadBegin(PetscOptionsObject, "Newton-Krylov method for bound constrained optimization");
1104: PetscCall(PetscOptionsEList("-tao_bnk_init_type", "radius initialization type", "", BNK_INIT, BNK_INIT_TYPES, BNK_INIT[bnk->init_type], &bnk->init_type, NULL));
1105: PetscCall(PetscOptionsEList("-tao_bnk_update_type", "radius update type", "", BNK_UPDATE, BNK_UPDATE_TYPES, BNK_UPDATE[bnk->update_type], &bnk->update_type, NULL));
1106: PetscCall(PetscOptionsEList("-tao_bnk_as_type", "active set estimation method", "", BNK_AS, BNK_AS_TYPES, BNK_AS[bnk->as_type], &bnk->as_type, NULL));
1107: PetscCall(PetscOptionsReal("-tao_bnk_sval", "(developer) Hessian perturbation starting value", "", bnk->sval, &bnk->sval, NULL));
1108: PetscCall(PetscOptionsReal("-tao_bnk_imin", "(developer) minimum initial Hessian perturbation", "", bnk->imin, &bnk->imin, NULL));
1109: PetscCall(PetscOptionsReal("-tao_bnk_imax", "(developer) maximum initial Hessian perturbation", "", bnk->imax, &bnk->imax, NULL));
1110: PetscCall(PetscOptionsReal("-tao_bnk_imfac", "(developer) initial merit factor for Hessian perturbation", "", bnk->imfac, &bnk->imfac, NULL));
1111: PetscCall(PetscOptionsReal("-tao_bnk_pmin", "(developer) minimum Hessian perturbation", "", bnk->pmin, &bnk->pmin, NULL));
1112: PetscCall(PetscOptionsReal("-tao_bnk_pmax", "(developer) maximum Hessian perturbation", "", bnk->pmax, &bnk->pmax, NULL));
1113: PetscCall(PetscOptionsReal("-tao_bnk_pgfac", "(developer) Hessian perturbation growth factor", "", bnk->pgfac, &bnk->pgfac, NULL));
1114: PetscCall(PetscOptionsReal("-tao_bnk_psfac", "(developer) Hessian perturbation shrink factor", "", bnk->psfac, &bnk->psfac, NULL));
1115: PetscCall(PetscOptionsReal("-tao_bnk_pmgfac", "(developer) merit growth factor for Hessian perturbation", "", bnk->pmgfac, &bnk->pmgfac, NULL));
1116: PetscCall(PetscOptionsReal("-tao_bnk_pmsfac", "(developer) merit shrink factor for Hessian perturbation", "", bnk->pmsfac, &bnk->pmsfac, NULL));
1117: PetscCall(PetscOptionsReal("-tao_bnk_eta1", "(developer) threshold for rejecting step (-tao_bnk_update_type reduction)", "", bnk->eta1, &bnk->eta1, NULL));
1118: PetscCall(PetscOptionsReal("-tao_bnk_eta2", "(developer) threshold for accepting marginal step (-tao_bnk_update_type reduction)", "", bnk->eta2, &bnk->eta2, NULL));
1119: PetscCall(PetscOptionsReal("-tao_bnk_eta3", "(developer) threshold for accepting reasonable step (-tao_bnk_update_type reduction)", "", bnk->eta3, &bnk->eta3, NULL));
1120: PetscCall(PetscOptionsReal("-tao_bnk_eta4", "(developer) threshold for accepting good step (-tao_bnk_update_type reduction)", "", bnk->eta4, &bnk->eta4, NULL));
1121: PetscCall(PetscOptionsReal("-tao_bnk_alpha1", "(developer) radius reduction factor for rejected step (-tao_bnk_update_type reduction)", "", bnk->alpha1, &bnk->alpha1, NULL));
1122: PetscCall(PetscOptionsReal("-tao_bnk_alpha2", "(developer) radius reduction factor for marginally accepted bad step (-tao_bnk_update_type reduction)", "", bnk->alpha2, &bnk->alpha2, NULL));
1123: PetscCall(PetscOptionsReal("-tao_bnk_alpha3", "(developer) radius increase factor for reasonable accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha3, &bnk->alpha3, NULL));
1124: PetscCall(PetscOptionsReal("-tao_bnk_alpha4", "(developer) radius increase factor for good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha4, &bnk->alpha4, NULL));
1125: PetscCall(PetscOptionsReal("-tao_bnk_alpha5", "(developer) radius increase factor for very good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha5, &bnk->alpha5, NULL));
1126: PetscCall(PetscOptionsReal("-tao_bnk_nu1", "(developer) threshold for small line-search step length (-tao_bnk_update_type step)", "", bnk->nu1, &bnk->nu1, NULL));
1127: PetscCall(PetscOptionsReal("-tao_bnk_nu2", "(developer) threshold for reasonable line-search step length (-tao_bnk_update_type step)", "", bnk->nu2, &bnk->nu2, NULL));
1128: PetscCall(PetscOptionsReal("-tao_bnk_nu3", "(developer) threshold for large line-search step length (-tao_bnk_update_type step)", "", bnk->nu3, &bnk->nu3, NULL));
1129: PetscCall(PetscOptionsReal("-tao_bnk_nu4", "(developer) threshold for very large line-search step length (-tao_bnk_update_type step)", "", bnk->nu4, &bnk->nu4, NULL));
1130: PetscCall(PetscOptionsReal("-tao_bnk_omega1", "(developer) radius reduction factor for very small line-search step length (-tao_bnk_update_type step)", "", bnk->omega1, &bnk->omega1, NULL));
1131: PetscCall(PetscOptionsReal("-tao_bnk_omega2", "(developer) radius reduction factor for small line-search step length (-tao_bnk_update_type step)", "", bnk->omega2, &bnk->omega2, NULL));
1132: PetscCall(PetscOptionsReal("-tao_bnk_omega3", "(developer) radius factor for decent line-search step length (-tao_bnk_update_type step)", "", bnk->omega3, &bnk->omega3, NULL));
1133: PetscCall(PetscOptionsReal("-tao_bnk_omega4", "(developer) radius increase factor for large line-search step length (-tao_bnk_update_type step)", "", bnk->omega4, &bnk->omega4, NULL));
1134: PetscCall(PetscOptionsReal("-tao_bnk_omega5", "(developer) radius increase factor for very large line-search step length (-tao_bnk_update_type step)", "", bnk->omega5, &bnk->omega5, NULL));
1135: PetscCall(PetscOptionsReal("-tao_bnk_mu1_i", "(developer) threshold for accepting very good step (-tao_bnk_init_type interpolation)", "", bnk->mu1_i, &bnk->mu1_i, NULL));
1136: PetscCall(PetscOptionsReal("-tao_bnk_mu2_i", "(developer) threshold for accepting good step (-tao_bnk_init_type interpolation)", "", bnk->mu2_i, &bnk->mu2_i, NULL));
1137: PetscCall(PetscOptionsReal("-tao_bnk_gamma1_i", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma1_i, &bnk->gamma1_i, NULL));
1138: PetscCall(PetscOptionsReal("-tao_bnk_gamma2_i", "(developer) radius reduction factor for rejected bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma2_i, &bnk->gamma2_i, NULL));
1139: PetscCall(PetscOptionsReal("-tao_bnk_gamma3_i", "(developer) radius increase factor for accepted good step (-tao_bnk_init_type interpolation)", "", bnk->gamma3_i, &bnk->gamma3_i, NULL));
1140: PetscCall(PetscOptionsReal("-tao_bnk_gamma4_i", "(developer) radius increase factor for accepted very good step (-tao_bnk_init_type interpolation)", "", bnk->gamma4_i, &bnk->gamma4_i, NULL));
1141: PetscCall(PetscOptionsReal("-tao_bnk_theta_i", "(developer) trust region interpolation factor (-tao_bnk_init_type interpolation)", "", bnk->theta_i, &bnk->theta_i, NULL));
1142: PetscCall(PetscOptionsReal("-tao_bnk_mu1", "(developer) threshold for accepting very good step (-tao_bnk_update_type interpolation)", "", bnk->mu1, &bnk->mu1, NULL));
1143: PetscCall(PetscOptionsReal("-tao_bnk_mu2", "(developer) threshold for accepting good step (-tao_bnk_update_type interpolation)", "", bnk->mu2, &bnk->mu2, NULL));
1144: PetscCall(PetscOptionsReal("-tao_bnk_gamma1", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma1, &bnk->gamma1, NULL));
1145: PetscCall(PetscOptionsReal("-tao_bnk_gamma2", "(developer) radius reduction factor for rejected bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma2, &bnk->gamma2, NULL));
1146: PetscCall(PetscOptionsReal("-tao_bnk_gamma3", "(developer) radius increase factor for accepted good step (-tao_bnk_update_type interpolation)", "", bnk->gamma3, &bnk->gamma3, NULL));
1147: PetscCall(PetscOptionsReal("-tao_bnk_gamma4", "(developer) radius increase factor for accepted very good step (-tao_bnk_update_type interpolation)", "", bnk->gamma4, &bnk->gamma4, NULL));
1148: PetscCall(PetscOptionsReal("-tao_bnk_theta", "(developer) trust region interpolation factor (-tao_bnk_update_type interpolation)", "", bnk->theta, &bnk->theta, NULL));
1149: PetscCall(PetscOptionsReal("-tao_bnk_min_radius", "(developer) lower bound on initial radius", "", bnk->min_radius, &bnk->min_radius, NULL));
1150: PetscCall(PetscOptionsReal("-tao_bnk_max_radius", "(developer) upper bound on radius", "", bnk->max_radius, &bnk->max_radius, NULL));
1151: PetscCall(PetscOptionsReal("-tao_bnk_epsilon", "(developer) tolerance used when computing actual and predicted reduction", "", bnk->epsilon, &bnk->epsilon, NULL));
1152: PetscCall(PetscOptionsReal("-tao_bnk_as_tol", "(developer) initial tolerance used when estimating actively bounded variables", "", bnk->as_tol, &bnk->as_tol, NULL));
1153: PetscCall(PetscOptionsReal("-tao_bnk_as_step", "(developer) step length used when estimating actively bounded variables", "", bnk->as_step, &bnk->as_step, NULL));
1154: PetscCall(PetscOptionsInt("-tao_bnk_max_cg_its", "number of BNCG iterations to take for each Newton step", "", bnk->max_cg_its, &bnk->max_cg_its, NULL));
1155: PetscOptionsHeadEnd();
1157: PetscCall(TaoSetOptionsPrefix(bnk->bncg, ((PetscObject)tao)->prefix));
1158: PetscCall(TaoAppendOptionsPrefix(bnk->bncg, "tao_bnk_cg_"));
1159: PetscCall(TaoSetFromOptions(bnk->bncg));
1161: PetscCall(KSPSetOptionsPrefix(tao->ksp, ((PetscObject)tao)->prefix));
1162: PetscCall(KSPAppendOptionsPrefix(tao->ksp, "tao_bnk_"));
1163: PetscCall(KSPSetFromOptions(tao->ksp));
1164: PetscFunctionReturn(PETSC_SUCCESS);
1165: }
1167: /*------------------------------------------------------------*/
1169: PetscErrorCode TaoView_BNK(Tao tao, PetscViewer viewer)
1170: {
1171: TAO_BNK *bnk = (TAO_BNK *)tao->data;
1172: PetscInt nrejects;
1173: PetscBool isascii;
1175: PetscFunctionBegin;
1176: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1177: if (isascii) {
1178: PetscCall(PetscViewerASCIIPushTab(viewer));
1179: PetscCall(TaoView(bnk->bncg, viewer));
1180: if (bnk->M) {
1181: PetscCall(MatLMVMGetRejectCount(bnk->M, &nrejects));
1182: PetscCall(PetscViewerASCIIPrintf(viewer, "Rejected BFGS updates: %" PetscInt_FMT "\n", nrejects));
1183: }
1184: PetscCall(PetscViewerASCIIPrintf(viewer, "CG steps: %" PetscInt_FMT "\n", bnk->tot_cg_its));
1185: PetscCall(PetscViewerASCIIPrintf(viewer, "Newton steps: %" PetscInt_FMT "\n", bnk->newt));
1186: if (bnk->M) PetscCall(PetscViewerASCIIPrintf(viewer, "BFGS steps: %" PetscInt_FMT "\n", bnk->bfgs));
1187: PetscCall(PetscViewerASCIIPrintf(viewer, "Scaled gradient steps: %" PetscInt_FMT "\n", bnk->sgrad));
1188: PetscCall(PetscViewerASCIIPrintf(viewer, "Gradient steps: %" PetscInt_FMT "\n", bnk->grad));
1189: PetscCall(PetscViewerASCIIPrintf(viewer, "KSP termination reasons:\n"));
1190: PetscCall(PetscViewerASCIIPrintf(viewer, " atol: %" PetscInt_FMT "\n", bnk->ksp_atol));
1191: PetscCall(PetscViewerASCIIPrintf(viewer, " rtol: %" PetscInt_FMT "\n", bnk->ksp_rtol));
1192: PetscCall(PetscViewerASCIIPrintf(viewer, " ctol: %" PetscInt_FMT "\n", bnk->ksp_ctol));
1193: PetscCall(PetscViewerASCIIPrintf(viewer, " negc: %" PetscInt_FMT "\n", bnk->ksp_negc));
1194: PetscCall(PetscViewerASCIIPrintf(viewer, " dtol: %" PetscInt_FMT "\n", bnk->ksp_dtol));
1195: PetscCall(PetscViewerASCIIPrintf(viewer, " iter: %" PetscInt_FMT "\n", bnk->ksp_iter));
1196: PetscCall(PetscViewerASCIIPrintf(viewer, " othr: %" PetscInt_FMT "\n", bnk->ksp_othr));
1197: PetscCall(PetscViewerASCIIPopTab(viewer));
1198: }
1199: PetscFunctionReturn(PETSC_SUCCESS);
1200: }
1202: /* ---------------------------------------------------------- */
1204: /*MC
1205: TAOBNK - Shared base-type for Bounded Newton-Krylov type algorithms.
1206: At each iteration, the BNK methods solve the symmetric
1207: system of equations to obtain the step direction dk:
1208: Hk dk = -gk
1209: for free variables only. The step can be globalized either through
1210: trust-region methods, or a line search, or a heuristic mixture of both.
1212: Options Database Keys:
1213: + -tao_bnk_max_cg_its - maximum number of bounded conjugate-gradient iterations taken in each Newton loop
1214: . -tao_bnk_init_type - trust radius initialization method ("constant", "direction", "interpolation")
1215: . -tao_bnk_update_type - trust radius update method ("step", "direction", "interpolation")
1216: . -tao_bnk_as_type - active-set estimation method ("none", "bertsekas")
1217: . -tao_bnk_as_tol - (developer) initial tolerance used in estimating bounded active variables (-as_type bertsekas)
1218: . -tao_bnk_as_step - (developer) trial step length used in estimating bounded active variables (-as_type bertsekas)
1219: . -tao_bnk_sval - (developer) Hessian perturbation starting value
1220: . -tao_bnk_imin - (developer) minimum initial Hessian perturbation
1221: . -tao_bnk_imax - (developer) maximum initial Hessian perturbation
1222: . -tao_bnk_pmin - (developer) minimum Hessian perturbation
1223: . -tao_bnk_pmax - (developer) aximum Hessian perturbation
1224: . -tao_bnk_pgfac - (developer) Hessian perturbation growth factor
1225: . -tao_bnk_psfac - (developer) Hessian perturbation shrink factor
1226: . -tao_bnk_imfac - (developer) initial merit factor for Hessian perturbation
1227: . -tao_bnk_pmgfac - (developer) merit growth factor for Hessian perturbation
1228: . -tao_bnk_pmsfac - (developer) merit shrink factor for Hessian perturbation
1229: . -tao_bnk_eta1 - (developer) threshold for rejecting step (-update_type reduction)
1230: . -tao_bnk_eta2 - (developer) threshold for accepting marginal step (-update_type reduction)
1231: . -tao_bnk_eta3 - (developer) threshold for accepting reasonable step (-update_type reduction)
1232: . -tao_bnk_eta4 - (developer) threshold for accepting good step (-update_type reduction)
1233: . -tao_bnk_alpha1 - (developer) radius reduction factor for rejected step (-update_type reduction)
1234: . -tao_bnk_alpha2 - (developer) radius reduction factor for marginally accepted bad step (-update_type reduction)
1235: . -tao_bnk_alpha3 - (developer) radius increase factor for reasonable accepted step (-update_type reduction)
1236: . -tao_bnk_alpha4 - (developer) radius increase factor for good accepted step (-update_type reduction)
1237: . -tao_bnk_alpha5 - (developer) radius increase factor for very good accepted step (-update_type reduction)
1238: . -tao_bnk_epsilon - (developer) tolerance for small pred/actual ratios that trigger automatic step acceptance (-update_type reduction)
1239: . -tao_bnk_mu1 - (developer) threshold for accepting very good step (-update_type interpolation)
1240: . -tao_bnk_mu2 - (developer) threshold for accepting good step (-update_type interpolation)
1241: . -tao_bnk_gamma1 - (developer) radius reduction factor for rejected very bad step (-update_type interpolation)
1242: . -tao_bnk_gamma2 - (developer) radius reduction factor for rejected bad step (-update_type interpolation)
1243: . -tao_bnk_gamma3 - (developer) radius increase factor for accepted good step (-update_type interpolation)
1244: . -tao_bnk_gamma4 - (developer) radius increase factor for accepted very good step (-update_type interpolation)
1245: . -tao_bnk_theta - (developer) trust region interpolation factor (-update_type interpolation)
1246: . -tao_bnk_nu1 - (developer) threshold for small line-search step length (-update_type step)
1247: . -tao_bnk_nu2 - (developer) threshold for reasonable line-search step length (-update_type step)
1248: . -tao_bnk_nu3 - (developer) threshold for large line-search step length (-update_type step)
1249: . -tao_bnk_nu4 - (developer) threshold for very large line-search step length (-update_type step)
1250: . -tao_bnk_omega1 - (developer) radius reduction factor for very small line-search step length (-update_type step)
1251: . -tao_bnk_omega2 - (developer) radius reduction factor for small line-search step length (-update_type step)
1252: . -tao_bnk_omega3 - (developer) radius factor for decent line-search step length (-update_type step)
1253: . -tao_bnk_omega4 - (developer) radius increase factor for large line-search step length (-update_type step)
1254: . -tao_bnk_omega5 - (developer) radius increase factor for very large line-search step length (-update_type step)
1255: . -tao_bnk_mu1_i - (developer) threshold for accepting very good step (-init_type interpolation)
1256: . -tao_bnk_mu2_i - (developer) threshold for accepting good step (-init_type interpolation)
1257: . -tao_bnk_gamma1_i - (developer) radius reduction factor for rejected very bad step (-init_type interpolation)
1258: . -tao_bnk_gamma2_i - (developer) radius reduction factor for rejected bad step (-init_type interpolation)
1259: . -tao_bnk_gamma3_i - (developer) radius increase factor for accepted good step (-init_type interpolation)
1260: . -tao_bnk_gamma4_i - (developer) radius increase factor for accepted very good step (-init_type interpolation)
1261: - -tao_bnk_theta_i - (developer) trust region interpolation factor (-init_type interpolation)
1263: Level: beginner
1264: M*/
1266: PetscErrorCode TaoCreate_BNK(Tao tao)
1267: {
1268: TAO_BNK *bnk;
1269: PC pc;
1271: PetscFunctionBegin;
1272: PetscCall(PetscNew(&bnk));
1274: tao->ops->setup = TaoSetUp_BNK;
1275: tao->ops->view = TaoView_BNK;
1276: tao->ops->setfromoptions = TaoSetFromOptions_BNK;
1277: tao->ops->destroy = TaoDestroy_BNK;
1279: /* Override default settings (unless already changed) */
1280: PetscCall(TaoParametersInitialize(tao));
1281: PetscObjectParameterSetDefault(tao, max_it, 50);
1282: PetscObjectParameterSetDefault(tao, trust0, 100.0);
1284: tao->data = (void *)bnk;
1286: /* Hessian shifting parameters */
1287: bnk->computehessian = TaoBNKComputeHessian;
1288: bnk->computestep = TaoBNKComputeStep;
1290: bnk->sval = 0.0;
1291: bnk->imin = 1.0e-4;
1292: bnk->imax = 1.0e+2;
1293: bnk->imfac = 1.0e-1;
1295: bnk->pmin = 1.0e-12;
1296: bnk->pmax = 1.0e+2;
1297: bnk->pgfac = 1.0e+1;
1298: bnk->psfac = 4.0e-1;
1299: bnk->pmgfac = 1.0e-1;
1300: bnk->pmsfac = 1.0e-1;
1302: /* Default values for trust-region radius update based on steplength */
1303: bnk->nu1 = 0.25;
1304: bnk->nu2 = 0.50;
1305: bnk->nu3 = 1.00;
1306: bnk->nu4 = 1.25;
1308: bnk->omega1 = 0.25;
1309: bnk->omega2 = 0.50;
1310: bnk->omega3 = 1.00;
1311: bnk->omega4 = 2.00;
1312: bnk->omega5 = 4.00;
1314: /* Default values for trust-region radius update based on reduction */
1315: bnk->eta1 = 1.0e-4;
1316: bnk->eta2 = 0.25;
1317: bnk->eta3 = 0.50;
1318: bnk->eta4 = 0.90;
1320: bnk->alpha1 = 0.25;
1321: bnk->alpha2 = 0.50;
1322: bnk->alpha3 = 1.00;
1323: bnk->alpha4 = 2.00;
1324: bnk->alpha5 = 4.00;
1326: /* Default values for trust-region radius update based on interpolation */
1327: bnk->mu1 = 0.10;
1328: bnk->mu2 = 0.50;
1330: bnk->gamma1 = 0.25;
1331: bnk->gamma2 = 0.50;
1332: bnk->gamma3 = 2.00;
1333: bnk->gamma4 = 4.00;
1335: bnk->theta = 0.05;
1337: /* Default values for trust region initialization based on interpolation */
1338: bnk->mu1_i = 0.35;
1339: bnk->mu2_i = 0.50;
1341: bnk->gamma1_i = 0.0625;
1342: bnk->gamma2_i = 0.5;
1343: bnk->gamma3_i = 2.0;
1344: bnk->gamma4_i = 5.0;
1346: bnk->theta_i = 0.25;
1348: /* Remaining parameters */
1349: bnk->max_cg_its = 0;
1350: bnk->min_radius = 1.0e-10;
1351: bnk->max_radius = 1.0e10;
1352: bnk->epsilon = PetscPowReal(PETSC_MACHINE_EPSILON, 2.0 / 3.0);
1353: bnk->as_tol = 1.0e-3;
1354: bnk->as_step = 1.0e-3;
1355: bnk->dmin = 1.0e-6;
1356: bnk->dmax = 1.0e6;
1358: bnk->M = NULL;
1359: bnk->bfgs_pre = NULL;
1360: bnk->init_type = BNK_INIT_INTERPOLATION;
1361: bnk->update_type = BNK_UPDATE_REDUCTION;
1362: bnk->as_type = BNK_AS_BERTSEKAS;
1364: /* Create the embedded BNCG solver */
1365: PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &bnk->bncg));
1366: PetscCall(PetscObjectIncrementTabLevel((PetscObject)bnk->bncg, (PetscObject)tao, 1));
1367: PetscCall(TaoSetType(bnk->bncg, TAOBNCG));
1369: /* Create the line search */
1370: PetscCall(TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch));
1371: PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1));
1372: PetscCall(TaoLineSearchSetType(tao->linesearch, TAOLINESEARCHMT));
1373: PetscCall(TaoLineSearchUseTaoRoutines(tao->linesearch, tao));
1375: /* Set linear solver to default for symmetric matrices */
1376: PetscCall(KSPCreate(((PetscObject)tao)->comm, &tao->ksp));
1377: PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1));
1378: PetscCall(KSPSetType(tao->ksp, KSPSTCG));
1379: PetscCall(KSPGetPC(tao->ksp, &pc));
1380: PetscCall(PCSetType(pc, PCLMVM));
1381: PetscFunctionReturn(PETSC_SUCCESS);
1382: }