Actual source code: bmrm.c

  1: #include <../src/tao/unconstrained/impls/bmrm/bmrm.h>

  3: static PetscErrorCode init_df_solver(TAO_DF *);
  4: static PetscErrorCode ensure_df_space(PetscInt, TAO_DF *);
  5: static PetscErrorCode destroy_df_solver(TAO_DF *);
  6: static PetscReal      phi(PetscReal *, PetscInt, PetscReal, PetscReal *, PetscReal, PetscReal *, PetscReal *, PetscReal *);
  7: static PetscInt       project(PetscInt, PetscReal *, PetscReal, PetscReal *, PetscReal *, PetscReal *, PetscReal *, PetscReal *, TAO_DF *);

  9: static PetscErrorCode solve(TAO_DF *df)
 10: {
 11:   PetscInt    i, j, innerIter, it, it2, luv, info;
 12:   PetscReal   gd, max, ak, bk, akold, bkold, lamnew, alpha, kktlam = 0.0, lam_ext;
 13:   PetscReal   DELTAsv, ProdDELTAsv;
 14:   PetscReal   c, *tempQ;
 15:   PetscReal  *x = df->x, *a = df->a, b = df->b, *l = df->l, *u = df->u, tol = df->tol;
 16:   PetscReal  *tempv = df->tempv, *y = df->y, *g = df->g, *d = df->d, *Qd = df->Qd;
 17:   PetscReal  *xplus = df->xplus, *tplus = df->tplus, *sk = df->sk, *yk = df->yk;
 18:   PetscReal **Q = df->Q, *f = df->f, *t = df->t;
 19:   PetscInt    dim = df->dim, *ipt = df->ipt, *ipt2 = df->ipt2, *uv = df->uv;

 21:   PetscCheck(dim >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Expected dim %" PetscInt_FMT " >= 0", dim);
 22:   /* variables for the adaptive nonmonotone linesearch */
 23:   PetscInt  L, llast;
 24:   PetscReal fr, fbest, fv, fc, fv0;

 26:   c = BMRM_INFTY;

 28:   DELTAsv = EPS_SV;
 29:   if (tol <= 1.0e-5 || dim <= 20) ProdDELTAsv = 0.0F;
 30:   else ProdDELTAsv = EPS_SV;

 32:   for (i = 0; i < dim; i++) tempv[i] = -x[i];

 34:   lam_ext = 0.0;

 36:   /* Project the initial solution */
 37:   project(dim, a, b, tempv, l, u, x, &lam_ext, df);

 39:   /* Compute gradient
 40:      g = Q*x + f; */

 42:   it = 0;
 43:   for (i = 0; i < dim; i++) {
 44:     if (PetscAbsReal(x[i]) > ProdDELTAsv) ipt[it++] = i;
 45:   }

 47:   PetscCall(PetscArrayzero(t, dim));
 48:   for (i = 0; i < it; i++) {
 49:     tempQ = Q[ipt[i]];
 50:     for (j = 0; j < dim; j++) t[j] += (tempQ[j] * x[ipt[i]]);
 51:   }
 52:   for (i = 0; i < dim; i++) g[i] = t[i] + f[i];

 54:   /* y = -(x_{k} - g_{k}) */
 55:   for (i = 0; i < dim; i++) y[i] = g[i] - x[i];

 57:   /* Project x_{k} - g_{k} */
 58:   project(dim, a, b, y, l, u, tempv, &lam_ext, df);

 60:   /* y = P(x_{k} - g_{k}) - x_{k} */
 61:   max = ALPHA_MIN;
 62:   for (i = 0; i < dim; i++) {
 63:     y[i] = tempv[i] - x[i];
 64:     if (PetscAbsReal(y[i]) > max) max = PetscAbsReal(y[i]);
 65:   }

 67:   if (max < tol * 1e-3) return PETSC_SUCCESS;

 69:   alpha = 1.0 / max;

 71:   /* fv0 = f(x_{0}). Recall t = Q x_{k}  */
 72:   fv0 = 0.0;
 73:   for (i = 0; i < dim; i++) fv0 += x[i] * (0.5 * t[i] + f[i]);

 75:   /* adaptive nonmonotone linesearch */
 76:   L     = 2;
 77:   fr    = ALPHA_MAX;
 78:   fbest = fv0;
 79:   fc    = fv0;
 80:   llast = 0;
 81:   akold = bkold = 0.0;

 83:   /*     Iterator begins     */
 84:   for (innerIter = 1; innerIter <= df->maxPGMIter; innerIter++) {
 85:     /* tempv = -(x_{k} - alpha*g_{k}) */
 86:     for (i = 0; i < dim; i++) tempv[i] = alpha * g[i] - x[i];

 88:     /* Project x_{k} - alpha*g_{k} */
 89:     project(dim, a, b, tempv, l, u, y, &lam_ext, df);

 91:     /* gd = \inner{d_{k}}{g_{k}}
 92:         d = P(x_{k} - alpha*g_{k}) - x_{k}
 93:     */
 94:     gd = 0.0;
 95:     for (i = 0; i < dim; i++) {
 96:       d[i] = y[i] - x[i];
 97:       gd += d[i] * g[i];
 98:     }

100:     /* Gradient computation  */

102:     /* compute Qd = Q*d  or  Qd = Q*y - t depending on their sparsity */

104:     it = it2 = 0;
105:     for (i = 0; i < dim; i++) {
106:       if (PetscAbsReal(d[i]) > (ProdDELTAsv * 1.0e-2)) ipt[it++] = i;
107:     }
108:     for (i = 0; i < dim; i++) {
109:       if (PetscAbsReal(y[i]) > ProdDELTAsv) ipt2[it2++] = i;
110:     }

112:     PetscCall(PetscArrayzero(Qd, dim));
113:     /* compute Qd = Q*d */
114:     if (it < it2) {
115:       for (i = 0; i < it; i++) {
116:         tempQ = Q[ipt[i]];
117:         for (j = 0; j < dim; j++) Qd[j] += (tempQ[j] * d[ipt[i]]);
118:       }
119:     } else { /* compute Qd = Q*y-t */
120:       for (i = 0; i < it2; i++) {
121:         tempQ = Q[ipt2[i]];
122:         for (j = 0; j < dim; j++) Qd[j] += (tempQ[j] * y[ipt2[i]]);
123:       }
124:       for (j = 0; j < dim; j++) Qd[j] -= t[j];
125:     }

127:     /* ak = inner{d_{k}}{d_{k}} */
128:     ak = 0.0;
129:     for (i = 0; i < dim; i++) ak += d[i] * d[i];

131:     bk = 0.0;
132:     for (i = 0; i < dim; i++) bk += d[i] * Qd[i];

134:     if (bk > EPS * ak && gd < 0.0) lamnew = -gd / bk;
135:     else lamnew = 1.0;

137:     /* fv is computing f(x_{k} + d_{k}) */
138:     fv = 0.0;
139:     for (i = 0; i < dim; i++) {
140:       xplus[i] = x[i] + d[i];
141:       tplus[i] = t[i] + Qd[i];
142:       fv += xplus[i] * (0.5 * tplus[i] + f[i]);
143:     }

145:     /* fr is fref */
146:     if ((innerIter == 1 && fv >= fv0) || (innerIter > 1 && fv >= fr)) {
147:       fv = 0.0;
148:       for (i = 0; i < dim; i++) {
149:         xplus[i] = x[i] + lamnew * d[i];
150:         tplus[i] = t[i] + lamnew * Qd[i];
151:         fv += xplus[i] * (0.5 * tplus[i] + f[i]);
152:       }
153:     }

155:     for (i = 0; i < dim; i++) {
156:       sk[i] = xplus[i] - x[i];
157:       yk[i] = tplus[i] - t[i];
158:       x[i]  = xplus[i];
159:       t[i]  = tplus[i];
160:       g[i]  = t[i] + f[i];
161:     }

163:     /* update the line search control parameters */
164:     if (fv < fbest) {
165:       fbest = fv;
166:       fc    = fv;
167:       llast = 0;
168:     } else {
169:       fc = (fc > fv ? fc : fv);
170:       llast++;
171:       if (llast == L) {
172:         fr    = fc;
173:         fc    = fv;
174:         llast = 0;
175:       }
176:     }

178:     ak = bk = 0.0;
179:     for (i = 0; i < dim; i++) {
180:       ak += sk[i] * sk[i];
181:       bk += sk[i] * yk[i];
182:     }

184:     if (bk <= EPS * ak) alpha = ALPHA_MAX;
185:     else {
186:       if (bkold < EPS * akold) alpha = ak / bk;
187:       else alpha = (akold + ak) / (bkold + bk);

189:       if (alpha > ALPHA_MAX) alpha = ALPHA_MAX;
190:       else if (alpha < ALPHA_MIN) alpha = ALPHA_MIN;
191:     }

193:     akold = ak;
194:     bkold = bk;

196:     /* stopping criterion based on KKT conditions */
197:     /* at optimal, gradient of lagrangian w.r.t. x is zero */

199:     bk = 0.0;
200:     for (i = 0; i < dim; i++) bk += x[i] * x[i];

202:     if (PetscSqrtReal(ak) < tol * 10 * PetscSqrtReal(bk)) {
203:       it     = 0;
204:       luv    = 0;
205:       kktlam = 0.0;
206:       for (i = 0; i < dim; i++) {
207:         /* x[i] is active hence lagrange multipliers for box constraints
208:                 are zero. The lagrange multiplier for ineq. const. is then
209:                 defined as below
210:         */
211:         if ((x[i] > DELTAsv) && (x[i] < c - DELTAsv)) {
212:           ipt[it++] = i;
213:           kktlam    = kktlam - a[i] * g[i];
214:         } else uv[luv++] = i;
215:       }

217:       if (it == 0 && PetscSqrtReal(ak) < tol * 0.5 * PetscSqrtReal(bk)) return PETSC_SUCCESS;
218:       else {
219:         kktlam = kktlam / it;
220:         info   = 1;
221:         for (i = 0; i < it; i++) {
222:           if (PetscAbsReal(a[ipt[i]] * g[ipt[i]] + kktlam) > tol) {
223:             info = 0;
224:             break;
225:           }
226:         }
227:         if (info == 1) {
228:           for (i = 0; i < luv; i++) {
229:             if (x[uv[i]] <= DELTAsv) {
230:               /* x[i] == lower bound, hence, lagrange multiplier (say, beta) for lower bound may
231:                      not be zero. So, the gradient without beta is > 0
232:               */
233:               if (g[uv[i]] + kktlam * a[uv[i]] < -tol) {
234:                 info = 0;
235:                 break;
236:               }
237:             } else {
238:               /* x[i] == upper bound, hence, lagrange multiplier (say, eta) for upper bound may
239:                      not be zero. So, the gradient without eta is < 0
240:               */
241:               if (g[uv[i]] + kktlam * a[uv[i]] > tol) {
242:                 info = 0;
243:                 break;
244:               }
245:             }
246:           }
247:         }

249:         if (info == 1) return PETSC_SUCCESS;
250:       }
251:     }
252:   }
253:   return PETSC_SUCCESS;
254: }

256: /* The main solver function

258:    f = Remp(W)          This is what the user provides us from the application layer
259:    So the ComputeGradient function for instance should get us back the subgradient of Remp(W)

261:    Regularizer assumed to be L2 norm = lambda*0.5*W'W ()
262: */

264: static PetscErrorCode make_grad_node(Vec X, Vec_Chain **p)
265: {
266:   PetscFunctionBegin;
267:   PetscCall(PetscNew(p));
268:   PetscCall(VecDuplicate(X, &(*p)->V));
269:   PetscCall(VecCopy(X, (*p)->V));
270:   (*p)->next = NULL;
271:   PetscFunctionReturn(PETSC_SUCCESS);
272: }

274: static PetscErrorCode destroy_grad_list(Vec_Chain *head)
275: {
276:   Vec_Chain *p = head->next, *q;

278:   PetscFunctionBegin;
279:   while (p) {
280:     q = p->next;
281:     PetscCall(VecDestroy(&p->V));
282:     PetscCall(PetscFree(p));
283:     p = q;
284:   }
285:   head->next = NULL;
286:   PetscFunctionReturn(PETSC_SUCCESS);
287: }

289: static PetscErrorCode TaoSolve_BMRM(Tao tao)
290: {
291:   TAO_DF    df;
292:   TAO_BMRM *bmrm = (TAO_BMRM *)tao->data;

294:   /* Values and pointers to parts of the optimization problem */
295:   PetscReal   f = 0.0;
296:   Vec         W = tao->solution;
297:   Vec         G = tao->gradient;
298:   PetscReal   lambda;
299:   PetscReal   bt;
300:   Vec_Chain   grad_list, *tail_glist, *pgrad;
301:   PetscInt    i;
302:   PetscMPIInt rank;

304:   /* Used in converged criteria check */
305:   PetscReal reg;
306:   PetscReal jtwt = 0.0, max_jtwt, pre_epsilon, epsilon, jw, min_jw;
307:   PetscReal innerSolverTol;
308:   MPI_Comm  comm;

310:   PetscFunctionBegin;
311:   PetscCall(PetscObjectGetComm((PetscObject)tao, &comm));
312:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
313:   lambda = bmrm->lambda;

315:   /* Check Stopping Condition */
316:   tao->step      = 1.0;
317:   max_jtwt       = -BMRM_INFTY;
318:   min_jw         = BMRM_INFTY;
319:   innerSolverTol = 1.0;
320:   epsilon        = 0.0;

322:   if (rank == 0) {
323:     PetscCall(init_df_solver(&df));
324:     grad_list.next = NULL;
325:     tail_glist     = &grad_list;
326:   }

328:   df.tol      = 1e-6;
329:   tao->reason = TAO_CONTINUE_ITERATING;

331:   /*-----------------Algorithm Begins------------------------*/
332:   /* make the scatter */
333:   PetscCall(VecScatterCreateToZero(W, &bmrm->scatter, &bmrm->local_w));
334:   PetscCall(VecAssemblyBegin(bmrm->local_w));
335:   PetscCall(VecAssemblyEnd(bmrm->local_w));

337:   /* NOTE: In application pass the sub-gradient of Remp(W) */
338:   PetscCall(TaoComputeObjectiveAndGradient(tao, W, &f, G));
339:   PetscCall(TaoLogConvergenceHistory(tao, f, 1.0, 0.0, tao->ksp_its));
340:   PetscCall(TaoMonitor(tao, tao->niter, f, 1.0, 0.0, tao->step));
341:   PetscUseTypeMethod(tao, convergencetest, tao->cnvP);

343:   while (tao->reason == TAO_CONTINUE_ITERATING) {
344:     /* Call general purpose update function */
345:     PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);

347:     /* compute bt = Remp(Wt-1) - <Wt-1, At> */
348:     PetscCall(VecDot(W, G, &bt));
349:     bt = f - bt;

351:     /* First gather the gradient to the rank-0 node */
352:     PetscCall(VecScatterBegin(bmrm->scatter, G, bmrm->local_w, INSERT_VALUES, SCATTER_FORWARD));
353:     PetscCall(VecScatterEnd(bmrm->scatter, G, bmrm->local_w, INSERT_VALUES, SCATTER_FORWARD));

355:     /* Bring up the inner solver */
356:     if (rank == 0) {
357:       PetscCall(ensure_df_space(tao->niter + 1, &df));
358:       PetscCall(make_grad_node(bmrm->local_w, &pgrad));
359:       tail_glist->next = pgrad;
360:       tail_glist       = pgrad;

362:       df.a[tao->niter] = 1.0;
363:       df.f[tao->niter] = -bt;
364:       df.u[tao->niter] = 1.0;
365:       df.l[tao->niter] = 0.0;

367:       /* set up the Q */
368:       pgrad = grad_list.next;
369:       for (i = 0; i <= tao->niter; i++) {
370:         PetscCheck(pgrad, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Assert that there are at least tao->niter+1 pgrad available");
371:         PetscCall(VecDot(pgrad->V, bmrm->local_w, &reg));
372:         df.Q[i][tao->niter] = df.Q[tao->niter][i] = reg / lambda;
373:         pgrad                                     = pgrad->next;
374:       }

376:       if (tao->niter > 0) {
377:         df.x[tao->niter] = 0.0;
378:         PetscCall(solve(&df));
379:       } else df.x[0] = 1.0;

381:       /* now computing Jt*(alpha_t) which should be = Jt(wt) to check convergence */
382:       jtwt = 0.0;
383:       PetscCall(VecSet(bmrm->local_w, 0.0));
384:       pgrad = grad_list.next;
385:       for (i = 0; i <= tao->niter; i++) {
386:         jtwt -= df.x[i] * df.f[i];
387:         PetscCall(VecAXPY(bmrm->local_w, -df.x[i] / lambda, pgrad->V));
388:         pgrad = pgrad->next;
389:       }

391:       PetscCall(VecNorm(bmrm->local_w, NORM_2, &reg));
392:       reg = 0.5 * lambda * reg * reg;
393:       jtwt -= reg;
394:     } /* end if rank == 0 */

396:     /* scatter the new W to all nodes */
397:     PetscCall(VecScatterBegin(bmrm->scatter, bmrm->local_w, W, INSERT_VALUES, SCATTER_REVERSE));
398:     PetscCall(VecScatterEnd(bmrm->scatter, bmrm->local_w, W, INSERT_VALUES, SCATTER_REVERSE));

400:     PetscCall(TaoComputeObjectiveAndGradient(tao, W, &f, G));

402:     PetscCallMPI(MPI_Bcast(&jtwt, 1, MPIU_REAL, 0, comm));
403:     PetscCallMPI(MPI_Bcast(&reg, 1, MPIU_REAL, 0, comm));

405:     jw = reg + f; /* J(w) = regularizer + Remp(w) */
406:     if (jw < min_jw) min_jw = jw;
407:     if (jtwt > max_jtwt) max_jtwt = jtwt;

409:     pre_epsilon = epsilon;
410:     epsilon     = min_jw - jtwt;

412:     if (rank == 0) {
413:       if (innerSolverTol > epsilon) innerSolverTol = epsilon;
414:       else if (innerSolverTol < 1e-7) innerSolverTol = 1e-7;

416:       /* if the annealing doesn't work well, lower the inner solver tolerance */
417:       if (pre_epsilon < epsilon) innerSolverTol *= 0.2;

419:       df.tol = innerSolverTol * 0.5;
420:     }

422:     tao->niter++;
423:     PetscCall(TaoLogConvergenceHistory(tao, min_jw, epsilon, 0.0, tao->ksp_its));
424:     PetscCall(TaoMonitor(tao, tao->niter, min_jw, epsilon, 0.0, tao->step));
425:     PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
426:   }

428:   /* free all the memory */
429:   if (rank == 0) {
430:     PetscCall(destroy_grad_list(&grad_list));
431:     PetscCall(destroy_df_solver(&df));
432:   }

434:   PetscCall(VecDestroy(&bmrm->local_w));
435:   PetscCall(VecScatterDestroy(&bmrm->scatter));
436:   PetscFunctionReturn(PETSC_SUCCESS);
437: }

439: /* ---------------------------------------------------------- */

441: static PetscErrorCode TaoSetup_BMRM(Tao tao)
442: {
443:   PetscFunctionBegin;
444:   /* Allocate some arrays */
445:   if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient));
446:   PetscFunctionReturn(PETSC_SUCCESS);
447: }

449: /*------------------------------------------------------------*/
450: static PetscErrorCode TaoDestroy_BMRM(Tao tao)
451: {
452:   PetscFunctionBegin;
453:   PetscCall(PetscFree(tao->data));
454:   PetscFunctionReturn(PETSC_SUCCESS);
455: }

457: static PetscErrorCode TaoSetFromOptions_BMRM(Tao tao, PetscOptionItems *PetscOptionsObject)
458: {
459:   TAO_BMRM *bmrm = (TAO_BMRM *)tao->data;

461:   PetscFunctionBegin;
462:   PetscOptionsHeadBegin(PetscOptionsObject, "BMRM for regularized risk minimization");
463:   PetscCall(PetscOptionsReal("-tao_bmrm_lambda", "regulariser weight", "", 100, &bmrm->lambda, NULL));
464:   PetscOptionsHeadEnd();
465:   PetscFunctionReturn(PETSC_SUCCESS);
466: }

468: /*------------------------------------------------------------*/
469: static PetscErrorCode TaoView_BMRM(Tao tao, PetscViewer viewer)
470: {
471:   PetscBool isascii;

473:   PetscFunctionBegin;
474:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
475:   if (isascii) {
476:     PetscCall(PetscViewerASCIIPushTab(viewer));
477:     PetscCall(PetscViewerASCIIPopTab(viewer));
478:   }
479:   PetscFunctionReturn(PETSC_SUCCESS);
480: }

482: /*------------------------------------------------------------*/
483: /*MC
484:   TAOBMRM - bundle method for regularized risk minimization

486:   Options Database Keys:
487: . - tao_bmrm_lambda - regulariser weight

489:   Level: beginner
490: M*/

492: PETSC_EXTERN PetscErrorCode TaoCreate_BMRM(Tao tao)
493: {
494:   TAO_BMRM *bmrm;

496:   PetscFunctionBegin;
497:   tao->ops->setup          = TaoSetup_BMRM;
498:   tao->ops->solve          = TaoSolve_BMRM;
499:   tao->ops->view           = TaoView_BMRM;
500:   tao->ops->setfromoptions = TaoSetFromOptions_BMRM;
501:   tao->ops->destroy        = TaoDestroy_BMRM;

503:   PetscCall(PetscNew(&bmrm));
504:   bmrm->lambda = 1.0;
505:   tao->data    = (void *)bmrm;

507:   /* Override default settings (unless already changed) */
508:   PetscCall(TaoParametersInitialize(tao));
509:   PetscObjectParameterSetDefault(tao, max_it, 2000);
510:   PetscObjectParameterSetDefault(tao, max_funcs, 4000);
511:   PetscObjectParameterSetDefault(tao, gatol, 1.0e-12);
512:   PetscObjectParameterSetDefault(tao, grtol, 1.0e-12);
513:   PetscFunctionReturn(PETSC_SUCCESS);
514: }

516: static PetscErrorCode init_df_solver(TAO_DF *df)
517: {
518:   PetscInt i, n = INCRE_DIM;

520:   PetscFunctionBegin;
521:   /* default values */
522:   df->maxProjIter = 200;
523:   df->maxPGMIter  = 300000;
524:   df->b           = 1.0;

526:   /* memory space required by Dai-Fletcher */
527:   df->cur_num_cp = n;
528:   PetscCall(PetscMalloc1(n, &df->f));
529:   PetscCall(PetscMalloc1(n, &df->a));
530:   PetscCall(PetscMalloc1(n, &df->l));
531:   PetscCall(PetscMalloc1(n, &df->u));
532:   PetscCall(PetscMalloc1(n, &df->x));
533:   PetscCall(PetscMalloc1(n, &df->Q));

535:   for (i = 0; i < n; i++) PetscCall(PetscMalloc1(n, &df->Q[i]));

537:   PetscCall(PetscMalloc1(n, &df->g));
538:   PetscCall(PetscMalloc1(n, &df->y));
539:   PetscCall(PetscMalloc1(n, &df->tempv));
540:   PetscCall(PetscMalloc1(n, &df->d));
541:   PetscCall(PetscMalloc1(n, &df->Qd));
542:   PetscCall(PetscMalloc1(n, &df->t));
543:   PetscCall(PetscMalloc1(n, &df->xplus));
544:   PetscCall(PetscMalloc1(n, &df->tplus));
545:   PetscCall(PetscMalloc1(n, &df->sk));
546:   PetscCall(PetscMalloc1(n, &df->yk));

548:   PetscCall(PetscMalloc1(n, &df->ipt));
549:   PetscCall(PetscMalloc1(n, &df->ipt2));
550:   PetscCall(PetscMalloc1(n, &df->uv));
551:   PetscFunctionReturn(PETSC_SUCCESS);
552: }

554: static PetscErrorCode ensure_df_space(PetscInt dim, TAO_DF *df)
555: {
556:   PetscReal *tmp, **tmp_Q;
557:   PetscInt   i, n, old_n;

559:   PetscFunctionBegin;
560:   df->dim = dim;
561:   if (dim <= df->cur_num_cp) PetscFunctionReturn(PETSC_SUCCESS);

563:   old_n = df->cur_num_cp;
564:   df->cur_num_cp += INCRE_DIM;
565:   n = df->cur_num_cp;

567:   /* memory space required by dai-fletcher */
568:   PetscCall(PetscMalloc1(n, &tmp));
569:   PetscCall(PetscArraycpy(tmp, df->f, old_n));
570:   PetscCall(PetscFree(df->f));
571:   df->f = tmp;

573:   PetscCall(PetscMalloc1(n, &tmp));
574:   PetscCall(PetscArraycpy(tmp, df->a, old_n));
575:   PetscCall(PetscFree(df->a));
576:   df->a = tmp;

578:   PetscCall(PetscMalloc1(n, &tmp));
579:   PetscCall(PetscArraycpy(tmp, df->l, old_n));
580:   PetscCall(PetscFree(df->l));
581:   df->l = tmp;

583:   PetscCall(PetscMalloc1(n, &tmp));
584:   PetscCall(PetscArraycpy(tmp, df->u, old_n));
585:   PetscCall(PetscFree(df->u));
586:   df->u = tmp;

588:   PetscCall(PetscMalloc1(n, &tmp));
589:   PetscCall(PetscArraycpy(tmp, df->x, old_n));
590:   PetscCall(PetscFree(df->x));
591:   df->x = tmp;

593:   PetscCall(PetscMalloc1(n, &tmp_Q));
594:   for (i = 0; i < n; i++) {
595:     PetscCall(PetscMalloc1(n, &tmp_Q[i]));
596:     if (i < old_n) {
597:       PetscCall(PetscArraycpy(tmp_Q[i], df->Q[i], old_n));
598:       PetscCall(PetscFree(df->Q[i]));
599:     }
600:   }

602:   PetscCall(PetscFree(df->Q));
603:   df->Q = tmp_Q;

605:   PetscCall(PetscFree(df->g));
606:   PetscCall(PetscMalloc1(n, &df->g));

608:   PetscCall(PetscFree(df->y));
609:   PetscCall(PetscMalloc1(n, &df->y));

611:   PetscCall(PetscFree(df->tempv));
612:   PetscCall(PetscMalloc1(n, &df->tempv));

614:   PetscCall(PetscFree(df->d));
615:   PetscCall(PetscMalloc1(n, &df->d));

617:   PetscCall(PetscFree(df->Qd));
618:   PetscCall(PetscMalloc1(n, &df->Qd));

620:   PetscCall(PetscFree(df->t));
621:   PetscCall(PetscMalloc1(n, &df->t));

623:   PetscCall(PetscFree(df->xplus));
624:   PetscCall(PetscMalloc1(n, &df->xplus));

626:   PetscCall(PetscFree(df->tplus));
627:   PetscCall(PetscMalloc1(n, &df->tplus));

629:   PetscCall(PetscFree(df->sk));
630:   PetscCall(PetscMalloc1(n, &df->sk));

632:   PetscCall(PetscFree(df->yk));
633:   PetscCall(PetscMalloc1(n, &df->yk));

635:   PetscCall(PetscFree(df->ipt));
636:   PetscCall(PetscMalloc1(n, &df->ipt));

638:   PetscCall(PetscFree(df->ipt2));
639:   PetscCall(PetscMalloc1(n, &df->ipt2));

641:   PetscCall(PetscFree(df->uv));
642:   PetscCall(PetscMalloc1(n, &df->uv));
643:   PetscFunctionReturn(PETSC_SUCCESS);
644: }

646: static PetscErrorCode destroy_df_solver(TAO_DF *df)
647: {
648:   PetscInt i;

650:   PetscFunctionBegin;
651:   PetscCall(PetscFree(df->f));
652:   PetscCall(PetscFree(df->a));
653:   PetscCall(PetscFree(df->l));
654:   PetscCall(PetscFree(df->u));
655:   PetscCall(PetscFree(df->x));

657:   for (i = 0; i < df->cur_num_cp; i++) PetscCall(PetscFree(df->Q[i]));
658:   PetscCall(PetscFree(df->Q));
659:   PetscCall(PetscFree(df->ipt));
660:   PetscCall(PetscFree(df->ipt2));
661:   PetscCall(PetscFree(df->uv));
662:   PetscCall(PetscFree(df->g));
663:   PetscCall(PetscFree(df->y));
664:   PetscCall(PetscFree(df->tempv));
665:   PetscCall(PetscFree(df->d));
666:   PetscCall(PetscFree(df->Qd));
667:   PetscCall(PetscFree(df->t));
668:   PetscCall(PetscFree(df->xplus));
669:   PetscCall(PetscFree(df->tplus));
670:   PetscCall(PetscFree(df->sk));
671:   PetscCall(PetscFree(df->yk));
672:   PetscFunctionReturn(PETSC_SUCCESS);
673: }

675: /* Piecewise linear monotone target function for the Dai-Fletcher projector */
676: static PetscReal phi(PetscReal *x, PetscInt n, PetscReal lambda, PetscReal *a, PetscReal b, PetscReal *c, PetscReal *l, PetscReal *u)
677: {
678:   PetscReal r = 0.0;
679:   PetscInt  i;

681:   for (i = 0; i < n; i++) {
682:     x[i] = -c[i] + lambda * a[i];
683:     if (x[i] > u[i]) x[i] = u[i];
684:     else if (x[i] < l[i]) x[i] = l[i];
685:     r += a[i] * x[i];
686:   }
687:   return r - b;
688: }

690: /** Modified Dai-Fletcher QP projector solves the problem:
691:  *
692:  *      minimise  0.5*x'*x - c'*x
693:  *      subj to   a'*x = b
694:  *                l \leq x \leq u
695:  *
696:  *  \param c The point to be projected onto feasible set
697:  */
698: static PetscInt project(PetscInt n, PetscReal *a, PetscReal b, PetscReal *c, PetscReal *l, PetscReal *u, PetscReal *x, PetscReal *lam_ext, TAO_DF *df)
699: {
700:   PetscReal lambda, lambdal, lambdau, dlambda, lambda_new;
701:   PetscReal r, rl, ru, s;
702:   PetscInt  innerIter;
703:   PetscBool nonNegativeSlack = PETSC_FALSE;

705:   *lam_ext  = 0;
706:   lambda    = 0;
707:   dlambda   = 0.5;
708:   innerIter = 1;

710:   /*  \phi(x;lambda) := 0.5*x'*x + c'*x - lambda*(a'*x-b)
711:    *
712:    *  Optimality conditions for \phi:
713:    *
714:    *  1. lambda   <= 0
715:    *  2. r        <= 0
716:    *  3. r*lambda == 0
717:    */

719:   /* Bracketing Phase */
720:   r = phi(x, n, lambda, a, b, c, l, u);

722:   if (nonNegativeSlack) {
723:     /* inequality constraint, i.e., with \xi >= 0 constraint */
724:     if (r < TOL_R) return PETSC_SUCCESS;
725:   } else {
726:     /* equality constraint ,i.e., without \xi >= 0 constraint */
727:     if (PetscAbsReal(r) < TOL_R) return PETSC_SUCCESS;
728:   }

730:   if (r < 0.0) {
731:     lambdal = lambda;
732:     rl      = r;
733:     lambda  = lambda + dlambda;
734:     r       = phi(x, n, lambda, a, b, c, l, u);
735:     while (r < 0.0 && dlambda < BMRM_INFTY) {
736:       lambdal = lambda;
737:       s       = rl / r - 1.0;
738:       if (s < 0.1) s = 0.1;
739:       dlambda = dlambda + dlambda / s;
740:       lambda  = lambda + dlambda;
741:       rl      = r;
742:       r       = phi(x, n, lambda, a, b, c, l, u);
743:     }
744:     lambdau = lambda;
745:     ru      = r;
746:   } else {
747:     lambdau = lambda;
748:     ru      = r;
749:     lambda  = lambda - dlambda;
750:     r       = phi(x, n, lambda, a, b, c, l, u);
751:     while (r > 0.0 && dlambda > -BMRM_INFTY) {
752:       lambdau = lambda;
753:       s       = ru / r - 1.0;
754:       if (s < 0.1) s = 0.1;
755:       dlambda = dlambda + dlambda / s;
756:       lambda  = lambda - dlambda;
757:       ru      = r;
758:       r       = phi(x, n, lambda, a, b, c, l, u);
759:     }
760:     lambdal = lambda;
761:     rl      = r;
762:   }

764:   PetscCheckAbort(PetscAbsReal(dlambda) <= BMRM_INFTY, PETSC_COMM_SELF, PETSC_ERR_PLIB, "L2N2_DaiFletcherPGM detected Infeasible QP problem!");

766:   if (ru == 0) return innerIter;

768:   /* Secant Phase */
769:   s       = 1.0 - rl / ru;
770:   dlambda = dlambda / s;
771:   lambda  = lambdau - dlambda;
772:   r       = phi(x, n, lambda, a, b, c, l, u);

774:   while (PetscAbsReal(r) > TOL_R && dlambda > TOL_LAM * (1.0 + PetscAbsReal(lambda)) && innerIter < df->maxProjIter) {
775:     innerIter++;
776:     if (r > 0.0) {
777:       if (s <= 2.0) {
778:         lambdau = lambda;
779:         ru      = r;
780:         s       = 1.0 - rl / ru;
781:         dlambda = (lambdau - lambdal) / s;
782:         lambda  = lambdau - dlambda;
783:       } else {
784:         s = ru / r - 1.0;
785:         if (s < 0.1) s = 0.1;
786:         dlambda    = (lambdau - lambda) / s;
787:         lambda_new = 0.75 * lambdal + 0.25 * lambda;
788:         if (lambda_new < (lambda - dlambda)) lambda_new = lambda - dlambda;
789:         lambdau = lambda;
790:         ru      = r;
791:         lambda  = lambda_new;
792:         s       = (lambdau - lambdal) / (lambdau - lambda);
793:       }
794:     } else {
795:       if (s >= 2.0) {
796:         lambdal = lambda;
797:         rl      = r;
798:         s       = 1.0 - rl / ru;
799:         dlambda = (lambdau - lambdal) / s;
800:         lambda  = lambdau - dlambda;
801:       } else {
802:         s = rl / r - 1.0;
803:         if (s < 0.1) s = 0.1;
804:         dlambda    = (lambda - lambdal) / s;
805:         lambda_new = 0.75 * lambdau + 0.25 * lambda;
806:         if (lambda_new > (lambda + dlambda)) lambda_new = lambda + dlambda;
807:         lambdal = lambda;
808:         rl      = r;
809:         lambda  = lambda_new;
810:         s       = (lambdau - lambdal) / (lambdau - lambda);
811:       }
812:     }
813:     r = phi(x, n, lambda, a, b, c, l, u);
814:   }

816:   *lam_ext = lambda;
817:   if (innerIter >= df->maxProjIter) PetscCallAbort(PETSC_COMM_SELF, PetscInfo(NULL, "WARNING: DaiFletcher max iterations\n"));
818:   return innerIter;
819: }