Actual source code: admm.c

  1: #include <../src/tao/constrained/impls/admm/admm.h>
  2: #include <petsctao.h>
  3: #include <petsc/private/petscimpl.h>

  5: /* Updates terminating criteria
  6:  *
  7:  * 1  ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
  8:  *
  9:  * 2. Updates dual residual, d_k
 10:  *
 11:  * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty||   */

 13: static PetscBool  cited      = PETSC_FALSE;
 14: static const char citation[] = "@misc{xu2017adaptive,\n"
 15:                                "   title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
 16:                                "   author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
 17:                                "   year={2017},\n"
 18:                                "   eprint={1704.02712},\n"
 19:                                "   archivePrefix={arXiv},\n"
 20:                                "   primaryClass={cs.CV}\n"
 21:                                "}  \n";

 23: const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER", "REGULARIZER_SOFT_THRESH", "TaoADMMRegularizerType", "TAO_ADMM_", NULL};
 24: const char *const TaoADMMUpdateTypes[]      = {"UPDATE_BASIC", "UPDATE_ADAPTIVE", "UPDATE_ADAPTIVE_RELAXED", "TaoADMMUpdateType", "TAO_ADMM_", NULL};
 25: const char *const TaoALMMTypes[]            = {"CLASSIC", "PHR", "TaoALMMType", "TAO_ALMM_", NULL};

 27: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
 28: {
 29:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
 30:   PetscReal Axnorm, Bznorm, ATynorm, temp;
 31:   Vec       tempJR, tempL;
 32:   Tao       mis;

 34:   PetscFunctionBegin;
 35:   mis    = am->subsolverX;
 36:   tempJR = am->workJacobianRight;
 37:   tempL  = am->workLeft;
 38:   /* ATy */
 39:   PetscCall(TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre));
 40:   PetscCall(MatMultTranspose(mis->jacobian_equality, am->y, tempJR));
 41:   PetscCall(VecNorm(tempJR, NORM_2, &ATynorm));
 42:   /* dualres = mu * ||AT(Bz-Bzold)||_2 */
 43:   PetscCall(VecWAXPY(tempJR, -1., am->Bzold, am->Bz));
 44:   PetscCall(MatMultTranspose(mis->jacobian_equality, tempJR, tempL));
 45:   PetscCall(VecNorm(tempL, NORM_2, &am->dualres));
 46:   am->dualres *= am->mu;

 48:   /* ||Ax||_2, ||Bz||_2 */
 49:   PetscCall(VecNorm(am->Ax, NORM_2, &Axnorm));
 50:   PetscCall(VecNorm(am->Bz, NORM_2, &Bznorm));

 52:   /* Set catol to be catol_admm *  max{||Ax||,||Bz||,||c||} *
 53:    * Set gatol to be gatol_admm *  ||A^Ty|| *
 54:    * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
 55:   temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm, am->const_norm));
 56:   PetscCall(TaoSetConstraintTolerances(tao, temp, PETSC_CURRENT));
 57:   PetscCall(TaoSetTolerances(tao, am->gatol_admm * ATynorm, PETSC_CURRENT, PETSC_CURRENT));
 58:   PetscFunctionReturn(PETSC_SUCCESS);
 59: }

 61: /* Penaly Update for Adaptive ADMM. */
 62: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
 63: {
 64:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
 65:   PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
 66:   PetscBool hflag, gflag;
 67:   Vec       tempJR, tempJR2;

 69:   PetscFunctionBegin;
 70:   tempJR  = am->workJacobianRight;
 71:   tempJR2 = am->workJacobianRight2;
 72:   hflag   = PETSC_FALSE;
 73:   gflag   = PETSC_FALSE;
 74:   a_k     = -1;
 75:   b_k     = -1;

 77:   PetscCall(VecWAXPY(tempJR, -1., am->Axold, am->Ax));
 78:   PetscCall(VecWAXPY(tempJR2, -1., am->yhatold, am->yhat));
 79:   PetscCall(VecNorm(tempJR, NORM_2, &Axdiff_norm));
 80:   PetscCall(VecNorm(tempJR2, NORM_2, &yhatdiff_norm));
 81:   PetscCall(VecDot(tempJR, tempJR2, &Axyhat));

 83:   PetscCall(VecWAXPY(tempJR, -1., am->Bz0, am->Bz));
 84:   PetscCall(VecWAXPY(tempJR2, -1., am->y, am->y0));
 85:   PetscCall(VecNorm(tempJR, NORM_2, &Bzdiff_norm));
 86:   PetscCall(VecNorm(tempJR2, NORM_2, &ydiff_norm));
 87:   PetscCall(VecDot(tempJR, tempJR2, &Bzy));

 89:   if (Axyhat > am->orthval * Axdiff_norm * yhatdiff_norm + am->mueps) {
 90:     hflag = PETSC_TRUE;
 91:     a_sd  = PetscSqr(yhatdiff_norm) / Axyhat; /* alphaSD */
 92:     a_mg  = Axyhat / PetscSqr(Axdiff_norm);   /* alphaMG */
 93:     a_k   = (a_mg / a_sd) > 0.5 ? a_mg : a_sd - 0.5 * a_mg;
 94:   }
 95:   if (Bzy > am->orthval * Bzdiff_norm * ydiff_norm + am->mueps) {
 96:     gflag = PETSC_TRUE;
 97:     b_sd  = PetscSqr(ydiff_norm) / Bzy;  /* betaSD */
 98:     b_mg  = Bzy / PetscSqr(Bzdiff_norm); /* betaMG */
 99:     b_k   = (b_mg / b_sd) > 0.5 ? b_mg : b_sd - 0.5 * b_mg;
100:   }
101:   am->muold = am->mu;
102:   if (gflag && hflag) {
103:     am->mu = PetscSqrtReal(a_k * b_k);
104:   } else if (hflag) {
105:     am->mu = a_k;
106:   } else if (gflag) {
107:     am->mu = b_k;
108:   }
109:   if (am->mu > am->muold) am->mu = am->muold;
110:   if (am->mu < am->mumin) am->mu = am->mumin;
111:   PetscFunctionReturn(PETSC_SUCCESS);
112: }

114: static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
115: {
116:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

118:   PetscFunctionBegin;
119:   am->regswitch = type;
120:   PetscFunctionReturn(PETSC_SUCCESS);
121: }

123: static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
124: {
125:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

127:   PetscFunctionBegin;
128:   *type = am->regswitch;
129:   PetscFunctionReturn(PETSC_SUCCESS);
130: }

132: static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
133: {
134:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

136:   PetscFunctionBegin;
137:   am->update = type;
138:   PetscFunctionReturn(PETSC_SUCCESS);
139: }

141: static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
142: {
143:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

145:   PetscFunctionBegin;
146:   *type = am->update;
147:   PetscFunctionReturn(PETSC_SUCCESS);
148: }

150: /* This routine updates Jacobians with new x,z vectors,
151:  * and then updates Ax and Bz vectors, then computes updated residual vector*/
152: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
153: {
154:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
155:   Tao       mis, reg;

157:   PetscFunctionBegin;
158:   mis = am->subsolverX;
159:   reg = am->subsolverZ;
160:   PetscCall(TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre));
161:   PetscCall(MatMult(mis->jacobian_equality, x, Ax));
162:   PetscCall(TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre));
163:   PetscCall(MatMult(reg->jacobian_equality, z, Bz));

165:   PetscCall(VecWAXPY(residual, 1., Bz, Ax));
166:   if (am->constraint != NULL) PetscCall(VecAXPY(residual, -1., am->constraint));
167:   PetscFunctionReturn(PETSC_SUCCESS);
168: }

170: /* Updates Augmented Lagrangians to given routines *
171:  * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
172:  * Separate Objective and Gradient routines are not supported.  */
173: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
174: {
175:   Tao       parent = (Tao)ptr;
176:   TAO_ADMM *am     = (TAO_ADMM *)parent->data;
177:   PetscReal temp, temp2;
178:   Vec       tempJR;

180:   PetscFunctionBegin;
181:   tempJR = am->workJacobianRight;
182:   PetscCall(ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
183:   PetscCall((*am->ops->misfitobjgrad)(am->subsolverX, x, f, g, am->misfitobjgradP));

185:   am->last_misfit_val = *f;
186:   /* Objective  Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
187:   PetscCall(VecTDot(am->residual, am->y, &temp));
188:   PetscCall(VecTDot(am->residual, am->residual, &temp2));
189:   *f += temp + (am->mu / 2) * temp2;

191:   /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
192:   PetscCall(MatMultTranspose(tao->jacobian_equality, am->residual, tempJR));
193:   PetscCall(VecAXPY(g, am->mu, tempJR));
194:   PetscCall(MatMultTranspose(tao->jacobian_equality, am->y, tempJR));
195:   PetscCall(VecAXPY(g, 1., tempJR));
196:   PetscFunctionReturn(PETSC_SUCCESS);
197: }

199: /* Updates Augmented Lagrangians to given routines
200:  * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
201:  * Separate Objective and Gradient routines are not supported.  */
202: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
203: {
204:   Tao       parent = (Tao)ptr;
205:   TAO_ADMM *am     = (TAO_ADMM *)parent->data;
206:   PetscReal temp, temp2;
207:   Vec       tempJR;

209:   PetscFunctionBegin;
210:   tempJR = am->workJacobianRight;
211:   PetscCall(ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual));
212:   PetscCall((*am->ops->regobjgrad)(am->subsolverZ, z, f, g, am->regobjgradP));
213:   am->last_reg_val = *f;
214:   /* Objective  Add  + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
215:   PetscCall(VecTDot(am->residual, am->y, &temp));
216:   PetscCall(VecTDot(am->residual, am->residual, &temp2));
217:   *f += temp + (am->mu / 2) * temp2;

219:   /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
220:   PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->residual, tempJR));
221:   PetscCall(VecAXPY(g, am->mu, tempJR));
222:   PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->y, tempJR));
223:   PetscCall(VecAXPY(g, 1., tempJR));
224:   PetscFunctionReturn(PETSC_SUCCESS);
225: }

227: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
228: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
229: {
230:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
231:   PetscInt  N;

233:   PetscFunctionBegin;
234:   PetscCall(VecGetSize(am->workLeft, &N));
235:   PetscCall(VecPointwiseMult(am->workLeft, x, x));
236:   PetscCall(VecShift(am->workLeft, am->l1epsilon * am->l1epsilon));
237:   PetscCall(VecSqrtAbs(am->workLeft));
238:   PetscCall(VecSum(am->workLeft, norm));
239:   *norm += N * am->l1epsilon;
240:   *norm *= am->lambda;
241:   PetscFunctionReturn(PETSC_SUCCESS);
242: }

244: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
245: {
246:   TAO_ADMM *am = (TAO_ADMM *)ptr;

248:   PetscFunctionBegin;
249:   switch (am->update) {
250:   case (TAO_ADMM_UPDATE_BASIC):
251:     break;
252:   case (TAO_ADMM_UPDATE_ADAPTIVE):
253:   case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED):
254:     if (H && (am->muold != am->mu)) {
255:       if (!Identity) {
256:         PetscCall(MatAXPY(H, am->mu - am->muold, Constraint, DIFFERENT_NONZERO_PATTERN));
257:       } else {
258:         PetscCall(MatShift(H, am->mu - am->muold));
259:       }
260:     }
261:     break;
262:   }
263:   PetscFunctionReturn(PETSC_SUCCESS);
264: }

266: /* Updates Hessian - adds second derivative of augmented Lagrangian
267:  * H \gets H + \rho*ATA
268:   Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
269:   For ADAPTAIVE,ADAPTIVE_RELAXED,
270:   H \gets H + (\rho-\rhoold)*ATA
271:   Here, we assume that A is linear constraint i.e., does not change.
272:   Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
273: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
274: {
275:   Tao       parent = (Tao)ptr;
276:   TAO_ADMM *am     = (TAO_ADMM *)parent->data;

278:   PetscFunctionBegin;
279:   if (am->Hxchange) {
280:     /* Case where Hessian gets updated with respect to x vector input. */
281:     PetscCall((*am->ops->misfithess)(am->subsolverX, x, H, Hpre, am->misfithessP));
282:     PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
283:   } else if (am->Hxbool) {
284:     /* Hessian doesn't get updated. H(x) = c */
285:     /* Update Lagrangian only once per TAO call */
286:     PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
287:     am->Hxbool = PETSC_FALSE;
288:   }
289:   PetscFunctionReturn(PETSC_SUCCESS);
290: }

292: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
293: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
294: {
295:   Tao       parent = (Tao)ptr;
296:   TAO_ADMM *am     = (TAO_ADMM *)parent->data;

298:   PetscFunctionBegin;
299:   if (am->Hzchange) {
300:     /* Case where Hessian gets updated with respect to x vector input. */
301:     PetscCall((*am->ops->reghess)(am->subsolverZ, z, H, Hpre, am->reghessP));
302:     PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
303:   } else if (am->Hzbool) {
304:     /* Hessian doesn't get updated. H(x) = c */
305:     /* Update Lagrangian only once per TAO call */
306:     PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
307:     am->Hzbool = PETSC_FALSE;
308:   }
309:   PetscFunctionReturn(PETSC_SUCCESS);
310: }

312: /* Shell Matrix routine for A matrix.
313:  * This gets used when user puts NULL for
314:  * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
315:  * Essentially sets A=I*/
316: static PetscErrorCode JacobianIdentity(Mat mat, Vec in, Vec out)
317: {
318:   PetscFunctionBegin;
319:   PetscCall(VecCopy(in, out));
320:   PetscFunctionReturn(PETSC_SUCCESS);
321: }

323: /* Shell Matrix routine for B matrix.
324:  * This gets used when user puts NULL for
325:  * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
326:  * Sets B=-I */
327: static PetscErrorCode JacobianIdentityB(Mat mat, Vec in, Vec out)
328: {
329:   PetscFunctionBegin;
330:   PetscCall(VecCopy(in, out));
331:   PetscCall(VecScale(out, -1.));
332:   PetscFunctionReturn(PETSC_SUCCESS);
333: }

335: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
336: static PetscErrorCode TaoSolve_ADMM(Tao tao)
337: {
338:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
339:   PetscInt  N;
340:   PetscReal reg_func;
341:   PetscBool is_reg_shell;
342:   Vec       tempL;

344:   PetscFunctionBegin;
345:   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
346:     PetscCheck(am->subsolverX->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetMisfitConstraintJacobian() first");
347:     PetscCheck(am->subsolverZ->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetRegularizerConstraintJacobian() first");
348:     if (am->constraint != NULL) PetscCall(VecNorm(am->constraint, NORM_2, &am->const_norm));
349:   }
350:   tempL = am->workLeft;
351:   PetscCall(VecGetSize(tempL, &N));

353:   if (am->Hx && am->ops->misfithess) PetscCall(TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao));

355:   if (!am->zJI) {
356:     /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
357:     PetscCall(MatTransposeMatMult(am->JB, am->JB, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &am->BTB));
358:   }
359:   if (!am->xJI) {
360:     /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
361:     PetscCall(MatTransposeMatMult(am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &am->ATA));
362:   }

364:   is_reg_shell = PETSC_FALSE;

366:   PetscCall(PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell));

368:   if (!is_reg_shell) {
369:     switch (am->regswitch) {
370:     case (TAO_ADMM_REGULARIZER_USER):
371:       break;
372:     case (TAO_ADMM_REGULARIZER_SOFT_THRESH):
373:       /* Soft Threshold. */
374:       break;
375:     }
376:     if (am->ops->regobjgrad) PetscCall(TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao));
377:     if (am->Hz && am->ops->reghess) PetscCall(TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao));
378:   }

380:   switch (am->update) {
381:   case TAO_ADMM_UPDATE_BASIC:
382:     if (am->subsolverX->hessian) {
383:       /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
384:        * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
385:       if (!am->xJI) {
386:         PetscCall(MatAXPY(am->subsolverX->hessian, am->mu, am->ATA, DIFFERENT_NONZERO_PATTERN));
387:       } else {
388:         PetscCall(MatShift(am->subsolverX->hessian, am->mu));
389:       }
390:     }
391:     if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
392:       if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
393:         PetscCall(MatAXPY(am->subsolverZ->hessian, am->mu, am->BTB, DIFFERENT_NONZERO_PATTERN));
394:       } else {
395:         PetscCall(MatShift(am->subsolverZ->hessian, am->mu));
396:       }
397:     }
398:     break;
399:   case TAO_ADMM_UPDATE_ADAPTIVE:
400:   case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
401:     break;
402:   }

404:   PetscCall(PetscCitationsRegister(citation, &cited));
405:   tao->reason = TAO_CONTINUE_ITERATING;

407:   while (tao->reason == TAO_CONTINUE_ITERATING) {
408:     PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
409:     PetscCall(VecCopy(am->Bz, am->Bzold));

411:     /* x update */
412:     PetscCall(TaoSolve(am->subsolverX));
413:     PetscCall(TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre));
414:     PetscCall(MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution, am->Ax));

416:     am->Hxbool = PETSC_TRUE;

418:     /* z update */
419:     switch (am->regswitch) {
420:     case TAO_ADMM_REGULARIZER_USER:
421:       PetscCall(TaoSolve(am->subsolverZ));
422:       break;
423:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
424:       /* L1 assumes A,B jacobians are identity nxn matrix */
425:       PetscCall(VecWAXPY(am->workJacobianRight, 1 / am->mu, am->y, am->Ax));
426:       PetscCall(TaoSoftThreshold(am->workJacobianRight, -am->lambda / am->mu, am->lambda / am->mu, am->subsolverZ->solution));
427:       break;
428:     }
429:     am->Hzbool = PETSC_TRUE;
430:     /* Returns Ax + Bz - c with updated Ax,Bz vectors */
431:     PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
432:     /* Dual variable, y += y + mu*(Ax+Bz-c) */
433:     PetscCall(VecWAXPY(am->y, am->mu, am->residual, am->yold));

435:     /* stopping tolerance update */
436:     PetscCall(TaoADMMToleranceUpdate(tao));

438:     /* Updating Spectral Penalty */
439:     switch (am->update) {
440:     case TAO_ADMM_UPDATE_BASIC:
441:       am->muold = am->mu;
442:       break;
443:     case TAO_ADMM_UPDATE_ADAPTIVE:
444:     case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
445:       if (tao->niter == 0) {
446:         PetscCall(VecCopy(am->y, am->y0));
447:         PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
448:         if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
449:         PetscCall(VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold));
450:         PetscCall(VecCopy(am->Ax, am->Axold));
451:         PetscCall(VecCopy(am->Bz, am->Bz0));
452:         am->muold = am->mu;
453:       } else if (tao->niter % am->T == 1) {
454:         /* we have compute Bzold in a previous iteration, and we computed Ax above */
455:         PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
456:         if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
457:         PetscCall(VecWAXPY(am->yhat, -am->mu, am->residual, am->yold));
458:         PetscCall(AdaptiveADMMPenaltyUpdate(tao));
459:         PetscCall(VecCopy(am->Ax, am->Axold));
460:         PetscCall(VecCopy(am->Bz, am->Bz0));
461:         PetscCall(VecCopy(am->yhat, am->yhatold));
462:         PetscCall(VecCopy(am->y, am->y0));
463:       } else {
464:         am->muold = am->mu;
465:       }
466:       break;
467:     default:
468:       break;
469:     }
470:     tao->niter++;

472:     /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
473:     switch (am->regswitch) {
474:     case TAO_ADMM_REGULARIZER_USER:
475:       if (is_reg_shell) {
476:         PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, &reg_func));
477:       } else {
478:         PetscCall((*am->ops->regobjgrad)(am->subsolverZ, am->subsolverX->solution, &reg_func, tempL, am->regobjgradP));
479:       }
480:       break;
481:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
482:       PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, &reg_func));
483:       break;
484:     }
485:     PetscCall(VecCopy(am->y, am->yold));
486:     PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
487:     PetscCall(VecNorm(am->residual, NORM_2, &am->resnorm));
488:     PetscCall(TaoLogConvergenceHistory(tao, am->last_misfit_val + reg_func, am->dualres, am->resnorm, tao->ksp_its));

490:     PetscCall(TaoMonitor(tao, tao->niter, am->last_misfit_val + reg_func, am->dualres, am->resnorm, 1.0));
491:     PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
492:   }
493:   /* Update vectors */
494:   PetscCall(VecCopy(am->subsolverX->solution, tao->solution));
495:   PetscCall(VecCopy(am->subsolverX->gradient, tao->gradient));
496:   PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", NULL));
497:   PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", NULL));
498:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
499:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
500:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
501:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
502:   PetscFunctionReturn(PETSC_SUCCESS);
503: }

505: static PetscErrorCode TaoSetFromOptions_ADMM(Tao tao, PetscOptionItems *PetscOptionsObject)
506: {
507:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

509:   PetscFunctionBegin;
510:   PetscOptionsHeadBegin(PetscOptionsObject, "ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
511:   PetscCall(PetscOptionsReal("-tao_admm_regularizer_coefficient", "regularizer constant", "", am->lambda, &am->lambda, NULL));
512:   PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty", "Constant for Augmented Lagrangian term.", "", am->mu, &am->mu, NULL));
513:   PetscCall(PetscOptionsReal("-tao_admm_relaxation_parameter", "x relaxation parameter for Z update.", "", am->gamma, &am->gamma, NULL));
514:   PetscCall(PetscOptionsReal("-tao_admm_tolerance_update_factor", "ADMM dynamic tolerance update factor.", "", am->tol, &am->tol, NULL));
515:   PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty_update_factor", "ADMM spectral penalty update curvature safeguard value.", "", am->orthval, &am->orthval, NULL));
516:   PetscCall(PetscOptionsReal("-tao_admm_minimum_spectral_penalty", "Set ADMM minimum spectral penalty.", "", am->mumin, &am->mumin, NULL));
517:   PetscCall(PetscOptionsEnum("-tao_admm_dual_update", "Lagrangian dual update policy", "TaoADMMUpdateType", TaoADMMUpdateTypes, (PetscEnum)am->update, (PetscEnum *)&am->update, NULL));
518:   PetscCall(PetscOptionsEnum("-tao_admm_regularizer_type", "ADMM regularizer update rule", "TaoADMMRegularizerType", TaoADMMRegularizerTypes, (PetscEnum)am->regswitch, (PetscEnum *)&am->regswitch, NULL));
519:   PetscOptionsHeadEnd();
520:   PetscCall(TaoSetFromOptions(am->subsolverX));
521:   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) PetscCall(TaoSetFromOptions(am->subsolverZ));
522:   PetscFunctionReturn(PETSC_SUCCESS);
523: }

525: static PetscErrorCode TaoView_ADMM(Tao tao, PetscViewer viewer)
526: {
527:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

529:   PetscFunctionBegin;
530:   PetscCall(PetscViewerASCIIPushTab(viewer));
531:   PetscCall(TaoView(am->subsolverX, viewer));
532:   PetscCall(TaoView(am->subsolverZ, viewer));
533:   PetscCall(PetscViewerASCIIPopTab(viewer));
534:   PetscFunctionReturn(PETSC_SUCCESS);
535: }

537: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
538: {
539:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
540:   PetscInt  n, N, M;

542:   PetscFunctionBegin;
543:   PetscCall(VecGetLocalSize(tao->solution, &n));
544:   PetscCall(VecGetSize(tao->solution, &N));
545:   /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
546:   if (!am->JB) {
547:     am->zJI = PETSC_TRUE;
548:     PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JB));
549:     PetscCall(MatShellSetOperation(am->JB, MATOP_MULT, (void (*)(void))JacobianIdentityB));
550:     PetscCall(MatShellSetOperation(am->JB, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentityB));
551:     am->JBpre = am->JB;
552:   }
553:   if (!am->JA) {
554:     am->xJI = PETSC_TRUE;
555:     PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JA));
556:     PetscCall(MatShellSetOperation(am->JA, MATOP_MULT, (void (*)(void))JacobianIdentity));
557:     PetscCall(MatShellSetOperation(am->JA, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentity));
558:     am->JApre = am->JA;
559:   }
560:   PetscCall(MatCreateVecs(am->JA, NULL, &am->Ax));
561:   if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient));
562:   PetscCall(TaoSetSolution(am->subsolverX, tao->solution));
563:   if (!am->z) {
564:     PetscCall(VecDuplicate(tao->solution, &am->z));
565:     PetscCall(VecSet(am->z, 0.0));
566:   }
567:   PetscCall(TaoSetSolution(am->subsolverZ, am->z));
568:   if (!am->workLeft) PetscCall(VecDuplicate(tao->solution, &am->workLeft));
569:   if (!am->Axold) PetscCall(VecDuplicate(am->Ax, &am->Axold));
570:   if (!am->workJacobianRight) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight));
571:   if (!am->workJacobianRight2) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight2));
572:   if (!am->Bz) PetscCall(VecDuplicate(am->Ax, &am->Bz));
573:   if (!am->Bzold) PetscCall(VecDuplicate(am->Ax, &am->Bzold));
574:   if (!am->Bz0) PetscCall(VecDuplicate(am->Ax, &am->Bz0));
575:   if (!am->y) {
576:     PetscCall(VecDuplicate(am->Ax, &am->y));
577:     PetscCall(VecSet(am->y, 0.0));
578:   }
579:   if (!am->yold) {
580:     PetscCall(VecDuplicate(am->Ax, &am->yold));
581:     PetscCall(VecSet(am->yold, 0.0));
582:   }
583:   if (!am->y0) {
584:     PetscCall(VecDuplicate(am->Ax, &am->y0));
585:     PetscCall(VecSet(am->y0, 0.0));
586:   }
587:   if (!am->yhat) {
588:     PetscCall(VecDuplicate(am->Ax, &am->yhat));
589:     PetscCall(VecSet(am->yhat, 0.0));
590:   }
591:   if (!am->yhatold) {
592:     PetscCall(VecDuplicate(am->Ax, &am->yhatold));
593:     PetscCall(VecSet(am->yhatold, 0.0));
594:   }
595:   if (!am->residual) {
596:     PetscCall(VecDuplicate(am->Ax, &am->residual));
597:     PetscCall(VecSet(am->residual, 0.0));
598:   }
599:   if (!am->constraint) {
600:     am->constraint = NULL;
601:   } else {
602:     PetscCall(VecGetSize(am->constraint, &M));
603:     PetscCheck(M == N, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Solution vector and constraint vector must be of same size!");
604:   }

606:   /* Save changed tao tolerance for adaptive tolerance */
607:   if (tao->gatol != tao->default_gatol) am->gatol_admm = tao->gatol;
608:   if (tao->catol != tao->default_catol) am->catol_admm = tao->catol;

610:   /*Update spectral and dual elements to X subsolver */
611:   PetscCall(TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao));
612:   PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverX, am->JA, am->JApre, am->ops->misfitjac, am->misfitjacobianP));
613:   PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverZ, am->JB, am->JBpre, am->ops->regjac, am->regjacobianP));
614:   PetscFunctionReturn(PETSC_SUCCESS);
615: }

617: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
618: {
619:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

621:   PetscFunctionBegin;
622:   PetscCall(VecDestroy(&am->z));
623:   PetscCall(VecDestroy(&am->Ax));
624:   PetscCall(VecDestroy(&am->Axold));
625:   PetscCall(VecDestroy(&am->Bz));
626:   PetscCall(VecDestroy(&am->Bzold));
627:   PetscCall(VecDestroy(&am->Bz0));
628:   PetscCall(VecDestroy(&am->residual));
629:   PetscCall(VecDestroy(&am->y));
630:   PetscCall(VecDestroy(&am->yold));
631:   PetscCall(VecDestroy(&am->y0));
632:   PetscCall(VecDestroy(&am->yhat));
633:   PetscCall(VecDestroy(&am->yhatold));
634:   PetscCall(VecDestroy(&am->workLeft));
635:   PetscCall(VecDestroy(&am->workJacobianRight));
636:   PetscCall(VecDestroy(&am->workJacobianRight2));

638:   PetscCall(MatDestroy(&am->JA));
639:   PetscCall(MatDestroy(&am->JB));
640:   if (!am->xJI) PetscCall(MatDestroy(&am->JApre));
641:   if (!am->zJI) PetscCall(MatDestroy(&am->JBpre));
642:   if (am->Hx) {
643:     PetscCall(MatDestroy(&am->Hx));
644:     PetscCall(MatDestroy(&am->Hxpre));
645:   }
646:   if (am->Hz) {
647:     PetscCall(MatDestroy(&am->Hz));
648:     PetscCall(MatDestroy(&am->Hzpre));
649:   }
650:   PetscCall(MatDestroy(&am->ATA));
651:   PetscCall(MatDestroy(&am->BTB));
652:   PetscCall(TaoDestroy(&am->subsolverX));
653:   PetscCall(TaoDestroy(&am->subsolverZ));
654:   am->parent = NULL;
655:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
656:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
657:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
658:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
659:   PetscCall(PetscFree(tao->data));
660:   PetscFunctionReturn(PETSC_SUCCESS);
661: }

663: /*MC
664:   TAOADMM - Alternating direction method of multipliers method for solving linear problems with
665:             constraints. in a $ \min_x f(x) + g(z)$  s.t. $Ax+Bz=c$.
666:             This algorithm employs two sub Tao solvers, of which type can be specified
667:             by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
668:             Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
669:             `TaoADMMSet{Misfit,Regularizer}HessianChangeStatus()`.
670:             Second subsolver does support `TAOSHELL`. It should be noted that L1-norm is used for objective value for `TAOSHELL` type.
671:             There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
672:             currently there are basic option and adaptive option.
673:             Constraint is set at Ax+Bz=c, and A and B can be set with `TaoADMMSet{Misfit,Regularizer}ConstraintJacobian()`.
674:             c can be set with `TaoADMMSetConstraintVectorRHS()`.
675:             The user can also provide regularizer weight for second subsolver. {cite}`xu2017adaptive`

677:   Options Database Keys:
678: + -tao_admm_regularizer_coefficient        - regularizer constant (default 1.e-6)
679: . -tao_admm_spectral_penalty               - Constant for Augmented Lagrangian term (default 1.)
680: . -tao_admm_relaxation_parameter           - relaxation parameter for Z update (default 1.)
681: . -tao_admm_tolerance_update_factor        - ADMM dynamic tolerance update factor (default 1.e-12)
682: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
683: . -tao_admm_minimum_spectral_penalty       - Set ADMM minimum spectral penalty (default 0)
684: . -tao_admm_dual_update                    - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
685: - -tao_admm_regularizer_type               - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")

687:   Level: beginner

689: .seealso: `TaoADMMSetMisfitHessianChangeStatus()`, `TaoADMMSetRegHessianChangeStatus()`, `TaoADMMGetSpectralPenalty()`,
690:           `TaoADMMGetMisfitSubsolver()`, `TaoADMMGetRegularizationSubsolver()`, `TaoADMMSetConstraintVectorRHS()`,
691:           `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetRegularizerCoefficient()`, `TaoADMMGetRegularizerCoefficient()`,
692:           `TaoADMMSetRegularizerConstraintJacobian()`, `TaoADMMSetMisfitConstraintJacobian()`,
693:           `TaoADMMSetMisfitObjectiveAndGradientRoutine()`, `TaoADMMSetMisfitHessianRoutine()`,
694:           `TaoADMMSetRegularizerObjectiveAndGradientRoutine()`, `TaoADMMSetRegularizerHessianRoutine()`,
695:           `TaoGetADMMParentTao()`, `TaoADMMGetDualVector()`, `TaoADMMSetRegularizerType()`,
696:           `TaoADMMGetRegularizerType()`, `TaoADMMSetUpdateType()`, `TaoADMMGetUpdateType()`
697: M*/

699: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
700: {
701:   TAO_ADMM *am;

703:   PetscFunctionBegin;
704:   PetscCall(PetscNew(&am));

706:   tao->ops->destroy        = TaoDestroy_ADMM;
707:   tao->ops->setup          = TaoSetUp_ADMM;
708:   tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
709:   tao->ops->view           = TaoView_ADMM;
710:   tao->ops->solve          = TaoSolve_ADMM;

712:   PetscCall(TaoParametersInitialize(tao));

714:   tao->data           = (void *)am;
715:   am->l1epsilon       = 1e-6;
716:   am->lambda          = 1e-4;
717:   am->mu              = 1.;
718:   am->muold           = 0.;
719:   am->mueps           = PETSC_MACHINE_EPSILON;
720:   am->mumin           = 0.;
721:   am->orthval         = 0.2;
722:   am->T               = 2;
723:   am->parent          = tao;
724:   am->update          = TAO_ADMM_UPDATE_BASIC;
725:   am->regswitch       = TAO_ADMM_REGULARIZER_SOFT_THRESH;
726:   am->tol             = PETSC_SMALL;
727:   am->const_norm      = 0;
728:   am->resnorm         = 0;
729:   am->dualres         = 0;
730:   am->ops->regobjgrad = NULL;
731:   am->ops->reghess    = NULL;
732:   am->gamma           = 1;
733:   am->regobjgradP     = NULL;
734:   am->reghessP        = NULL;
735:   am->gatol_admm      = 1e-8;
736:   am->catol_admm      = 0;
737:   am->Hxchange        = PETSC_TRUE;
738:   am->Hzchange        = PETSC_TRUE;
739:   am->Hzbool          = PETSC_TRUE;
740:   am->Hxbool          = PETSC_TRUE;

742:   PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverX));
743:   PetscCall(TaoSetOptionsPrefix(am->subsolverX, "misfit_"));
744:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverX, (PetscObject)tao, 1));
745:   PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverZ));
746:   PetscCall(TaoSetOptionsPrefix(am->subsolverZ, "reg_"));
747:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ, (PetscObject)tao, 1));

749:   PetscCall(TaoSetType(am->subsolverX, TAONLS));
750:   PetscCall(TaoSetType(am->subsolverZ, TAONLS));
751:   PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
752:   PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
753:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", TaoADMMSetRegularizerType_ADMM));
754:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", TaoADMMGetRegularizerType_ADMM));
755:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", TaoADMMSetUpdateType_ADMM));
756:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", TaoADMMGetUpdateType_ADMM));
757:   PetscFunctionReturn(PETSC_SUCCESS);
758: }

760: /*@
761:   TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines  whether Hessian matrix of misfit subsolver changes with respect to input vector.

763:   Collective

765:   Input Parameters:
766: + tao - the Tao solver context.
767: - b   - the Hessian matrix change status boolean, `PETSC_FALSE`  when the Hessian matrix does not change, `PETSC_TRUE` otherwise.

769:   Level: advanced

771: .seealso: `TAOADMM`
772: @*/
773: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
774: {
775:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

777:   PetscFunctionBegin;
778:   am->Hxchange = b;
779:   PetscFunctionReturn(PETSC_SUCCESS);
780: }

782: /*@
783:   TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.

785:   Collective

787:   Input Parameters:
788: + tao - the `Tao` solver context
789: - b   - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise.

791:   Level: advanced

793: .seealso: `TAOADMM`
794: @*/
795: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
796: {
797:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

799:   PetscFunctionBegin;
800:   am->Hzchange = b;
801:   PetscFunctionReturn(PETSC_SUCCESS);
802: }

804: /*@
805:   TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value

807:   Collective

809:   Input Parameters:
810: + tao - the `Tao` solver context
811: - mu  - spectral penalty

813:   Level: advanced

815: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TAOADMM`
816: @*/
817: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
818: {
819:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

821:   PetscFunctionBegin;
822:   am->mu = mu;
823:   PetscFunctionReturn(PETSC_SUCCESS);
824: }

826: /*@
827:   TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value

829:   Collective

831:   Input Parameter:
832: . tao - the `Tao` solver context

834:   Output Parameter:
835: . mu - spectral penalty

837:   Level: advanced

839: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetSpectralPenalty()`, `TAOADMM`
840: @*/
841: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
842: {
843:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

845:   PetscFunctionBegin;
847:   PetscAssertPointer(mu, 2);
848:   *mu = am->mu;
849:   PetscFunctionReturn(PETSC_SUCCESS);
850: }

852: /*@
853:   TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside `TAOADMM`

855:   Collective

857:   Input Parameter:
858: . tao - the `Tao` solver context

860:   Output Parameter:
861: . misfit - the `Tao` subsolver context

863:   Level: advanced

865: .seealso: `TAOADMM`, `Tao`
866: @*/
867: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
868: {
869:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

871:   PetscFunctionBegin;
872:   *misfit = am->subsolverX;
873:   PetscFunctionReturn(PETSC_SUCCESS);
874: }

876: /*@
877:   TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside `TAOADMM`

879:   Collective

881:   Input Parameter:
882: . tao - the `Tao` solver context

884:   Output Parameter:
885: . reg - the `Tao` subsolver context

887:   Level: advanced

889: .seealso: `TAOADMM`, `Tao`
890: @*/
891: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
892: {
893:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

895:   PetscFunctionBegin;
896:   *reg = am->subsolverZ;
897:   PetscFunctionReturn(PETSC_SUCCESS);
898: }

900: /*@
901:   TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for `TAOADMM`

903:   Collective

905:   Input Parameters:
906: + tao - the `Tao` solver context
907: - c   - RHS vector

909:   Level: advanced

911: .seealso: `TAOADMM`
912: @*/
913: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
914: {
915:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

917:   PetscFunctionBegin;
918:   am->constraint = c;
919:   PetscFunctionReturn(PETSC_SUCCESS);
920: }

922: /*@
923:   TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty

925:   Collective

927:   Input Parameters:
928: + tao - the `Tao` solver context
929: - mu  - minimum spectral penalty value

931:   Level: advanced

933: .seealso: `TaoADMMGetSpectralPenalty()`, `TAOADMM`
934: @*/
935: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
936: {
937:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

939:   PetscFunctionBegin;
940:   am->mumin = mu;
941:   PetscFunctionReturn(PETSC_SUCCESS);
942: }

944: /*@
945:   TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case

947:   Collective

949:   Input Parameters:
950: + tao    - the `Tao` solver context
951: - lambda - L1-norm regularizer coefficient

953:   Level: advanced

955: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
956: @*/
957: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
958: {
959:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

961:   PetscFunctionBegin;
962:   am->lambda = lambda;
963:   PetscFunctionReturn(PETSC_SUCCESS);
964: }

966: /*@
967:   TaoADMMGetRegularizerCoefficient - Get the regularization coefficient lambda for L1 norm regularization case

969:   Collective

971:   Input Parameter:
972: . tao - the `Tao` solver context

974:   Output Parameter:
975: . lambda - L1-norm regularizer coefficient

977:   Level: advanced

979: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
980: @*/
981: PetscErrorCode TaoADMMGetRegularizerCoefficient(Tao tao, PetscReal *lambda)
982: {
983:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

985:   PetscFunctionBegin;
986:   *lambda = am->lambda;
987:   PetscFunctionReturn(PETSC_SUCCESS);
988: }

990: /*@C
991:   TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the `TAOADMM` algorithm. Matrix B constrains the z variable.

993:   Collective

995:   Input Parameters:
996: + tao  - the Tao solver context
997: . J    - user-created regularizer constraint Jacobian matrix
998: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
999: . func - function pointer for the regularizer constraint Jacobian update function
1000: - ctx  - user context for the regularizer Hessian

1002:   Level: advanced

1004: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
1005: @*/
1006: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1007: {
1008:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1010:   PetscFunctionBegin;
1012:   if (J) {
1014:     PetscCheckSameComm(tao, 1, J, 2);
1015:   }
1016:   if (Jpre) {
1018:     PetscCheckSameComm(tao, 1, Jpre, 3);
1019:   }
1020:   if (ctx) am->misfitjacobianP = ctx;
1021:   if (func) am->ops->misfitjac = func;

1023:   if (J) {
1024:     PetscCall(PetscObjectReference((PetscObject)J));
1025:     PetscCall(MatDestroy(&am->JA));
1026:     am->JA = J;
1027:   }
1028:   if (Jpre) {
1029:     PetscCall(PetscObjectReference((PetscObject)Jpre));
1030:     PetscCall(MatDestroy(&am->JApre));
1031:     am->JApre = Jpre;
1032:   }
1033:   PetscFunctionReturn(PETSC_SUCCESS);
1034: }

1036: /*@C
1037:   TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for `TAOADMM` algorithm. Matrix B constraints z variable.

1039:   Collective

1041:   Input Parameters:
1042: + tao  - the `Tao` solver context
1043: . J    - user-created regularizer constraint Jacobian matrix
1044: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
1045: . func - function pointer for the regularizer constraint Jacobian update function
1046: - ctx  - user context for the regularizer Hessian

1048:   Level: advanced

1050: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetMisfitConstraintJacobian()`, `TAOADMM`
1051: @*/
1052: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1053: {
1054:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1056:   PetscFunctionBegin;
1058:   if (J) {
1060:     PetscCheckSameComm(tao, 1, J, 2);
1061:   }
1062:   if (Jpre) {
1064:     PetscCheckSameComm(tao, 1, Jpre, 3);
1065:   }
1066:   if (ctx) am->regjacobianP = ctx;
1067:   if (func) am->ops->regjac = func;

1069:   if (J) {
1070:     PetscCall(PetscObjectReference((PetscObject)J));
1071:     PetscCall(MatDestroy(&am->JB));
1072:     am->JB = J;
1073:   }
1074:   if (Jpre) {
1075:     PetscCall(PetscObjectReference((PetscObject)Jpre));
1076:     PetscCall(MatDestroy(&am->JBpre));
1077:     am->JBpre = Jpre;
1078:   }
1079:   PetscFunctionReturn(PETSC_SUCCESS);
1080: }

1082: /*@C
1083:   TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function

1085:   Collective

1087:   Input Parameters:
1088: + tao  - the `Tao` context
1089: . func - function pointer for the misfit value and gradient evaluation
1090: - ctx  - user context for the misfit

1092:   Level: advanced

1094: .seealso: `TAOADMM`
1095: @*/
1096: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1097: {
1098:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1100:   PetscFunctionBegin;
1102:   am->misfitobjgradP     = ctx;
1103:   am->ops->misfitobjgrad = func;
1104:   PetscFunctionReturn(PETSC_SUCCESS);
1105: }

1107: /*@C
1108:   TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1109:   function into the algorithm, to be used for subsolverX.

1111:   Collective

1113:   Input Parameters:
1114: + tao  - the `Tao` context
1115: . H    - user-created matrix for the Hessian of the misfit term
1116: . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1117: . func - function pointer for the misfit Hessian evaluation
1118: - ctx  - user context for the misfit Hessian

1120:   Level: advanced

1122: .seealso: `TAOADMM`
1123: @*/
1124: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1125: {
1126:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1128:   PetscFunctionBegin;
1130:   if (H) {
1132:     PetscCheckSameComm(tao, 1, H, 2);
1133:   }
1134:   if (Hpre) {
1136:     PetscCheckSameComm(tao, 1, Hpre, 3);
1137:   }
1138:   if (ctx) am->misfithessP = ctx;
1139:   if (func) am->ops->misfithess = func;
1140:   if (H) {
1141:     PetscCall(PetscObjectReference((PetscObject)H));
1142:     PetscCall(MatDestroy(&am->Hx));
1143:     am->Hx = H;
1144:   }
1145:   if (Hpre) {
1146:     PetscCall(PetscObjectReference((PetscObject)Hpre));
1147:     PetscCall(MatDestroy(&am->Hxpre));
1148:     am->Hxpre = Hpre;
1149:   }
1150:   PetscFunctionReturn(PETSC_SUCCESS);
1151: }

1153: /*@C
1154:   TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function

1156:   Collective

1158:   Input Parameters:
1159: + tao  - the Tao context
1160: . func - function pointer for the regularizer value and gradient evaluation
1161: - ctx  - user context for the regularizer

1163:   Level: advanced

1165: .seealso: `TAOADMM`
1166: @*/
1167: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
1168: {
1169:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1171:   PetscFunctionBegin;
1173:   am->regobjgradP     = ctx;
1174:   am->ops->regobjgrad = func;
1175:   PetscFunctionReturn(PETSC_SUCCESS);
1176: }

1178: /*@C
1179:   TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1180:   function, to be used for subsolverZ.

1182:   Collective

1184:   Input Parameters:
1185: + tao  - the `Tao` context
1186: . H    - user-created matrix for the Hessian of the regularization term
1187: . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term
1188: . func - function pointer for the regularizer Hessian evaluation
1189: - ctx  - user context for the regularizer Hessian

1191:   Level: advanced

1193: .seealso: `TAOADMM`
1194: @*/
1195: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx)
1196: {
1197:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1199:   PetscFunctionBegin;
1201:   if (H) {
1203:     PetscCheckSameComm(tao, 1, H, 2);
1204:   }
1205:   if (Hpre) {
1207:     PetscCheckSameComm(tao, 1, Hpre, 3);
1208:   }
1209:   if (ctx) am->reghessP = ctx;
1210:   if (func) am->ops->reghess = func;
1211:   if (H) {
1212:     PetscCall(PetscObjectReference((PetscObject)H));
1213:     PetscCall(MatDestroy(&am->Hz));
1214:     am->Hz = H;
1215:   }
1216:   if (Hpre) {
1217:     PetscCall(PetscObjectReference((PetscObject)Hpre));
1218:     PetscCall(MatDestroy(&am->Hzpre));
1219:     am->Hzpre = Hpre;
1220:   }
1221:   PetscFunctionReturn(PETSC_SUCCESS);
1222: }

1224: /*@
1225:   TaoGetADMMParentTao - Gets pointer to parent `TAOADMM`, used by inner subsolver.

1227:   Collective

1229:   Input Parameter:
1230: . tao - the `Tao` context

1232:   Output Parameter:
1233: . admm_tao - the parent `Tao` context

1235:   Level: advanced

1237: .seealso: `TAOADMM`
1238: @*/
1239: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1240: {
1241:   PetscFunctionBegin;
1243:   PetscCall(PetscObjectQuery((PetscObject)tao, "TaoGetADMMParentTao_ADMM", (PetscObject *)admm_tao));
1244:   PetscFunctionReturn(PETSC_SUCCESS);
1245: }

1247: /*@
1248:   TaoADMMGetDualVector - Returns the dual vector associated with the current `TAOADMM` state

1250:   Not Collective

1252:   Input Parameter:
1253: . tao - the `Tao` context

1255:   Output Parameter:
1256: . Y - the current solution

1258:   Level: intermediate

1260: .seealso: `TAOADMM`
1261: @*/
1262: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1263: {
1264:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1266:   PetscFunctionBegin;
1268:   *Y = am->y;
1269:   PetscFunctionReturn(PETSC_SUCCESS);
1270: }

1272: /*@
1273:   TaoADMMSetRegularizerType - Set regularizer type for `TAOADMM` routine

1275:   Not Collective

1277:   Input Parameters:
1278: + tao  - the `Tao` context
1279: - type - regularizer type

1281:   Options Database Key:
1282: . -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh> - select the regularizer

1284:   Level: intermediate

1286: .seealso: `TaoADMMGetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1287: @*/
1288: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1289: {
1290:   PetscFunctionBegin;
1293:   PetscTryMethod(tao, "TaoADMMSetRegularizerType_C", (Tao, TaoADMMRegularizerType), (tao, type));
1294:   PetscFunctionReturn(PETSC_SUCCESS);
1295: }

1297: /*@
1298:   TaoADMMGetRegularizerType - Gets the type of regularizer routine for `TAOADMM`

1300:   Not Collective

1302:   Input Parameter:
1303: . tao - the `Tao` context

1305:   Output Parameter:
1306: . type - the type of regularizer

1308:   Level: intermediate

1310: .seealso: `TaoADMMSetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1311: @*/
1312: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1313: {
1314:   PetscFunctionBegin;
1316:   PetscUseMethod(tao, "TaoADMMGetRegularizerType_C", (Tao, TaoADMMRegularizerType *), (tao, type));
1317:   PetscFunctionReturn(PETSC_SUCCESS);
1318: }

1320: /*@
1321:   TaoADMMSetUpdateType - Set update routine for `TAOADMM` routine

1323:   Not Collective

1325:   Input Parameters:
1326: + tao  - the `Tao` context
1327: - type - spectral parameter update type

1329:   Level: intermediate

1331: .seealso: `TaoADMMGetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1332: @*/
1333: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1334: {
1335:   PetscFunctionBegin;
1338:   PetscTryMethod(tao, "TaoADMMSetUpdateType_C", (Tao, TaoADMMUpdateType), (tao, type));
1339:   PetscFunctionReturn(PETSC_SUCCESS);
1340: }

1342: /*@
1343:   TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for `TAOADMM`

1345:   Not Collective

1347:   Input Parameter:
1348: . tao - the `Tao` context

1350:   Output Parameter:
1351: . type - the type of spectral penalty update routine

1353:   Level: intermediate

1355: .seealso: `TaoADMMSetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1356: @*/
1357: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1358: {
1359:   PetscFunctionBegin;
1361:   PetscUseMethod(tao, "TaoADMMGetUpdateType_C", (Tao, TaoADMMUpdateType *), (tao, type));
1362:   PetscFunctionReturn(PETSC_SUCCESS);
1363: }