Actual source code: admm.c

  1: #include <../src/tao/constrained/impls/admm/admm.h>
  2: #include <petsctao.h>
  3: #include <petsc/private/petscimpl.h>

  5: /* Updates terminating criteria
  6:  *
  7:  * 1  ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||}
  8:  *
  9:  * 2. Updates dual residual, d_k
 10:  *
 11:  * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty||   */

 13: static PetscBool  cited      = PETSC_FALSE;
 14: static const char citation[] = "@misc{xu2017adaptive,\n"
 15:                                "   title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n"
 16:                                "   author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n"
 17:                                "   year={2017},\n"
 18:                                "   eprint={1704.02712},\n"
 19:                                "   archivePrefix={arXiv},\n"
 20:                                "   primaryClass={cs.CV}\n"
 21:                                "}  \n";

 23: const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER", "REGULARIZER_SOFT_THRESH", "TaoADMMRegularizerType", "TAO_ADMM_", NULL};
 24: const char *const TaoADMMUpdateTypes[]      = {"UPDATE_BASIC", "UPDATE_ADAPTIVE", "UPDATE_ADAPTIVE_RELAXED", "TaoADMMUpdateType", "TAO_ADMM_", NULL};
 25: const char *const TaoALMMTypes[]            = {"CLASSIC", "PHR", "TaoALMMType", "TAO_ALMM_", NULL};

 27: static PetscErrorCode TaoADMMToleranceUpdate(Tao tao)
 28: {
 29:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
 30:   PetscReal Axnorm, Bznorm, ATynorm, temp;
 31:   Vec       tempJR, tempL;
 32:   Tao       mis;

 34:   PetscFunctionBegin;
 35:   mis    = am->subsolverX;
 36:   tempJR = am->workJacobianRight;
 37:   tempL  = am->workLeft;
 38:   /* ATy */
 39:   PetscCall(TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre));
 40:   PetscCall(MatMultTranspose(mis->jacobian_equality, am->y, tempJR));
 41:   PetscCall(VecNorm(tempJR, NORM_2, &ATynorm));
 42:   /* dualres = mu * ||AT(Bz-Bzold)||_2 */
 43:   PetscCall(VecWAXPY(tempJR, -1., am->Bzold, am->Bz));
 44:   PetscCall(MatMultTranspose(mis->jacobian_equality, tempJR, tempL));
 45:   PetscCall(VecNorm(tempL, NORM_2, &am->dualres));
 46:   am->dualres *= am->mu;

 48:   /* ||Ax||_2, ||Bz||_2 */
 49:   PetscCall(VecNorm(am->Ax, NORM_2, &Axnorm));
 50:   PetscCall(VecNorm(am->Bz, NORM_2, &Bznorm));

 52:   /* Set catol to be catol_admm *  max{||Ax||,||Bz||,||c||} *
 53:    * Set gatol to be gatol_admm *  ||A^Ty|| *
 54:    * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */
 55:   temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm, am->const_norm));
 56:   PetscCall(TaoSetConstraintTolerances(tao, temp, PETSC_CURRENT));
 57:   PetscCall(TaoSetTolerances(tao, am->gatol_admm * ATynorm, PETSC_CURRENT, PETSC_CURRENT));
 58:   PetscFunctionReturn(PETSC_SUCCESS);
 59: }

 61: /* Penalty Update for Adaptive ADMM. */
 62: static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao)
 63: {
 64:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
 65:   PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k;
 66:   PetscBool hflag, gflag;
 67:   Vec       tempJR, tempJR2;

 69:   PetscFunctionBegin;
 70:   tempJR  = am->workJacobianRight;
 71:   tempJR2 = am->workJacobianRight2;
 72:   hflag   = PETSC_FALSE;
 73:   gflag   = PETSC_FALSE;
 74:   a_k     = -1;
 75:   b_k     = -1;

 77:   PetscCall(VecWAXPY(tempJR, -1., am->Axold, am->Ax));
 78:   PetscCall(VecWAXPY(tempJR2, -1., am->yhatold, am->yhat));
 79:   PetscCall(VecNorm(tempJR, NORM_2, &Axdiff_norm));
 80:   PetscCall(VecNorm(tempJR2, NORM_2, &yhatdiff_norm));
 81:   PetscCall(VecDot(tempJR, tempJR2, &Axyhat));

 83:   PetscCall(VecWAXPY(tempJR, -1., am->Bz0, am->Bz));
 84:   PetscCall(VecWAXPY(tempJR2, -1., am->y, am->y0));
 85:   PetscCall(VecNorm(tempJR, NORM_2, &Bzdiff_norm));
 86:   PetscCall(VecNorm(tempJR2, NORM_2, &ydiff_norm));
 87:   PetscCall(VecDot(tempJR, tempJR2, &Bzy));

 89:   if (Axyhat > am->orthval * Axdiff_norm * yhatdiff_norm + am->mueps) {
 90:     hflag = PETSC_TRUE;
 91:     a_sd  = PetscSqr(yhatdiff_norm) / Axyhat; /* alphaSD */
 92:     a_mg  = Axyhat / PetscSqr(Axdiff_norm);   /* alphaMG */
 93:     a_k   = (a_mg / a_sd) > 0.5 ? a_mg : a_sd - 0.5 * a_mg;
 94:   }
 95:   if (Bzy > am->orthval * Bzdiff_norm * ydiff_norm + am->mueps) {
 96:     gflag = PETSC_TRUE;
 97:     b_sd  = PetscSqr(ydiff_norm) / Bzy;  /* betaSD */
 98:     b_mg  = Bzy / PetscSqr(Bzdiff_norm); /* betaMG */
 99:     b_k   = (b_mg / b_sd) > 0.5 ? b_mg : b_sd - 0.5 * b_mg;
100:   }
101:   am->muold = am->mu;
102:   if (gflag && hflag) {
103:     am->mu = PetscSqrtReal(a_k * b_k);
104:   } else if (hflag) {
105:     am->mu = a_k;
106:   } else if (gflag) {
107:     am->mu = b_k;
108:   }
109:   if (am->mu > am->muold) am->mu = am->muold;
110:   if (am->mu < am->mumin) am->mu = am->mumin;
111:   PetscFunctionReturn(PETSC_SUCCESS);
112: }

114: static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type)
115: {
116:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

118:   PetscFunctionBegin;
119:   am->regswitch = type;
120:   PetscFunctionReturn(PETSC_SUCCESS);
121: }

123: static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type)
124: {
125:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

127:   PetscFunctionBegin;
128:   *type = am->regswitch;
129:   PetscFunctionReturn(PETSC_SUCCESS);
130: }

132: static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type)
133: {
134:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

136:   PetscFunctionBegin;
137:   am->update = type;
138:   PetscFunctionReturn(PETSC_SUCCESS);
139: }

141: static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type)
142: {
143:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

145:   PetscFunctionBegin;
146:   *type = am->update;
147:   PetscFunctionReturn(PETSC_SUCCESS);
148: }

150: /* This routine updates Jacobians with new x,z vectors,
151:  * and then updates Ax and Bz vectors, then computes updated residual vector*/
152: static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual)
153: {
154:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
155:   Tao       mis, reg;

157:   PetscFunctionBegin;
158:   mis = am->subsolverX;
159:   reg = am->subsolverZ;
160:   PetscCall(TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre));
161:   PetscCall(MatMult(mis->jacobian_equality, x, Ax));
162:   PetscCall(TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre));
163:   PetscCall(MatMult(reg->jacobian_equality, z, Bz));

165:   PetscCall(VecWAXPY(residual, 1., Bz, Ax));
166:   if (am->constraint != NULL) PetscCall(VecAXPY(residual, -1., am->constraint));
167:   PetscFunctionReturn(PETSC_SUCCESS);
168: }

170: /* Updates Augmented Lagrangians to given routines *
171:  * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet
172:  * Separate Objective and Gradient routines are not supported.  */
173: static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr)
174: {
175:   Tao       parent = (Tao)ptr;
176:   TAO_ADMM *am     = (TAO_ADMM *)parent->data;
177:   PetscReal temp, temp2;
178:   Vec       tempJR;

180:   PetscFunctionBegin;
181:   tempJR = am->workJacobianRight;
182:   PetscCall(ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
183:   PetscCall((*am->ops->misfitobjgrad)(am->subsolverX, x, f, g, am->misfitobjgradP));

185:   am->last_misfit_val = *f;
186:   /* Objective  Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
187:   PetscCall(VecTDot(am->residual, am->y, &temp));
188:   PetscCall(VecTDot(am->residual, am->residual, &temp2));
189:   *f += temp + (am->mu / 2) * temp2;

191:   /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/
192:   PetscCall(MatMultTranspose(tao->jacobian_equality, am->residual, tempJR));
193:   PetscCall(VecAXPY(g, am->mu, tempJR));
194:   PetscCall(MatMultTranspose(tao->jacobian_equality, am->y, tempJR));
195:   PetscCall(VecAXPY(g, 1., tempJR));
196:   PetscFunctionReturn(PETSC_SUCCESS);
197: }

199: /* Updates Augmented Lagrangians to given routines
200:  * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet
201:  * Separate Objective and Gradient routines are not supported.  */
202: static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr)
203: {
204:   Tao       parent = (Tao)ptr;
205:   TAO_ADMM *am     = (TAO_ADMM *)parent->data;
206:   PetscReal temp, temp2;
207:   Vec       tempJR;

209:   PetscFunctionBegin;
210:   tempJR = am->workJacobianRight;
211:   PetscCall(ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual));
212:   PetscCall((*am->ops->regobjgrad)(am->subsolverZ, z, f, g, am->regobjgradP));
213:   am->last_reg_val = *f;
214:   /* Objective  Add  + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */
215:   PetscCall(VecTDot(am->residual, am->y, &temp));
216:   PetscCall(VecTDot(am->residual, am->residual, &temp2));
217:   *f += temp + (am->mu / 2) * temp2;

219:   /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/
220:   PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->residual, tempJR));
221:   PetscCall(VecAXPY(g, am->mu, tempJR));
222:   PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->y, tempJR));
223:   PetscCall(VecAXPY(g, 1., tempJR));
224:   PetscFunctionReturn(PETSC_SUCCESS);
225: }

227: /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */
228: static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm)
229: {
230:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
231:   PetscInt  N;

233:   PetscFunctionBegin;
234:   PetscCall(VecGetSize(am->workLeft, &N));
235:   PetscCall(VecPointwiseMult(am->workLeft, x, x));
236:   PetscCall(VecShift(am->workLeft, am->l1epsilon * am->l1epsilon));
237:   PetscCall(VecSqrtAbs(am->workLeft));
238:   PetscCall(VecSum(am->workLeft, norm));
239:   *norm += N * am->l1epsilon;
240:   *norm *= am->lambda;
241:   PetscFunctionReturn(PETSC_SUCCESS);
242: }

244: static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr)
245: {
246:   TAO_ADMM *am = (TAO_ADMM *)ptr;

248:   PetscFunctionBegin;
249:   switch (am->update) {
250:   case TAO_ADMM_UPDATE_BASIC:
251:     break;
252:   case TAO_ADMM_UPDATE_ADAPTIVE:
253:   case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
254:     if (H && (am->muold != am->mu)) {
255:       if (!Identity) {
256:         PetscCall(MatAXPY(H, am->mu - am->muold, Constraint, DIFFERENT_NONZERO_PATTERN));
257:       } else {
258:         PetscCall(MatShift(H, am->mu - am->muold));
259:       }
260:     }
261:     break;
262:   }
263:   PetscFunctionReturn(PETSC_SUCCESS);
264: }

266: /* Updates Hessian - adds second derivative of augmented Lagrangian
267:  * H \gets H + \rho*ATA
268:   Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op
269:   For ADAPTAIVE,ADAPTIVE_RELAXED,
270:   H \gets H + (\rho-\rhoold)*ATA
271:   Here, we assume that A is linear constraint i.e., does not change.
272:   Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */
273: static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr)
274: {
275:   Tao       parent = (Tao)ptr;
276:   TAO_ADMM *am     = (TAO_ADMM *)parent->data;

278:   PetscFunctionBegin;
279:   if (am->Hxchange) {
280:     /* Case where Hessian gets updated with respect to x vector input. */
281:     PetscCall((*am->ops->misfithess)(am->subsolverX, x, H, Hpre, am->misfithessP));
282:     PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
283:   } else if (am->Hxbool) {
284:     /* Hessian doesn't get updated. H(x) = c */
285:     /* Update Lagrangian only once per TAO call */
286:     PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am));
287:     am->Hxbool = PETSC_FALSE;
288:   }
289:   PetscFunctionReturn(PETSC_SUCCESS);
290: }

292: /* Same as SubHessianUpdate, except for B matrix instead of A matrix */
293: static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr)
294: {
295:   Tao       parent = (Tao)ptr;
296:   TAO_ADMM *am     = (TAO_ADMM *)parent->data;

298:   PetscFunctionBegin;
299:   if (am->Hzchange) {
300:     /* Case where Hessian gets updated with respect to x vector input. */
301:     PetscCall((*am->ops->reghess)(am->subsolverZ, z, H, Hpre, am->reghessP));
302:     PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
303:   } else if (am->Hzbool) {
304:     /* Hessian doesn't get updated. H(x) = c */
305:     /* Update Lagrangian only once per TAO call */
306:     PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am));
307:     am->Hzbool = PETSC_FALSE;
308:   }
309:   PetscFunctionReturn(PETSC_SUCCESS);
310: }

312: /* Shell Matrix routine for A matrix.
313:  * This gets used when user puts NULL for
314:  * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...)
315:  * Essentially sets A=I*/
316: static PetscErrorCode JacobianIdentity(Mat mat, Vec in, Vec out)
317: {
318:   PetscFunctionBegin;
319:   PetscCall(VecCopy(in, out));
320:   PetscFunctionReturn(PETSC_SUCCESS);
321: }

323: /* Shell Matrix routine for B matrix.
324:  * This gets used when user puts NULL for
325:  * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...)
326:  * Sets B=-I */
327: static PetscErrorCode JacobianIdentityB(Mat mat, Vec in, Vec out)
328: {
329:   PetscFunctionBegin;
330:   PetscCall(VecCopy(in, out));
331:   PetscCall(VecScale(out, -1.));
332:   PetscFunctionReturn(PETSC_SUCCESS);
333: }

335: /* Solve f(x) + g(z) s.t. Ax + Bz = c */
336: static PetscErrorCode TaoSolve_ADMM(Tao tao)
337: {
338:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
339:   PetscInt  N;
340:   PetscReal reg_func;
341:   PetscBool is_reg_shell;
342:   Vec       tempL;

344:   PetscFunctionBegin;
345:   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) {
346:     PetscCheck(am->subsolverX->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetMisfitConstraintJacobian() first");
347:     PetscCheck(am->subsolverZ->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetRegularizerConstraintJacobian() first");
348:     if (am->constraint != NULL) PetscCall(VecNorm(am->constraint, NORM_2, &am->const_norm));
349:   }
350:   tempL = am->workLeft;
351:   PetscCall(VecGetSize(tempL, &N));

353:   if (am->Hx && am->ops->misfithess) PetscCall(TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao));

355:   if (!am->zJI) {
356:     /* Currently, B is assumed to be a linear system, i.e., not getting updated*/
357:     PetscCall(MatTransposeMatMult(am->JB, am->JB, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &am->BTB));
358:   }
359:   if (!am->xJI) {
360:     /* Currently, A is assumed to be a linear system, i.e., not getting updated*/
361:     PetscCall(MatTransposeMatMult(am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &am->ATA));
362:   }

364:   is_reg_shell = PETSC_FALSE;

366:   PetscCall(PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell));

368:   if (!is_reg_shell) {
369:     switch (am->regswitch) {
370:     case TAO_ADMM_REGULARIZER_USER:
371:       break;
372:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
373:       /* Soft Threshold. */
374:       break;
375:     }
376:     if (am->ops->regobjgrad) PetscCall(TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao));
377:     if (am->Hz && am->ops->reghess) PetscCall(TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao));
378:   }

380:   switch (am->update) {
381:   case TAO_ADMM_UPDATE_BASIC:
382:     if (am->subsolverX->hessian) {
383:       /* In basic case, Hessian does not get updated w.r.t. to spectral penalty
384:        * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/
385:       if (!am->xJI) {
386:         PetscCall(MatAXPY(am->subsolverX->hessian, am->mu, am->ATA, DIFFERENT_NONZERO_PATTERN));
387:       } else {
388:         PetscCall(MatShift(am->subsolverX->hessian, am->mu));
389:       }
390:     }
391:     if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) {
392:       if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) {
393:         PetscCall(MatAXPY(am->subsolverZ->hessian, am->mu, am->BTB, DIFFERENT_NONZERO_PATTERN));
394:       } else {
395:         PetscCall(MatShift(am->subsolverZ->hessian, am->mu));
396:       }
397:     }
398:     break;
399:   case TAO_ADMM_UPDATE_ADAPTIVE:
400:   case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
401:     break;
402:   }

404:   PetscCall(PetscCitationsRegister(citation, &cited));
405:   tao->reason = TAO_CONTINUE_ITERATING;

407:   while (tao->reason == TAO_CONTINUE_ITERATING) {
408:     PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);
409:     PetscCall(VecCopy(am->Bz, am->Bzold));

411:     /* x update */
412:     PetscCall(TaoSolve(am->subsolverX));
413:     PetscCall(TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre));
414:     PetscCall(MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution, am->Ax));

416:     am->Hxbool = PETSC_TRUE;

418:     /* z update */
419:     switch (am->regswitch) {
420:     case TAO_ADMM_REGULARIZER_USER:
421:       PetscCall(TaoSolve(am->subsolverZ));
422:       break;
423:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
424:       /* L1 assumes A,B jacobians are identity nxn matrix */
425:       PetscCall(VecWAXPY(am->workJacobianRight, 1 / am->mu, am->y, am->Ax));
426:       PetscCall(TaoSoftThreshold(am->workJacobianRight, -am->lambda / am->mu, am->lambda / am->mu, am->subsolverZ->solution));
427:       break;
428:     }
429:     am->Hzbool = PETSC_TRUE;
430:     /* Returns Ax + Bz - c with updated Ax,Bz vectors */
431:     PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
432:     /* Dual variable, y += y + mu*(Ax+Bz-c) */
433:     PetscCall(VecWAXPY(am->y, am->mu, am->residual, am->yold));

435:     /* stopping tolerance update */
436:     PetscCall(TaoADMMToleranceUpdate(tao));

438:     /* Updating Spectral Penalty */
439:     switch (am->update) {
440:     case TAO_ADMM_UPDATE_BASIC:
441:       am->muold = am->mu;
442:       break;
443:     case TAO_ADMM_UPDATE_ADAPTIVE:
444:     case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED:
445:       if (tao->niter == 0) {
446:         PetscCall(VecCopy(am->y, am->y0));
447:         PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
448:         if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
449:         PetscCall(VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold));
450:         PetscCall(VecCopy(am->Ax, am->Axold));
451:         PetscCall(VecCopy(am->Bz, am->Bz0));
452:         am->muold = am->mu;
453:       } else if (tao->niter % am->T == 1) {
454:         /* we have compute Bzold in a previous iteration, and we computed Ax above */
455:         PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold));
456:         if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint));
457:         PetscCall(VecWAXPY(am->yhat, -am->mu, am->residual, am->yold));
458:         PetscCall(AdaptiveADMMPenaltyUpdate(tao));
459:         PetscCall(VecCopy(am->Ax, am->Axold));
460:         PetscCall(VecCopy(am->Bz, am->Bz0));
461:         PetscCall(VecCopy(am->yhat, am->yhatold));
462:         PetscCall(VecCopy(am->y, am->y0));
463:       } else {
464:         am->muold = am->mu;
465:       }
466:       break;
467:     default:
468:       break;
469:     }
470:     tao->niter++;

472:     /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/
473:     switch (am->regswitch) {
474:     case TAO_ADMM_REGULARIZER_USER:
475:       if (is_reg_shell) {
476:         PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, &reg_func));
477:       } else {
478:         PetscCall((*am->ops->regobjgrad)(am->subsolverZ, am->subsolverX->solution, &reg_func, tempL, am->regobjgradP));
479:       }
480:       break;
481:     case TAO_ADMM_REGULARIZER_SOFT_THRESH:
482:       PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, &reg_func));
483:       break;
484:     }
485:     PetscCall(VecCopy(am->y, am->yold));
486:     PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual));
487:     PetscCall(VecNorm(am->residual, NORM_2, &am->resnorm));
488:     PetscCall(TaoLogConvergenceHistory(tao, am->last_misfit_val + reg_func, am->dualres, am->resnorm, tao->ksp_its));

490:     PetscCall(TaoMonitor(tao, tao->niter, am->last_misfit_val + reg_func, am->dualres, am->resnorm, 1.0));
491:     PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
492:   }
493:   /* Update vectors */
494:   PetscCall(VecCopy(am->subsolverX->solution, tao->solution));
495:   PetscCall(VecCopy(am->subsolverX->gradient, tao->gradient));
496:   PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", NULL));
497:   PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", NULL));
498:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
499:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
500:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
501:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
502:   PetscFunctionReturn(PETSC_SUCCESS);
503: }

505: static PetscErrorCode TaoSetFromOptions_ADMM(Tao tao, PetscOptionItems PetscOptionsObject)
506: {
507:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

509:   PetscFunctionBegin;
510:   PetscOptionsHeadBegin(PetscOptionsObject, "ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. ");
511:   PetscCall(PetscOptionsReal("-tao_admm_regularizer_coefficient", "regularizer constant", "", am->lambda, &am->lambda, NULL));
512:   PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty", "Constant for Augmented Lagrangian term.", "", am->mu, &am->mu, NULL));
513:   PetscCall(PetscOptionsReal("-tao_admm_relaxation_parameter", "x relaxation parameter for Z update.", "", am->gamma, &am->gamma, NULL));
514:   PetscCall(PetscOptionsReal("-tao_admm_tolerance_update_factor", "ADMM dynamic tolerance update factor.", "", am->tol, &am->tol, NULL));
515:   PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty_update_factor", "ADMM spectral penalty update curvature safeguard value.", "", am->orthval, &am->orthval, NULL));
516:   PetscCall(PetscOptionsReal("-tao_admm_minimum_spectral_penalty", "Set ADMM minimum spectral penalty.", "", am->mumin, &am->mumin, NULL));
517:   PetscCall(PetscOptionsEnum("-tao_admm_dual_update", "Lagrangian dual update policy", "TaoADMMUpdateType", TaoADMMUpdateTypes, (PetscEnum)am->update, (PetscEnum *)&am->update, NULL));
518:   PetscCall(PetscOptionsEnum("-tao_admm_regularizer_type", "ADMM regularizer update rule", "TaoADMMRegularizerType", TaoADMMRegularizerTypes, (PetscEnum)am->regswitch, (PetscEnum *)&am->regswitch, NULL));
519:   PetscOptionsHeadEnd();
520:   PetscCall(TaoSetFromOptions(am->subsolverX));
521:   if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) PetscCall(TaoSetFromOptions(am->subsolverZ));
522:   PetscFunctionReturn(PETSC_SUCCESS);
523: }

525: static PetscErrorCode TaoView_ADMM(Tao tao, PetscViewer viewer)
526: {
527:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

529:   PetscFunctionBegin;
530:   PetscCall(PetscViewerASCIIPushTab(viewer));
531:   PetscCall(TaoView(am->subsolverX, viewer));
532:   PetscCall(TaoView(am->subsolverZ, viewer));
533:   PetscCall(PetscViewerASCIIPopTab(viewer));
534:   PetscFunctionReturn(PETSC_SUCCESS);
535: }

537: static PetscErrorCode TaoSetUp_ADMM(Tao tao)
538: {
539:   TAO_ADMM *am = (TAO_ADMM *)tao->data;
540:   PetscInt  n, N, M;

542:   PetscFunctionBegin;
543:   PetscCall(VecGetLocalSize(tao->solution, &n));
544:   PetscCall(VecGetSize(tao->solution, &N));
545:   /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */
546:   if (!am->JB) {
547:     am->zJI = PETSC_TRUE;
548:     PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JB));
549:     PetscCall(MatShellSetOperation(am->JB, MATOP_MULT, (PetscErrorCodeFn *)JacobianIdentityB));
550:     PetscCall(MatShellSetOperation(am->JB, MATOP_MULT_TRANSPOSE, (PetscErrorCodeFn *)JacobianIdentityB));
551:     am->JBpre = am->JB;
552:   }
553:   if (!am->JA) {
554:     am->xJI = PETSC_TRUE;
555:     PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JA));
556:     PetscCall(MatShellSetOperation(am->JA, MATOP_MULT, (PetscErrorCodeFn *)JacobianIdentity));
557:     PetscCall(MatShellSetOperation(am->JA, MATOP_MULT_TRANSPOSE, (PetscErrorCodeFn *)JacobianIdentity));
558:     am->JApre = am->JA;
559:   }
560:   PetscCall(MatCreateVecs(am->JA, NULL, &am->Ax));
561:   if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient));
562:   PetscCall(TaoSetSolution(am->subsolverX, tao->solution));
563:   if (!am->z) PetscCall(VecDuplicate(tao->solution, &am->z));
564:   PetscCall(TaoSetSolution(am->subsolverZ, am->z));
565:   if (!am->workLeft) PetscCall(VecDuplicate(tao->solution, &am->workLeft));
566:   if (!am->Axold) PetscCall(VecDuplicate(am->Ax, &am->Axold));
567:   if (!am->workJacobianRight) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight));
568:   if (!am->workJacobianRight2) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight2));
569:   if (!am->Bz) PetscCall(VecDuplicate(am->Ax, &am->Bz));
570:   if (!am->Bzold) PetscCall(VecDuplicate(am->Ax, &am->Bzold));
571:   if (!am->Bz0) PetscCall(VecDuplicate(am->Ax, &am->Bz0));
572:   if (!am->y) PetscCall(VecDuplicate(am->Ax, &am->y));
573:   if (!am->yold) PetscCall(VecDuplicate(am->Ax, &am->yold));
574:   if (!am->y0) PetscCall(VecDuplicate(am->Ax, &am->y0));
575:   if (!am->yhat) PetscCall(VecDuplicate(am->Ax, &am->yhat));
576:   if (!am->yhatold) PetscCall(VecDuplicate(am->Ax, &am->yhatold));
577:   if (!am->residual) PetscCall(VecDuplicate(am->Ax, &am->residual));
578:   if (!am->constraint) {
579:     am->constraint = NULL;
580:   } else {
581:     PetscCall(VecGetSize(am->constraint, &M));
582:     PetscCheck(M == N, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Solution vector and constraint vector must be of same size!");
583:   }

585:   /* Save changed tao tolerance for adaptive tolerance */
586:   if (tao->gatol != tao->default_gatol) am->gatol_admm = tao->gatol;
587:   if (tao->catol != tao->default_catol) am->catol_admm = tao->catol;

589:   /*Update spectral and dual elements to X subsolver */
590:   PetscCall(TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao));
591:   PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverX, am->JA, am->JApre, am->ops->misfitjac, am->misfitjacobianP));
592:   PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverZ, am->JB, am->JBpre, am->ops->regjac, am->regjacobianP));
593:   PetscFunctionReturn(PETSC_SUCCESS);
594: }

596: static PetscErrorCode TaoDestroy_ADMM(Tao tao)
597: {
598:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

600:   PetscFunctionBegin;
601:   PetscCall(VecDestroy(&am->z));
602:   PetscCall(VecDestroy(&am->Ax));
603:   PetscCall(VecDestroy(&am->Axold));
604:   PetscCall(VecDestroy(&am->Bz));
605:   PetscCall(VecDestroy(&am->Bzold));
606:   PetscCall(VecDestroy(&am->Bz0));
607:   PetscCall(VecDestroy(&am->residual));
608:   PetscCall(VecDestroy(&am->y));
609:   PetscCall(VecDestroy(&am->yold));
610:   PetscCall(VecDestroy(&am->y0));
611:   PetscCall(VecDestroy(&am->yhat));
612:   PetscCall(VecDestroy(&am->yhatold));
613:   PetscCall(VecDestroy(&am->workLeft));
614:   PetscCall(VecDestroy(&am->workJacobianRight));
615:   PetscCall(VecDestroy(&am->workJacobianRight2));

617:   PetscCall(MatDestroy(&am->JA));
618:   PetscCall(MatDestroy(&am->JB));
619:   if (!am->xJI) PetscCall(MatDestroy(&am->JApre));
620:   if (!am->zJI) PetscCall(MatDestroy(&am->JBpre));
621:   if (am->Hx) {
622:     PetscCall(MatDestroy(&am->Hx));
623:     PetscCall(MatDestroy(&am->Hxpre));
624:   }
625:   if (am->Hz) {
626:     PetscCall(MatDestroy(&am->Hz));
627:     PetscCall(MatDestroy(&am->Hzpre));
628:   }
629:   PetscCall(MatDestroy(&am->ATA));
630:   PetscCall(MatDestroy(&am->BTB));
631:   PetscCall(TaoDestroy(&am->subsolverX));
632:   PetscCall(TaoDestroy(&am->subsolverZ));
633:   am->parent = NULL;
634:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL));
635:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL));
636:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL));
637:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL));
638:   PetscCall(PetscFree(tao->data));
639:   PetscFunctionReturn(PETSC_SUCCESS);
640: }

642: /*MC
643:   TAOADMM - Alternating direction method of multipliers method for solving linear problems with
644:             constraints. in a $ \min_x f(x) + g(z)$  s.t. $Ax+Bz=c$.
645:             This algorithm employs two sub Tao solvers, of which type can be specified
646:             by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers.
647:             Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via
648:             `TaoADMMSet{Misfit,Regularizer}HessianChangeStatus()`.
649:             Second subsolver does support `TAOSHELL`. It should be noted that L1-norm is used for objective value for `TAOSHELL` type.
650:             There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update,
651:             currently there are basic option and adaptive option.
652:             Constraint is set at Ax+Bz=c, and A and B can be set with `TaoADMMSet{Misfit,Regularizer}ConstraintJacobian()`.
653:             c can be set with `TaoADMMSetConstraintVectorRHS()`.
654:             The user can also provide regularizer weight for second subsolver. {cite}`xu2017adaptive`

656:   Options Database Keys:
657: + -tao_admm_regularizer_coefficient        - regularizer constant (default 1.e-6)
658: . -tao_admm_spectral_penalty               - Constant for Augmented Lagrangian term (default 1.)
659: . -tao_admm_relaxation_parameter           - relaxation parameter for Z update (default 1.)
660: . -tao_admm_tolerance_update_factor        - ADMM dynamic tolerance update factor (default 1.e-12)
661: . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2)
662: . -tao_admm_minimum_spectral_penalty       - Set ADMM minimum spectral penalty (default 0)
663: . -tao_admm_dual_update                    - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic")
664: - -tao_admm_regularizer_type               - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold")

666:   Level: beginner

668: .seealso: `TaoADMMSetMisfitHessianChangeStatus()`, `TaoADMMSetRegHessianChangeStatus()`, `TaoADMMGetSpectralPenalty()`,
669:           `TaoADMMGetMisfitSubsolver()`, `TaoADMMGetRegularizationSubsolver()`, `TaoADMMSetConstraintVectorRHS()`,
670:           `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetRegularizerCoefficient()`, `TaoADMMGetRegularizerCoefficient()`,
671:           `TaoADMMSetRegularizerConstraintJacobian()`, `TaoADMMSetMisfitConstraintJacobian()`,
672:           `TaoADMMSetMisfitObjectiveAndGradientRoutine()`, `TaoADMMSetMisfitHessianRoutine()`,
673:           `TaoADMMSetRegularizerObjectiveAndGradientRoutine()`, `TaoADMMSetRegularizerHessianRoutine()`,
674:           `TaoGetADMMParentTao()`, `TaoADMMGetDualVector()`, `TaoADMMSetRegularizerType()`,
675:           `TaoADMMGetRegularizerType()`, `TaoADMMSetUpdateType()`, `TaoADMMGetUpdateType()`
676: M*/

678: PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao)
679: {
680:   TAO_ADMM *am;

682:   PetscFunctionBegin;
683:   PetscCall(PetscNew(&am));

685:   tao->ops->destroy        = TaoDestroy_ADMM;
686:   tao->ops->setup          = TaoSetUp_ADMM;
687:   tao->ops->setfromoptions = TaoSetFromOptions_ADMM;
688:   tao->ops->view           = TaoView_ADMM;
689:   tao->ops->solve          = TaoSolve_ADMM;

691:   PetscCall(TaoParametersInitialize(tao));

693:   tao->data           = (void *)am;
694:   am->l1epsilon       = 1e-6;
695:   am->lambda          = 1e-4;
696:   am->mu              = 1.;
697:   am->muold           = 0.;
698:   am->mueps           = PETSC_MACHINE_EPSILON;
699:   am->mumin           = 0.;
700:   am->orthval         = 0.2;
701:   am->T               = 2;
702:   am->parent          = tao;
703:   am->update          = TAO_ADMM_UPDATE_BASIC;
704:   am->regswitch       = TAO_ADMM_REGULARIZER_SOFT_THRESH;
705:   am->tol             = PETSC_SMALL;
706:   am->const_norm      = 0;
707:   am->resnorm         = 0;
708:   am->dualres         = 0;
709:   am->ops->regobjgrad = NULL;
710:   am->ops->reghess    = NULL;
711:   am->gamma           = 1;
712:   am->regobjgradP     = NULL;
713:   am->reghessP        = NULL;
714:   am->gatol_admm      = 1e-8;
715:   am->catol_admm      = 0;
716:   am->Hxchange        = PETSC_TRUE;
717:   am->Hzchange        = PETSC_TRUE;
718:   am->Hzbool          = PETSC_TRUE;
719:   am->Hxbool          = PETSC_TRUE;

721:   PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverX));
722:   PetscCall(TaoSetOptionsPrefix(am->subsolverX, "misfit_"));
723:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverX, (PetscObject)tao, 1));
724:   PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverZ));
725:   PetscCall(TaoSetOptionsPrefix(am->subsolverZ, "reg_"));
726:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ, (PetscObject)tao, 1));

728:   PetscCall(TaoSetType(am->subsolverX, TAONLS));
729:   PetscCall(TaoSetType(am->subsolverZ, TAONLS));
730:   PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
731:   PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", (PetscObject)tao));
732:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", TaoADMMSetRegularizerType_ADMM));
733:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", TaoADMMGetRegularizerType_ADMM));
734:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", TaoADMMSetUpdateType_ADMM));
735:   PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", TaoADMMGetUpdateType_ADMM));
736:   PetscFunctionReturn(PETSC_SUCCESS);
737: }

739: /*@
740:   TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines  whether Hessian matrix of misfit subsolver changes with respect to input vector.

742:   Collective

744:   Input Parameters:
745: + tao - the Tao solver context.
746: - b   - the Hessian matrix change status boolean, `PETSC_FALSE`  when the Hessian matrix does not change, `PETSC_TRUE` otherwise.

748:   Level: advanced

750: .seealso: `TAOADMM`
751: @*/
752: PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b)
753: {
754:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

756:   PetscFunctionBegin;
757:   am->Hxchange = b;
758:   PetscFunctionReturn(PETSC_SUCCESS);
759: }

761: /*@
762:   TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector.

764:   Collective

766:   Input Parameters:
767: + tao - the `Tao` solver context
768: - b   - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise.

770:   Level: advanced

772: .seealso: `TAOADMM`
773: @*/
774: PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b)
775: {
776:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

778:   PetscFunctionBegin;
779:   am->Hzchange = b;
780:   PetscFunctionReturn(PETSC_SUCCESS);
781: }

783: /*@
784:   TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value

786:   Collective

788:   Input Parameters:
789: + tao - the `Tao` solver context
790: - mu  - spectral penalty

792:   Level: advanced

794: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TAOADMM`
795: @*/
796: PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu)
797: {
798:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

800:   PetscFunctionBegin;
801:   am->mu = mu;
802:   PetscFunctionReturn(PETSC_SUCCESS);
803: }

805: /*@
806:   TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value

808:   Collective

810:   Input Parameter:
811: . tao - the `Tao` solver context

813:   Output Parameter:
814: . mu - spectral penalty

816:   Level: advanced

818: .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetSpectralPenalty()`, `TAOADMM`
819: @*/
820: PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu)
821: {
822:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

824:   PetscFunctionBegin;
826:   PetscAssertPointer(mu, 2);
827:   *mu = am->mu;
828:   PetscFunctionReturn(PETSC_SUCCESS);
829: }

831: /*@
832:   TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside `TAOADMM`

834:   Collective

836:   Input Parameter:
837: . tao - the `Tao` solver context

839:   Output Parameter:
840: . misfit - the `Tao` subsolver context

842:   Level: advanced

844: .seealso: `TAOADMM`, `Tao`
845: @*/
846: PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit)
847: {
848:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

850:   PetscFunctionBegin;
851:   *misfit = am->subsolverX;
852:   PetscFunctionReturn(PETSC_SUCCESS);
853: }

855: /*@
856:   TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside `TAOADMM`

858:   Collective

860:   Input Parameter:
861: . tao - the `Tao` solver context

863:   Output Parameter:
864: . reg - the `Tao` subsolver context

866:   Level: advanced

868: .seealso: `TAOADMM`, `Tao`
869: @*/
870: PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg)
871: {
872:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

874:   PetscFunctionBegin;
875:   *reg = am->subsolverZ;
876:   PetscFunctionReturn(PETSC_SUCCESS);
877: }

879: /*@
880:   TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for `TAOADMM`

882:   Collective

884:   Input Parameters:
885: + tao - the `Tao` solver context
886: - c   - RHS vector

888:   Level: advanced

890: .seealso: `TAOADMM`
891: @*/
892: PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c)
893: {
894:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

896:   PetscFunctionBegin;
897:   am->constraint = c;
898:   PetscFunctionReturn(PETSC_SUCCESS);
899: }

901: /*@
902:   TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty

904:   Collective

906:   Input Parameters:
907: + tao - the `Tao` solver context
908: - mu  - minimum spectral penalty value

910:   Level: advanced

912: .seealso: `TaoADMMGetSpectralPenalty()`, `TAOADMM`
913: @*/
914: PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu)
915: {
916:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

918:   PetscFunctionBegin;
919:   am->mumin = mu;
920:   PetscFunctionReturn(PETSC_SUCCESS);
921: }

923: /*@
924:   TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case

926:   Collective

928:   Input Parameters:
929: + tao    - the `Tao` solver context
930: - lambda - L1-norm regularizer coefficient

932:   Level: advanced

934: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
935: @*/
936: PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda)
937: {
938:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

940:   PetscFunctionBegin;
941:   am->lambda = lambda;
942:   PetscFunctionReturn(PETSC_SUCCESS);
943: }

945: /*@
946:   TaoADMMGetRegularizerCoefficient - Get the regularization coefficient lambda for L1 norm regularization case

948:   Collective

950:   Input Parameter:
951: . tao - the `Tao` solver context

953:   Output Parameter:
954: . lambda - L1-norm regularizer coefficient

956:   Level: advanced

958: .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
959: @*/
960: PetscErrorCode TaoADMMGetRegularizerCoefficient(Tao tao, PetscReal *lambda)
961: {
962:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

964:   PetscFunctionBegin;
965:   *lambda = am->lambda;
966:   PetscFunctionReturn(PETSC_SUCCESS);
967: }

969: /*@C
970:   TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the `TAOADMM` algorithm. Matrix B constrains the z variable.

972:   Collective

974:   Input Parameters:
975: + tao  - the Tao solver context
976: . J    - user-created misfit constraint Jacobian matrix
977: . Jpre - user-created misfit Jacobian constraint matrix for constructing the preconditioner, often this is `J`
978: . func - function pointer for the misfit constraint Jacobian update function
979: - ctx  - application context for the regularizer constraint Jacobian

981:   Calling sequence of func:
982: + tao  - the `Tao` context
983: . u    - in current input solution
984: . J    - the contribution to the misfit constraint Jacobian
985: . Jpre - the contribution to matrix from which to construct a preconditioner for the misfit constraint Jacobian
986: - ctx  - the optional application context

988:   Level: advanced

990: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM`
991: @*/
992: PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao tao, Vec u, Mat J, Mat Jpre, PetscCtx ctx), PetscCtx ctx)
993: {
994:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

996:   PetscFunctionBegin;
998:   if (J) {
1000:     PetscCheckSameComm(tao, 1, J, 2);
1001:   }
1002:   if (Jpre) {
1004:     PetscCheckSameComm(tao, 1, Jpre, 3);
1005:   }
1006:   if (ctx) am->misfitjacobianP = ctx;
1007:   if (func) am->ops->misfitjac = func;

1009:   if (J) {
1010:     PetscCall(PetscObjectReference((PetscObject)J));
1011:     PetscCall(MatDestroy(&am->JA));
1012:     am->JA = J;
1013:   }
1014:   if (Jpre) {
1015:     PetscCall(PetscObjectReference((PetscObject)Jpre));
1016:     PetscCall(MatDestroy(&am->JApre));
1017:     am->JApre = Jpre;
1018:   }
1019:   PetscFunctionReturn(PETSC_SUCCESS);
1020: }

1022: /*@C
1023:   TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for `TAOADMM` algorithm. Matrix B constraints z variable.

1025:   Collective

1027:   Input Parameters:
1028: + tao  - the `Tao` solver context
1029: . J    - user-created regularizer constraint Jacobian matrix
1030: . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J`
1031: . func - function pointer for the regularizer constraint Jacobian update function
1032: - ctx  - application context for the regularizer constraint Jacobian

1034:   Calling sequence of func:
1035: + tao  - the `Tao` context
1036: . u    - in current input solution
1037: . J    - the contribution to the constraint Jacobian
1038: . Jpre - the contribution to matrix from which to construct a preconditioner for the constraint Jacobian
1039: - ctx  - the optional application context

1041:   Level: advanced

1043: .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetMisfitConstraintJacobian()`, `TAOADMM`
1044: @*/
1045: PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao tao, Vec u, Mat J, Mat Jpre, PetscCtx ctx), PetscCtx ctx)
1046: {
1047:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1049:   PetscFunctionBegin;
1051:   if (J) {
1053:     PetscCheckSameComm(tao, 1, J, 2);
1054:   }
1055:   if (Jpre) {
1057:     PetscCheckSameComm(tao, 1, Jpre, 3);
1058:   }
1059:   if (ctx) am->regjacobianP = ctx;
1060:   if (func) am->ops->regjac = func;

1062:   if (J) {
1063:     PetscCall(PetscObjectReference((PetscObject)J));
1064:     PetscCall(MatDestroy(&am->JB));
1065:     am->JB = J;
1066:   }
1067:   if (Jpre) {
1068:     PetscCall(PetscObjectReference((PetscObject)Jpre));
1069:     PetscCall(MatDestroy(&am->JBpre));
1070:     am->JBpre = Jpre;
1071:   }
1072:   PetscFunctionReturn(PETSC_SUCCESS);
1073: }

1075: /*@C
1076:   TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function

1078:   Collective

1080:   Input Parameters:
1081: + tao  - the `Tao` context
1082: . func - function pointer for the misfit value and gradient evaluation
1083: - ctx  - application context for the misfit

1085:   Calling sequence of func:
1086: + tao - the `Tao` context
1087: . u   - in current input solution
1088: . f   - the contribution to the objective function
1089: . g   - the contribution to the gradient
1090: - ctx - the optional application context

1092:   Level: advanced

1094: .seealso: `TAOADMM`
1095: @*/
1096: PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao tao, Vec u, PetscReal *f, Vec g, PetscCtx ctx), PetscCtx ctx)
1097: {
1098:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1100:   PetscFunctionBegin;
1102:   am->misfitobjgradP     = ctx;
1103:   am->ops->misfitobjgrad = func;
1104:   PetscFunctionReturn(PETSC_SUCCESS);
1105: }

1107: /*@C
1108:   TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back
1109:   function into the algorithm, to be used for subsolverX.

1111:   Collective

1113:   Input Parameters:
1114: + tao  - the `Tao` context
1115: . H    - user-created matrix for the Hessian of the misfit term
1116: . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term
1117: . func - function pointer for the misfit Hessian evaluation
1118: - ctx  - application context for the misfit Hessian

1120:   Calling sequence of func:
1121: + tao  - the `Tao` context
1122: . u    - in current input solution
1123: . H    - output, the contribution to the Hessian matrix
1124: . Hpre - an optional contribution to an alternative matrix with which the preconditioner is to be constructed
1125: - ctx  - the optional application context

1127:   Level: advanced

1129: .seealso: `TAOADMM`
1130: @*/
1131: PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao tao, Vec u, Mat H, Mat Hpre, PetscCtx ctx), PetscCtx ctx)
1132: {
1133:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1135:   PetscFunctionBegin;
1137:   if (H) {
1139:     PetscCheckSameComm(tao, 1, H, 2);
1140:   }
1141:   if (Hpre) {
1143:     PetscCheckSameComm(tao, 1, Hpre, 3);
1144:   }
1145:   if (ctx) am->misfithessP = ctx;
1146:   if (func) am->ops->misfithess = func;
1147:   if (H) {
1148:     PetscCall(PetscObjectReference((PetscObject)H));
1149:     PetscCall(MatDestroy(&am->Hx));
1150:     am->Hx = H;
1151:   }
1152:   if (Hpre) {
1153:     PetscCall(PetscObjectReference((PetscObject)Hpre));
1154:     PetscCall(MatDestroy(&am->Hxpre));
1155:     am->Hxpre = Hpre;
1156:   }
1157:   PetscFunctionReturn(PETSC_SUCCESS);
1158: }

1160: /*@C
1161:   TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function

1163:   Collective

1165:   Input Parameters:
1166: + tao  - the Tao context
1167: . func - function pointer for the regularizer value and gradient evaluation
1168: - ctx  - application context for the regularizer

1170:   Calling sequence of func:
1171: + tao - the `Tao` context
1172: . u   - in current input solution
1173: . f   - the contribution to the objective function
1174: . g   - the contribution to the gradient
1175: - ctx - the optional application context

1177:   Level: advanced

1179: .seealso: `TAOADMM`
1180: @*/
1181: PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao tao, Vec u, PetscReal *f, Vec g, PetscCtx ctx), PetscCtx ctx)
1182: {
1183:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1185:   PetscFunctionBegin;
1187:   am->regobjgradP     = ctx;
1188:   am->ops->regobjgrad = func;
1189:   PetscFunctionReturn(PETSC_SUCCESS);
1190: }

1192: /*@C
1193:   TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back
1194:   function, to be used for subsolverZ.

1196:   Collective

1198:   Input Parameters:
1199: + tao  - the `Tao` context
1200: . H    - user-created matrix for the Hessian of the regularization term
1201: . Hpre - user-created matrix for building the preconditioner of the Hessian of the regularization term
1202: . func - function pointer for the regularizer Hessian evaluation
1203: - ctx  - application context for the regularizer Hessian

1205:   Calling sequence of func:
1206: + tao  - the `Tao` context
1207: . u    - in current input solution
1208: . H    - output, the contribution to the Hessian matrix
1209: . Hpre - an optional contribution to an alternative matrix with which the preconditioner is to be constructed
1210: - ctx  - the optional application context

1212:   Level: advanced

1214: .seealso: `TAOADMM`
1215: @*/
1216: PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao tao, Vec u, Mat H, Mat Hpre, PetscCtx ctx), PetscCtx ctx)
1217: {
1218:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1220:   PetscFunctionBegin;
1222:   if (H) {
1224:     PetscCheckSameComm(tao, 1, H, 2);
1225:   }
1226:   if (Hpre) {
1228:     PetscCheckSameComm(tao, 1, Hpre, 3);
1229:   }
1230:   if (ctx) am->reghessP = ctx;
1231:   if (func) am->ops->reghess = func;
1232:   if (H) {
1233:     PetscCall(PetscObjectReference((PetscObject)H));
1234:     PetscCall(MatDestroy(&am->Hz));
1235:     am->Hz = H;
1236:   }
1237:   if (Hpre) {
1238:     PetscCall(PetscObjectReference((PetscObject)Hpre));
1239:     PetscCall(MatDestroy(&am->Hzpre));
1240:     am->Hzpre = Hpre;
1241:   }
1242:   PetscFunctionReturn(PETSC_SUCCESS);
1243: }

1245: /*@
1246:   TaoGetADMMParentTao - Gets pointer to parent `TAOADMM`, used by inner subsolver.

1248:   Collective

1250:   Input Parameter:
1251: . tao - the `Tao` context

1253:   Output Parameter:
1254: . admm_tao - the parent `Tao` context

1256:   Level: advanced

1258: .seealso: `TAOADMM`
1259: @*/
1260: PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao)
1261: {
1262:   PetscFunctionBegin;
1264:   PetscCall(PetscObjectQuery((PetscObject)tao, "TaoGetADMMParentTao_ADMM", (PetscObject *)admm_tao));
1265:   PetscFunctionReturn(PETSC_SUCCESS);
1266: }

1268: /*@
1269:   TaoADMMGetDualVector - Returns the dual vector associated with the current `TAOADMM` state

1271:   Not Collective

1273:   Input Parameter:
1274: . tao - the `Tao` context

1276:   Output Parameter:
1277: . Y - the current solution

1279:   Level: intermediate

1281: .seealso: `TAOADMM`
1282: @*/
1283: PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y)
1284: {
1285:   TAO_ADMM *am = (TAO_ADMM *)tao->data;

1287:   PetscFunctionBegin;
1289:   *Y = am->y;
1290:   PetscFunctionReturn(PETSC_SUCCESS);
1291: }

1293: /*@
1294:   TaoADMMSetRegularizerType - Set regularizer type for `TAOADMM` routine

1296:   Not Collective

1298:   Input Parameters:
1299: + tao  - the `Tao` context
1300: - type - regularizer type

1302:   Options Database Key:
1303: . -tao_admm_regularizer_type (regularizer_user|regularizer_soft_thresh) - select the regularizer

1305:   Level: intermediate

1307: .seealso: `TaoADMMGetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1308: @*/
1309: PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type)
1310: {
1311:   PetscFunctionBegin;
1314:   PetscTryMethod(tao, "TaoADMMSetRegularizerType_C", (Tao, TaoADMMRegularizerType), (tao, type));
1315:   PetscFunctionReturn(PETSC_SUCCESS);
1316: }

1318: /*@
1319:   TaoADMMGetRegularizerType - Gets the type of regularizer routine for `TAOADMM`

1321:   Not Collective

1323:   Input Parameter:
1324: . tao - the `Tao` context

1326:   Output Parameter:
1327: . type - the type of regularizer

1329:   Level: intermediate

1331: .seealso: `TaoADMMSetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM`
1332: @*/
1333: PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type)
1334: {
1335:   PetscFunctionBegin;
1337:   PetscUseMethod(tao, "TaoADMMGetRegularizerType_C", (Tao, TaoADMMRegularizerType *), (tao, type));
1338:   PetscFunctionReturn(PETSC_SUCCESS);
1339: }

1341: /*@
1342:   TaoADMMSetUpdateType - Set update routine for `TAOADMM` routine

1344:   Not Collective

1346:   Input Parameters:
1347: + tao  - the `Tao` context
1348: - type - spectral parameter update type

1350:   Level: intermediate

1352: .seealso: `TaoADMMGetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1353: @*/
1354: PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type)
1355: {
1356:   PetscFunctionBegin;
1359:   PetscTryMethod(tao, "TaoADMMSetUpdateType_C", (Tao, TaoADMMUpdateType), (tao, type));
1360:   PetscFunctionReturn(PETSC_SUCCESS);
1361: }

1363: /*@
1364:   TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for `TAOADMM`

1366:   Not Collective

1368:   Input Parameter:
1369: . tao - the `Tao` context

1371:   Output Parameter:
1372: . type - the type of spectral penalty update routine

1374:   Level: intermediate

1376: .seealso: `TaoADMMSetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM`
1377: @*/
1378: PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type)
1379: {
1380:   PetscFunctionBegin;
1382:   PetscUseMethod(tao, "TaoADMMGetUpdateType_C", (Tao, TaoADMMUpdateType *), (tao, type));
1383:   PetscFunctionReturn(PETSC_SUCCESS);
1384: }