Actual source code: brgn.c
1: #include <../src/tao/leastsquares/impls/brgn/brgn.h>
3: #define BRGN_REGULARIZATION_USER 0
4: #define BRGN_REGULARIZATION_L2PROX 1
5: #define BRGN_REGULARIZATION_L2PURE 2
6: #define BRGN_REGULARIZATION_L1DICT 3
7: #define BRGN_REGULARIZATION_LM 4
8: #define BRGN_REGULARIZATION_TYPES 5
10: static const char *BRGN_REGULARIZATION_TABLE[64] = {"user", "l2prox", "l2pure", "l1dict", "lm"};
12: static PetscErrorCode GNHessianProd(Mat H, Vec in, Vec out)
13: {
14: TAO_BRGN *gn;
16: PetscFunctionBegin;
17: PetscCall(MatShellGetContext(H, &gn));
18: PetscCall(MatMult(gn->subsolver->ls_jac, in, gn->r_work));
19: PetscCall(MatMultTranspose(gn->subsolver->ls_jac, gn->r_work, out));
20: switch (gn->reg_type) {
21: case BRGN_REGULARIZATION_USER:
22: PetscCall(MatMult(gn->Hreg, in, gn->x_work));
23: PetscCall(VecAXPY(out, gn->lambda, gn->x_work));
24: break;
25: case BRGN_REGULARIZATION_L2PURE:
26: PetscCall(VecAXPY(out, gn->lambda, in));
27: break;
28: case BRGN_REGULARIZATION_L2PROX:
29: PetscCall(VecAXPY(out, gn->lambda, in));
30: break;
31: case BRGN_REGULARIZATION_L1DICT:
32: /* out = out + lambda*D'*(diag.*(D*in)) */
33: if (gn->D) {
34: PetscCall(MatMult(gn->D, in, gn->y)); /* y = D*in */
35: } else {
36: PetscCall(VecCopy(in, gn->y));
37: }
38: PetscCall(VecPointwiseMult(gn->y_work, gn->diag, gn->y)); /* y_work = diag.*(D*in), where diag = epsilon^2 ./ sqrt(x.^2+epsilon^2).^3 */
39: if (gn->D) {
40: PetscCall(MatMultTranspose(gn->D, gn->y_work, gn->x_work)); /* x_work = D'*(diag.*(D*in)) */
41: } else {
42: PetscCall(VecCopy(gn->y_work, gn->x_work));
43: }
44: PetscCall(VecAXPY(out, gn->lambda, gn->x_work));
45: break;
46: case BRGN_REGULARIZATION_LM:
47: PetscCall(VecPointwiseMult(gn->x_work, gn->damping, in));
48: PetscCall(VecAXPY(out, 1, gn->x_work));
49: break;
50: }
51: PetscFunctionReturn(PETSC_SUCCESS);
52: }
53: static PetscErrorCode ComputeDamping(TAO_BRGN *gn)
54: {
55: const PetscScalar *diag_ary;
56: PetscScalar *damping_ary;
57: PetscInt i, n;
59: PetscFunctionBegin;
60: /* update damping */
61: PetscCall(VecGetArray(gn->damping, &damping_ary));
62: PetscCall(VecGetArrayRead(gn->diag, &diag_ary));
63: PetscCall(VecGetLocalSize(gn->damping, &n));
64: for (i = 0; i < n; i++) damping_ary[i] = PetscClipInterval(diag_ary[i], PETSC_SQRT_MACHINE_EPSILON, PetscSqrtReal(PETSC_MAX_REAL));
65: PetscCall(VecScale(gn->damping, gn->lambda));
66: PetscCall(VecRestoreArray(gn->damping, &damping_ary));
67: PetscCall(VecRestoreArrayRead(gn->diag, &diag_ary));
68: PetscFunctionReturn(PETSC_SUCCESS);
69: }
71: PetscErrorCode TaoBRGNGetDampingVector(Tao tao, Vec *d)
72: {
73: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
75: PetscFunctionBegin;
76: PetscCheck(gn->reg_type == BRGN_REGULARIZATION_LM, PetscObjectComm((PetscObject)tao), PETSC_ERR_SUP, "Damping vector is only available if regularization type is lm.");
77: *d = gn->damping;
78: PetscFunctionReturn(PETSC_SUCCESS);
79: }
81: static PetscErrorCode GNObjectiveGradientEval(Tao tao, Vec X, PetscReal *fcn, Vec G, void *ptr)
82: {
83: TAO_BRGN *gn = (TAO_BRGN *)ptr;
84: PetscInt K; /* dimension of D*X */
85: PetscScalar yESum;
86: PetscReal f_reg;
88: PetscFunctionBegin;
89: /* compute objective *fcn*/
90: /* compute first term 0.5*||ls_res||_2^2 */
91: PetscCall(TaoComputeResidual(tao, X, tao->ls_res));
92: PetscCall(VecDot(tao->ls_res, tao->ls_res, fcn));
93: *fcn *= 0.5;
94: /* compute gradient G */
95: PetscCall(TaoComputeResidualJacobian(tao, X, tao->ls_jac, tao->ls_jac_pre));
96: PetscCall(MatMultTranspose(tao->ls_jac, tao->ls_res, G));
97: /* add the regularization contribution */
98: switch (gn->reg_type) {
99: case BRGN_REGULARIZATION_USER:
100: PetscCall((*gn->regularizerobjandgrad)(tao, X, &f_reg, gn->x_work, gn->reg_obj_ctx));
101: *fcn += gn->lambda * f_reg;
102: PetscCall(VecAXPY(G, gn->lambda, gn->x_work));
103: break;
104: case BRGN_REGULARIZATION_L2PURE:
105: /* compute f = f + lambda*0.5*xk'*xk */
106: PetscCall(VecDot(X, X, &f_reg));
107: *fcn += gn->lambda * 0.5 * f_reg;
108: /* compute G = G + lambda*xk */
109: PetscCall(VecAXPY(G, gn->lambda, X));
110: break;
111: case BRGN_REGULARIZATION_L2PROX:
112: /* compute f = f + lambda*0.5*(xk - xkm1)'*(xk - xkm1) */
113: PetscCall(VecAXPBYPCZ(gn->x_work, 1.0, -1.0, 0.0, X, gn->x_old));
114: PetscCall(VecDot(gn->x_work, gn->x_work, &f_reg));
115: *fcn += gn->lambda * 0.5 * f_reg;
116: /* compute G = G + lambda*(xk - xkm1) */
117: PetscCall(VecAXPBYPCZ(G, gn->lambda, -gn->lambda, 1.0, X, gn->x_old));
118: break;
119: case BRGN_REGULARIZATION_L1DICT:
120: /* compute f = f + lambda*sum(sqrt(y.^2+epsilon^2) - epsilon), where y = D*x*/
121: if (gn->D) {
122: PetscCall(MatMult(gn->D, X, gn->y)); /* y = D*x */
123: } else {
124: PetscCall(VecCopy(X, gn->y));
125: }
126: PetscCall(VecPointwiseMult(gn->y_work, gn->y, gn->y));
127: PetscCall(VecShift(gn->y_work, gn->epsilon * gn->epsilon));
128: PetscCall(VecSqrtAbs(gn->y_work)); /* gn->y_work = sqrt(y.^2+epsilon^2) */
129: PetscCall(VecSum(gn->y_work, &yESum));
130: PetscCall(VecGetSize(gn->y, &K));
131: *fcn += gn->lambda * (yESum - K * gn->epsilon);
132: /* compute G = G + lambda*D'*(y./sqrt(y.^2+epsilon^2)),where y = D*x */
133: PetscCall(VecPointwiseDivide(gn->y_work, gn->y, gn->y_work)); /* reuse y_work = y./sqrt(y.^2+epsilon^2) */
134: if (gn->D) {
135: PetscCall(MatMultTranspose(gn->D, gn->y_work, gn->x_work));
136: } else {
137: PetscCall(VecCopy(gn->y_work, gn->x_work));
138: }
139: PetscCall(VecAXPY(G, gn->lambda, gn->x_work));
140: break;
141: }
142: PetscFunctionReturn(PETSC_SUCCESS);
143: }
145: static PetscErrorCode GNComputeHessian(Tao tao, Vec X, Mat H, Mat Hpre, void *ptr)
146: {
147: TAO_BRGN *gn = (TAO_BRGN *)ptr;
148: PetscInt i, n, cstart, cend;
149: PetscScalar *cnorms, *diag_ary;
151: PetscFunctionBegin;
152: PetscCall(TaoComputeResidualJacobian(tao, X, tao->ls_jac, tao->ls_jac_pre));
153: if (gn->mat_explicit) PetscCall(MatTransposeMatMult(tao->ls_jac, tao->ls_jac, MAT_REUSE_MATRIX, PETSC_DETERMINE, &gn->H));
155: switch (gn->reg_type) {
156: case BRGN_REGULARIZATION_USER:
157: PetscCall((*gn->regularizerhessian)(tao, X, gn->Hreg, gn->reg_hess_ctx));
158: if (gn->mat_explicit) PetscCall(MatAXPY(gn->H, 1.0, gn->Hreg, DIFFERENT_NONZERO_PATTERN));
159: break;
160: case BRGN_REGULARIZATION_L2PURE:
161: if (gn->mat_explicit) PetscCall(MatShift(gn->H, gn->lambda));
162: break;
163: case BRGN_REGULARIZATION_L2PROX:
164: if (gn->mat_explicit) PetscCall(MatShift(gn->H, gn->lambda));
165: break;
166: case BRGN_REGULARIZATION_L1DICT:
167: /* calculate and store diagonal matrix as a vector: diag = epsilon^2 ./ sqrt(x.^2+epsilon^2).^3* --> diag = epsilon^2 ./ sqrt(y.^2+epsilon^2).^3,where y = D*x */
168: if (gn->D) {
169: PetscCall(MatMult(gn->D, X, gn->y)); /* y = D*x */
170: } else {
171: PetscCall(VecCopy(X, gn->y));
172: }
173: PetscCall(VecPointwiseMult(gn->y_work, gn->y, gn->y));
174: PetscCall(VecShift(gn->y_work, gn->epsilon * gn->epsilon));
175: PetscCall(VecCopy(gn->y_work, gn->diag)); /* gn->diag = y.^2+epsilon^2 */
176: PetscCall(VecSqrtAbs(gn->y_work)); /* gn->y_work = sqrt(y.^2+epsilon^2) */
177: PetscCall(VecPointwiseMult(gn->diag, gn->y_work, gn->diag)); /* gn->diag = sqrt(y.^2+epsilon^2).^3 */
178: PetscCall(VecReciprocal(gn->diag));
179: PetscCall(VecScale(gn->diag, gn->epsilon * gn->epsilon));
180: if (gn->mat_explicit) PetscCall(MatDiagonalSet(gn->H, gn->diag, ADD_VALUES));
181: break;
182: case BRGN_REGULARIZATION_LM:
183: /* compute diagonal of J^T J */
184: PetscCall(MatGetSize(gn->parent->ls_jac, NULL, &n));
185: PetscCall(PetscMalloc1(n, &cnorms));
186: PetscCall(MatGetColumnNorms(gn->parent->ls_jac, NORM_2, cnorms));
187: PetscCall(MatGetOwnershipRangeColumn(gn->parent->ls_jac, &cstart, &cend));
188: PetscCall(VecGetArray(gn->diag, &diag_ary));
189: for (i = 0; i < cend - cstart; i++) diag_ary[i] = cnorms[cstart + i] * cnorms[cstart + i];
190: PetscCall(VecRestoreArray(gn->diag, &diag_ary));
191: PetscCall(PetscFree(cnorms));
192: PetscCall(ComputeDamping(gn));
193: if (gn->mat_explicit) PetscCall(MatDiagonalSet(gn->H, gn->damping, ADD_VALUES));
194: break;
195: }
196: PetscFunctionReturn(PETSC_SUCCESS);
197: }
199: static PetscErrorCode GNHookFunction(Tao tao, PetscInt iter, void *ctx)
200: {
201: TAO_BRGN *gn = (TAO_BRGN *)ctx;
203: PetscFunctionBegin;
204: /* Update basic tao information from the subsolver */
205: gn->parent->nfuncs = tao->nfuncs;
206: gn->parent->ngrads = tao->ngrads;
207: gn->parent->nfuncgrads = tao->nfuncgrads;
208: gn->parent->nhess = tao->nhess;
209: gn->parent->niter = tao->niter;
210: gn->parent->ksp_its = tao->ksp_its;
211: gn->parent->ksp_tot_its = tao->ksp_tot_its;
212: gn->parent->fc = tao->fc;
213: PetscCall(TaoGetConvergedReason(tao, &gn->parent->reason));
214: /* Update the solution vectors */
215: if (iter == 0) {
216: PetscCall(VecSet(gn->x_old, 0.0));
217: } else {
218: PetscCall(VecCopy(tao->solution, gn->x_old));
219: PetscCall(VecCopy(tao->solution, gn->parent->solution));
220: }
221: /* Update the gradient */
222: PetscCall(VecCopy(tao->gradient, gn->parent->gradient));
224: /* Update damping parameter for LM */
225: if (gn->reg_type == BRGN_REGULARIZATION_LM) {
226: if (iter > 0) {
227: if (gn->fc_old > tao->fc) {
228: gn->lambda = gn->lambda * gn->downhill_lambda_change;
229: } else {
230: /* uphill step */
231: gn->lambda = gn->lambda * gn->uphill_lambda_change;
232: }
233: }
234: gn->fc_old = tao->fc;
235: }
237: /* Call general purpose update function */
238: if (gn->parent->ops->update) PetscCall((*gn->parent->ops->update)(gn->parent, gn->parent->niter, gn->parent->user_update));
239: PetscFunctionReturn(PETSC_SUCCESS);
240: }
242: static PetscErrorCode TaoSolve_BRGN(Tao tao)
243: {
244: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
246: PetscFunctionBegin;
247: PetscCall(TaoSolve(gn->subsolver));
248: /* Update basic tao information from the subsolver */
249: tao->nfuncs = gn->subsolver->nfuncs;
250: tao->ngrads = gn->subsolver->ngrads;
251: tao->nfuncgrads = gn->subsolver->nfuncgrads;
252: tao->nhess = gn->subsolver->nhess;
253: tao->niter = gn->subsolver->niter;
254: tao->ksp_its = gn->subsolver->ksp_its;
255: tao->ksp_tot_its = gn->subsolver->ksp_tot_its;
256: PetscCall(TaoGetConvergedReason(gn->subsolver, &tao->reason));
257: /* Update vectors */
258: PetscCall(VecCopy(gn->subsolver->solution, tao->solution));
259: PetscCall(VecCopy(gn->subsolver->gradient, tao->gradient));
260: PetscFunctionReturn(PETSC_SUCCESS);
261: }
263: static PetscErrorCode TaoSetFromOptions_BRGN(Tao tao, PetscOptionItems *PetscOptionsObject)
264: {
265: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
266: TaoLineSearch ls;
268: PetscFunctionBegin;
269: PetscOptionsHeadBegin(PetscOptionsObject, "least-squares problems with regularizer: ||f(x)||^2 + lambda*g(x), g(x) = ||xk-xkm1||^2 or ||Dx||_1 or user defined function.");
270: PetscCall(PetscOptionsBool("-tao_brgn_mat_explicit", "switches the Hessian construction to be an explicit matrix rather than MATSHELL", "", gn->mat_explicit, &gn->mat_explicit, NULL));
271: PetscCall(PetscOptionsReal("-tao_brgn_regularizer_weight", "regularizer weight (default 1e-4)", "", gn->lambda, &gn->lambda, NULL));
272: PetscCall(PetscOptionsReal("-tao_brgn_l1_smooth_epsilon", "L1-norm smooth approximation parameter: ||x||_1 = sum(sqrt(x.^2+epsilon^2)-epsilon) (default 1e-6)", "", gn->epsilon, &gn->epsilon, NULL));
273: PetscCall(PetscOptionsReal("-tao_brgn_lm_downhill_lambda_change", "Factor to decrease trust region by on downhill steps", "", gn->downhill_lambda_change, &gn->downhill_lambda_change, NULL));
274: PetscCall(PetscOptionsReal("-tao_brgn_lm_uphill_lambda_change", "Factor to increase trust region by on uphill steps", "", gn->uphill_lambda_change, &gn->uphill_lambda_change, NULL));
275: PetscCall(PetscOptionsEList("-tao_brgn_regularization_type", "regularization type", "", BRGN_REGULARIZATION_TABLE, BRGN_REGULARIZATION_TYPES, BRGN_REGULARIZATION_TABLE[gn->reg_type], &gn->reg_type, NULL));
276: PetscOptionsHeadEnd();
277: /* set unit line search direction as the default when using the lm regularizer */
278: if (gn->reg_type == BRGN_REGULARIZATION_LM) {
279: PetscCall(TaoGetLineSearch(gn->subsolver, &ls));
280: PetscCall(TaoLineSearchSetType(ls, TAOLINESEARCHUNIT));
281: }
282: PetscCall(TaoSetFromOptions(gn->subsolver));
283: PetscFunctionReturn(PETSC_SUCCESS);
284: }
286: static PetscErrorCode TaoView_BRGN(Tao tao, PetscViewer viewer)
287: {
288: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
290: PetscFunctionBegin;
291: PetscCall(PetscViewerASCIIPushTab(viewer));
292: PetscCall(TaoView(gn->subsolver, viewer));
293: PetscCall(PetscViewerASCIIPopTab(viewer));
294: PetscFunctionReturn(PETSC_SUCCESS);
295: }
297: static PetscErrorCode TaoSetUp_BRGN(Tao tao)
298: {
299: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
300: PetscBool is_bnls, is_bntr, is_bntl;
301: PetscInt i, n, N, K; /* dict has size K*N*/
303: PetscFunctionBegin;
304: PetscCheck(tao->ls_res, PetscObjectComm((PetscObject)tao), PETSC_ERR_ORDER, "TaoSetResidualRoutine() must be called before setup!");
305: PetscCall(PetscObjectTypeCompare((PetscObject)gn->subsolver, TAOBNLS, &is_bnls));
306: PetscCall(PetscObjectTypeCompare((PetscObject)gn->subsolver, TAOBNTR, &is_bntr));
307: PetscCall(PetscObjectTypeCompare((PetscObject)gn->subsolver, TAOBNTL, &is_bntl));
308: PetscCheck((!is_bnls && !is_bntr && !is_bntl) || tao->ls_jac, PetscObjectComm((PetscObject)tao), PETSC_ERR_ORDER, "TaoSetResidualJacobianRoutine() must be called before setup!");
309: if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient));
310: if (!gn->x_work) PetscCall(VecDuplicate(tao->solution, &gn->x_work));
311: if (!gn->r_work) PetscCall(VecDuplicate(tao->ls_res, &gn->r_work));
312: if (!gn->x_old) {
313: PetscCall(VecDuplicate(tao->solution, &gn->x_old));
314: PetscCall(VecSet(gn->x_old, 0.0));
315: }
317: if (BRGN_REGULARIZATION_L1DICT == gn->reg_type) {
318: if (!gn->y) {
319: if (gn->D) {
320: PetscCall(MatGetSize(gn->D, &K, &N)); /* Shell matrices still must have sizes defined. K = N for identity matrix, K=N-1 or N for gradient matrix */
321: PetscCall(MatCreateVecs(gn->D, NULL, &gn->y));
322: } else {
323: PetscCall(VecDuplicate(tao->solution, &gn->y)); /* If user does not setup dict matrix, use identity matrix, K=N */
324: }
325: PetscCall(VecSet(gn->y, 0.0));
326: }
327: if (!gn->y_work) PetscCall(VecDuplicate(gn->y, &gn->y_work));
328: if (!gn->diag) {
329: PetscCall(VecDuplicate(gn->y, &gn->diag));
330: PetscCall(VecSet(gn->diag, 0.0));
331: }
332: }
333: if (BRGN_REGULARIZATION_LM == gn->reg_type) {
334: if (!gn->diag) PetscCall(MatCreateVecs(tao->ls_jac, &gn->diag, NULL));
335: if (!gn->damping) PetscCall(MatCreateVecs(tao->ls_jac, &gn->damping, NULL));
336: }
338: if (!tao->setupcalled) {
339: /* Hessian setup */
340: if (gn->mat_explicit) {
341: PetscCall(TaoComputeResidualJacobian(tao, tao->solution, tao->ls_jac, tao->ls_jac_pre));
342: PetscCall(MatTransposeMatMult(tao->ls_jac, tao->ls_jac, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &gn->H));
343: } else {
344: PetscCall(VecGetLocalSize(tao->solution, &n));
345: PetscCall(VecGetSize(tao->solution, &N));
346: PetscCall(MatCreate(PetscObjectComm((PetscObject)tao), &gn->H));
347: PetscCall(MatSetSizes(gn->H, n, n, N, N));
348: PetscCall(MatSetType(gn->H, MATSHELL));
349: PetscCall(MatSetOption(gn->H, MAT_SYMMETRIC, PETSC_TRUE));
350: PetscCall(MatShellSetOperation(gn->H, MATOP_MULT, (void (*)(void))GNHessianProd));
351: PetscCall(MatShellSetContext(gn->H, gn));
352: }
353: PetscCall(MatSetUp(gn->H));
354: /* Subsolver setup,include initial vector and dictionary D */
355: PetscCall(TaoSetUpdate(gn->subsolver, GNHookFunction, gn));
356: PetscCall(TaoSetSolution(gn->subsolver, tao->solution));
357: if (tao->bounded) PetscCall(TaoSetVariableBounds(gn->subsolver, tao->XL, tao->XU));
358: PetscCall(TaoSetResidualRoutine(gn->subsolver, tao->ls_res, tao->ops->computeresidual, tao->user_lsresP));
359: PetscCall(TaoSetJacobianResidualRoutine(gn->subsolver, tao->ls_jac, tao->ls_jac, tao->ops->computeresidualjacobian, tao->user_lsjacP));
360: PetscCall(TaoSetObjectiveAndGradient(gn->subsolver, NULL, GNObjectiveGradientEval, gn));
361: PetscCall(TaoSetHessian(gn->subsolver, gn->H, gn->H, GNComputeHessian, gn));
362: /* Propagate some options down */
363: PetscCall(TaoSetTolerances(gn->subsolver, tao->gatol, tao->grtol, tao->gttol));
364: PetscCall(TaoSetMaximumIterations(gn->subsolver, tao->max_it));
365: PetscCall(TaoSetMaximumFunctionEvaluations(gn->subsolver, tao->max_funcs));
366: for (i = 0; i < tao->numbermonitors; ++i) {
367: PetscCall(TaoMonitorSet(gn->subsolver, tao->monitor[i], tao->monitorcontext[i], tao->monitordestroy[i]));
368: PetscCall(PetscObjectReference((PetscObject)tao->monitorcontext[i]));
369: }
370: PetscCall(TaoSetUp(gn->subsolver));
371: }
372: PetscFunctionReturn(PETSC_SUCCESS);
373: }
375: static PetscErrorCode TaoDestroy_BRGN(Tao tao)
376: {
377: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
379: PetscFunctionBegin;
380: if (tao->setupcalled) {
381: PetscCall(VecDestroy(&tao->gradient));
382: PetscCall(VecDestroy(&gn->x_work));
383: PetscCall(VecDestroy(&gn->r_work));
384: PetscCall(VecDestroy(&gn->x_old));
385: PetscCall(VecDestroy(&gn->diag));
386: PetscCall(VecDestroy(&gn->y));
387: PetscCall(VecDestroy(&gn->y_work));
388: }
389: PetscCall(VecDestroy(&gn->damping));
390: PetscCall(VecDestroy(&gn->diag));
391: PetscCall(MatDestroy(&gn->H));
392: PetscCall(MatDestroy(&gn->D));
393: PetscCall(MatDestroy(&gn->Hreg));
394: PetscCall(TaoDestroy(&gn->subsolver));
395: gn->parent = NULL;
396: PetscCall(PetscFree(tao->data));
397: PetscFunctionReturn(PETSC_SUCCESS);
398: }
400: /*MC
401: TAOBRGN - Bounded Regularized Gauss-Newton method for solving nonlinear least-squares
402: problems with bound constraints. This algorithm is a thin wrapper around `TAOBNTL`
403: that constructs the Gauss-Newton problem with the user-provided least-squares
404: residual and Jacobian. The algorithm offers an L2-norm ("l2pure"), L2-norm proximal point ("l2prox")
405: regularizer, and L1-norm dictionary regularizer ("l1dict"), where we approximate the
406: L1-norm ||x||_1 by sum_i(sqrt(x_i^2+epsilon^2)-epsilon) with a small positive number epsilon.
407: Also offered is the "lm" regularizer which uses a scaled diagonal of J^T J.
408: With the "lm" regularizer, `TAOBRGN` is a Levenberg-Marquardt optimizer.
409: The user can also provide own regularization function.
411: Options Database Keys:
412: + -tao_brgn_regularization_type - regularization type ("user", "l2prox", "l2pure", "l1dict", "lm") (default "l2prox")
413: . -tao_brgn_regularizer_weight - regularizer weight (default 1e-4)
414: - -tao_brgn_l1_smooth_epsilon - L1-norm smooth approximation parameter: ||x||_1 = sum(sqrt(x.^2+epsilon^2)-epsilon) (default 1e-6)
416: Level: beginner
418: .seealso: `Tao`, `TaoBRGNGetSubsolver()`, `TaoBRGNSetRegularizerWeight()`, `TaoBRGNSetL1SmoothEpsilon()`, `TaoBRGNSetDictionaryMatrix()`,
419: `TaoBRGNSetRegularizerObjectiveAndGradientRoutine()`, `TaoBRGNSetRegularizerHessianRoutine()`
420: M*/
421: PETSC_EXTERN PetscErrorCode TaoCreate_BRGN(Tao tao)
422: {
423: TAO_BRGN *gn;
425: PetscFunctionBegin;
426: PetscCall(PetscNew(&gn));
428: tao->ops->destroy = TaoDestroy_BRGN;
429: tao->ops->setup = TaoSetUp_BRGN;
430: tao->ops->setfromoptions = TaoSetFromOptions_BRGN;
431: tao->ops->view = TaoView_BRGN;
432: tao->ops->solve = TaoSolve_BRGN;
434: PetscCall(TaoParametersInitialize(tao));
436: tao->data = gn;
437: gn->reg_type = BRGN_REGULARIZATION_L2PROX;
438: gn->lambda = 1e-4;
439: gn->epsilon = 1e-6;
440: gn->downhill_lambda_change = 1. / 5.;
441: gn->uphill_lambda_change = 1.5;
442: gn->parent = tao;
444: PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &gn->subsolver));
445: PetscCall(TaoSetType(gn->subsolver, TAOBNLS));
446: PetscCall(TaoSetOptionsPrefix(gn->subsolver, "tao_brgn_subsolver_"));
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@
451: TaoBRGNGetSubsolver - Get the pointer to the subsolver inside a `TAOBRGN`
453: Collective
455: Input Parameters:
456: + tao - the Tao solver context
457: - subsolver - the `Tao` sub-solver context
459: Level: advanced
461: .seealso: `Tao`, `Mat`, `TAOBRGN`
462: @*/
463: PetscErrorCode TaoBRGNGetSubsolver(Tao tao, Tao *subsolver)
464: {
465: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
467: PetscFunctionBegin;
468: *subsolver = gn->subsolver;
469: PetscFunctionReturn(PETSC_SUCCESS);
470: }
472: /*@
473: TaoBRGNSetRegularizerWeight - Set the regularizer weight for the Gauss-Newton least-squares algorithm
475: Collective
477: Input Parameters:
478: + tao - the `Tao` solver context
479: - lambda - L1-norm regularizer weight
481: Level: beginner
483: .seealso: `Tao`, `Mat`, `TAOBRGN`
484: @*/
485: PetscErrorCode TaoBRGNSetRegularizerWeight(Tao tao, PetscReal lambda)
486: {
487: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
489: /* Initialize lambda here */
491: PetscFunctionBegin;
492: gn->lambda = lambda;
493: PetscFunctionReturn(PETSC_SUCCESS);
494: }
496: /*@
497: TaoBRGNSetL1SmoothEpsilon - Set the L1-norm smooth approximation parameter for L1-regularized least-squares algorithm
499: Collective
501: Input Parameters:
502: + tao - the `Tao` solver context
503: - epsilon - L1-norm smooth approximation parameter
505: Level: advanced
507: .seealso: `Tao`, `Mat`, `TAOBRGN`
508: @*/
509: PetscErrorCode TaoBRGNSetL1SmoothEpsilon(Tao tao, PetscReal epsilon)
510: {
511: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
513: /* Initialize epsilon here */
515: PetscFunctionBegin;
516: gn->epsilon = epsilon;
517: PetscFunctionReturn(PETSC_SUCCESS);
518: }
520: /*@
521: TaoBRGNSetDictionaryMatrix - bind the dictionary matrix from user application context to gn->D, for compressed sensing (with least-squares problem)
523: Input Parameters:
524: + tao - the `Tao` context
525: - dict - the user specified dictionary matrix. We allow to set a `NULL` dictionary, which means identity matrix by default
527: Level: advanced
529: .seealso: `Tao`, `Mat`, `TAOBRGN`
530: @*/
531: PetscErrorCode TaoBRGNSetDictionaryMatrix(Tao tao, Mat dict)
532: {
533: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
535: PetscFunctionBegin;
537: if (dict) {
539: PetscCheckSameComm(tao, 1, dict, 2);
540: PetscCall(PetscObjectReference((PetscObject)dict));
541: }
542: PetscCall(MatDestroy(&gn->D));
543: gn->D = dict;
544: PetscFunctionReturn(PETSC_SUCCESS);
545: }
547: /*@C
548: TaoBRGNSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back
549: function into the algorithm.
551: Input Parameters:
552: + tao - the Tao context
553: . func - function pointer for the regularizer value and gradient evaluation
554: - ctx - user context for the regularizer
556: Calling sequence:
557: + tao - the `Tao` context
558: . u - the location at which to compute the objective and gradient
559: . val - location to store objective function value
560: . g - location to store gradient
561: - ctx - user context for the regularizer Hessian
563: Level: advanced
565: .seealso: `Tao`, `Mat`, `TAOBRGN`
566: @*/
567: PetscErrorCode TaoBRGNSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao tao, Vec u, PetscReal *val, Vec g, void *ctx), void *ctx)
568: {
569: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
571: PetscFunctionBegin;
573: if (ctx) gn->reg_obj_ctx = ctx;
574: if (func) gn->regularizerobjandgrad = func;
575: PetscFunctionReturn(PETSC_SUCCESS);
576: }
578: /*@C
579: TaoBRGNSetRegularizerHessianRoutine - Sets the user-defined regularizer call-back
580: function into the algorithm.
582: Input Parameters:
583: + tao - the `Tao` context
584: . Hreg - user-created matrix for the Hessian of the regularization term
585: . func - function pointer for the regularizer Hessian evaluation
586: - ctx - user context for the regularizer Hessian
588: Calling sequence:
589: + tao - the `Tao` context
590: . u - the location at which to compute the Hessian
591: . Hreg - user-created matrix for the Hessian of the regularization term
592: - ctx - user context for the regularizer Hessian
594: Level: advanced
596: .seealso: `Tao`, `Mat`, `TAOBRGN`
597: @*/
598: PetscErrorCode TaoBRGNSetRegularizerHessianRoutine(Tao tao, Mat Hreg, PetscErrorCode (*func)(Tao tao, Vec u, Mat Hreg, void *ctx), void *ctx)
599: {
600: TAO_BRGN *gn = (TAO_BRGN *)tao->data;
602: PetscFunctionBegin;
604: if (Hreg) {
606: PetscCheckSameComm(tao, 1, Hreg, 2);
607: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONG, "NULL Hessian detected! User must provide valid Hessian for the regularizer.");
608: if (ctx) gn->reg_hess_ctx = ctx;
609: if (func) gn->regularizerhessian = func;
610: if (Hreg) {
611: PetscCall(PetscObjectReference((PetscObject)Hreg));
612: PetscCall(MatDestroy(&gn->Hreg));
613: gn->Hreg = Hreg;
614: }
615: PetscFunctionReturn(PETSC_SUCCESS);
616: }