Actual source code: brgn.h

  1: /*
  2: Context for Bounded Regularized Gauss-Newton algorithm.
  3: Extended with L1-regularizer with a linear transformation matrix D:
  4: 0.5*||Ax-b||^2 + lambda*||D*x||_1
  5: When D is an identity matrix, we have the classic lasso, aka basis pursuit denoising in compressive sensing problem.
  6: */

  8: #pragma once

 10: #include <../src/tao/bound/impls/bnk/bnk.h>

 12: typedef struct {
 13:   PetscErrorCode (*regularizerobjandgrad)(Tao, Vec, PetscReal *, Vec, void *);
 14:   PetscErrorCode (*regularizerhessian)(Tao, Vec, Mat, void *);
 15:   void     *reg_obj_ctx;
 16:   void     *reg_hess_ctx;
 17:   Mat       H, Hreg, D;                             /* Hessian, Hessian for regulization part, and Dictionary matrix have size N*N, and K*N respectively. (Jacobian M*N not used here) */
 18:   Vec       x_old, x_work, r_work, diag, y, y_work; /* x, r=J*x, and y=D*x have size N, M, and K respectively. */
 19:   Vec       damping;                                /* Optional diagonal damping matrix. */
 20:   Tao       subsolver, parent;
 21:   PetscReal lambda, epsilon, fc_old;                      /* lambda is regularizer weight for both L2-norm Gaussian-Newton and L1-norm, ||x||_1 is approximated with sum(sqrt(x.^2+epsilon^2)-epsilon)*/
 22:   PetscReal downhill_lambda_change, uphill_lambda_change; /* With the lm regularizer lambda diag(J^T J),
 23:                                                                  lambda = downhill_lambda_change * lambda on steps that decrease the objective.
 24:                                                                  lambda = uphill_lambda_change * lambda on steps that increase the objective. */
 25:   PetscInt  reg_type;
 26:   PetscBool mat_explicit;
 27: } TAO_BRGN;