Actual source code: taosolver.c

  1: #include <petsc/private/taoimpl.h>
  2: #include <petsc/private/snesimpl.h>

  4: PetscBool         TaoRegisterAllCalled = PETSC_FALSE;
  5: PetscFunctionList TaoList              = NULL;

  7: PetscClassId TAO_CLASSID;

  9: PetscLogEvent TAO_Solve;
 10: PetscLogEvent TAO_ObjectiveEval;
 11: PetscLogEvent TAO_GradientEval;
 12: PetscLogEvent TAO_ObjGradEval;
 13: PetscLogEvent TAO_HessianEval;
 14: PetscLogEvent TAO_JacobianEval;
 15: PetscLogEvent TAO_ConstraintsEval;

 17: const char *TaoSubSetTypes[] = {"subvec", "mask", "matrixfree", "TaoSubSetType", "TAO_SUBSET_", NULL};

 19: struct _n_TaoMonitorDrawCtx {
 20:   PetscViewer viewer;
 21:   PetscInt    howoften; /* when > 0 uses iteration % howoften, when negative only final solution plotted */
 22: };

 24: static PetscErrorCode KSPPreSolve_TAOEW_Private(KSP ksp, Vec b, Vec x, Tao tao)
 25: {
 26:   SNES snes_ewdummy = tao->snes_ewdummy;

 28:   if (!snes_ewdummy) return 0;
 29:   /* populate snes_ewdummy struct values used in KSPPreSolve_SNESEW */
 30:   snes_ewdummy->vec_func = b;
 31:   snes_ewdummy->rtol     = tao->gttol;
 32:   snes_ewdummy->iter     = tao->niter;
 33:   VecNorm(b, NORM_2, &snes_ewdummy->norm);
 34:   KSPPreSolve_SNESEW(ksp, b, x, snes_ewdummy);
 35:   snes_ewdummy->vec_func = NULL;
 36:   return 0;
 37: }

 39: static PetscErrorCode KSPPostSolve_TAOEW_Private(KSP ksp, Vec b, Vec x, Tao tao)
 40: {
 41:   SNES snes_ewdummy = tao->snes_ewdummy;

 43:   if (!snes_ewdummy) return 0;
 44:   KSPPostSolve_SNESEW(ksp, b, x, snes_ewdummy);
 45:   return 0;
 46: }

 48: static PetscErrorCode TaoSetUpEW_Private(Tao tao)
 49: {
 50:   SNESKSPEW  *kctx;
 51:   const char *ewprefix;

 53:   if (!tao->ksp) return 0;
 54:   if (tao->ksp_ewconv) {
 55:     if (!tao->snes_ewdummy) SNESCreate(PetscObjectComm((PetscObject)tao), &tao->snes_ewdummy);
 56:     tao->snes_ewdummy->ksp_ewconv = PETSC_TRUE;
 57:     KSPSetPreSolve(tao->ksp, (PetscErrorCode(*)(KSP, Vec, Vec, void *))KSPPreSolve_TAOEW_Private, tao);
 58:     KSPSetPostSolve(tao->ksp, (PetscErrorCode(*)(KSP, Vec, Vec, void *))KSPPostSolve_TAOEW_Private, tao);

 60:     KSPGetOptionsPrefix(tao->ksp, &ewprefix);
 61:     kctx = (SNESKSPEW *)tao->snes_ewdummy->kspconvctx;
 62:     SNESEWSetFromOptions_Private(kctx, PetscObjectComm((PetscObject)tao), ewprefix);
 63:   } else SNESDestroy(&tao->snes_ewdummy);
 64:   return 0;
 65: }

 67: /*@
 68:   TaoCreate - Creates a Tao solver

 70:   Collective

 72:   Input Parameter:
 73: . comm - MPI communicator

 75:   Output Parameter:
 76: . newtao - the new Tao context

 78:   Available methods include:
 79: +    `TAONLS` - nls Newton's method with line search for unconstrained minimization
 80: .    `TAONTR` - ntr Newton's method with trust region for unconstrained minimization
 81: .    `TAONTL` - ntl Newton's method with trust region, line search for unconstrained minimization
 82: .    `TAOLMVM` - lmvm Limited memory variable metric method for unconstrained minimization
 83: .    `TAOCG` - cg Nonlinear conjugate gradient method for unconstrained minimization
 84: .    `TAONM` - nm Nelder-Mead algorithm for derivate-free unconstrained minimization
 85: .    `TAOTRON` - tron Newton Trust Region method for bound constrained minimization
 86: .    `TAOGPCG` - gpcg Newton Trust Region method for quadratic bound constrained minimization
 87: .    `TAOBLMVM` - blmvm Limited memory variable metric method for bound constrained minimization
 88: .    `TAOLCL` - lcl Linearly constrained Lagrangian method for pde-constrained minimization
 89: -    `TAOPOUNDERS` - pounders Model-based algorithm for nonlinear least squares

 91:    Options Database Keys:
 92: .   -tao_type - select which method Tao should use

 94:    Level: beginner

 96: .seealso: `Tao`, `TaoSolve()`, `TaoDestroy()`, `TAOSetFromOptions()`, `TAOSetType()`
 97: @*/
 98: PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao)
 99: {
100:   Tao tao;

103:   TaoInitializePackage();
104:   TaoLineSearchInitializePackage();
105:   PetscHeaderCreate(tao, TAO_CLASSID, "Tao", "Optimization solver", "Tao", comm, TaoDestroy, TaoView);

107:   /* Set non-NULL defaults */
108:   tao->ops->convergencetest = TaoDefaultConvergenceTest;

110:   tao->max_it    = 10000;
111:   tao->max_funcs = -1;
112: #if defined(PETSC_USE_REAL_SINGLE)
113:   tao->gatol = 1e-5;
114:   tao->grtol = 1e-5;
115:   tao->crtol = 1e-5;
116:   tao->catol = 1e-5;
117: #else
118:   tao->gatol = 1e-8;
119:   tao->grtol = 1e-8;
120:   tao->crtol = 1e-8;
121:   tao->catol = 1e-8;
122: #endif
123:   tao->gttol   = 0.0;
124:   tao->steptol = 0.0;
125:   tao->trust0  = PETSC_INFINITY;
126:   tao->fmin    = PETSC_NINFINITY;

128:   tao->hist_reset = PETSC_TRUE;

130:   TaoResetStatistics(tao);
131:   *newtao = tao;
132:   return 0;
133: }

135: /*@
136:   TaoSolve - Solves an optimization problem min F(x) s.t. l <= x <= u

138:   Collective

140:   Input Parameters:
141: . tao - the Tao context

143:   Notes:
144:   The user must set up the Tao with calls to `TaoSetSolution()`, `TaoSetObjective()`, `TaoSetGradient()`, and (if using 2nd order method) `TaoSetHessian()`.

146:   You should call `TaoGetConvergedReason()` or run with -tao_converged_reason to determine if the optimization algorithm actually succeeded or
147:   why it failed.

149:   Level: beginner

151: .seealso: `Tao`, `TaoCreate()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoGetConvergedReason()`, `TaoSetUp()`
152:  @*/
153: PetscErrorCode TaoSolve(Tao tao)
154: {
155:   static PetscBool set = PETSC_FALSE;

158:   PetscCall(PetscCitationsRegister("@TechReport{tao-user-ref,\n"
159:                                    "title   = {Toolkit for Advanced Optimization (TAO) Users Manual},\n"
160:                                    "author  = {Todd Munson and Jason Sarich and Stefan Wild and Steve Benson and Lois Curfman McInnes},\n"
161:                                    "Institution = {Argonne National Laboratory},\n"
162:                                    "Year   = 2014,\n"
163:                                    "Number = {ANL/MCS-TM-322 - Revision 3.5},\n"
164:                                    "url    = {https://www.mcs.anl.gov/research/projects/tao/}\n}\n",
165:                                    &set));
166:   tao->header_printed = PETSC_FALSE;
167:   TaoSetUp(tao);
168:   TaoResetStatistics(tao);
169:   if (tao->linesearch) TaoLineSearchReset(tao->linesearch);

171:   PetscLogEventBegin(TAO_Solve, tao, 0, 0, 0);
172:   PetscTryTypeMethod(tao, solve);
173:   PetscLogEventEnd(TAO_Solve, tao, 0, 0, 0);

175:   VecViewFromOptions(tao->solution, (PetscObject)tao, "-tao_view_solution");

177:   tao->ntotalits += tao->niter;

179:   if (tao->printreason) {
180:     PetscViewer viewer = PETSC_VIEWER_STDOUT_(((PetscObject)tao)->comm);
181:     PetscViewerASCIIAddTab(viewer, ((PetscObject)tao)->tablevel);
182:     if (tao->reason > 0) {
183:       PetscViewerASCIIPrintf(viewer, "  TAO %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)tao)->prefix ? ((PetscObject)tao)->prefix : "", TaoConvergedReasons[tao->reason], tao->niter);
184:     } else {
185:       PetscViewerASCIIPrintf(viewer, "  TAO %s solve did not converge due to %s iteration %" PetscInt_FMT "\n", ((PetscObject)tao)->prefix ? ((PetscObject)tao)->prefix : "", TaoConvergedReasons[tao->reason], tao->niter);
186:     }
187:     PetscViewerASCIISubtractTab(viewer, ((PetscObject)tao)->tablevel);
188:   }
189:   TaoViewFromOptions(tao, NULL, "-tao_view");
190:   return 0;
191: }

193: /*@
194:   TaoSetUp - Sets up the internal data structures for the later use
195:   of a Tao solver

197:   Collective

199:   Input Parameters:
200: . tao - the Tao context

202:   Notes:
203:   The user will not need to explicitly call `TaoSetUp()`, as it will
204:   automatically be called in `TaoSolve()`.  However, if the user
205:   desires to call it explicitly, it should come after `TaoCreate()`
206:   and any TaoSetSomething() routines, but before `TaoSolve()`.

208:   Level: advanced

210: .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`
211: @*/
212: PetscErrorCode TaoSetUp(Tao tao)
213: {
215:   if (tao->setupcalled) return 0;
216:   TaoSetUpEW_Private(tao);
218:   PetscTryTypeMethod(tao, setup);
219:   tao->setupcalled = PETSC_TRUE;
220:   return 0;
221: }

223: /*@C
224:   TaoDestroy - Destroys the Tao context that was created with `TaoCreate()`

226:   Collective

228:   Input Parameter:
229: . tao - the Tao context

231:   Level: beginner

233: .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`
234: @*/
235: PetscErrorCode TaoDestroy(Tao *tao)
236: {
237:   if (!*tao) return 0;
239:   if (--((PetscObject)*tao)->refct > 0) {
240:     *tao = NULL;
241:     return 0;
242:   }

244:   if ((*tao)->ops->destroy) (*((*tao))->ops->destroy)(*tao);
245:   KSPDestroy(&(*tao)->ksp);
246:   SNESDestroy(&(*tao)->snes_ewdummy);
247:   TaoLineSearchDestroy(&(*tao)->linesearch);

249:   if ((*tao)->ops->convergencedestroy) {
250:     (*(*tao)->ops->convergencedestroy)((*tao)->cnvP);
251:     if ((*tao)->jacobian_state_inv) MatDestroy(&(*tao)->jacobian_state_inv);
252:   }
253:   VecDestroy(&(*tao)->solution);
254:   VecDestroy(&(*tao)->gradient);
255:   VecDestroy(&(*tao)->ls_res);

257:   if ((*tao)->gradient_norm) {
258:     PetscObjectDereference((PetscObject)(*tao)->gradient_norm);
259:     VecDestroy(&(*tao)->gradient_norm_tmp);
260:   }

262:   VecDestroy(&(*tao)->XL);
263:   VecDestroy(&(*tao)->XU);
264:   VecDestroy(&(*tao)->IL);
265:   VecDestroy(&(*tao)->IU);
266:   VecDestroy(&(*tao)->DE);
267:   VecDestroy(&(*tao)->DI);
268:   VecDestroy(&(*tao)->constraints);
269:   VecDestroy(&(*tao)->constraints_equality);
270:   VecDestroy(&(*tao)->constraints_inequality);
271:   VecDestroy(&(*tao)->stepdirection);
272:   MatDestroy(&(*tao)->hessian_pre);
273:   MatDestroy(&(*tao)->hessian);
274:   MatDestroy(&(*tao)->ls_jac);
275:   MatDestroy(&(*tao)->ls_jac_pre);
276:   MatDestroy(&(*tao)->jacobian_pre);
277:   MatDestroy(&(*tao)->jacobian);
278:   MatDestroy(&(*tao)->jacobian_state_pre);
279:   MatDestroy(&(*tao)->jacobian_state);
280:   MatDestroy(&(*tao)->jacobian_state_inv);
281:   MatDestroy(&(*tao)->jacobian_design);
282:   MatDestroy(&(*tao)->jacobian_equality);
283:   MatDestroy(&(*tao)->jacobian_equality_pre);
284:   MatDestroy(&(*tao)->jacobian_inequality);
285:   MatDestroy(&(*tao)->jacobian_inequality_pre);
286:   ISDestroy(&(*tao)->state_is);
287:   ISDestroy(&(*tao)->design_is);
288:   VecDestroy(&(*tao)->res_weights_v);
289:   TaoCancelMonitors(*tao);
290:   if ((*tao)->hist_malloc) PetscFree4((*tao)->hist_obj, (*tao)->hist_resid, (*tao)->hist_cnorm, (*tao)->hist_lits);
291:   if ((*tao)->res_weights_n) {
292:     PetscFree((*tao)->res_weights_rows);
293:     PetscFree((*tao)->res_weights_cols);
294:     PetscFree((*tao)->res_weights_w);
295:   }
296:   PetscHeaderDestroy(tao);
297:   return 0;
298: }

300: /*@
301:    TaoKSPSetUseEW - Sets `SNES` use Eisenstat-Walker method for
302:    computing relative tolerance for linear solvers.

304:    Logically Collective

306:    Input Parameters:
307: +  tao - Tao context
308: -  flag - `PETSC_TRUE` or `PETSC_FALSE`

310:    Notes:
311:    See `SNESKSPSetUseEW()` for customization details.

313:    Level: advanced

315:    Reference:
316:    S. C. Eisenstat and H. F. Walker, "Choosing the forcing terms in an
317:    inexact Newton method", SISC 17 (1), pp.16-32, 1996.

319: .seealso: `Tao`, `SNESKSPSetUseEW()`
320: @*/
321: PetscErrorCode TaoKSPSetUseEW(Tao tao, PetscBool flag)
322: {
325:   tao->ksp_ewconv = flag;
326:   return 0;
327: }

329: /*@
330:   TaoSetFromOptions - Sets various Tao parameters from user
331:   options.

333:   Collective

335:   Input Parameter:
336: . tao - the Tao solver context

338:   options Database Keys:
339: + -tao_type <type> - The algorithm that Tao uses (lmvm, nls, etc.)
340: . -tao_gatol <gatol> - absolute error tolerance for ||gradient||
341: . -tao_grtol <grtol> - relative error tolerance for ||gradient||
342: . -tao_gttol <gttol> - reduction of ||gradient|| relative to initial gradient
343: . -tao_max_it <max> - sets maximum number of iterations
344: . -tao_max_funcs <max> - sets maximum number of function evaluations
345: . -tao_fmin <fmin> - stop if function value reaches fmin
346: . -tao_steptol <tol> - stop if trust region radius less than <tol>
347: . -tao_trust0 <t> - initial trust region radius
348: . -tao_monitor - prints function value and residual at each iteration
349: . -tao_smonitor - same as tao_monitor, but truncates very small values
350: . -tao_cmonitor - prints function value, residual, and constraint norm at each iteration
351: . -tao_view_solution - prints solution vector at each iteration
352: . -tao_view_ls_residual - prints least-squares residual vector at each iteration
353: . -tao_view_stepdirection - prints step direction vector at each iteration
354: . -tao_view_gradient - prints gradient vector at each iteration
355: . -tao_draw_solution - graphically view solution vector at each iteration
356: . -tao_draw_step - graphically view step vector at each iteration
357: . -tao_draw_gradient - graphically view gradient at each iteration
358: . -tao_fd_gradient - use gradient computed with finite differences
359: . -tao_fd_hessian - use hessian computed with finite differences
360: . -tao_mf_hessian - use matrix-free hessian computed with finite differences
361: . -tao_cancelmonitors - cancels all monitors (except those set with command line)
362: . -tao_view - prints information about the Tao after solving
363: - -tao_converged_reason - prints the reason Tao stopped iterating

365:   Notes:
366:   To see all options, run your program with the -help option or consult the
367:  user's manual. Should be called after `TaoCreate()` but before `TaoSolve()`

369:   Level: beginner

371: .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`
372: @*/
373: PetscErrorCode TaoSetFromOptions(Tao tao)
374: {
375:   TaoType     default_type = TAOLMVM;
376:   char        type[256], monfilename[PETSC_MAX_PATH_LEN];
377:   PetscViewer monviewer;
378:   PetscBool   flg;
379:   MPI_Comm    comm;

382:   PetscObjectGetComm((PetscObject)tao, &comm);

384:   if (((PetscObject)tao)->type_name) default_type = ((PetscObject)tao)->type_name;

386:   PetscObjectOptionsBegin((PetscObject)tao);
387:   /* Check for type from options */
388:   PetscOptionsFList("-tao_type", "Tao Solver type", "TaoSetType", TaoList, default_type, type, 256, &flg);
389:   if (flg) {
390:     TaoSetType(tao, type);
391:   } else if (!((PetscObject)tao)->type_name) {
392:     TaoSetType(tao, default_type);
393:   }

395:   /* Tao solvers do not set the prefix, set it here if not yet done
396:      We do it after SetType since solver may have been changed */
397:   if (tao->linesearch) {
398:     const char *prefix;
399:     TaoLineSearchGetOptionsPrefix(tao->linesearch, &prefix);
400:     if (!prefix) TaoLineSearchSetOptionsPrefix(tao->linesearch, ((PetscObject)(tao))->prefix);
401:   }

403:   PetscOptionsReal("-tao_catol", "Stop if constraints violations within", "TaoSetConstraintTolerances", tao->catol, &tao->catol, &flg);
404:   if (flg) tao->catol_changed = PETSC_TRUE;
405:   PetscOptionsReal("-tao_crtol", "Stop if relative constraint violations within", "TaoSetConstraintTolerances", tao->crtol, &tao->crtol, &flg);
406:   if (flg) tao->crtol_changed = PETSC_TRUE;
407:   PetscOptionsReal("-tao_gatol", "Stop if norm of gradient less than", "TaoSetTolerances", tao->gatol, &tao->gatol, &flg);
408:   if (flg) tao->gatol_changed = PETSC_TRUE;
409:   PetscOptionsReal("-tao_grtol", "Stop if norm of gradient divided by the function value is less than", "TaoSetTolerances", tao->grtol, &tao->grtol, &flg);
410:   if (flg) tao->grtol_changed = PETSC_TRUE;
411:   PetscOptionsReal("-tao_gttol", "Stop if the norm of the gradient is less than the norm of the initial gradient times tol", "TaoSetTolerances", tao->gttol, &tao->gttol, &flg);
412:   if (flg) tao->gttol_changed = PETSC_TRUE;
413:   PetscOptionsInt("-tao_max_it", "Stop if iteration number exceeds", "TaoSetMaximumIterations", tao->max_it, &tao->max_it, &flg);
414:   if (flg) tao->max_it_changed = PETSC_TRUE;
415:   PetscOptionsInt("-tao_max_funcs", "Stop if number of function evaluations exceeds", "TaoSetMaximumFunctionEvaluations", tao->max_funcs, &tao->max_funcs, &flg);
416:   if (flg) tao->max_funcs_changed = PETSC_TRUE;
417:   PetscOptionsReal("-tao_fmin", "Stop if function less than", "TaoSetFunctionLowerBound", tao->fmin, &tao->fmin, &flg);
418:   if (flg) tao->fmin_changed = PETSC_TRUE;
419:   PetscOptionsReal("-tao_steptol", "Stop if step size or trust region radius less than", "", tao->steptol, &tao->steptol, &flg);
420:   if (flg) tao->steptol_changed = PETSC_TRUE;
421:   PetscOptionsReal("-tao_trust0", "Initial trust region radius", "TaoSetTrustRegionRadius", tao->trust0, &tao->trust0, &flg);
422:   if (flg) tao->trust0_changed = PETSC_TRUE;
423:   PetscOptionsString("-tao_view_solution", "view solution vector after each evaluation", "TaoSetMonitor", "stdout", monfilename, sizeof(monfilename), &flg);
424:   if (flg) {
425:     PetscViewerASCIIOpen(comm, monfilename, &monviewer);
426:     TaoSetMonitor(tao, TaoSolutionMonitor, monviewer, (PetscErrorCode(*)(void **))PetscViewerDestroy);
427:   }

429:   PetscOptionsBool("-tao_converged_reason", "Print reason for Tao converged", "TaoSolve", tao->printreason, &tao->printreason, NULL);
430:   PetscOptionsString("-tao_view_gradient", "view gradient vector after each evaluation", "TaoSetMonitor", "stdout", monfilename, sizeof(monfilename), &flg);
431:   if (flg) {
432:     PetscViewerASCIIOpen(comm, monfilename, &monviewer);
433:     TaoSetMonitor(tao, TaoGradientMonitor, monviewer, (PetscErrorCode(*)(void **))PetscViewerDestroy);
434:   }

436:   PetscOptionsString("-tao_view_stepdirection", "view step direction vector after each iteration", "TaoSetMonitor", "stdout", monfilename, sizeof(monfilename), &flg);
437:   if (flg) {
438:     PetscViewerASCIIOpen(comm, monfilename, &monviewer);
439:     TaoSetMonitor(tao, TaoStepDirectionMonitor, monviewer, (PetscErrorCode(*)(void **))PetscViewerDestroy);
440:   }

442:   PetscOptionsString("-tao_view_residual", "view least-squares residual vector after each evaluation", "TaoSetMonitor", "stdout", monfilename, sizeof(monfilename), &flg);
443:   if (flg) {
444:     PetscViewerASCIIOpen(comm, monfilename, &monviewer);
445:     TaoSetMonitor(tao, TaoResidualMonitor, monviewer, (PetscErrorCode(*)(void **))PetscViewerDestroy);
446:   }

448:   PetscOptionsString("-tao_monitor", "Use the default convergence monitor", "TaoSetMonitor", "stdout", monfilename, sizeof(monfilename), &flg);
449:   if (flg) {
450:     PetscViewerASCIIOpen(comm, monfilename, &monviewer);
451:     TaoSetMonitor(tao, TaoMonitorDefault, monviewer, (PetscErrorCode(*)(void **))PetscViewerDestroy);
452:   }

454:   PetscOptionsString("-tao_gmonitor", "Use the convergence monitor with extra globalization info", "TaoSetMonitor", "stdout", monfilename, sizeof(monfilename), &flg);
455:   if (flg) {
456:     PetscViewerASCIIOpen(comm, monfilename, &monviewer);
457:     TaoSetMonitor(tao, TaoDefaultGMonitor, monviewer, (PetscErrorCode(*)(void **))PetscViewerDestroy);
458:   }

460:   PetscOptionsString("-tao_smonitor", "Use the short convergence monitor", "TaoSetMonitor", "stdout", monfilename, sizeof(monfilename), &flg);
461:   if (flg) {
462:     PetscViewerASCIIOpen(comm, monfilename, &monviewer);
463:     TaoSetMonitor(tao, TaoDefaultSMonitor, monviewer, (PetscErrorCode(*)(void **))PetscViewerDestroy);
464:   }

466:   PetscOptionsString("-tao_cmonitor", "Use the default convergence monitor with constraint norm", "TaoSetMonitor", "stdout", monfilename, sizeof(monfilename), &flg);
467:   if (flg) {
468:     PetscViewerASCIIOpen(comm, monfilename, &monviewer);
469:     TaoSetMonitor(tao, TaoDefaultCMonitor, monviewer, (PetscErrorCode(*)(void **))PetscViewerDestroy);
470:   }

472:   flg = PETSC_FALSE;
473:   PetscOptionsBool("-tao_cancelmonitors", "cancel all monitors and call any registered destroy routines", "TaoCancelMonitors", flg, &flg, NULL);
474:   if (flg) TaoCancelMonitors(tao);

476:   flg = PETSC_FALSE;
477:   PetscOptionsBool("-tao_draw_solution", "Plot solution vector at each iteration", "TaoSetMonitor", flg, &flg, NULL);
478:   if (flg) {
479:     TaoMonitorDrawCtx drawctx;
480:     PetscInt          howoften = 1;
481:     TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &drawctx);
482:     TaoSetMonitor(tao, TaoDrawSolutionMonitor, drawctx, (PetscErrorCode(*)(void **))TaoMonitorDrawCtxDestroy);
483:   }

485:   flg = PETSC_FALSE;
486:   PetscOptionsBool("-tao_draw_step", "plots step direction at each iteration", "TaoSetMonitor", flg, &flg, NULL);
487:   if (flg) TaoSetMonitor(tao, TaoDrawStepMonitor, NULL, NULL);

489:   flg = PETSC_FALSE;
490:   PetscOptionsBool("-tao_draw_gradient", "plots gradient at each iteration", "TaoSetMonitor", flg, &flg, NULL);
491:   if (flg) {
492:     TaoMonitorDrawCtx drawctx;
493:     PetscInt          howoften = 1;
494:     TaoMonitorDrawCtxCreate(PetscObjectComm((PetscObject)tao), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &drawctx);
495:     TaoSetMonitor(tao, TaoDrawGradientMonitor, drawctx, (PetscErrorCode(*)(void **))TaoMonitorDrawCtxDestroy);
496:   }
497:   flg = PETSC_FALSE;
498:   PetscOptionsBool("-tao_fd_gradient", "compute gradient using finite differences", "TaoDefaultComputeGradient", flg, &flg, NULL);
499:   if (flg) TaoSetGradient(tao, NULL, TaoDefaultComputeGradient, NULL);
500:   flg = PETSC_FALSE;
501:   PetscOptionsBool("-tao_fd_hessian", "compute hessian using finite differences", "TaoDefaultComputeHessian", flg, &flg, NULL);
502:   if (flg) {
503:     Mat H;

505:     MatCreate(PetscObjectComm((PetscObject)tao), &H);
506:     MatSetType(H, MATAIJ);
507:     TaoSetHessian(tao, H, H, TaoDefaultComputeHessian, NULL);
508:     MatDestroy(&H);
509:   }
510:   flg = PETSC_FALSE;
511:   PetscOptionsBool("-tao_mf_hessian", "compute matrix-free hessian using finite differences", "TaoDefaultComputeHessianMFFD", flg, &flg, NULL);
512:   if (flg) {
513:     Mat H;

515:     MatCreate(PetscObjectComm((PetscObject)tao), &H);
516:     TaoSetHessian(tao, H, H, TaoDefaultComputeHessianMFFD, NULL);
517:     MatDestroy(&H);
518:   }
519:   flg = PETSC_FALSE;
520:   PetscOptionsBool("-tao_recycle_history", "enable recycling/re-using information from the previous TaoSolve() call for some algorithms", "TaoSetRecycleHistory", flg, &flg, NULL);
521:   if (flg) TaoSetRecycleHistory(tao, PETSC_TRUE);
522:   PetscOptionsEnum("-tao_subset_type", "subset type", "", TaoSubSetTypes, (PetscEnum)tao->subset_type, (PetscEnum *)&tao->subset_type, NULL);

524:   if (tao->ksp) {
525:     PetscOptionsBool("-tao_ksp_ew", "Use Eisentat-Walker linear system convergence test", "TaoKSPSetUseEW", tao->ksp_ewconv, &tao->ksp_ewconv, NULL);
526:     TaoKSPSetUseEW(tao, tao->ksp_ewconv);
527:   }

529:   PetscTryTypeMethod(tao, setfromoptions, PetscOptionsObject);

531:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
532:   PetscObjectProcessOptionsHandlers((PetscObject)tao, PetscOptionsObject);
533:   PetscOptionsEnd();

535:   if (tao->linesearch) TaoLineSearchSetFromOptions(tao->linesearch);
536:   return 0;
537: }

539: /*@C
540:    TaoViewFromOptions - View a Tao options from the options database

542:    Collective

544:    Input Parameters:
545: +  A - the  Tao context
546: .  obj - Optional object
547: -  name - command line option

549:    Level: intermediate
550: .seealso: `Tao`, `TaoView`, `PetscObjectViewFromOptions()`, `TaoCreate()`
551: @*/
552: PetscErrorCode TaoViewFromOptions(Tao A, PetscObject obj, const char name[])
553: {
555:   PetscObjectViewFromOptions((PetscObject)A, obj, name);
556:   return 0;
557: }

559: /*@C
560:   TaoView - Prints information about the Tao object

562:   Collective

564:   InputParameters:
565: + tao - the Tao context
566: - viewer - visualization context

568:   Options Database Key:
569: . -tao_view - Calls `TaoView()` at the end of `TaoSolve()`

571:   Notes:
572:   The available visualization contexts include
573: +     `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
574: -     `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
575:          output where only the first processor opens
576:          the file.  All other processors send their
577:          data to the first processor to print.

579:   Level: beginner

581: .seealso: `PetscViewerASCIIOpen()`
582: @*/
583: PetscErrorCode TaoView(Tao tao, PetscViewer viewer)
584: {
585:   PetscBool isascii, isstring;
586:   TaoType   type;

589:   if (!viewer) PetscViewerASCIIGetStdout(((PetscObject)tao)->comm, &viewer);

593:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);
594:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring);
595:   if (isascii) {
596:     PetscObjectPrintClassNamePrefixType((PetscObject)tao, viewer);

598:     if (tao->ops->view) {
599:       PetscViewerASCIIPushTab(viewer);
600:       PetscUseTypeMethod(tao, view, viewer);
601:       PetscViewerASCIIPopTab(viewer);
602:     }
603:     if (tao->linesearch) {
604:       PetscViewerASCIIPushTab(viewer);
605:       TaoLineSearchView(tao->linesearch, viewer);
606:       PetscViewerASCIIPopTab(viewer);
607:     }
608:     if (tao->ksp) {
609:       PetscViewerASCIIPushTab(viewer);
610:       KSPView(tao->ksp, viewer);
611:       PetscViewerASCIIPrintf(viewer, "total KSP iterations: %" PetscInt_FMT "\n", tao->ksp_tot_its);
612:       PetscViewerASCIIPopTab(viewer);
613:     }

615:     PetscViewerASCIIPushTab(viewer);

617:     if (tao->XL || tao->XU) PetscViewerASCIIPrintf(viewer, "Active Set subset type: %s\n", TaoSubSetTypes[tao->subset_type]);

619:     PetscViewerASCIIPrintf(viewer, "convergence tolerances: gatol=%g,", (double)tao->gatol);
620:     PetscViewerASCIIPrintf(viewer, " steptol=%g,", (double)tao->steptol);
621:     PetscViewerASCIIPrintf(viewer, " gttol=%g\n", (double)tao->gttol);
622:     PetscViewerASCIIPrintf(viewer, "Residual in Function/Gradient:=%g\n", (double)tao->residual);

624:     if (tao->constrained) {
625:       PetscViewerASCIIPrintf(viewer, "convergence tolerances:");
626:       PetscViewerASCIIPrintf(viewer, " catol=%g,", (double)tao->catol);
627:       PetscViewerASCIIPrintf(viewer, " crtol=%g\n", (double)tao->crtol);
628:       PetscViewerASCIIPrintf(viewer, "Residual in Constraints:=%g\n", (double)tao->cnorm);
629:     }

631:     if (tao->trust < tao->steptol) {
632:       PetscViewerASCIIPrintf(viewer, "convergence tolerances: steptol=%g\n", (double)tao->steptol);
633:       PetscViewerASCIIPrintf(viewer, "Final trust region radius:=%g\n", (double)tao->trust);
634:     }

636:     if (tao->fmin > -1.e25) PetscViewerASCIIPrintf(viewer, "convergence tolerances: function minimum=%g\n", (double)tao->fmin);
637:     PetscViewerASCIIPrintf(viewer, "Objective value=%g\n", (double)tao->fc);

639:     PetscViewerASCIIPrintf(viewer, "total number of iterations=%" PetscInt_FMT ",          ", tao->niter);
640:     PetscViewerASCIIPrintf(viewer, "              (max: %" PetscInt_FMT ")\n", tao->max_it);

642:     if (tao->nfuncs > 0) {
643:       PetscViewerASCIIPrintf(viewer, "total number of function evaluations=%" PetscInt_FMT ",", tao->nfuncs);
644:       PetscViewerASCIIPrintf(viewer, "                max: %" PetscInt_FMT "\n", tao->max_funcs);
645:     }
646:     if (tao->ngrads > 0) {
647:       PetscViewerASCIIPrintf(viewer, "total number of gradient evaluations=%" PetscInt_FMT ",", tao->ngrads);
648:       PetscViewerASCIIPrintf(viewer, "                max: %" PetscInt_FMT "\n", tao->max_funcs);
649:     }
650:     if (tao->nfuncgrads > 0) {
651:       PetscViewerASCIIPrintf(viewer, "total number of function/gradient evaluations=%" PetscInt_FMT ",", tao->nfuncgrads);
652:       PetscViewerASCIIPrintf(viewer, "    (max: %" PetscInt_FMT ")\n", tao->max_funcs);
653:     }
654:     if (tao->nhess > 0) PetscViewerASCIIPrintf(viewer, "total number of Hessian evaluations=%" PetscInt_FMT "\n", tao->nhess);
655:     if (tao->nconstraints > 0) PetscViewerASCIIPrintf(viewer, "total number of constraint function evaluations=%" PetscInt_FMT "\n", tao->nconstraints);
656:     if (tao->njac > 0) PetscViewerASCIIPrintf(viewer, "total number of Jacobian evaluations=%" PetscInt_FMT "\n", tao->njac);

658:     if (tao->reason > 0) {
659:       PetscViewerASCIIPrintf(viewer, "Solution converged: ");
660:       switch (tao->reason) {
661:       case TAO_CONVERGED_GATOL:
662:         PetscViewerASCIIPrintf(viewer, " ||g(X)|| <= gatol\n");
663:         break;
664:       case TAO_CONVERGED_GRTOL:
665:         PetscViewerASCIIPrintf(viewer, " ||g(X)||/|f(X)| <= grtol\n");
666:         break;
667:       case TAO_CONVERGED_GTTOL:
668:         PetscViewerASCIIPrintf(viewer, " ||g(X)||/||g(X0)|| <= gttol\n");
669:         break;
670:       case TAO_CONVERGED_STEPTOL:
671:         PetscViewerASCIIPrintf(viewer, " Steptol -- step size small\n");
672:         break;
673:       case TAO_CONVERGED_MINF:
674:         PetscViewerASCIIPrintf(viewer, " Minf --  f < fmin\n");
675:         break;
676:       case TAO_CONVERGED_USER:
677:         PetscViewerASCIIPrintf(viewer, " User Terminated\n");
678:         break;
679:       default:
680:         PetscViewerASCIIPrintf(viewer, "\n");
681:         break;
682:       }
683:     } else {
684:       PetscViewerASCIIPrintf(viewer, "Solver terminated: %d", tao->reason);
685:       switch (tao->reason) {
686:       case TAO_DIVERGED_MAXITS:
687:         PetscViewerASCIIPrintf(viewer, " Maximum Iterations\n");
688:         break;
689:       case TAO_DIVERGED_NAN:
690:         PetscViewerASCIIPrintf(viewer, " NAN or Inf encountered\n");
691:         break;
692:       case TAO_DIVERGED_MAXFCN:
693:         PetscViewerASCIIPrintf(viewer, " Maximum Function Evaluations\n");
694:         break;
695:       case TAO_DIVERGED_LS_FAILURE:
696:         PetscViewerASCIIPrintf(viewer, " Line Search Failure\n");
697:         break;
698:       case TAO_DIVERGED_TR_REDUCTION:
699:         PetscViewerASCIIPrintf(viewer, " Trust Region too small\n");
700:         break;
701:       case TAO_DIVERGED_USER:
702:         PetscViewerASCIIPrintf(viewer, " User Terminated\n");
703:         break;
704:       default:
705:         PetscViewerASCIIPrintf(viewer, "\n");
706:         break;
707:       }
708:     }
709:     PetscViewerASCIIPopTab(viewer);
710:   } else if (isstring) {
711:     TaoGetType(tao, &type);
712:     PetscViewerStringSPrintf(viewer, " %-3.3s", type);
713:   }
714:   return 0;
715: }

717: /*@
718:   TaoSetRecycleHistory - Sets the boolean flag to enable/disable re-using
719:   iterate information from the previous `TaoSolve()`. This feature is disabled by
720:   default.

722:   For conjugate gradient methods (`TAOBNCG`), this re-uses the latest search direction
723:   from the previous `TaoSolve()` call when computing the first search direction in a
724:   new solution. By default, CG methods set the first search direction to the
725:   negative gradient.

727:   For quasi-Newton family of methods (`TAOBQNLS`, `TAOBQNKLS`, `TAOBQNKTR`, `TAOBQNKTL`), this re-uses
728:   the accumulated quasi-Newton Hessian approximation from the previous `TaoSolve()`
729:   call. By default, QN family of methods reset the initial Hessian approximation to
730:   the identity matrix.

732:   For any other algorithm, this setting has no effect.

734:   Logically collective

736:   Input Parameters:
737: + tao - the Tao context
738: - recycle - boolean flag

740:   Options Database Keys:
741: . -tao_recycle_history <true,false> - reuse the history

743:   Level: intermediate

745: .seealso: `TaoGetRecycleHistory()`, `TAOBNCG`, `TAOBQNLS`, `TAOBQNKLS`, `TAOBQNKTR`, `TAOBQNKTL`

747: @*/
748: PetscErrorCode TaoSetRecycleHistory(Tao tao, PetscBool recycle)
749: {
752:   tao->recycle = recycle;
753:   return 0;
754: }

756: /*@
757:   TaoGetRecycleHistory - Retrieve the boolean flag for re-using iterate information
758:   from the previous `TaoSolve()`. This feature is disabled by default.

760:   Logically collective

762:   Input Parameters:
763: . tao - the Tao context

765:   Output Parameters:
766: . recycle - boolean flag

768:   Level: intermediate

770: .seealso: `TaoSetRecycleHistory()`, `TAOBNCG`, `TAOBQNLS`, `TAOBQNKLS`, `TAOBQNKTR`, `TAOBQNKTL`

772: @*/
773: PetscErrorCode TaoGetRecycleHistory(Tao tao, PetscBool *recycle)
774: {
777:   *recycle = tao->recycle;
778:   return 0;
779: }

781: /*@
782:   TaoSetTolerances - Sets parameters used in Tao convergence tests

784:   Logically collective

786:   Input Parameters:
787: + tao - the Tao context
788: . gatol - stop if norm of gradient is less than this
789: . grtol - stop if relative norm of gradient is less than this
790: - gttol - stop if norm of gradient is reduced by this factor

792:   Options Database Keys:
793: + -tao_gatol <gatol> - Sets gatol
794: . -tao_grtol <grtol> - Sets grtol
795: - -tao_gttol <gttol> - Sets gttol

797:   Stopping Criteria:
798: $ ||g(X)||                            <= gatol
799: $ ||g(X)|| / |f(X)|                   <= grtol
800: $ ||g(X)|| / ||g(X0)||                <= gttol

802:   Notes:
803:   Use PETSC_DEFAULT to leave one or more tolerances unchanged.

805:   Level: beginner

807: .seealso: `TaoGetTolerances()`

809: @*/
810: PetscErrorCode TaoSetTolerances(Tao tao, PetscReal gatol, PetscReal grtol, PetscReal gttol)
811: {

817:   if (gatol != PETSC_DEFAULT) {
818:     if (gatol < 0) {
819:       PetscInfo(tao, "Tried to set negative gatol -- ignored.\n");
820:     } else {
821:       tao->gatol         = PetscMax(0, gatol);
822:       tao->gatol_changed = PETSC_TRUE;
823:     }
824:   }

826:   if (grtol != PETSC_DEFAULT) {
827:     if (grtol < 0) {
828:       PetscInfo(tao, "Tried to set negative grtol -- ignored.\n");
829:     } else {
830:       tao->grtol         = PetscMax(0, grtol);
831:       tao->grtol_changed = PETSC_TRUE;
832:     }
833:   }

835:   if (gttol != PETSC_DEFAULT) {
836:     if (gttol < 0) {
837:       PetscInfo(tao, "Tried to set negative gttol -- ignored.\n");
838:     } else {
839:       tao->gttol         = PetscMax(0, gttol);
840:       tao->gttol_changed = PETSC_TRUE;
841:     }
842:   }
843:   return 0;
844: }

846: /*@
847:   TaoSetConstraintTolerances - Sets constraint tolerance parameters used in Tao convergence tests

849:   Logically collective

851:   Input Parameters:
852: + tao - the Tao context
853: . catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
854: - crtol - relative constraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

856:   Options Database Keys:
857: + -tao_catol <catol> - Sets catol
858: - -tao_crtol <crtol> - Sets crtol

860:   Notes:
861:   Use PETSC_DEFAULT to leave any tolerance unchanged.

863:   Level: intermediate

865: .seealso: `TaoGetTolerances()`, `TaoGetConstraintTolerances()`, `TaoSetTolerances()`

867: @*/
868: PetscErrorCode TaoSetConstraintTolerances(Tao tao, PetscReal catol, PetscReal crtol)
869: {

874:   if (catol != PETSC_DEFAULT) {
875:     if (catol < 0) {
876:       PetscInfo(tao, "Tried to set negative catol -- ignored.\n");
877:     } else {
878:       tao->catol         = PetscMax(0, catol);
879:       tao->catol_changed = PETSC_TRUE;
880:     }
881:   }

883:   if (crtol != PETSC_DEFAULT) {
884:     if (crtol < 0) {
885:       PetscInfo(tao, "Tried to set negative crtol -- ignored.\n");
886:     } else {
887:       tao->crtol         = PetscMax(0, crtol);
888:       tao->crtol_changed = PETSC_TRUE;
889:     }
890:   }
891:   return 0;
892: }

894: /*@
895:   TaoGetConstraintTolerances - Gets constraint tolerance parameters used in Tao  convergence tests

897:   Not ollective

899:   Input Parameter:
900: . tao - the Tao context

902:   Output Parameters:
903: + catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria
904: - crtol - relative constraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria

906:   Level: intermediate

908: .seealso: `TaoGetTolerances()`, `TaoSetTolerances()`, `TaoSetConstraintTolerances()`

910: @*/
911: PetscErrorCode TaoGetConstraintTolerances(Tao tao, PetscReal *catol, PetscReal *crtol)
912: {
914:   if (catol) *catol = tao->catol;
915:   if (crtol) *crtol = tao->crtol;
916:   return 0;
917: }

919: /*@
920:    TaoSetFunctionLowerBound - Sets a bound on the solution objective value.
921:    When an approximate solution with an objective value below this number
922:    has been found, the solver will terminate.

924:    Logically Collective

926:    Input Parameters:
927: +  tao - the Tao solver context
928: -  fmin - the tolerance

930:    Options Database Keys:
931: .    -tao_fmin <fmin> - sets the minimum function value

933:    Level: intermediate

935: .seealso: `TaoSetTolerances()`
936: @*/
937: PetscErrorCode TaoSetFunctionLowerBound(Tao tao, PetscReal fmin)
938: {
941:   tao->fmin         = fmin;
942:   tao->fmin_changed = PETSC_TRUE;
943:   return 0;
944: }

946: /*@
947:    TaoGetFunctionLowerBound - Gets the bound on the solution objective value.
948:    When an approximate solution with an objective value below this number
949:    has been found, the solver will terminate.

951:    Not collective

953:    Input Parameters:
954: .  tao - the Tao solver context

956:    OutputParameters:
957: .  fmin - the minimum function value

959:    Level: intermediate

961: .seealso: `TaoSetFunctionLowerBound()`
962: @*/
963: PetscErrorCode TaoGetFunctionLowerBound(Tao tao, PetscReal *fmin)
964: {
967:   *fmin = tao->fmin;
968:   return 0;
969: }

971: /*@
972:    TaoSetMaximumFunctionEvaluations - Sets a maximum number of
973:    function evaluations.

975:    Logically Collective

977:    Input Parameters:
978: +  tao - the Tao solver context
979: -  nfcn - the maximum number of function evaluations (>=0)

981:    Options Database Keys:
982: .    -tao_max_funcs <nfcn> - sets the maximum number of function evaluations

984:    Level: intermediate

986: .seealso: `TaoSetTolerances()`, `TaoSetMaximumIterations()`
987: @*/

989: PetscErrorCode TaoSetMaximumFunctionEvaluations(Tao tao, PetscInt nfcn)
990: {
993:   if (nfcn >= 0) {
994:     tao->max_funcs = PetscMax(0, nfcn);
995:   } else {
996:     tao->max_funcs = -1;
997:   }
998:   tao->max_funcs_changed = PETSC_TRUE;
999:   return 0;
1000: }

1002: /*@
1003:    TaoGetMaximumFunctionEvaluations - Gets a maximum number of
1004:    function evaluations.

1006:    Logically Collective

1008:    Input Parameters:
1009: .  tao - the Tao solver context

1011:    Output Parameters:
1012: .  nfcn - the maximum number of function evaluations

1014:    Level: intermediate

1016: .seealso: `TaoSetMaximumFunctionEvaluations()`, `TaoGetMaximumIterations()`
1017: @*/

1019: PetscErrorCode TaoGetMaximumFunctionEvaluations(Tao tao, PetscInt *nfcn)
1020: {
1023:   *nfcn = tao->max_funcs;
1024:   return 0;
1025: }

1027: /*@
1028:    TaoGetCurrentFunctionEvaluations - Get current number of
1029:    function evaluations.

1031:    Not Collective

1033:    Input Parameters:
1034: .  tao - the Tao solver context

1036:    Output Parameters:
1037: .  nfuncs - the current number of function evaluations (maximum between gradient and function evaluations)

1039:    Level: intermediate

1041: .seealso: `TaoSetMaximumFunctionEvaluations()`, `TaoGetMaximumFunctionEvaluations()`, `TaoGetMaximumIterations()`
1042: @*/

1044: PetscErrorCode TaoGetCurrentFunctionEvaluations(Tao tao, PetscInt *nfuncs)
1045: {
1048:   *nfuncs = PetscMax(tao->nfuncs, tao->nfuncgrads);
1049:   return 0;
1050: }

1052: /*@
1053:    TaoSetMaximumIterations - Sets a maximum number of iterates.

1055:    Logically Collective

1057:    Input Parameters:
1058: +  tao - the Tao solver context
1059: -  maxits - the maximum number of iterates (>=0)

1061:    Options Database Keys:
1062: .    -tao_max_it <its> - sets the maximum number of iterations

1064:    Level: intermediate

1066: .seealso: `TaoSetTolerances()`, `TaoSetMaximumFunctionEvaluations()`
1067: @*/
1068: PetscErrorCode TaoSetMaximumIterations(Tao tao, PetscInt maxits)
1069: {
1072:   tao->max_it         = PetscMax(0, maxits);
1073:   tao->max_it_changed = PETSC_TRUE;
1074:   return 0;
1075: }

1077: /*@
1078:    TaoGetMaximumIterations - Gets a maximum number of iterates that will be used

1080:    Not Collective

1082:    Input Parameters:
1083: .  tao - the Tao solver context

1085:    Output Parameters:
1086: .  maxits - the maximum number of iterates

1088:    Level: intermediate

1090: .seealso: `TaoSetMaximumIterations()`, `TaoGetMaximumFunctionEvaluations()`
1091: @*/
1092: PetscErrorCode TaoGetMaximumIterations(Tao tao, PetscInt *maxits)
1093: {
1096:   *maxits = tao->max_it;
1097:   return 0;
1098: }

1100: /*@
1101:    TaoSetInitialTrustRegionRadius - Sets the initial trust region radius.

1103:    Logically collective

1105:    Input Parameters:
1106: +  tao - a Tao optimization solver
1107: -  radius - the trust region radius

1109:    Level: intermediate

1111:    Options Database Key:
1112: .  -tao_trust0 <t0> - sets initial trust region radius

1114: .seealso: `TaoGetTrustRegionRadius()`, `TaoSetTrustRegionTolerance()`, `TAONTR`
1115: @*/
1116: PetscErrorCode TaoSetInitialTrustRegionRadius(Tao tao, PetscReal radius)
1117: {
1120:   tao->trust0         = PetscMax(0.0, radius);
1121:   tao->trust0_changed = PETSC_TRUE;
1122:   return 0;
1123: }

1125: /*@
1126:    TaoGetInitialTrustRegionRadius - Gets the initial trust region radius.

1128:    Not Collective

1130:    Input Parameter:
1131: .  tao - a Tao optimization solver

1133:    Output Parameter:
1134: .  radius - the trust region radius

1136:    Level: intermediate

1138: .seealso: `TaoSetInitialTrustRegionRadius()`, `TaoGetCurrentTrustRegionRadius()`, `TAONTR`
1139: @*/
1140: PetscErrorCode TaoGetInitialTrustRegionRadius(Tao tao, PetscReal *radius)
1141: {
1144:   *radius = tao->trust0;
1145:   return 0;
1146: }

1148: /*@
1149:    TaoGetCurrentTrustRegionRadius - Gets the current trust region radius.

1151:    Not Collective

1153:    Input Parameter:
1154: .  tao - a Tao optimization solver

1156:    Output Parameter:
1157: .  radius - the trust region radius

1159:    Level: intermediate

1161: .seealso: `TaoSetInitialTrustRegionRadius()`, `TaoGetInitialTrustRegionRadius()`, `TAONTR`
1162: @*/
1163: PetscErrorCode TaoGetCurrentTrustRegionRadius(Tao tao, PetscReal *radius)
1164: {
1167:   *radius = tao->trust;
1168:   return 0;
1169: }

1171: /*@
1172:   TaoGetTolerances - gets the current values of tolerances

1174:   Not Collective

1176:   Input Parameter:
1177: . tao - the Tao context

1179:   Output Parameters:
1180: + gatol - stop if norm of gradient is less than this
1181: . grtol - stop if relative norm of gradient is less than this
1182: - gttol - stop if norm of gradient is reduced by a this factor

1184:   Level: intermediate

1186:   Note:
1187:   NULL can be used as an argument if not all tolerances values are needed

1189: .seealso: `Tao`, `TaoSetTolerances()`
1190: @*/
1191: PetscErrorCode TaoGetTolerances(Tao tao, PetscReal *gatol, PetscReal *grtol, PetscReal *gttol)
1192: {
1194:   if (gatol) *gatol = tao->gatol;
1195:   if (grtol) *grtol = tao->grtol;
1196:   if (gttol) *gttol = tao->gttol;
1197:   return 0;
1198: }

1200: /*@
1201:   TaoGetKSP - Gets the linear solver used by the optimization solver.
1202:   Application writers should use `TaoGetKSP()` if they need direct access
1203:   to the PETSc `KSP` object.

1205:   Not Collective

1207:    Input Parameters:
1208: .  tao - the Tao solver

1210:    Output Parameters:
1211: .  ksp - the KSP linear solver used in the optimization solver

1213:    Level: intermediate

1215: .seealso: `Tao`, `KSP`
1216: @*/
1217: PetscErrorCode TaoGetKSP(Tao tao, KSP *ksp)
1218: {
1221:   *ksp = tao->ksp;
1222:   return 0;
1223: }

1225: /*@
1226:    TaoGetLinearSolveIterations - Gets the total number of linear iterations
1227:    used by the Tao solver

1229:    Not Collective

1231:    Input Parameter:
1232: .  tao - Tao context

1234:    Output Parameter:
1235: .  lits - number of linear iterations

1237:    Notes:
1238:    This counter is reset to zero for each successive call to TaoSolve()

1240:    Level: intermediate

1242: .seealso: `Tao`, `TaoGetKSP()`
1243: @*/
1244: PetscErrorCode TaoGetLinearSolveIterations(Tao tao, PetscInt *lits)
1245: {
1248:   *lits = tao->ksp_tot_its;
1249:   return 0;
1250: }

1252: /*@
1253:   TaoGetLineSearch - Gets the line search used by the optimization solver.
1254:   Application writers should use `TaoGetLineSearch()` if they need direct access
1255:   to the TaoLineSearch object.

1257:   Not Collective

1259:    Input Parameters:
1260: .  tao - the Tao solver

1262:    Output Parameters:
1263: .  ls - the line search used in the optimization solver

1265:    Level: intermediate

1267: @*/
1268: PetscErrorCode TaoGetLineSearch(Tao tao, TaoLineSearch *ls)
1269: {
1272:   *ls = tao->linesearch;
1273:   return 0;
1274: }

1276: /*@
1277:   TaoAddLineSearchCounts - Adds the number of function evaluations spent
1278:   in the line search to the running total.

1280:    Input Parameters:
1281: +  tao - the Tao solver
1282: -  ls - the line search used in the optimization solver

1284:    Level: developer

1286: .seealso: `TaoGetLineSearch()`, `TaoLineSearchApply()`
1287: @*/
1288: PetscErrorCode TaoAddLineSearchCounts(Tao tao)
1289: {
1290:   PetscBool flg;
1291:   PetscInt  nfeval, ngeval, nfgeval;

1294:   if (tao->linesearch) {
1295:     TaoLineSearchIsUsingTaoRoutines(tao->linesearch, &flg);
1296:     if (!flg) {
1297:       TaoLineSearchGetNumberFunctionEvaluations(tao->linesearch, &nfeval, &ngeval, &nfgeval);
1298:       tao->nfuncs += nfeval;
1299:       tao->ngrads += ngeval;
1300:       tao->nfuncgrads += nfgeval;
1301:     }
1302:   }
1303:   return 0;
1304: }

1306: /*@
1307:   TaoGetSolution - Returns the vector with the current Tao solution

1309:   Not Collective

1311:   Input Parameter:
1312: . tao - the Tao context

1314:   Output Parameter:
1315: . X - the current solution

1317:   Level: intermediate

1319:   Note:
1320:   The returned vector will be the same object that was passed into `TaoSetSolution()`

1322: .seealso: `Tao`, `TaoSetSolution()`, `TaoSolve()`
1323: @*/
1324: PetscErrorCode TaoGetSolution(Tao tao, Vec *X)
1325: {
1328:   *X = tao->solution;
1329:   return 0;
1330: }

1332: /*@
1333:    TaoResetStatistics - Initialize the statistics used by Tao for all of the solvers.
1334:    These statistics include the iteration number, residual norms, and convergence status.
1335:    This routine gets called before solving each optimization problem.

1337:    Collective

1339:    Input Parameters:
1340: .  solver - the Tao context

1342:    Level: developer

1344: .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`
1345: @*/
1346: PetscErrorCode TaoResetStatistics(Tao tao)
1347: {
1349:   tao->niter        = 0;
1350:   tao->nfuncs       = 0;
1351:   tao->nfuncgrads   = 0;
1352:   tao->ngrads       = 0;
1353:   tao->nhess        = 0;
1354:   tao->njac         = 0;
1355:   tao->nconstraints = 0;
1356:   tao->ksp_its      = 0;
1357:   tao->ksp_tot_its  = 0;
1358:   tao->reason       = TAO_CONTINUE_ITERATING;
1359:   tao->residual     = 0.0;
1360:   tao->cnorm        = 0.0;
1361:   tao->step         = 0.0;
1362:   tao->lsflag       = PETSC_FALSE;
1363:   if (tao->hist_reset) tao->hist_len = 0;
1364:   return 0;
1365: }

1367: /*@C
1368:   TaoSetUpdate - Sets the general-purpose update function called
1369:   at the beginning of every iteration of the optimization algorithm. Specifically
1370:   it is called at the top of every iteration, after the new solution and the gradient
1371:   is determined, but before the Hessian is computed (if applicable).

1373:   Logically Collective

1375:   Input Parameters:
1376: + tao - The tao solver context
1377: - func - The function

1379:   Calling sequence of func:
1380: $ func (Tao tao, PetscInt step);

1382: . step - The current step of the iteration

1384:   Level: advanced

1386: .seealso: `Tao`, `TaoSolve()`
1387: @*/
1388: PetscErrorCode TaoSetUpdate(Tao tao, PetscErrorCode (*func)(Tao, PetscInt, void *), void *ctx)
1389: {
1391:   tao->ops->update = func;
1392:   tao->user_update = ctx;
1393:   return 0;
1394: }

1396: /*@C
1397:   TaoSetConvergenceTest - Sets the function that is to be used to test
1398:   for convergence o fthe iterative minimization solution.  The new convergence
1399:   testing routine will replace Tao's default convergence test.

1401:   Logically Collective

1403:   Input Parameters:
1404: + tao - the Tao object
1405: . conv - the routine to test for convergence
1406: - ctx - [optional] context for private data for the convergence routine
1407:         (may be NULL)

1409:   Calling sequence of conv:
1410: $   PetscErrorCode conv(Tao tao, void *ctx)

1412: + tao - the Tao object
1413: - ctx - [optional] convergence context

1415:   Note:
1416:   The new convergence testing routine should call `TaoSetConvergedReason()`.

1418:   Level: advanced

1420: .seealso: `Tao`, `TaoSolve()`, `TaoSetConvergedReason()`, `TaoGetSolutionStatus()`, `TaoGetTolerances()`, `TaoSetMonitor`

1422: @*/
1423: PetscErrorCode TaoSetConvergenceTest(Tao tao, PetscErrorCode (*conv)(Tao, void *), void *ctx)
1424: {
1426:   tao->ops->convergencetest = conv;
1427:   tao->cnvP                 = ctx;
1428:   return 0;
1429: }

1431: /*@C
1432:    TaoSetMonitor - Sets an additional function that is to be used at every
1433:    iteration of the solver to display the iteration's
1434:    progress.

1436:    Logically Collective

1438:    Input Parameters:
1439: +  tao - the Tao solver context
1440: .  mymonitor - monitoring routine
1441: -  mctx - [optional] user-defined context for private data for the
1442:           monitor routine (may be NULL)

1444:    Calling sequence of mymonitor:
1445: .vb
1446:      PetscErrorCode mymonitor(Tao tao,void *mctx)
1447: .ve

1449: +    tao - the Tao solver context
1450: -    mctx - [optional] monitoring context

1452:    Options Database Keys:
1453: +    -tao_monitor        - sets the default monitor `TaoMonitorDefault()`
1454: .    -tao_smonitor       - sets short monitor
1455: .    -tao_cmonitor       - same as smonitor plus constraint norm
1456: .    -tao_view_solution   - view solution at each iteration
1457: .    -tao_view_gradient   - view gradient at each iteration
1458: .    -tao_view_ls_residual - view least-squares residual vector at each iteration
1459: -    -tao_cancelmonitors - cancels all monitors that have been hardwired into a code by calls to TaoSetMonitor(), but does not cancel those set via the options database.

1461:    Notes:
1462:    Several different monitoring routines may be set by calling
1463:    `TaoSetMonitor()` multiple times; all will be called in the
1464:    order in which they were set.

1466:    Fortran Note:
1467:     Only one monitor function may be set

1469:    Level: intermediate

1471: .seealso: `Tao`, `TaoSolve()`, `TaoMonitorDefault()`, `TaoCancelMonitors()`, `TaoSetDestroyRoutine()`, `TaoView()`
1472: @*/
1473: PetscErrorCode TaoSetMonitor(Tao tao, PetscErrorCode (*func)(Tao, void *), void *ctx, PetscErrorCode (*dest)(void **))
1474: {
1475:   PetscInt  i;
1476:   PetscBool identical;


1481:   for (i = 0; i < tao->numbermonitors; i++) {
1482:     PetscMonitorCompare((PetscErrorCode(*)(void))func, ctx, dest, (PetscErrorCode(*)(void))tao->monitor[i], tao->monitorcontext[i], tao->monitordestroy[i], &identical);
1483:     if (identical) return 0;
1484:   }
1485:   tao->monitor[tao->numbermonitors]        = func;
1486:   tao->monitorcontext[tao->numbermonitors] = (void *)ctx;
1487:   tao->monitordestroy[tao->numbermonitors] = dest;
1488:   ++tao->numbermonitors;
1489:   return 0;
1490: }

1492: /*@
1493:    TaoCancelMonitors - Clears all the monitor functions for a Tao object.

1495:    Logically Collective

1497:    Input Parameters:
1498: .  tao - the Tao solver context

1500:    Options Database Key:
1501: .  -tao_cancelmonitors - cancels all monitors that have been hardwired
1502:     into a code by calls to `TaoSetMonitor()`, but does not cancel those
1503:     set via the options database

1505:    Notes:
1506:    There is no way to clear one specific monitor from a Tao object.

1508:    Level: advanced

1510: .seealso: `Tao`, `TaoMonitorDefault()`, `TaoSetMonitor()`
1511: @*/
1512: PetscErrorCode TaoCancelMonitors(Tao tao)
1513: {
1514:   PetscInt i;

1517:   for (i = 0; i < tao->numbermonitors; i++) {
1518:     if (tao->monitordestroy[i]) (*tao->monitordestroy[i])(&tao->monitorcontext[i]);
1519:   }
1520:   tao->numbermonitors = 0;
1521:   return 0;
1522: }

1524: /*@
1525:    TaoMonitorDefault - Default routine for monitoring progress of the
1526:    Tao solvers (default).  This monitor prints the function value and gradient
1527:    norm at each iteration.  It can be turned on from the command line using the
1528:    -tao_monitor option

1530:    Collective

1532:    Input Parameters:
1533: +  tao - the Tao context
1534: -  ctx - `PetscViewer` context or NULL

1536:    Options Database Keys:
1537: .  -tao_monitor - turn on default monitoring

1539:    Level: advanced

1541: .seealso: `TaoDefaultSMonitor()`, `TaoSetMonitor()`
1542: @*/
1543: PetscErrorCode TaoMonitorDefault(Tao tao, void *ctx)
1544: {
1545:   PetscInt    its, tabs;
1546:   PetscReal   fct, gnorm;
1547:   PetscViewer viewer = (PetscViewer)ctx;

1551:   its   = tao->niter;
1552:   fct   = tao->fc;
1553:   gnorm = tao->residual;
1554:   PetscViewerASCIIGetTab(viewer, &tabs);
1555:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1556:   if (its == 0 && ((PetscObject)tao)->prefix && !tao->header_printed) {
1557:     PetscViewerASCIIPrintf(viewer, "  Iteration information for %s solve.\n", ((PetscObject)tao)->prefix);
1558:     tao->header_printed = PETSC_TRUE;
1559:   }
1560:   PetscViewerASCIIPrintf(viewer, "%3" PetscInt_FMT " TAO,", its);
1561:   PetscViewerASCIIPrintf(viewer, "  Function value: %g,", (double)fct);
1562:   if (gnorm >= PETSC_INFINITY) {
1563:     PetscViewerASCIIPrintf(viewer, "  Residual: Inf \n");
1564:   } else {
1565:     PetscViewerASCIIPrintf(viewer, "  Residual: %g \n", (double)gnorm);
1566:   }
1567:   PetscViewerASCIISetTab(viewer, tabs);
1568:   return 0;
1569: }

1571: /*@
1572:    TaoDefaultGMonitor - Default routine for monitoring progress of the
1573:    Tao solvers (default) with extra detail on the globalization method.
1574:    This monitor prints the function value and gradient norm at each
1575:    iteration, as well as the step size and trust radius. Note that the
1576:    step size and trust radius may be the same for some algorithms.
1577:    It can be turned on from the command line using the
1578:    -tao_gmonitor option

1580:    Collective

1582:    Input Parameters:
1583: +  tao - the Tao context
1584: -  ctx - `PetscViewer` context or NULL

1586:    Options Database Keys:
1587: .  -tao_gmonitor - turn on monitoring with globalization information

1589:    Level: advanced

1591: .seealso: `TaoDefaultSMonitor()`, `TaoSetMonitor()`
1592: @*/
1593: PetscErrorCode TaoDefaultGMonitor(Tao tao, void *ctx)
1594: {
1595:   PetscInt    its, tabs;
1596:   PetscReal   fct, gnorm, stp, tr;
1597:   PetscViewer viewer = (PetscViewer)ctx;

1601:   its   = tao->niter;
1602:   fct   = tao->fc;
1603:   gnorm = tao->residual;
1604:   stp   = tao->step;
1605:   tr    = tao->trust;
1606:   PetscViewerASCIIGetTab(viewer, &tabs);
1607:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1608:   if (its == 0 && ((PetscObject)tao)->prefix && !tao->header_printed) {
1609:     PetscViewerASCIIPrintf(viewer, "  Iteration information for %s solve.\n", ((PetscObject)tao)->prefix);
1610:     tao->header_printed = PETSC_TRUE;
1611:   }
1612:   PetscViewerASCIIPrintf(viewer, "%3" PetscInt_FMT " TAO,", its);
1613:   PetscViewerASCIIPrintf(viewer, "  Function value: %g,", (double)fct);
1614:   if (gnorm >= PETSC_INFINITY) {
1615:     PetscViewerASCIIPrintf(viewer, "  Residual: Inf,");
1616:   } else {
1617:     PetscViewerASCIIPrintf(viewer, "  Residual: %g,", (double)gnorm);
1618:   }
1619:   PetscViewerASCIIPrintf(viewer, "  Step: %g,  Trust: %g\n", (double)stp, (double)tr);
1620:   PetscViewerASCIISetTab(viewer, tabs);
1621:   return 0;
1622: }

1624: /*@
1625:    TaoDefaultSMonitor - Default routine for monitoring progress of the
1626:    solver. Same as `TaoMonitorDefault()` except
1627:    it prints fewer digits of the residual as the residual gets smaller.
1628:    This is because the later digits are meaningless and are often
1629:    different on different machines; by using this routine different
1630:    machines will usually generate the same output. It can be turned on
1631:    by using the -tao_smonitor option

1633:    Collective

1635:    Input Parameters:
1636: +  tao - the Tao context
1637: -  ctx - PetscViewer context of type ASCII

1639:    Options Database Keys:
1640: .  -tao_smonitor - turn on default short monitoring

1642:    Level: advanced

1644: .seealso: `TaoMonitorDefault()`, `TaoSetMonitor()`
1645: @*/
1646: PetscErrorCode TaoDefaultSMonitor(Tao tao, void *ctx)
1647: {
1648:   PetscInt    its, tabs;
1649:   PetscReal   fct, gnorm;
1650:   PetscViewer viewer = (PetscViewer)ctx;

1654:   its   = tao->niter;
1655:   fct   = tao->fc;
1656:   gnorm = tao->residual;
1657:   PetscViewerASCIIGetTab(viewer, &tabs);
1658:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1659:   PetscViewerASCIIPrintf(viewer, "iter = %3" PetscInt_FMT ",", its);
1660:   PetscViewerASCIIPrintf(viewer, " Function value %g,", (double)fct);
1661:   if (gnorm >= PETSC_INFINITY) {
1662:     PetscViewerASCIIPrintf(viewer, " Residual: Inf \n");
1663:   } else if (gnorm > 1.e-6) {
1664:     PetscViewerASCIIPrintf(viewer, " Residual: %g \n", (double)gnorm);
1665:   } else if (gnorm > 1.e-11) {
1666:     PetscViewerASCIIPrintf(viewer, " Residual: < 1.0e-6 \n");
1667:   } else {
1668:     PetscViewerASCIIPrintf(viewer, " Residual: < 1.0e-11 \n");
1669:   }
1670:   PetscViewerASCIISetTab(viewer, tabs);
1671:   return 0;
1672: }

1674: /*@
1675:    TaoDefaultCMonitor - same as `TaoMonitorDefault()` except
1676:    it prints the norm of the constraints function. It can be turned on
1677:    from the command line using the -tao_cmonitor option

1679:    Collective

1681:    Input Parameters:
1682: +  tao - the Tao context
1683: -  ctx - `PetscViewer` context or NULL

1685:    Options Database Keys:
1686: .  -tao_cmonitor - monitor the constraints

1688:    Level: advanced

1690: .seealso: `TaoMonitorDefault()`, `TaoSetMonitor()`
1691: @*/
1692: PetscErrorCode TaoDefaultCMonitor(Tao tao, void *ctx)
1693: {
1694:   PetscInt    its, tabs;
1695:   PetscReal   fct, gnorm;
1696:   PetscViewer viewer = (PetscViewer)ctx;

1700:   its   = tao->niter;
1701:   fct   = tao->fc;
1702:   gnorm = tao->residual;
1703:   PetscViewerASCIIGetTab(viewer, &tabs);
1704:   PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
1705:   PetscViewerASCIIPrintf(viewer, "iter = %" PetscInt_FMT ",", its);
1706:   PetscViewerASCIIPrintf(viewer, " Function value: %g,", (double)fct);
1707:   PetscViewerASCIIPrintf(viewer, "  Residual: %g ", (double)gnorm);
1708:   PetscViewerASCIIPrintf(viewer, "  Constraint: %g \n", (double)tao->cnorm);
1709:   PetscViewerASCIISetTab(viewer, tabs);
1710:   return 0;
1711: }

1713: /*@C
1714:    TaoSolutionMonitor - Views the solution at each iteration
1715:    It can be turned on from the command line using the
1716:    -tao_view_solution option

1718:    Collective

1720:    Input Parameters:
1721: +  tao - the Tao context
1722: -  ctx - `PetscViewer` context or NULL

1724:    Options Database Keys:
1725: .  -tao_view_solution - view the solution

1727:    Level: advanced

1729: .seealso: `TaoDefaultSMonitor()`, `TaoSetMonitor()`
1730: @*/
1731: PetscErrorCode TaoSolutionMonitor(Tao tao, void *ctx)
1732: {
1733:   PetscViewer viewer = (PetscViewer)ctx;

1737:   VecView(tao->solution, viewer);
1738:   return 0;
1739: }

1741: /*@C
1742:    TaoGradientMonitor - Views the gradient at each iteration
1743:    It can be turned on from the command line using the
1744:    -tao_view_gradient option

1746:    Collective

1748:    Input Parameters:
1749: +  tao - the Tao context
1750: -  ctx - `PetscViewer` context or NULL

1752:    Options Database Keys:
1753: .  -tao_view_gradient - view the gradient at each iteration

1755:    Level: advanced

1757: .seealso: `TaoDefaultSMonitor()`, `TaoSetMonitor()`
1758: @*/
1759: PetscErrorCode TaoGradientMonitor(Tao tao, void *ctx)
1760: {
1761:   PetscViewer viewer = (PetscViewer)ctx;

1765:   VecView(tao->gradient, viewer);
1766:   return 0;
1767: }

1769: /*@C
1770:    TaoStepDirectionMonitor - Views the step-direction at each iteration

1772:    Collective

1774:    Input Parameters:
1775: +  tao - the Tao context
1776: -  ctx - `PetscViewer` context or NULL

1778:    Options Database Keys:
1779: .  -tao_view_gradient - view the gradient at each iteration

1781:    Level: advanced

1783: .seealso: `TaoDefaultSMonitor()`, `TaoSetMonitor()`
1784: @*/
1785: PetscErrorCode TaoStepDirectionMonitor(Tao tao, void *ctx)
1786: {
1787:   PetscViewer viewer = (PetscViewer)ctx;

1791:   VecView(tao->stepdirection, viewer);
1792:   return 0;
1793: }

1795: /*@C
1796:    TaoDrawSolutionMonitor - Plots the solution at each iteration
1797:    It can be turned on from the command line using the
1798:    -tao_draw_solution option

1800:    Collective

1802:    Input Parameters:
1803: +  tao - the Tao context
1804: -  ctx - `TaoMonitorDraw` context

1806:    Options Database Keys:
1807: .  -tao_draw_solution - draw the solution at each iteration

1809:    Level: advanced

1811: .seealso: `TaoSolutionMonitor()`, `TaoSetMonitor()`, `TaoDrawGradientMonitor`, `TaoMonitorDraw`
1812: @*/
1813: PetscErrorCode TaoDrawSolutionMonitor(Tao tao, void *ctx)
1814: {
1815:   TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;

1818:   if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return 0;
1819:   VecView(tao->solution, ictx->viewer);
1820:   return 0;
1821: }

1823: /*@C
1824:    TaoDrawGradientMonitor - Plots the gradient at each iteration
1825:    It can be turned on from the command line using the
1826:    -tao_draw_gradient option

1828:    Collective

1830:    Input Parameters:
1831: +  tao - the Tao context
1832: -  ctx - `PetscViewer` context

1834:    Options Database Keys:
1835: .  -tao_draw_gradient - draw the gradient at each iteration

1837:    Level: advanced

1839: .seealso: `TaoGradientMonitor()`, `TaoSetMonitor()`, `TaoDrawSolutionMonitor`
1840: @*/
1841: PetscErrorCode TaoDrawGradientMonitor(Tao tao, void *ctx)
1842: {
1843:   TaoMonitorDrawCtx ictx = (TaoMonitorDrawCtx)ctx;

1846:   if (!(((ictx->howoften > 0) && (!(tao->niter % ictx->howoften))) || ((ictx->howoften == -1) && tao->reason))) return 0;
1847:   VecView(tao->gradient, ictx->viewer);
1848:   return 0;
1849: }

1851: /*@C
1852:    TaoDrawStepMonitor - Plots the step direction at each iteration

1854:    Collective

1856:    Input Parameters:
1857: +  tao - the Tao context
1858: -  ctx - PetscViewer context

1860:    Options Database Keys:
1861: .  -tao_draw_step - draw the step direction at each iteration

1863:    Level: advanced

1865: .seealso: `TaoSetMonitor()`, `TaoDrawSolutionMonitor`
1866: @*/
1867: PetscErrorCode TaoDrawStepMonitor(Tao tao, void *ctx)
1868: {
1869:   PetscViewer viewer = (PetscViewer)ctx;

1873:   VecView(tao->stepdirection, viewer);
1874:   return 0;
1875: }

1877: /*@C
1878:    TaoResidualMonitor - Views the least-squares residual at each iteration

1880:    Collective

1882:    Input Parameters:
1883: +  tao - the Tao context
1884: -  ctx - `PetscViewer` context or NULL

1886:    Options Database Keys:
1887: .  -tao_view_ls_residual - view the least-squares residual at each iteration

1889:    Level: advanced

1891: .seealso: `TaoDefaultSMonitor()`, `TaoSetMonitor()`
1892: @*/
1893: PetscErrorCode TaoResidualMonitor(Tao tao, void *ctx)
1894: {
1895:   PetscViewer viewer = (PetscViewer)ctx;

1899:   VecView(tao->ls_res, viewer);
1900:   return 0;
1901: }

1903: /*@
1904:    TaoDefaultConvergenceTest - Determines whether the solver should continue iterating
1905:    or terminate.

1907:    Collective

1909:    Input Parameters:
1910: +  tao - the Tao context
1911: -  dummy - unused dummy context

1913:    Output Parameter:
1914: .  reason - for terminating

1916:    Notes:
1917:    This routine checks the residual in the optimality conditions, the
1918:    relative residual in the optimity conditions, the number of function
1919:    evaluations, and the function value to test convergence.  Some
1920:    solvers may use different convergence routines.

1922:    Level: developer

1924: .seealso: `TaoSetTolerances()`, `TaoGetConvergedReason()`, `TaoSetConvergedReason()`
1925: @*/

1927: PetscErrorCode TaoDefaultConvergenceTest(Tao tao, void *dummy)
1928: {
1929:   PetscInt           niter = tao->niter, nfuncs = PetscMax(tao->nfuncs, tao->nfuncgrads);
1930:   PetscInt           max_funcs = tao->max_funcs;
1931:   PetscReal          gnorm = tao->residual, gnorm0 = tao->gnorm0;
1932:   PetscReal          f = tao->fc, steptol = tao->steptol, trradius = tao->step;
1933:   PetscReal          gatol = tao->gatol, grtol = tao->grtol, gttol = tao->gttol;
1934:   PetscReal          catol = tao->catol, crtol = tao->crtol;
1935:   PetscReal          fmin = tao->fmin, cnorm = tao->cnorm;
1936:   TaoConvergedReason reason = tao->reason;

1939:   if (reason != TAO_CONTINUE_ITERATING) return 0;

1941:   if (PetscIsInfOrNanReal(f)) {
1942:     PetscInfo(tao, "Failed to converged, function value is Inf or NaN\n");
1943:     reason = TAO_DIVERGED_NAN;
1944:   } else if (f <= fmin && cnorm <= catol) {
1945:     PetscInfo(tao, "Converged due to function value %g < minimum function value %g\n", (double)f, (double)fmin);
1946:     reason = TAO_CONVERGED_MINF;
1947:   } else if (gnorm <= gatol && cnorm <= catol) {
1948:     PetscInfo(tao, "Converged due to residual norm ||g(X)||=%g < %g\n", (double)gnorm, (double)gatol);
1949:     reason = TAO_CONVERGED_GATOL;
1950:   } else if (f != 0 && PetscAbsReal(gnorm / f) <= grtol && cnorm <= crtol) {
1951:     PetscInfo(tao, "Converged due to residual ||g(X)||/|f(X)| =%g < %g\n", (double)(gnorm / f), (double)grtol);
1952:     reason = TAO_CONVERGED_GRTOL;
1953:   } else if (gnorm0 != 0 && ((gttol == 0 && gnorm == 0) || gnorm / gnorm0 < gttol) && cnorm <= crtol) {
1954:     PetscInfo(tao, "Converged due to relative residual norm ||g(X)||/||g(X0)|| = %g < %g\n", (double)(gnorm / gnorm0), (double)gttol);
1955:     reason = TAO_CONVERGED_GTTOL;
1956:   } else if (max_funcs >= 0 && nfuncs > max_funcs) {
1957:     PetscInfo(tao, "Exceeded maximum number of function evaluations: %" PetscInt_FMT " > %" PetscInt_FMT "\n", nfuncs, max_funcs);
1958:     reason = TAO_DIVERGED_MAXFCN;
1959:   } else if (tao->lsflag != 0) {
1960:     PetscInfo(tao, "Tao Line Search failure.\n");
1961:     reason = TAO_DIVERGED_LS_FAILURE;
1962:   } else if (trradius < steptol && niter > 0) {
1963:     PetscInfo(tao, "Trust region/step size too small: %g < %g\n", (double)trradius, (double)steptol);
1964:     reason = TAO_CONVERGED_STEPTOL;
1965:   } else if (niter >= tao->max_it) {
1966:     PetscInfo(tao, "Exceeded maximum number of iterations: %" PetscInt_FMT " > %" PetscInt_FMT "\n", niter, tao->max_it);
1967:     reason = TAO_DIVERGED_MAXITS;
1968:   } else {
1969:     reason = TAO_CONTINUE_ITERATING;
1970:   }
1971:   tao->reason = reason;
1972:   return 0;
1973: }

1975: /*@C
1976:    TaoSetOptionsPrefix - Sets the prefix used for searching for all
1977:    Tao options in the database.

1979:    Logically Collective

1981:    Input Parameters:
1982: +  tao - the Tao context
1983: -  prefix - the prefix string to prepend to all Tao option requests

1985:    Notes:
1986:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1987:    The first character of all runtime options is AUTOMATICALLY the hyphen.

1989:    For example, to distinguish between the runtime options for two
1990:    different Tao solvers, one could call
1991: .vb
1992:       TaoSetOptionsPrefix(tao1,"sys1_")
1993:       TaoSetOptionsPrefix(tao2,"sys2_")
1994: .ve

1996:    This would enable use of different options for each system, such as
1997: .vb
1998:       -sys1_tao_method blmvm -sys1_tao_grtol 1.e-3
1999:       -sys2_tao_method lmvm  -sys2_tao_grtol 1.e-4
2000: .ve

2002:    Level: advanced

2004: .seealso: `TaoSetFromOptions()`, `TaoAppendOptionsPrefix()`, `TaoGetOptionsPrefix()`
2005: @*/

2007: PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[])
2008: {
2010:   PetscObjectSetOptionsPrefix((PetscObject)tao, p);
2011:   if (tao->linesearch) TaoLineSearchSetOptionsPrefix(tao->linesearch, p);
2012:   if (tao->ksp) KSPSetOptionsPrefix(tao->ksp, p);
2013:   return 0;
2014: }

2016: /*@C
2017:    TaoAppendOptionsPrefix - Appends to the prefix used for searching for all
2018:    Tao options in the database.

2020:    Logically Collective

2022:    Input Parameters:
2023: +  tao - the Tao solver context
2024: -  prefix - the prefix string to prepend to all Tao option requests

2026:    Note:
2027:    A hyphen (-) must NOT be given at the beginning of the prefix name.
2028:    The first character of all runtime options is automatically the hyphen.

2030:    Level: advanced

2032: .seealso: `TaoSetFromOptions()`, `TaoSetOptionsPrefix()`, `TaoGetOptionsPrefix()`
2033: @*/
2034: PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[])
2035: {
2037:   PetscObjectAppendOptionsPrefix((PetscObject)tao, p);
2038:   if (tao->linesearch) PetscObjectAppendOptionsPrefix((PetscObject)tao->linesearch, p);
2039:   if (tao->ksp) KSPAppendOptionsPrefix(tao->ksp, p);
2040:   return 0;
2041: }

2043: /*@C
2044:   TaoGetOptionsPrefix - Gets the prefix used for searching for all
2045:   Tao options in the database

2047:   Not Collective

2049:   Input Parameters:
2050: . tao - the Tao context

2052:   Output Parameters:
2053: . prefix - pointer to the prefix string used is returned

2055:   Fortran Note:
2056:     On the fortran side, the user should pass in a string 'prefix' of
2057:   sufficient length to hold the prefix.

2059:   Level: advanced

2061: .seealso: `TaoSetFromOptions()`, `TaoSetOptionsPrefix()`, `TaoAppendOptionsPrefix()`
2062: @*/
2063: PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[])
2064: {
2066:   PetscObjectGetOptionsPrefix((PetscObject)tao, p);
2067:   return 0;
2068: }

2070: /*@C
2071:    TaoSetType - Sets the method for the unconstrained minimization solver.

2073:    Collective

2075:    Input Parameters:
2076: +  solver - the Tao solver context
2077: -  type - a known method

2079:    Options Database Key:
2080: .  -tao_type <type> - Sets the method; use -help for a list
2081:    of available methods (for instance, "-tao_type lmvm" or "-tao_type tron")

2083:    Available methods include:
2084: +    `TAONLS` - nls Newton's method with line search for unconstrained minimization
2085: .    `TAONTR` - ntr Newton's method with trust region for unconstrained minimization
2086: .    `TAONTL` - ntl Newton's method with trust region, line search for unconstrained minimization
2087: .    `TAOLMVM` - lmvm Limited memory variable metric method for unconstrained minimization
2088: .    `TAOCG` - cg Nonlinear conjugate gradient method for unconstrained minimization
2089: .    `TAONM` - nm Nelder-Mead algorithm for derivate-free unconstrained minimization
2090: .    `TAOTRON` - tron Newton Trust Region method for bound constrained minimization
2091: .    `TAOGPCG` - gpcg Newton Trust Region method for quadratic bound constrained minimization
2092: .    `TAOBLMVM` - blmvm Limited memory variable metric method for bound constrained minimization
2093: .    `TAOLCL` - lcl Linearly constrained Lagrangian method for pde-constrained minimization
2094: -    `TAOPOUNDERS` - pounders Model-based algorithm for nonlinear least squares

2096:   Level: intermediate

2098: .seealso: `Tao`, `TaoCreate()`, `TaoGetType()`, `TaoType`

2100: @*/
2101: PetscErrorCode TaoSetType(Tao tao, TaoType type)
2102: {
2103:   PetscErrorCode (*create_xxx)(Tao);
2104:   PetscBool issame;


2108:   PetscObjectTypeCompare((PetscObject)tao, type, &issame);
2109:   if (issame) return 0;

2111:   PetscFunctionListFind(TaoList, type, (void (**)(void)) & create_xxx);

2114:   /* Destroy the existing solver information */
2115:   PetscTryTypeMethod(tao, destroy);
2116:   KSPDestroy(&tao->ksp);
2117:   TaoLineSearchDestroy(&tao->linesearch);
2118:   tao->ops->setup          = NULL;
2119:   tao->ops->solve          = NULL;
2120:   tao->ops->view           = NULL;
2121:   tao->ops->setfromoptions = NULL;
2122:   tao->ops->destroy        = NULL;

2124:   tao->setupcalled = PETSC_FALSE;

2126:   (*create_xxx)(tao);
2127:   PetscObjectChangeTypeName((PetscObject)tao, type);
2128:   return 0;
2129: }

2131: /*MC
2132:    TaoRegister - Adds a method to the Tao package for unconstrained minimization.

2134:    Synopsis:
2135:    TaoRegister(char *name_solver,char *path,char *name_Create,PetscErrorCode (*routine_Create)(Tao))

2137:    Not collective

2139:    Input Parameters:
2140: +  sname - name of a new user-defined solver
2141: -  func - routine to Create method context

2143:    Note:
2144:    `TaoRegister()` may be called multiple times to add several user-defined solvers.

2146:    Sample usage:
2147: .vb
2148:    TaoRegister("my_solver",MySolverCreate);
2149: .ve

2151:    Then, your solver can be chosen with the procedural interface via
2152: $     TaoSetType(tao,"my_solver")
2153:    or at runtime via the option
2154: $     -tao_type my_solver

2156:    Level: advanced

2158: .seealso: `Tao`, `TaoSetType()`, `TaoRegisterAll()`, `TaoRegisterDestroy()`
2159: M*/
2160: PetscErrorCode TaoRegister(const char sname[], PetscErrorCode (*func)(Tao))
2161: {
2162:   TaoInitializePackage();
2163:   PetscFunctionListAdd(&TaoList, sname, (void (*)(void))func);
2164:   return 0;
2165: }

2167: /*@C
2168:    TaoRegisterDestroy - Frees the list of minimization solvers that were
2169:    registered by `TaoRegisterDynamic()`.

2171:    Not Collective

2173:    Level: advanced

2175: .seealso: `TaoRegisterAll()`, `TaoRegister()`
2176: @*/
2177: PetscErrorCode TaoRegisterDestroy(void)
2178: {
2179:   PetscFunctionListDestroy(&TaoList);
2180:   TaoRegisterAllCalled = PETSC_FALSE;
2181:   return 0;
2182: }

2184: /*@
2185:    TaoGetIterationNumber - Gets the number of Tao iterations completed
2186:    at this time.

2188:    Not Collective

2190:    Input Parameter:
2191: .  tao - Tao context

2193:    Output Parameter:
2194: .  iter - iteration number

2196:    Notes:
2197:    For example, during the computation of iteration 2 this would return 1.

2199:    Level: intermediate

2201: .seealso: `TaoGetLinearSolveIterations()`, `TaoGetResidualNorm()`, `TaoGetObjective()`
2202: @*/
2203: PetscErrorCode TaoGetIterationNumber(Tao tao, PetscInt *iter)
2204: {
2207:   *iter = tao->niter;
2208:   return 0;
2209: }

2211: /*@
2212:    TaoGetResidualNorm - Gets the current value of the norm of the residual
2213:    at this time.

2215:    Not Collective

2217:    Input Parameter:
2218: .  tao - Tao context

2220:    Output Parameter:
2221: .  value - the current value

2223:    Level: intermediate

2225:    Developer Note: This is the 2-norm of the residual, we cannot use `TaoGetGradientNorm()` because that has
2226:                    a different meaning. For some reason Tao sometimes calls the gradient the residual.

2228: .seealso: `TaoGetLinearSolveIterations()`, `TaoGetIterationNumber()`, `TaoGetObjective()`
2229: @*/
2230: PetscErrorCode TaoGetResidualNorm(Tao tao, PetscReal *value)
2231: {
2234:   *value = tao->residual;
2235:   return 0;
2236: }

2238: /*@
2239:    TaoSetIterationNumber - Sets the current iteration number.

2241:    Logically Collective

2243:    Input Parameters:
2244: +  tao - Tao context
2245: -  iter - iteration number

2247:    Level: developer

2249: .seealso: `TaoGetLinearSolveIterations()`
2250: @*/
2251: PetscErrorCode TaoSetIterationNumber(Tao tao, PetscInt iter)
2252: {
2255:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2256:   tao->niter = iter;
2257:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2258:   return 0;
2259: }

2261: /*@
2262:    TaoGetTotalIterationNumber - Gets the total number of Tao iterations
2263:    completed. This number keeps accumulating if multiple solves
2264:    are called with the Tao object.

2266:    Not Collective

2268:    Input Parameter:
2269: .  tao - Tao context

2271:    Output Parameter:
2272: .  iter - iteration number

2274:    Notes:
2275:    The total iteration count is updated after each solve, if there is a current
2276:    TaoSolve() in progress then those iterations are not yet counted.

2278:    Level: intermediate

2280: .seealso: `TaoGetLinearSolveIterations()`
2281: @*/
2282: PetscErrorCode TaoGetTotalIterationNumber(Tao tao, PetscInt *iter)
2283: {
2286:   *iter = tao->ntotalits;
2287:   return 0;
2288: }

2290: /*@
2291:    TaoSetTotalIterationNumber - Sets the current total iteration number.

2293:    Logically Collective

2295:    Input Parameters:
2296: +  tao - Tao context
2297: -  iter - iteration number

2299:    Level: developer

2301: .seealso: `TaoGetLinearSolveIterations()`
2302: @*/
2303: PetscErrorCode TaoSetTotalIterationNumber(Tao tao, PetscInt iter)
2304: {
2307:   PetscObjectSAWsTakeAccess((PetscObject)tao);
2308:   tao->ntotalits = iter;
2309:   PetscObjectSAWsGrantAccess((PetscObject)tao);
2310:   return 0;
2311: }

2313: /*@
2314:   TaoSetConvergedReason - Sets the termination flag on a Tao object

2316:   Logically Collective

2318:   Input Parameters:
2319: + tao - the Tao context
2320: - reason - one of
2321: $     `TAO_CONVERGED_ATOL` (2),
2322: $     `TAO_CONVERGED_RTOL` (3),
2323: $     `TAO_CONVERGED_STEPTOL` (4),
2324: $     `TAO_CONVERGED_MINF` (5),
2325: $     `TAO_CONVERGED_USER` (6),
2326: $     `TAO_DIVERGED_MAXITS` (-2),
2327: $     `TAO_DIVERGED_NAN` (-4),
2328: $     `TAO_DIVERGED_MAXFCN` (-5),
2329: $     `TAO_DIVERGED_LS_FAILURE` (-6),
2330: $     `TAO_DIVERGED_TR_REDUCTION` (-7),
2331: $     `TAO_DIVERGED_USER` (-8),
2332: $     `TAO_CONTINUE_ITERATING` (0)

2334:    Level: intermediate

2336: @*/
2337: PetscErrorCode TaoSetConvergedReason(Tao tao, TaoConvergedReason reason)
2338: {
2341:   tao->reason = reason;
2342:   return 0;
2343: }

2345: /*@
2346:    TaoGetConvergedReason - Gets the reason the Tao iteration was stopped.

2348:    Not Collective

2350:    Input Parameter:
2351: .  tao - the Tao solver context

2353:    Output Parameter:
2354: .  reason - one of
2355: $  `TAO_CONVERGED_GATOL` (3)           ||g(X)|| < gatol
2356: $  `TAO_CONVERGED_GRTOL` (4)           ||g(X)|| / f(X)  < grtol
2357: $  `TAO_CONVERGED_GTTOL` (5)           ||g(X)|| / ||g(X0)|| < gttol
2358: $  `TAO_CONVERGED_STEPTOL` (6)         step size small
2359: $  `TAO_CONVERGED_MINF` (7)            F < F_min
2360: $  `TAO_CONVERGED_USER` (8)            User defined
2361: $  `TAO_DIVERGED_MAXITS` (-2)          its > maxits
2362: $  `TAO_DIVERGED_NAN` (-4)             Numerical problems
2363: $  `TAO_DIVERGED_MAXFCN` (-5)          fevals > max_funcsals
2364: $  `TAO_DIVERGED_LS_FAILURE` (-6)      line search failure
2365: $  `TAO_DIVERGED_TR_REDUCTION` (-7)    trust region failure
2366: $  `TAO_DIVERGED_USER` (-8)             (user defined)
2367: $  `TAO_CONTINUE_ITERATING` (0)

2369:    where
2370: +  X - current solution
2371: .  X0 - initial guess
2372: .  f(X) - current function value
2373: .  f(X*) - true solution (estimated)
2374: .  g(X) - current gradient
2375: .  its - current iterate number
2376: .  maxits - maximum number of iterates
2377: .  fevals - number of function evaluations
2378: -  max_funcsals - maximum number of function evaluations

2380:    Level: intermediate

2382: .seealso: `TaoSetConvergenceTest()`, `TaoSetTolerances()`

2384: @*/
2385: PetscErrorCode TaoGetConvergedReason(Tao tao, TaoConvergedReason *reason)
2386: {
2389:   *reason = tao->reason;
2390:   return 0;
2391: }

2393: /*@
2394:    TaoGetSolutionStatus - Get the current iterate, objective value,
2395:    residual, infeasibility, and termination

2397:    Not Collective

2399:    Input Parameter:
2400: .  tao - the Tao context

2402:    Output Parameters:
2403: +  iterate - the current iterate number (>=0)
2404: .  f - the current function value
2405: .  gnorm - the square of the gradient norm, duality gap, or other measure indicating distance from optimality.
2406: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2407: .  xdiff - the step length or trust region radius of the most recent iterate.
2408: -  reason - The termination reason, which can equal `TAO_CONTINUE_ITERATING`

2410:    Level: intermediate

2412:    Notes:
2413:    Tao returns the values set by the solvers in the routine `TaoMonitor()`.

2415:    If any of the output arguments are set to NULL, no corresponding value will be returned.

2417: .seealso: `TaoMonitor()`, `TaoGetConvergedReason()`
2418: @*/
2419: PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscReal *gnorm, PetscReal *cnorm, PetscReal *xdiff, TaoConvergedReason *reason)
2420: {
2422:   if (its) *its = tao->niter;
2423:   if (f) *f = tao->fc;
2424:   if (gnorm) *gnorm = tao->residual;
2425:   if (cnorm) *cnorm = tao->cnorm;
2426:   if (reason) *reason = tao->reason;
2427:   if (xdiff) *xdiff = tao->step;
2428:   return 0;
2429: }

2431: /*@C
2432:    TaoGetType - Gets the current Tao algorithm.

2434:    Not Collective

2436:    Input Parameter:
2437: .  tao - the Tao solver context

2439:    Output Parameter:
2440: .  type - Tao method

2442:    Level: intermediate

2444: .seealso: `Tao`, `TaoType`, `TaoSetType()`
2445: @*/
2446: PetscErrorCode TaoGetType(Tao tao, TaoType *type)
2447: {
2450:   *type = ((PetscObject)tao)->type_name;
2451:   return 0;
2452: }

2454: /*@C
2455:   TaoMonitor - Monitor the solver and the current solution.  This
2456:   routine will record the iteration number and residual statistics,
2457:   and call any monitors specified by the user.

2459:    Input Parameters:
2460: +  tao - the Tao context
2461: .  its - the current iterate number (>=0)
2462: .  f - the current objective function value
2463: .  res - the gradient norm, square root of the duality gap, or other measure indicating distince from optimality.  This measure will be recorded and
2464:           used for some termination tests.
2465: .  cnorm - the infeasibility of the current solution with regard to the constraints.
2466: -  steplength - multiple of the step direction added to the previous iterate.

2468:    Output Parameters:
2469: .  reason - The termination reason, which can equal `TAO_CONTINUE_ITERATING`

2471:    Options Database Key:
2472: .  -tao_monitor - Use the default monitor, which prints statistics to standard output

2474:    Level: developer

2476: .seealso: `Tao`, `TaoGetConvergedReason()`, `TaoMonitorDefault()`, `TaoSetMonitor()`
2477: @*/
2478: PetscErrorCode TaoMonitor(Tao tao, PetscInt its, PetscReal f, PetscReal res, PetscReal cnorm, PetscReal steplength)
2479: {
2480:   PetscInt i;

2483:   tao->fc       = f;
2484:   tao->residual = res;
2485:   tao->cnorm    = cnorm;
2486:   tao->step     = steplength;
2487:   if (!its) {
2488:     tao->cnorm0 = cnorm;
2489:     tao->gnorm0 = res;
2490:   }
2492:   for (i = 0; i < tao->numbermonitors; i++) (*tao->monitor[i])(tao, tao->monitorcontext[i]);
2493:   return 0;
2494: }

2496: /*@
2497:    TaoSetConvergenceHistory - Sets the array used to hold the convergence history.

2499:    Logically Collective

2501:    Input Parameters:
2502: +  tao - the Tao solver context
2503: .  obj   - array to hold objective value history
2504: .  resid - array to hold residual history
2505: .  cnorm - array to hold constraint violation history
2506: .  lits - integer array holds the number of linear iterations for each Tao iteration
2507: .  na  - size of obj, resid, and cnorm
2508: -  reset - `PETSC_TRUE` indicates each new minimization resets the history counter to zero,
2509:            else it continues storing new values for new minimizations after the old ones

2511:    Notes:
2512:    If set, Tao will fill the given arrays with the indicated
2513:    information at each iteration.  If 'obj','resid','cnorm','lits' are
2514:    *all* NULL then space (using size na, or 1000 if na is `PETSC_DECIDE` or
2515:    `PETSC_DEFAULT`) is allocated for the history.
2516:    If not all are NULL, then only the non-NULL information categories
2517:    will be stored, the others will be ignored.

2519:    Any convergence information after iteration number 'na' will not be stored.

2521:    This routine is useful, e.g., when running a code for purposes
2522:    of accurate performance monitoring, when no I/O should be done
2523:    during the section of code that is being timed.

2525:    Level: intermediate

2527: .seealso: `TaoGetConvergenceHistory()`

2529: @*/
2530: PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal obj[], PetscReal resid[], PetscReal cnorm[], PetscInt lits[], PetscInt na, PetscBool reset)
2531: {

2538:   if (na == PETSC_DECIDE || na == PETSC_DEFAULT) na = 1000;
2539:   if (!obj && !resid && !cnorm && !lits) {
2540:     PetscCalloc4(na, &obj, na, &resid, na, &cnorm, na, &lits);
2541:     tao->hist_malloc = PETSC_TRUE;
2542:   }

2544:   tao->hist_obj   = obj;
2545:   tao->hist_resid = resid;
2546:   tao->hist_cnorm = cnorm;
2547:   tao->hist_lits  = lits;
2548:   tao->hist_max   = na;
2549:   tao->hist_reset = reset;
2550:   tao->hist_len   = 0;
2551:   return 0;
2552: }

2554: /*@C
2555:    TaoGetConvergenceHistory - Gets the arrays used that hold the convergence history.

2557:    Collective

2559:    Input Parameter:
2560: .  tao - the Tao context

2562:    Output Parameters:
2563: +  obj   - array used to hold objective value history
2564: .  resid - array used to hold residual history
2565: .  cnorm - array used to hold constraint violation history
2566: .  lits  - integer array used to hold linear solver iteration count
2567: -  nhist  - size of obj, resid, cnorm, and lits

2569:    Notes:
2570:     This routine must be preceded by calls to `TaoSetConvergenceHistory()`
2571:     and `TaoSolve()`, otherwise it returns useless information.

2573:     The calling sequence for this routine in Fortran is
2574: $   call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr)

2576:    This routine is useful, e.g., when running a code for purposes
2577:    of accurate performance monitoring, when no I/O should be done
2578:    during the section of code that is being timed.

2580:    Level: advanced

2582: .seealso: `Tao`, `TaoSolve()`, `TaoSetConvergenceHistory()`

2584: @*/
2585: PetscErrorCode TaoGetConvergenceHistory(Tao tao, PetscReal **obj, PetscReal **resid, PetscReal **cnorm, PetscInt **lits, PetscInt *nhist)
2586: {
2588:   if (obj) *obj = tao->hist_obj;
2589:   if (cnorm) *cnorm = tao->hist_cnorm;
2590:   if (resid) *resid = tao->hist_resid;
2591:   if (lits) *lits = tao->hist_lits;
2592:   if (nhist) *nhist = tao->hist_len;
2593:   return 0;
2594: }

2596: /*@
2597:    TaoSetApplicationContext - Sets the optional user-defined context for
2598:    a solver.

2600:    Logically Collective

2602:    Input Parameters:
2603: +  tao  - the Tao context
2604: -  usrP - optional user context

2606:    Level: intermediate

2608: .seealso: `Tao`, `TaoGetApplicationContext()`, `TaoSetApplicationContext()`
2609: @*/
2610: PetscErrorCode TaoSetApplicationContext(Tao tao, void *usrP)
2611: {
2613:   tao->user = usrP;
2614:   return 0;
2615: }

2617: /*@
2618:    TaoGetApplicationContext - Gets the user-defined context for a
2619:    Tao solvers.

2621:    Not Collective

2623:    Input Parameter:
2624: .  tao  - Tao context

2626:    Output Parameter:
2627: .  usrP - user context

2629:    Level: intermediate

2631: .seealso: `TaoSetApplicationContext()`
2632: @*/
2633: PetscErrorCode TaoGetApplicationContext(Tao tao, void *usrP)
2634: {
2637:   *(void **)usrP = tao->user;
2638:   return 0;
2639: }

2641: /*@
2642:    TaoSetGradientNorm - Sets the matrix used to define the norm that measures the size of the gradient.

2644:    Collective

2646:    Input Parameters:
2647: +  tao  - the Tao context
2648: -  M    - matrix that defines the norm

2650:    Level: beginner

2652: .seealso: `Tao`, `TaoGetGradientNorm()`, `TaoGradientNorm()`
2653: @*/
2654: PetscErrorCode TaoSetGradientNorm(Tao tao, Mat M)
2655: {
2658:   PetscObjectReference((PetscObject)M);
2659:   MatDestroy(&tao->gradient_norm);
2660:   VecDestroy(&tao->gradient_norm_tmp);
2661:   tao->gradient_norm = M;
2662:   MatCreateVecs(M, NULL, &tao->gradient_norm_tmp);
2663:   return 0;
2664: }

2666: /*@
2667:    TaoGetGradientNorm - Returns the matrix used to define the norm used for measuring the size of the gradient.

2669:    Not Collective

2671:    Input Parameter:
2672: .  tao  - Tao context

2674:    Output Parameter:
2675: .  M - gradient norm

2677:    Level: beginner

2679: .seealso: `Tao`, `TaoSetGradientNorm()`, `TaoGradientNorm()`
2680: @*/
2681: PetscErrorCode TaoGetGradientNorm(Tao tao, Mat *M)
2682: {
2685:   *M = tao->gradient_norm;
2686:   return 0;
2687: }

2689: /*@C
2690:    TaoGradientNorm - Compute the norm with respect to the norm the user has set.

2692:    Collective

2694:    Input Parameters:
2695: +  tao      - the Tao context
2696: .  gradient - the gradient to be computed
2697: -  norm     - the norm type

2699:    Output Parameter:
2700: .  gnorm    - the gradient norm

2702:    Level: developer

2704: .seealso: `Tao`, `TaoSetGradientNorm()`, `TaoGetGradientNorm()`
2705: @*/
2706: PetscErrorCode TaoGradientNorm(Tao tao, Vec gradient, NormType type, PetscReal *gnorm)
2707: {
2712:   if (tao->gradient_norm) {
2713:     PetscScalar gnorms;

2716:     MatMult(tao->gradient_norm, gradient, tao->gradient_norm_tmp);
2717:     VecDot(gradient, tao->gradient_norm_tmp, &gnorms);
2718:     *gnorm = PetscRealPart(PetscSqrtScalar(gnorms));
2719:   } else {
2720:     VecNorm(gradient, type, gnorm);
2721:   }
2722:   return 0;
2723: }

2725: /*@C
2726:    TaoMonitorDrawCtxCreate - Creates the monitor context `TaoMonitorDrawSolution()`

2728:    Collective

2730:    Output Parameter:
2731: .    ctx - the monitor context

2733:    Options Database Key:
2734: .   -tao_draw_solution_initial - show initial guess as well as current solution

2736:    Level: intermediate

2738: .seealso: `Tao`, `TaoMonitorSet()`, `TaoMonitorDefault()`, `VecView()`, `TaoMonitorDrawCtx()`
2739: @*/
2740: PetscErrorCode TaoMonitorDrawCtxCreate(MPI_Comm comm, const char host[], const char label[], int x, int y, int m, int n, PetscInt howoften, TaoMonitorDrawCtx *ctx)
2741: {
2742:   PetscNew(ctx);
2743:   PetscViewerDrawOpen(comm, host, label, x, y, m, n, &(*ctx)->viewer);
2744:   PetscViewerSetFromOptions((*ctx)->viewer);
2745:   (*ctx)->howoften = howoften;
2746:   return 0;
2747: }

2749: /*@C
2750:    TaoMonitorDrawCtxDestroy - Destroys the monitor context for `TaoMonitorDrawSolution()`

2752:    Collective

2754:    Input Parameters:
2755: .    ctx - the monitor context

2757:    Level: intermediate

2759: .seealso: `TaoMonitorSet()`, `TaoMonitorDefault()`, `VecView()`, `TaoMonitorDrawSolution()`
2760: @*/
2761: PetscErrorCode TaoMonitorDrawCtxDestroy(TaoMonitorDrawCtx *ictx)
2762: {
2763:   PetscViewerDestroy(&(*ictx)->viewer);
2764:   PetscFree(*ictx);
2765:   return 0;
2766: }