Actual source code: taosolver_fg.c
1: #include <petsc/private/taoimpl.h>
3: /*@
4: TaoSetSolution - Sets the vector holding the initial guess for the solve
6: Logically Collective
8: Input Parameters:
9: + tao - the `Tao` context
10: - x0 - the initial guess
12: Level: beginner
14: .seealso: [](ch_tao), `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()`
15: @*/
16: PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
17: {
18: PetscFunctionBegin;
21: PetscCall(PetscObjectReference((PetscObject)x0));
22: PetscCall(VecDestroy(&tao->solution));
23: tao->solution = x0;
24: if (x0) PetscCall(TaoTermSetSolutionTemplate(tao->callbacks, x0));
25: PetscFunctionReturn(PETSC_SUCCESS);
26: }
28: PETSC_INTERN PetscErrorCode TaoTestGradient_Internal(Tao tao, Vec x, Vec g1, PetscViewer viewer, PetscViewer mviewer)
29: {
30: Vec g2, g3;
31: PetscReal hcnorm, fdnorm, hcmax, fdmax, diffmax, diffnorm;
32: PetscScalar dot;
34: PetscFunctionBegin;
35: PetscCall(VecDuplicate(x, &g2));
36: PetscCall(VecDuplicate(x, &g3));
38: /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
39: PetscCall(TaoDefaultComputeGradient(tao, x, g2, NULL));
41: PetscCall(VecNorm(g2, NORM_2, &fdnorm));
42: PetscCall(VecNorm(g1, NORM_2, &hcnorm));
43: PetscCall(VecNorm(g2, NORM_INFINITY, &fdmax));
44: PetscCall(VecNorm(g1, NORM_INFINITY, &hcmax));
45: PetscCall(VecDot(g1, g2, &dot));
46: PetscCall(VecCopy(g1, g3));
47: PetscCall(VecAXPY(g3, -1.0, g2));
48: PetscCall(VecNorm(g3, NORM_2, &diffnorm));
49: PetscCall(VecNorm(g3, NORM_INFINITY, &diffmax));
50: PetscCall(PetscViewerASCIIPrintf(viewer, " ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot) / (fdnorm * hcnorm))));
51: PetscCall(PetscViewerASCIIPrintf(viewer, " 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffnorm / PetscMax(hcnorm, fdnorm)), (double)diffnorm));
52: PetscCall(PetscViewerASCIIPrintf(viewer, " max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffmax / PetscMax(hcmax, fdmax)), (double)diffmax));
54: if (mviewer) {
55: PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded gradient ----------\n"));
56: PetscCall(VecView(g1, mviewer));
57: PetscCall(PetscViewerASCIIPrintf(viewer, " Finite difference gradient ----------\n"));
58: PetscCall(VecView(g2, mviewer));
59: PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded minus finite-difference gradient ----------\n"));
60: PetscCall(VecView(g3, mviewer));
61: }
62: PetscCall(VecDestroy(&g2));
63: PetscCall(VecDestroy(&g3));
64: PetscFunctionReturn(PETSC_SUCCESS);
65: }
67: PetscErrorCode TaoTestGradient(Tao tao, Vec x, Vec g1)
68: {
69: PetscBool complete_print = PETSC_FALSE, test = PETSC_FALSE;
70: MPI_Comm comm;
71: PetscViewer viewer, mviewer;
72: PetscViewerFormat format;
73: PetscInt tabs;
74: static PetscBool directionsprinted = PETSC_FALSE;
76: PetscFunctionBegin;
77: PetscObjectOptionsBegin((PetscObject)tao);
78: PetscCall(PetscOptionsName("-tao_test_gradient", "Compare hand-coded and finite difference Gradients", "None", &test));
79: PetscCall(PetscOptionsViewer("-tao_test_gradient_view", "View difference between hand-coded and finite difference Gradients element entries", "None", &mviewer, &format, &complete_print));
80: PetscOptionsEnd();
81: if (!test) {
82: if (complete_print) PetscCall(PetscViewerDestroy(&mviewer));
83: PetscFunctionReturn(PETSC_SUCCESS);
84: }
86: PetscCall(PetscObjectGetComm((PetscObject)tao, &comm));
87: PetscCall(PetscViewerASCIIGetStdout(comm, &viewer));
88: PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
89: PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
90: PetscCall(PetscViewerASCIIPrintf(viewer, " ---------- Testing Gradient -------------\n"));
91: if (!complete_print && !directionsprinted) {
92: PetscCall(PetscViewerASCIIPrintf(viewer, " Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n"));
93: PetscCall(PetscViewerASCIIPrintf(viewer, " of hand-coded and finite difference gradient entries greater than <threshold>.\n"));
94: }
95: if (!directionsprinted) {
96: PetscCall(PetscViewerASCIIPrintf(viewer, " Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n"));
97: PetscCall(PetscViewerASCIIPrintf(viewer, " O(1.e-8), the hand-coded Gradient is probably correct.\n"));
98: directionsprinted = PETSC_TRUE;
99: }
100: if (complete_print) PetscCall(PetscViewerPushFormat(mviewer, format));
101: PetscCall(TaoTestGradient_Internal(tao, x, g1, viewer, complete_print ? mviewer : NULL));
102: if (complete_print) {
103: PetscCall(PetscViewerPopFormat(mviewer));
104: PetscCall(PetscViewerDestroy(&mviewer));
105: }
106: PetscCall(PetscViewerASCIISetTab(viewer, tabs));
107: PetscFunctionReturn(PETSC_SUCCESS);
108: }
110: /*@
111: TaoComputeGradient - Computes the gradient of the objective function
113: Collective
115: Input Parameters:
116: + tao - the `Tao` context
117: - X - input vector
119: Output Parameter:
120: . G - gradient vector
122: Options Database Keys:
123: + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
124: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
126: Level: developer
128: Note:
129: `TaoComputeGradient()` is typically used within the implementation of the optimization method,
130: so most users would not generally call this routine themselves.
132: .seealso: [](ch_tao), `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()`
133: @*/
134: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
135: {
136: PetscFunctionBegin;
140: PetscCheckSameComm(tao, 1, X, 2);
141: PetscCheckSameComm(tao, 1, G, 3);
142: PetscCall(TaoTermMappingComputeGradient(&tao->objective_term, X, tao->objective_parameters, INSERT_VALUES, G));
143: PetscCall(TaoTestGradient(tao, X, G));
144: PetscFunctionReturn(PETSC_SUCCESS);
145: }
147: /*@
148: TaoComputeObjective - Computes the objective function value at a given point
150: Collective
152: Input Parameters:
153: + tao - the `Tao` context
154: - X - input vector
156: Output Parameter:
157: . f - Objective value at X
159: Level: developer
161: Note:
162: `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm
163: so most users would not generally call this routine themselves.
165: .seealso: [](ch_tao), `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
166: @*/
167: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
168: {
169: PetscFunctionBegin;
172: PetscAssertPointer(f, 3);
173: PetscCheckSameComm(tao, 1, X, 2);
174: PetscCall(TaoTermMappingComputeObjective(&tao->objective_term, X, tao->objective_parameters, INSERT_VALUES, f));
175: PetscFunctionReturn(PETSC_SUCCESS);
176: }
178: /*@
179: TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
181: Collective
183: Input Parameters:
184: + tao - the `Tao` context
185: - X - input vector
187: Output Parameters:
188: + f - Objective value at `X`
189: - G - Gradient vector at `X`
191: Level: developer
193: Note:
194: `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm,
195: so most users would not generally call this routine themselves.
197: .seealso: [](ch_tao), `TaoComputeGradient()`, `TaoSetObjective()`
198: @*/
199: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
200: {
201: PetscFunctionBegin;
204: PetscAssertPointer(f, 3);
206: PetscCheckSameComm(tao, 1, X, 2);
207: PetscCheckSameComm(tao, 1, G, 4);
208: PetscCall(TaoTermMappingComputeObjectiveAndGradient(&tao->objective_term, X, tao->objective_parameters, INSERT_VALUES, f, G));
209: PetscCall(TaoTestGradient(tao, X, G));
210: PetscFunctionReturn(PETSC_SUCCESS);
211: }
213: /*@C
214: TaoSetObjective - Sets the function evaluation routine for minimization
216: Logically Collective
218: Input Parameters:
219: + tao - the `Tao` context
220: . func - the objective function
221: - ctx - [optional] user-defined context for private data for the function evaluation
222: routine (may be `NULL`)
224: Calling sequence of `func`:
225: + tao - the optimizer
226: . x - input vector
227: . f - function value
228: - ctx - [optional] user-defined function context
230: Level: beginner
232: .seealso: [](ch_tao), `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()`
233: @*/
234: PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao tao, Vec x, PetscReal *f, PetscCtx ctx), PetscCtx ctx)
235: {
236: PetscFunctionBegin;
238: PetscCall(TaoTermCallbacksSetObjective(tao->callbacks, func, ctx));
239: PetscFunctionReturn(PETSC_SUCCESS);
240: }
242: /*@C
243: TaoGetObjective - Gets the function evaluation routine for the function to be minimized
245: Not Collective
247: Input Parameter:
248: . tao - the `Tao` context
250: Output Parameters:
251: + func - the objective function
252: - ctx - the user-defined context for private data for the function evaluation
254: Calling sequence of `func`:
255: + tao - the optimizer
256: . x - input vector
257: . f - function value
258: - ctx - [optional] user-defined function context
260: Level: beginner
262: Notes:
263: In addition to specifying an objective function using callbacks such as
264: `TaoSetObjective()` and `TaoSetGradient()`, users can specify
265: objective functions with `TaoAddTerm()`.
267: `TaoGetObjective()` will always return the callback specified with
268: `TaoSetObjective()`, even if the objective function has been changed by
269: calling `TaoAddTerm()`.
271: .seealso: [](ch_tao), `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()`
272: @*/
273: PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao tao, Vec x, PetscReal *f, PetscCtx ctx), PetscCtxRt ctx)
274: {
275: PetscFunctionBegin;
277: if (func || ctx) PetscCall(TaoTermCallbacksGetObjective(tao->callbacks, func, ctx));
278: PetscFunctionReturn(PETSC_SUCCESS);
279: }
281: /*@C
282: TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
284: Logically Collective
286: Input Parameters:
287: + tao - the `Tao` context
288: . res - the residual vector
289: . func - the residual evaluation routine
290: - ctx - [optional] user-defined context for private data for the function evaluation
291: routine (may be `NULL`)
293: Calling sequence of `func`:
294: + tao - the optimizer
295: . x - input vector
296: . res - function value vector
297: - ctx - [optional] user-defined function context
299: Level: beginner
301: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()`
302: @*/
303: PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao tao, Vec x, Vec res, PetscCtx ctx), PetscCtx ctx)
304: {
305: PetscFunctionBegin;
308: PetscCall(PetscObjectReference((PetscObject)res));
309: PetscCall(VecDestroy(&tao->ls_res));
310: tao->ls_res = res;
311: tao->user_lsresP = ctx;
312: tao->ops->computeresidual = func;
313: PetscFunctionReturn(PETSC_SUCCESS);
314: }
316: /*@
317: TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give.
319: Collective
321: Input Parameters:
322: + tao - the `Tao` context
323: . sigma_v - vector of weights (diagonal terms only)
324: . n - the number of weights (if using off-diagonal)
325: . rows - index list of rows for `sigma_v`
326: . cols - index list of columns for `sigma_v`
327: - vals - array of weights
329: Level: intermediate
331: Notes:
332: If this function is not provided, or if `sigma_v` and `vals` are both `NULL`, then the
333: identity matrix will be used for weights.
335: Either `sigma_v` or `vals` should be `NULL`
337: .seealso: [](ch_tao), `Tao`, `TaoSetResidualRoutine()`
338: @*/
339: PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
340: {
341: PetscInt i;
343: PetscFunctionBegin;
346: PetscCall(PetscObjectReference((PetscObject)sigma_v));
347: PetscCall(VecDestroy(&tao->res_weights_v));
348: tao->res_weights_v = sigma_v;
349: if (vals) {
350: PetscCall(PetscFree(tao->res_weights_rows));
351: PetscCall(PetscFree(tao->res_weights_cols));
352: PetscCall(PetscFree(tao->res_weights_w));
353: PetscCall(PetscMalloc1(n, &tao->res_weights_rows));
354: PetscCall(PetscMalloc1(n, &tao->res_weights_cols));
355: PetscCall(PetscMalloc1(n, &tao->res_weights_w));
356: tao->res_weights_n = n;
357: for (i = 0; i < n; i++) {
358: tao->res_weights_rows[i] = rows[i];
359: tao->res_weights_cols[i] = cols[i];
360: tao->res_weights_w[i] = vals[i];
361: }
362: } else {
363: tao->res_weights_n = 0;
364: tao->res_weights_rows = NULL;
365: tao->res_weights_cols = NULL;
366: }
367: PetscFunctionReturn(PETSC_SUCCESS);
368: }
370: /*@
371: TaoComputeResidual - Computes a least-squares residual vector at a given point
373: Collective
375: Input Parameters:
376: + tao - the `Tao` context
377: - X - input vector
379: Output Parameter:
380: . F - Objective vector at `X`
382: Level: advanced
384: Notes:
385: `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm,
386: so most users would not generally call this routine themselves.
388: .seealso: [](ch_tao), `Tao`, `TaoSetResidualRoutine()`
389: @*/
390: PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
391: {
392: PetscFunctionBegin;
396: PetscCheckSameComm(tao, 1, X, 2);
397: PetscCheckSameComm(tao, 1, F, 3);
398: PetscCheck(tao->ops->computeresidual, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetResidualRoutine() has not been called");
399: PetscCall(PetscLogEventBegin(TAO_ResidualEval, tao, X, NULL, NULL));
400: PetscCallBack("Tao callback least-squares residual", (*tao->ops->computeresidual)(tao, X, F, tao->user_lsresP));
401: PetscCall(PetscLogEventEnd(TAO_ResidualEval, tao, X, NULL, NULL));
402: tao->nres++;
403: PetscCall(PetscInfo(tao, "TAO least-squares residual evaluation.\n"));
404: PetscFunctionReturn(PETSC_SUCCESS);
405: }
407: /*@C
408: TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized
410: Logically Collective
412: Input Parameters:
413: + tao - the `Tao` context
414: . g - [optional] the vector to internally hold the gradient computation
415: . func - the gradient function
416: - ctx - [optional] user-defined context for private data for the gradient evaluation
417: routine (may be `NULL`)
419: Calling sequence of `func`:
420: + tao - the optimization solver
421: . x - input vector
422: . g - gradient value (output)
423: - ctx - [optional] user-defined function context
425: Level: beginner
427: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()`
428: @*/
429: PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao tao, Vec x, Vec g, PetscCtx ctx), PetscCtx ctx)
430: {
431: PetscFunctionBegin;
433: if (g) {
435: PetscCheckSameComm(tao, 1, g, 2);
436: PetscCall(PetscObjectReference((PetscObject)g));
437: PetscCall(VecDestroy(&tao->gradient));
438: tao->gradient = g;
439: }
440: PetscCall(TaoTermCallbacksSetGradient(tao->callbacks, func, ctx));
441: PetscFunctionReturn(PETSC_SUCCESS);
442: }
444: /*@C
445: TaoGetGradient - Gets the gradient evaluation routine for the function being optimized
447: Not Collective
449: Input Parameter:
450: . tao - the `Tao` context
452: Output Parameters:
453: + g - the vector to internally hold the gradient computation
454: . func - the gradient function
455: - ctx - user-defined context for private data for the gradient evaluation routine
457: Calling sequence of `func`:
458: + tao - the optimizer
459: . x - input vector
460: . g - gradient value (output)
461: - ctx - [optional] user-defined function context
463: Level: beginner
465: Notes:
466: In addition to specifying an objective function using callbacks such as
467: `TaoSetObjective()` and `TaoSetGradient()`, users can specify
468: objective functions with `TaoAddTerm()`.
470: `TaoGetGradient()` will always return the callback specified with
471: `TaoSetGradient()`, even if the objective function has been changed by
472: calling `TaoAddTerm()`.
474: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()`
475: @*/
476: PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao tao, Vec x, Vec g, PetscCtx ctx), PetscCtxRt ctx)
477: {
478: PetscFunctionBegin;
480: if (g) *g = tao->gradient;
481: if (func || ctx) PetscCall(TaoTermCallbacksGetGradient(tao->callbacks, func, ctx));
482: PetscFunctionReturn(PETSC_SUCCESS);
483: }
485: /*@C
486: TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized
488: Logically Collective
490: Input Parameters:
491: + tao - the `Tao` context
492: . g - [optional] the vector to internally hold the gradient computation
493: . func - the gradient function
494: - ctx - [optional] user-defined context for private data for the gradient evaluation
495: routine (may be `NULL`)
497: Calling sequence of `func`:
498: + tao - the optimization object
499: . x - input vector
500: . f - objective value (output)
501: . g - gradient value (output)
502: - ctx - [optional] user-defined function context
504: Level: beginner
506: Note:
507: For some optimization methods using a combined function can be more efficient.
509: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()`
510: @*/
511: PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao tao, Vec x, PetscReal *f, Vec g, PetscCtx ctx), PetscCtx ctx)
512: {
513: PetscFunctionBegin;
515: if (g) {
517: PetscCheckSameComm(tao, 1, g, 2);
518: PetscCall(PetscObjectReference((PetscObject)g));
519: PetscCall(VecDestroy(&tao->gradient));
520: tao->gradient = g;
521: }
522: PetscCall(TaoTermCallbacksSetObjectiveAndGradient(tao->callbacks, func, ctx));
523: PetscFunctionReturn(PETSC_SUCCESS);
524: }
526: /*@C
527: TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized
529: Not Collective
531: Input Parameter:
532: . tao - the `Tao` context
534: Output Parameters:
535: + g - the vector to internally hold the gradient computation
536: . func - the gradient function
537: - ctx - user-defined context for private data for the gradient evaluation routine
539: Calling sequence of `func`:
540: + tao - the optimizer
541: . x - input vector
542: . f - objective value (output)
543: . g - gradient value (output)
544: - ctx - [optional] user-defined function context
546: Level: beginner
548: Note:
549: In addition to specifying an objective function using callbacks such as
550: `TaoSetObjectiveAndGradient()`, users can specify
551: objective functions with `TaoAddTerm()`.
553: `TaoGetObjectiveAndGradient()` will always return the callback specified with
554: `TaoSetObjectiveAndGradient()`, even if the objective function has been changed by
555: calling `TaoAddTerm()`.
557: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`
558: @*/
559: PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao tao, Vec x, PetscReal *f, Vec g, PetscCtx ctx), PetscCtxRt ctx)
560: {
561: PetscFunctionBegin;
563: if (g) *g = tao->gradient;
564: if (func || ctx) PetscCall(TaoTermCallbacksGetObjectiveAndGradient(tao->callbacks, func, ctx));
565: PetscFunctionReturn(PETSC_SUCCESS);
566: }
568: /*@
569: TaoIsObjectiveDefined - Checks to see if the user has
570: declared an objective-only routine. Useful for determining when
571: it is appropriate to call `TaoComputeObjective()` or
572: `TaoComputeObjectiveAndGradient()`
574: Not Collective
576: Input Parameter:
577: . tao - the `Tao` context
579: Output Parameter:
580: . flg - `PETSC_TRUE` if the `Tao` has this routine `PETSC_FALSE` otherwise
582: Level: developer
584: Note:
585: If the objective of `Tao` has been altered via `TaoAddTerm()`, it will
586: return whether the summation of all terms has this routine.
588: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()`
589: @*/
590: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
591: {
592: PetscFunctionBegin;
594: PetscCall(TaoTermIsObjectiveDefined(tao->objective_term.term, flg));
595: PetscFunctionReturn(PETSC_SUCCESS);
596: }
598: /*@
599: TaoIsGradientDefined - Checks to see if the user has
600: declared a gradient-only routine. Useful for determining when
601: it is appropriate to call `TaoComputeGradient()` or
602: `TaoComputeObjectiveAndGradient()`
604: Not Collective
606: Input Parameter:
607: . tao - the `Tao` context
609: Output Parameter:
610: . flg - `PETSC_TRUE` if the objective `TaoTerm` has this routine, `PETSC_FALSE` otherwise
612: Level: developer
614: Note:
615: If the objective of `Tao` has been altered via `TaoAddTerm()`, it will
616: return whether the summation of all terms has this routine.
618: .seealso: [](ch_tao), `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()`
619: @*/
620: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
621: {
622: PetscFunctionBegin;
624: PetscCall(TaoTermIsGradientDefined(tao->objective_term.term, flg));
625: PetscFunctionReturn(PETSC_SUCCESS);
626: }
628: /*@
629: TaoIsObjectiveAndGradientDefined - Checks to see if the user has
630: declared a joint objective/gradient routine. Useful for determining when
631: it is appropriate to call `TaoComputeObjectiveAndGradient()`
633: Not Collective
635: Input Parameter:
636: . tao - the `Tao` context
638: Output Parameter:
639: . flg - `PETSC_TRUE` if the objective `TaoTerm` has this routine `PETSC_FALSE` otherwise
641: Level: developer
643: Note:
644: If the objective of `Tao` has been altered via `TaoAddTerm()`, it will
645: return whether the summation of all terms has this routine.
647: .seealso: [](ch_tao), `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()`
648: @*/
649: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
650: {
651: PetscFunctionBegin;
653: PetscCall(TaoTermIsObjectiveAndGradientDefined(tao->objective_term.term, flg));
654: PetscFunctionReturn(PETSC_SUCCESS);
655: }