Actual source code: taosolver_fg.c
1: #include <petsc/private/taoimpl.h>
3: /*@
4: TaoSetSolution - Sets the vector holding the initial guess for the solve
6: Logically Collective
8: Input Parameters:
9: + tao - the `Tao` context
10: - x0 - the initial guess
12: Level: beginner
14: .seealso: [](ch_tao), `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()`
15: @*/
16: PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
17: {
18: PetscFunctionBegin;
21: PetscCall(PetscObjectReference((PetscObject)x0));
22: PetscCall(VecDestroy(&tao->solution));
23: tao->solution = x0;
24: if (x0) PetscCall(TaoTermSetSolutionTemplate(tao->callbacks, x0));
25: PetscFunctionReturn(PETSC_SUCCESS);
26: }
28: PETSC_INTERN PetscErrorCode TaoTestGradient_Internal(Tao tao, Vec x, Vec g1, PetscViewer viewer, PetscViewer mviewer)
29: {
30: Vec g2, g3;
31: PetscReal hcnorm, fdnorm, hcmax, fdmax, diffmax, diffnorm;
32: PetscScalar dot;
34: PetscFunctionBegin;
35: PetscCall(VecDuplicate(x, &g2));
36: PetscCall(VecDuplicate(x, &g3));
38: /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
39: PetscCall(TaoDefaultComputeGradient(tao, x, g2, NULL));
41: PetscCall(VecNorm(g2, NORM_2, &fdnorm));
42: PetscCall(VecNorm(g1, NORM_2, &hcnorm));
43: PetscCall(VecNorm(g2, NORM_INFINITY, &fdmax));
44: PetscCall(VecNorm(g1, NORM_INFINITY, &hcmax));
45: PetscCall(VecDot(g1, g2, &dot));
46: PetscCall(VecCopy(g1, g3));
47: PetscCall(VecAXPY(g3, -1.0, g2));
48: PetscCall(VecNorm(g3, NORM_2, &diffnorm));
49: PetscCall(VecNorm(g3, NORM_INFINITY, &diffmax));
50: PetscCall(PetscViewerASCIIPrintf(viewer, " ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot) / (fdnorm * hcnorm))));
51: PetscCall(PetscViewerASCIIPrintf(viewer, " 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffnorm / PetscMax(hcnorm, fdnorm)), (double)diffnorm));
52: PetscCall(PetscViewerASCIIPrintf(viewer, " max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffmax / PetscMax(hcmax, fdmax)), (double)diffmax));
54: if (mviewer) {
55: PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded gradient ----------\n"));
56: PetscCall(VecView(g1, mviewer));
57: PetscCall(PetscViewerASCIIPrintf(viewer, " Finite difference gradient ----------\n"));
58: PetscCall(VecView(g2, mviewer));
59: PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded minus finite-difference gradient ----------\n"));
60: PetscCall(VecView(g3, mviewer));
61: }
62: PetscCall(VecDestroy(&g2));
63: PetscCall(VecDestroy(&g3));
64: PetscFunctionReturn(PETSC_SUCCESS);
65: }
67: PetscErrorCode TaoTestGradient(Tao tao, Vec x, Vec g1)
68: {
69: PetscBool complete_print = PETSC_FALSE, test = PETSC_FALSE;
70: MPI_Comm comm;
71: PetscViewer viewer, mviewer;
72: PetscViewerFormat format;
73: PetscInt tabs;
74: static PetscBool directionsprinted = PETSC_FALSE;
76: PetscFunctionBegin;
77: PetscObjectOptionsBegin((PetscObject)tao);
78: PetscCall(PetscOptionsName("-tao_test_gradient", "Compare hand-coded and finite difference Gradients", "None", &test));
79: PetscCall(PetscOptionsViewer("-tao_test_gradient_view", "View difference between hand-coded and finite difference Gradients element entries", "None", &mviewer, &format, &complete_print));
80: PetscOptionsEnd();
81: if (!test) {
82: if (complete_print) PetscCall(PetscViewerDestroy(&mviewer));
83: PetscFunctionReturn(PETSC_SUCCESS);
84: }
86: PetscCall(PetscObjectGetComm((PetscObject)tao, &comm));
87: PetscCall(PetscViewerASCIIGetStdout(comm, &viewer));
88: PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
89: PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
90: PetscCall(PetscViewerASCIIPrintf(viewer, " ---------- Testing Gradient -------------\n"));
91: if (!complete_print && !directionsprinted) {
92: PetscCall(PetscViewerASCIIPrintf(viewer, " Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n"));
93: PetscCall(PetscViewerASCIIPrintf(viewer, " of hand-coded and finite difference gradient entries greater than <threshold>.\n"));
94: }
95: if (!directionsprinted) {
96: PetscCall(PetscViewerASCIIPrintf(viewer, " Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n"));
97: PetscCall(PetscViewerASCIIPrintf(viewer, " O(1.e-8), the hand-coded Gradient is probably correct.\n"));
98: directionsprinted = PETSC_TRUE;
99: }
100: if (complete_print) PetscCall(PetscViewerPushFormat(mviewer, format));
101: PetscCall(TaoTestGradient_Internal(tao, x, g1, viewer, complete_print ? mviewer : NULL));
102: if (complete_print) {
103: PetscCall(PetscViewerPopFormat(mviewer));
104: PetscCall(PetscViewerDestroy(&mviewer));
105: }
106: PetscCall(PetscViewerASCIISetTab(viewer, tabs));
107: PetscFunctionReturn(PETSC_SUCCESS);
108: }
110: /*@
111: TaoComputeGradient - Computes the gradient of the objective function
113: Collective
115: Input Parameters:
116: + tao - the `Tao` context
117: - X - input vector
119: Output Parameter:
120: . G - gradient vector
122: Options Database Keys:
123: + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
124: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
126: Level: developer
128: Note:
129: `TaoComputeGradient()` is typically used within the implementation of the optimization method,
130: so most users would not generally call this routine themselves.
132: .seealso: [](ch_tao), `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()`
133: @*/
134: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
135: {
136: PetscFunctionBegin;
140: PetscCheckSameComm(tao, 1, X, 2);
141: PetscCheckSameComm(tao, 1, G, 3);
142: PetscCall(TaoTermMappingComputeGradient(&tao->objective_term, X, tao->objective_parameters, INSERT_VALUES, G));
143: PetscCall(TaoTestGradient(tao, X, G));
144: PetscFunctionReturn(PETSC_SUCCESS);
145: }
147: /*@
148: TaoComputeObjective - Computes the objective function value at a given point
150: Collective
152: Input Parameters:
153: + tao - the `Tao` context
154: - X - input vector
156: Output Parameter:
157: . f - Objective value at X
159: Level: developer
161: Note:
162: `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm
163: so most users would not generally call this routine themselves.
165: .seealso: [](ch_tao), `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
166: @*/
167: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
168: {
169: PetscFunctionBegin;
172: PetscAssertPointer(f, 3);
173: PetscCheckSameComm(tao, 1, X, 2);
174: PetscCall(TaoTermMappingComputeObjective(&tao->objective_term, X, tao->objective_parameters, INSERT_VALUES, f));
175: PetscFunctionReturn(PETSC_SUCCESS);
176: }
178: /*@
179: TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
181: Collective
183: Input Parameters:
184: + tao - the `Tao` context
185: - X - input vector
187: Output Parameters:
188: + f - Objective value at `X`
189: - G - Gradient vector at `X`
191: Level: developer
193: Note:
194: `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm,
195: so most users would not generally call this routine themselves.
197: .seealso: [](ch_tao), `TaoComputeGradient()`, `TaoSetObjective()`
198: @*/
199: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
200: {
201: PetscFunctionBegin;
204: PetscAssertPointer(f, 3);
206: PetscCheckSameComm(tao, 1, X, 2);
207: PetscCheckSameComm(tao, 1, G, 4);
208: PetscCall(TaoTermMappingComputeObjectiveAndGradient(&tao->objective_term, X, tao->objective_parameters, INSERT_VALUES, f, G));
209: PetscCall(TaoTestGradient(tao, X, G));
210: PetscFunctionReturn(PETSC_SUCCESS);
211: }
213: /*@C
214: TaoSetObjective - Sets the function evaluation routine for minimization
216: Logically Collective
218: Input Parameters:
219: + tao - the `Tao` context
220: . func - the objective function
221: - ctx - [optional] user-defined context for private data for the function evaluation
222: routine (may be `NULL`)
224: Calling sequence of `func`:
225: + tao - the optimizer
226: . x - input vector
227: . f - function value
228: - ctx - [optional] user-defined function context
230: Level: beginner
232: .seealso: [](ch_tao), `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()`
233: @*/
234: PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao tao, Vec x, PetscReal *f, PetscCtx ctx), PetscCtx ctx)
235: {
236: PetscFunctionBegin;
238: PetscCall(TaoTermCallbacksSetObjective(tao->callbacks, func, ctx));
239: PetscFunctionReturn(PETSC_SUCCESS);
240: }
242: /*@C
243: TaoGetObjective - Gets the function evaluation routine for the function to be minimized
245: Not Collective
247: Input Parameter:
248: . tao - the `Tao` context
250: Output Parameters:
251: + func - the objective function
252: - ctx - the user-defined context for private data for the function evaluation
254: Calling sequence of `func`:
255: + tao - the optimizer
256: . x - input vector
257: . f - function value
258: - ctx - [optional] user-defined function context
260: Level: beginner
262: Notes:
263: In addition to specifying an objective function using callbacks such as
264: `TaoSetObjective()` and `TaoSetGradient()`, users can specify
265: objective functions with `TaoAddTerm()`.
267: `TaoGetObjective()` will always return the callback specified with
268: `TaoSetObjective()`, even if the objective function has been changed by
269: calling `TaoAddTerm()`.
271: .seealso: [](ch_tao), `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()`
272: @*/
273: PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao tao, Vec x, PetscReal *f, PetscCtx ctx), PetscCtxRt ctx)
274: {
275: PetscFunctionBegin;
277: if (func || ctx) PetscCall(TaoTermCallbacksGetObjective(tao->callbacks, func, ctx));
278: PetscFunctionReturn(PETSC_SUCCESS);
279: }
281: /*@C
282: TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
284: Logically Collective
286: Input Parameters:
287: + tao - the `Tao` context
288: . res - the residual vector
289: . func - the residual evaluation routine
290: - ctx - [optional] user-defined context for private data for the function evaluation
291: routine (may be `NULL`)
293: Calling sequence of `func`:
294: + tao - the optimizer
295: . x - input vector
296: . res - function value vector
297: - ctx - [optional] user-defined function context
299: Level: beginner
301: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()`
302: @*/
303: PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao tao, Vec x, Vec res, PetscCtx ctx), PetscCtx ctx)
304: {
305: PetscFunctionBegin;
308: PetscCall(PetscObjectReference((PetscObject)res));
309: PetscCall(VecDestroy(&tao->ls_res));
310: tao->ls_res = res;
311: tao->user_lsresP = ctx;
312: tao->ops->computeresidual = func;
313: PetscFunctionReturn(PETSC_SUCCESS);
314: }
316: /*@
317: TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give.
319: Collective
321: Input Parameters:
322: + tao - the `Tao` context
323: . sigma_v - vector of weights (diagonal terms only)
324: . n - the number of weights (if using off-diagonal)
325: . rows - index list of rows for `sigma_v`
326: . cols - index list of columns for `sigma_v`
327: - vals - array of weights
329: Level: intermediate
331: Notes:
332: If this function is not provided, or if `sigma_v` and `vals` are both `NULL`, then the
333: identity matrix will be used for weights.
335: Either `sigma_v` or `vals` should be `NULL`
337: .seealso: [](ch_tao), `Tao`, `TaoSetResidualRoutine()`
338: @*/
339: PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
340: {
341: PetscFunctionBegin;
344: PetscCall(PetscObjectReference((PetscObject)sigma_v));
345: PetscCall(VecDestroy(&tao->res_weights_v));
346: tao->res_weights_v = sigma_v;
347: if (vals) {
348: PetscCall(PetscFree(tao->res_weights_rows));
349: PetscCall(PetscFree(tao->res_weights_cols));
350: PetscCall(PetscFree(tao->res_weights_w));
351: PetscCall(PetscMalloc1(n, &tao->res_weights_rows));
352: PetscCall(PetscMalloc1(n, &tao->res_weights_cols));
353: PetscCall(PetscMalloc1(n, &tao->res_weights_w));
354: tao->res_weights_n = n;
355: for (PetscInt i = 0; i < n; i++) {
356: tao->res_weights_rows[i] = rows[i];
357: tao->res_weights_cols[i] = cols[i];
358: tao->res_weights_w[i] = vals[i];
359: }
360: } else {
361: tao->res_weights_n = 0;
362: tao->res_weights_rows = NULL;
363: tao->res_weights_cols = NULL;
364: }
365: PetscFunctionReturn(PETSC_SUCCESS);
366: }
368: /*@
369: TaoComputeResidual - Computes a least-squares residual vector at a given point
371: Collective
373: Input Parameters:
374: + tao - the `Tao` context
375: - X - input vector
377: Output Parameter:
378: . F - Objective vector at `X`
380: Level: advanced
382: Notes:
383: `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm,
384: so most users would not generally call this routine themselves.
386: .seealso: [](ch_tao), `Tao`, `TaoSetResidualRoutine()`
387: @*/
388: PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
389: {
390: PetscFunctionBegin;
394: PetscCheckSameComm(tao, 1, X, 2);
395: PetscCheckSameComm(tao, 1, F, 3);
396: PetscCheck(tao->ops->computeresidual, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetResidualRoutine() has not been called");
397: PetscCall(PetscLogEventBegin(TAO_ResidualEval, tao, X, NULL, NULL));
398: PetscCallBack("Tao callback least-squares residual", (*tao->ops->computeresidual)(tao, X, F, tao->user_lsresP));
399: PetscCall(PetscLogEventEnd(TAO_ResidualEval, tao, X, NULL, NULL));
400: tao->nres++;
401: PetscCall(PetscInfo(tao, "TAO least-squares residual evaluation.\n"));
402: PetscFunctionReturn(PETSC_SUCCESS);
403: }
405: /*@C
406: TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized
408: Logically Collective
410: Input Parameters:
411: + tao - the `Tao` context
412: . g - [optional] the vector to internally hold the gradient computation
413: . func - the gradient function
414: - ctx - [optional] user-defined context for private data for the gradient evaluation
415: routine (may be `NULL`)
417: Calling sequence of `func`:
418: + tao - the optimization solver
419: . x - input vector
420: . g - gradient value (output)
421: - ctx - [optional] user-defined function context
423: Level: beginner
425: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()`
426: @*/
427: PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao tao, Vec x, Vec g, PetscCtx ctx), PetscCtx ctx)
428: {
429: PetscFunctionBegin;
431: if (g) {
433: PetscCheckSameComm(tao, 1, g, 2);
434: PetscCall(PetscObjectReference((PetscObject)g));
435: PetscCall(VecDestroy(&tao->gradient));
436: tao->gradient = g;
437: }
438: PetscCall(TaoTermCallbacksSetGradient(tao->callbacks, func, ctx));
439: PetscFunctionReturn(PETSC_SUCCESS);
440: }
442: /*@C
443: TaoGetGradient - Gets the gradient evaluation routine for the function being optimized
445: Not Collective
447: Input Parameter:
448: . tao - the `Tao` context
450: Output Parameters:
451: + g - the vector to internally hold the gradient computation
452: . func - the gradient function
453: - ctx - user-defined context for private data for the gradient evaluation routine
455: Calling sequence of `func`:
456: + tao - the optimizer
457: . x - input vector
458: . g - gradient value (output)
459: - ctx - [optional] user-defined function context
461: Level: beginner
463: Notes:
464: In addition to specifying an objective function using callbacks such as
465: `TaoSetObjective()` and `TaoSetGradient()`, users can specify
466: objective functions with `TaoAddTerm()`.
468: `TaoGetGradient()` will always return the callback specified with
469: `TaoSetGradient()`, even if the objective function has been changed by
470: calling `TaoAddTerm()`.
472: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()`
473: @*/
474: PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao tao, Vec x, Vec g, PetscCtx ctx), PetscCtxRt ctx)
475: {
476: PetscFunctionBegin;
478: if (g) *g = tao->gradient;
479: if (func || ctx) PetscCall(TaoTermCallbacksGetGradient(tao->callbacks, func, ctx));
480: PetscFunctionReturn(PETSC_SUCCESS);
481: }
483: /*@C
484: TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized
486: Logically Collective
488: Input Parameters:
489: + tao - the `Tao` context
490: . g - [optional] the vector to internally hold the gradient computation
491: . func - the gradient function
492: - ctx - [optional] user-defined context for private data for the gradient evaluation
493: routine (may be `NULL`)
495: Calling sequence of `func`:
496: + tao - the optimization object
497: . x - input vector
498: . f - objective value (output)
499: . g - gradient value (output)
500: - ctx - [optional] user-defined function context
502: Level: beginner
504: Note:
505: For some optimization methods using a combined function can be more efficient.
507: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()`
508: @*/
509: PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao tao, Vec x, PetscReal *f, Vec g, PetscCtx ctx), PetscCtx ctx)
510: {
511: PetscFunctionBegin;
513: if (g) {
515: PetscCheckSameComm(tao, 1, g, 2);
516: PetscCall(PetscObjectReference((PetscObject)g));
517: PetscCall(VecDestroy(&tao->gradient));
518: tao->gradient = g;
519: }
520: PetscCall(TaoTermCallbacksSetObjectiveAndGradient(tao->callbacks, func, ctx));
521: PetscFunctionReturn(PETSC_SUCCESS);
522: }
524: /*@C
525: TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized
527: Not Collective
529: Input Parameter:
530: . tao - the `Tao` context
532: Output Parameters:
533: + g - the vector to internally hold the gradient computation
534: . func - the gradient function
535: - ctx - user-defined context for private data for the gradient evaluation routine
537: Calling sequence of `func`:
538: + tao - the optimizer
539: . x - input vector
540: . f - objective value (output)
541: . g - gradient value (output)
542: - ctx - [optional] user-defined function context
544: Level: beginner
546: Note:
547: In addition to specifying an objective function using callbacks such as
548: `TaoSetObjectiveAndGradient()`, users can specify
549: objective functions with `TaoAddTerm()`.
551: `TaoGetObjectiveAndGradient()` will always return the callback specified with
552: `TaoSetObjectiveAndGradient()`, even if the objective function has been changed by
553: calling `TaoAddTerm()`.
555: .seealso: [](ch_tao), `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`
556: @*/
557: PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao tao, Vec x, PetscReal *f, Vec g, PetscCtx ctx), PetscCtxRt ctx)
558: {
559: PetscFunctionBegin;
561: if (g) *g = tao->gradient;
562: if (func || ctx) PetscCall(TaoTermCallbacksGetObjectiveAndGradient(tao->callbacks, func, ctx));
563: PetscFunctionReturn(PETSC_SUCCESS);
564: }
566: /*@
567: TaoIsObjectiveDefined - Checks to see if the user has
568: declared an objective-only routine. Useful for determining when
569: it is appropriate to call `TaoComputeObjective()` or
570: `TaoComputeObjectiveAndGradient()`
572: Not Collective
574: Input Parameter:
575: . tao - the `Tao` context
577: Output Parameter:
578: . flg - `PETSC_TRUE` if the `Tao` has this routine `PETSC_FALSE` otherwise
580: Level: developer
582: Note:
583: If the objective of `Tao` has been altered via `TaoAddTerm()`, it will
584: return whether the summation of all terms has this routine.
586: .seealso: [](ch_tao), `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()`
587: @*/
588: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
589: {
590: PetscFunctionBegin;
592: PetscCall(TaoTermIsObjectiveDefined(tao->objective_term.term, flg));
593: PetscFunctionReturn(PETSC_SUCCESS);
594: }
596: /*@
597: TaoIsGradientDefined - Checks to see if the user has
598: declared a gradient-only routine. Useful for determining when
599: it is appropriate to call `TaoComputeGradient()` or
600: `TaoComputeObjectiveAndGradient()`
602: Not Collective
604: Input Parameter:
605: . tao - the `Tao` context
607: Output Parameter:
608: . flg - `PETSC_TRUE` if the objective `TaoTerm` has this routine, `PETSC_FALSE` otherwise
610: Level: developer
612: Note:
613: If the objective of `Tao` has been altered via `TaoAddTerm()`, it will
614: return whether the summation of all terms has this routine.
616: .seealso: [](ch_tao), `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()`
617: @*/
618: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
619: {
620: PetscFunctionBegin;
622: PetscCall(TaoTermIsGradientDefined(tao->objective_term.term, flg));
623: PetscFunctionReturn(PETSC_SUCCESS);
624: }
626: /*@
627: TaoIsObjectiveAndGradientDefined - Checks to see if the user has
628: declared a joint objective/gradient routine. Useful for determining when
629: it is appropriate to call `TaoComputeObjectiveAndGradient()`
631: Not Collective
633: Input Parameter:
634: . tao - the `Tao` context
636: Output Parameter:
637: . flg - `PETSC_TRUE` if the objective `TaoTerm` has this routine `PETSC_FALSE` otherwise
639: Level: developer
641: Note:
642: If the objective of `Tao` has been altered via `TaoAddTerm()`, it will
643: return whether the summation of all terms has this routine.
645: .seealso: [](ch_tao), `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()`
646: @*/
647: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
648: {
649: PetscFunctionBegin;
651: PetscCall(TaoTermIsObjectiveAndGradientDefined(tao->objective_term.term, flg));
652: PetscFunctionReturn(PETSC_SUCCESS);
653: }