Actual source code: spectraladjointassimilation.c
2: static char help[] = "Solves a simple data assimilation problem with one dimensional advection diffusion equation using TSAdjoint\n\n";
4: /*
6: Not yet tested in parallel
8: */
10: /* ------------------------------------------------------------------------
12: This program uses the one-dimensional advection-diffusion equation),
13: u_t = mu*u_xx - a u_x,
14: on the domain 0 <= x <= 1, with periodic boundary conditions
16: to demonstrate solving a data assimilation problem of finding the initial conditions
17: to produce a given solution at a fixed time.
19: The operators are discretized with the spectral element method
21: ------------------------------------------------------------------------- */
23: /*
24: Include "petscts.h" so that we can use TS solvers. Note that this file
25: automatically includes:
26: petscsys.h - base PETSc routines petscvec.h - vectors
27: petscmat.h - matrices
28: petscis.h - index sets petscksp.h - Krylov subspace methods
29: petscviewer.h - viewers petscpc.h - preconditioners
30: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
31: */
33: #include <petsctao.h>
34: #include <petscts.h>
35: #include <petscdt.h>
36: #include <petscdraw.h>
37: #include <petscdmda.h>
39: /*
40: User-defined application context - contains data needed by the
41: application-provided call-back routines.
42: */
44: typedef struct {
45: PetscInt n; /* number of nodes */
46: PetscReal *nodes; /* GLL nodes */
47: PetscReal *weights; /* GLL weights */
48: } PetscGLL;
50: typedef struct {
51: PetscInt N; /* grid points per elements*/
52: PetscInt E; /* number of elements */
53: PetscReal tol_L2, tol_max; /* error norms */
54: PetscInt steps; /* number of timesteps */
55: PetscReal Tend; /* endtime */
56: PetscReal mu; /* viscosity */
57: PetscReal a; /* advection speed */
58: PetscReal L; /* total length of domain */
59: PetscReal Le;
60: PetscReal Tadj;
61: } PetscParam;
63: typedef struct {
64: Vec reference; /* desired end state */
65: Vec grid; /* total grid */
66: Vec grad;
67: Vec ic;
68: Vec curr_sol;
69: Vec joe;
70: Vec true_solution; /* actual initial conditions for the final solution */
71: } PetscData;
73: typedef struct {
74: Vec grid; /* total grid */
75: Vec mass; /* mass matrix for total integration */
76: Mat stiff; /* stifness matrix */
77: Mat advec;
78: Mat keptstiff;
79: PetscGLL gll;
80: } PetscSEMOperators;
82: typedef struct {
83: DM da; /* distributed array data structure */
84: PetscSEMOperators SEMop;
85: PetscParam param;
86: PetscData dat;
87: TS ts;
88: PetscReal initial_dt;
89: PetscReal *solutioncoefficients;
90: PetscInt ncoeff;
91: } AppCtx;
93: /*
94: User-defined routines
95: */
96: extern PetscErrorCode FormFunctionGradient(Tao, Vec, PetscReal *, Vec, void *);
97: extern PetscErrorCode RHSLaplacian(TS, PetscReal, Vec, Mat, Mat, void *);
98: extern PetscErrorCode RHSAdvection(TS, PetscReal, Vec, Mat, Mat, void *);
99: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
100: extern PetscErrorCode ComputeReference(TS, PetscReal, Vec, AppCtx *);
101: extern PetscErrorCode MonitorError(Tao, void *);
102: extern PetscErrorCode MonitorDestroy(void **);
103: extern PetscErrorCode ComputeSolutionCoefficients(AppCtx *);
104: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
105: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
107: int main(int argc, char **argv)
108: {
109: AppCtx appctx; /* user-defined application context */
110: Tao tao;
111: Vec u; /* approximate solution vector */
112: PetscInt i, xs, xm, ind, j, lenglob;
113: PetscReal x, *wrk_ptr1, *wrk_ptr2;
114: MatNullSpace nsp;
116: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117: Initialize program and set problem parameters
118: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119: PetscFunctionBegin;
121: PetscFunctionBeginUser;
122: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
124: /*initialize parameters */
125: appctx.param.N = 10; /* order of the spectral element */
126: appctx.param.E = 8; /* number of elements */
127: appctx.param.L = 1.0; /* length of the domain */
128: appctx.param.mu = 0.00001; /* diffusion coefficient */
129: appctx.param.a = 0.0; /* advection speed */
130: appctx.initial_dt = 1e-4;
131: appctx.param.steps = PETSC_MAX_INT;
132: appctx.param.Tend = 0.01;
133: appctx.ncoeff = 2;
135: PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
136: PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
137: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncoeff", &appctx.ncoeff, NULL));
138: PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
139: PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
140: PetscCall(PetscOptionsGetReal(NULL, NULL, "-a", &appctx.param.a, NULL));
141: appctx.param.Le = appctx.param.L / appctx.param.E;
143: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144: Create GLL data structures
145: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
146: PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
147: PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
148: appctx.SEMop.gll.n = appctx.param.N;
149: lenglob = appctx.param.E * (appctx.param.N - 1);
151: /*
152: Create distributed array (DMDA) to manage parallel grid and vectors
153: and to set up the ghost point communication pattern. There are E*(Nl-1)+1
154: total grid values spread equally among all the processors, except first and last
155: */
157: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
158: PetscCall(DMSetFromOptions(appctx.da));
159: PetscCall(DMSetUp(appctx.da));
161: /*
162: Extract global and local vectors from DMDA; we use these to store the
163: approximate solution. Then duplicate these for remaining vectors that
164: have the same types.
165: */
167: PetscCall(DMCreateGlobalVector(appctx.da, &u));
168: PetscCall(VecDuplicate(u, &appctx.dat.ic));
169: PetscCall(VecDuplicate(u, &appctx.dat.true_solution));
170: PetscCall(VecDuplicate(u, &appctx.dat.reference));
171: PetscCall(VecDuplicate(u, &appctx.SEMop.grid));
172: PetscCall(VecDuplicate(u, &appctx.SEMop.mass));
173: PetscCall(VecDuplicate(u, &appctx.dat.curr_sol));
174: PetscCall(VecDuplicate(u, &appctx.dat.joe));
176: PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
177: PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
178: PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
180: /* Compute function over the locally owned part of the grid */
182: xs = xs / (appctx.param.N - 1);
183: xm = xm / (appctx.param.N - 1);
185: /*
186: Build total grid and mass over entire mesh (multi-elemental)
187: */
189: for (i = xs; i < xs + xm; i++) {
190: for (j = 0; j < appctx.param.N - 1; j++) {
191: x = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
192: ind = i * (appctx.param.N - 1) + j;
193: wrk_ptr1[ind] = x;
194: wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
195: if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
196: }
197: }
198: PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
199: PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
201: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: Create matrix data structure; set matrix evaluation routine.
203: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
204: PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
205: PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
206: PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.advec));
208: /*
209: For linear problems with a time-dependent f(u,t) in the equation
210: u_t = f(u,t), the user provides the discretized right-hand-side
211: as a time-dependent matrix.
212: */
213: PetscCall(RHSLaplacian(appctx.ts, 0.0, u, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
214: PetscCall(RHSAdvection(appctx.ts, 0.0, u, appctx.SEMop.advec, appctx.SEMop.advec, &appctx));
215: PetscCall(MatAXPY(appctx.SEMop.stiff, -1.0, appctx.SEMop.advec, DIFFERENT_NONZERO_PATTERN));
216: PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));
218: /* attach the null space to the matrix, this probably is not needed but does no harm */
219: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
220: PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
221: PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
222: PetscCall(MatNullSpaceDestroy(&nsp));
224: /* Create the TS solver that solves the ODE and its adjoint; set its options */
225: PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
226: PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))ComputeReference, &appctx));
227: PetscCall(TSSetProblemType(appctx.ts, TS_LINEAR));
228: PetscCall(TSSetType(appctx.ts, TSRK));
229: PetscCall(TSSetDM(appctx.ts, appctx.da));
230: PetscCall(TSSetTime(appctx.ts, 0.0));
231: PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
232: PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
233: PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
234: PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
235: PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
236: PetscCall(TSSetFromOptions(appctx.ts));
237: /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */
238: PetscCall(TSGetTimeStep(appctx.ts, &appctx.initial_dt));
239: PetscCall(TSSetRHSFunction(appctx.ts, NULL, TSComputeRHSFunctionLinear, &appctx));
240: PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, TSComputeRHSJacobianConstant, &appctx));
241: /* PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx));
242: PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); */
244: /* Set random initial conditions as initial guess, compute analytic reference solution and analytic (true) initial conditions */
245: PetscCall(ComputeSolutionCoefficients(&appctx));
246: PetscCall(InitialConditions(appctx.dat.ic, &appctx));
247: PetscCall(ComputeReference(appctx.ts, appctx.param.Tend, appctx.dat.reference, &appctx));
248: PetscCall(ComputeReference(appctx.ts, 0.0, appctx.dat.true_solution, &appctx));
250: /* Set up to save trajectory before TSSetFromOptions() so that TSTrajectory options can be captured */
251: PetscCall(TSSetSaveTrajectory(appctx.ts));
252: PetscCall(TSSetFromOptions(appctx.ts));
254: /* Create TAO solver and set desired solution method */
255: PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
256: PetscCall(TaoSetMonitor(tao, MonitorError, &appctx, MonitorDestroy));
257: PetscCall(TaoSetType(tao, TAOBQNLS));
258: PetscCall(TaoSetSolution(tao, appctx.dat.ic));
259: /* Set routine for function and gradient evaluation */
260: PetscCall(TaoSetObjectiveAndGradient(tao, NULL, FormFunctionGradient, (void *)&appctx));
261: /* Check for any TAO command line options */
262: PetscCall(TaoSetTolerances(tao, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT));
263: PetscCall(TaoSetFromOptions(tao));
264: PetscCall(TaoSolve(tao));
266: PetscCall(TaoDestroy(&tao));
267: PetscCall(PetscFree(appctx.solutioncoefficients));
268: PetscCall(MatDestroy(&appctx.SEMop.advec));
269: PetscCall(MatDestroy(&appctx.SEMop.stiff));
270: PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
271: PetscCall(VecDestroy(&u));
272: PetscCall(VecDestroy(&appctx.dat.ic));
273: PetscCall(VecDestroy(&appctx.dat.joe));
274: PetscCall(VecDestroy(&appctx.dat.true_solution));
275: PetscCall(VecDestroy(&appctx.dat.reference));
276: PetscCall(VecDestroy(&appctx.SEMop.grid));
277: PetscCall(VecDestroy(&appctx.SEMop.mass));
278: PetscCall(VecDestroy(&appctx.dat.curr_sol));
279: PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
280: PetscCall(DMDestroy(&appctx.da));
281: PetscCall(TSDestroy(&appctx.ts));
283: /*
284: Always call PetscFinalize() before exiting a program. This routine
285: - finalizes the PETSc libraries as well as MPI
286: - provides summary and diagnostic information if certain runtime
287: options are chosen (e.g., -log_summary).
288: */
289: PetscCall(PetscFinalize());
290: return 0;
291: }
293: /*
294: Computes the coefficients for the analytic solution to the PDE
295: */
296: PetscErrorCode ComputeSolutionCoefficients(AppCtx *appctx)
297: {
298: PetscRandom rand;
299: PetscInt i;
301: PetscFunctionBegin;
302: PetscCall(PetscMalloc1(appctx->ncoeff, &appctx->solutioncoefficients));
303: PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand));
304: PetscCall(PetscRandomSetInterval(rand, .9, 1.0));
305: for (i = 0; i < appctx->ncoeff; i++) PetscCall(PetscRandomGetValue(rand, &appctx->solutioncoefficients[i]));
306: PetscCall(PetscRandomDestroy(&rand));
307: PetscFunctionReturn(PETSC_SUCCESS);
308: }
310: /* --------------------------------------------------------------------- */
311: /*
312: InitialConditions - Computes the (random) initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve()
314: Input Parameter:
315: u - uninitialized solution vector (global)
316: appctx - user-defined application context
318: Output Parameter:
319: u - vector with solution at initial time (global)
320: */
321: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
322: {
323: PetscScalar *s;
324: const PetscScalar *xg;
325: PetscInt i, j, lenglob;
326: PetscReal sum, val;
327: PetscRandom rand;
329: PetscFunctionBegin;
330: PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand));
331: PetscCall(PetscRandomSetInterval(rand, .9, 1.0));
332: PetscCall(DMDAVecGetArray(appctx->da, u, &s));
333: PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
334: lenglob = appctx->param.E * (appctx->param.N - 1);
335: for (i = 0; i < lenglob; i++) {
336: s[i] = 0;
337: for (j = 0; j < appctx->ncoeff; j++) {
338: PetscCall(PetscRandomGetValue(rand, &val));
339: s[i] += val * PetscSinScalar(2 * (j + 1) * PETSC_PI * xg[i]);
340: }
341: }
342: PetscCall(PetscRandomDestroy(&rand));
343: PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
344: PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
345: /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
346: PetscCall(VecSum(u, &sum));
347: PetscCall(VecShift(u, -sum / lenglob));
348: PetscFunctionReturn(PETSC_SUCCESS);
349: }
351: /*
352: TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function.
354: InitialConditions() computes the initial conditions for the beginning of the Tao iterations
356: Input Parameter:
357: u - uninitialized solution vector (global)
358: appctx - user-defined application context
360: Output Parameter:
361: u - vector with solution at initial time (global)
362: */
363: PetscErrorCode TrueSolution(Vec u, AppCtx *appctx)
364: {
365: PetscScalar *s;
366: const PetscScalar *xg;
367: PetscInt i, j, lenglob;
368: PetscReal sum;
370: PetscFunctionBegin;
371: PetscCall(DMDAVecGetArray(appctx->da, u, &s));
372: PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
373: lenglob = appctx->param.E * (appctx->param.N - 1);
374: for (i = 0; i < lenglob; i++) {
375: s[i] = 0;
376: for (j = 0; j < appctx->ncoeff; j++) s[i] += appctx->solutioncoefficients[j] * PetscSinScalar(2 * (j + 1) * PETSC_PI * xg[i]);
377: }
378: PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
379: PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
380: /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
381: PetscCall(VecSum(u, &sum));
382: PetscCall(VecShift(u, -sum / lenglob));
383: PetscFunctionReturn(PETSC_SUCCESS);
384: }
385: /* --------------------------------------------------------------------- */
386: /*
387: Sets the desired profile for the final end time
389: Input Parameters:
390: t - final time
391: obj - vector storing the desired profile
392: appctx - user-defined application context
394: */
395: PetscErrorCode ComputeReference(TS ts, PetscReal t, Vec obj, AppCtx *appctx)
396: {
397: PetscScalar *s, tc;
398: const PetscScalar *xg;
399: PetscInt i, j, lenglob;
401: PetscFunctionBegin;
402: PetscCall(DMDAVecGetArray(appctx->da, obj, &s));
403: PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
404: lenglob = appctx->param.E * (appctx->param.N - 1);
405: for (i = 0; i < lenglob; i++) {
406: s[i] = 0;
407: for (j = 0; j < appctx->ncoeff; j++) {
408: tc = -appctx->param.mu * (j + 1) * (j + 1) * 4.0 * PETSC_PI * PETSC_PI * t;
409: s[i] += appctx->solutioncoefficients[j] * PetscSinScalar(2 * (j + 1) * PETSC_PI * (xg[i] + appctx->param.a * t)) * PetscExpReal(tc);
410: }
411: }
412: PetscCall(DMDAVecRestoreArray(appctx->da, obj, &s));
413: PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
414: PetscFunctionReturn(PETSC_SUCCESS);
415: }
417: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
418: {
419: AppCtx *appctx = (AppCtx *)ctx;
421: PetscFunctionBegin;
422: PetscCall(MatMult(appctx->SEMop.keptstiff, globalin, globalout));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
427: {
428: AppCtx *appctx = (AppCtx *)ctx;
430: PetscFunctionBegin;
431: PetscCall(MatCopy(appctx->SEMop.keptstiff, A, DIFFERENT_NONZERO_PATTERN));
432: PetscFunctionReturn(PETSC_SUCCESS);
433: }
435: /* --------------------------------------------------------------------- */
437: /*
438: RHSLaplacian - matrix for diffusion
440: Input Parameters:
441: ts - the TS context
442: t - current time (ignored)
443: X - current solution (ignored)
444: dummy - optional user-defined context, as set by TSetRHSJacobian()
446: Output Parameters:
447: AA - Jacobian matrix
448: BB - optionally different matrix from which the preconditioner is built
449: str - flag indicating matrix structure
451: Scales by the inverse of the mass matrix (perhaps that should be pulled out)
453: */
454: PetscErrorCode RHSLaplacian(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
455: {
456: PetscReal **temp;
457: PetscReal vv;
458: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
459: PetscInt i, xs, xn, l, j;
460: PetscInt *rowsDM;
462: PetscFunctionBegin;
463: /*
464: Creates the element stiffness matrix for the given gll
465: */
466: PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
468: /* scale by the size of the element */
469: for (i = 0; i < appctx->param.N; i++) {
470: vv = -appctx->param.mu * 2.0 / appctx->param.Le;
471: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
472: }
474: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
475: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
477: PetscCheck(appctx->param.N - 1 >= 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Polynomial order must be at least 2");
478: xs = xs / (appctx->param.N - 1);
479: xn = xn / (appctx->param.N - 1);
481: PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
482: /*
483: loop over local elements
484: */
485: for (j = xs; j < xs + xn; j++) {
486: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
487: PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
488: }
489: PetscCall(PetscFree(rowsDM));
490: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
491: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
492: PetscCall(VecReciprocal(appctx->SEMop.mass));
493: PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
494: PetscCall(VecReciprocal(appctx->SEMop.mass));
496: PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
497: PetscFunctionReturn(PETSC_SUCCESS);
498: }
500: /*
501: Almost identical to Laplacian
503: Note that the element matrix is NOT scaled by the size of element like the Laplacian term.
504: */
505: PetscErrorCode RHSAdvection(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
506: {
507: PetscReal **temp;
508: PetscReal vv;
509: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
510: PetscInt i, xs, xn, l, j;
511: PetscInt *rowsDM;
513: PetscFunctionBegin;
514: /*
515: Creates the element stiffness matrix for the given gll
516: */
517: PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
519: /* scale by the size of the element */
520: for (i = 0; i < appctx->param.N; i++) {
521: vv = -appctx->param.a;
522: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
523: }
525: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
526: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
528: PetscCheck(appctx->param.N - 1 >= 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Polynomial order must be at least 2");
529: xs = xs / (appctx->param.N - 1);
530: xn = xn / (appctx->param.N - 1);
532: PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
533: /*
534: loop over local elements
535: */
536: for (j = xs; j < xs + xn; j++) {
537: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
538: PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
539: }
540: PetscCall(PetscFree(rowsDM));
541: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
542: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
543: PetscCall(VecReciprocal(appctx->SEMop.mass));
544: PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
545: PetscCall(VecReciprocal(appctx->SEMop.mass));
547: PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
548: PetscFunctionReturn(PETSC_SUCCESS);
549: }
551: /* ------------------------------------------------------------------ */
552: /*
553: FormFunctionGradient - Evaluates the function and corresponding gradient.
555: Input Parameters:
556: tao - the Tao context
557: ic - the input vector
558: ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient()
560: Output Parameters:
561: f - the newly evaluated function
562: G - the newly evaluated gradient
564: Notes:
566: The forward equation is
567: M u_t = F(U)
568: which is converted to
569: u_t = M^{-1} F(u)
570: in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is
571: M^{-1} J
572: where J is the Jacobian of F. Now the adjoint equation is
573: M v_t = J^T v
574: but TSAdjoint does not solve this since it can only solve the transposed system for the
575: Jacobian the user provided. Hence TSAdjoint solves
576: w_t = J^T M^{-1} w (where w = M v)
577: since there is no way to indicate the mass matrix as a separate entity to TS. Thus one
578: must be careful in initializing the "adjoint equation" and using the result. This is
579: why
580: G = -2 M(u(T) - u_d)
581: below (instead of -2(u(T) - u_d)
583: */
584: PetscErrorCode FormFunctionGradient(Tao tao, Vec ic, PetscReal *f, Vec G, void *ctx)
585: {
586: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
587: Vec temp;
589: PetscFunctionBegin;
590: PetscCall(TSSetTime(appctx->ts, 0.0));
591: PetscCall(TSSetStepNumber(appctx->ts, 0));
592: PetscCall(TSSetTimeStep(appctx->ts, appctx->initial_dt));
593: PetscCall(VecCopy(ic, appctx->dat.curr_sol));
595: PetscCall(TSSolve(appctx->ts, appctx->dat.curr_sol));
596: PetscCall(VecCopy(appctx->dat.curr_sol, appctx->dat.joe));
598: /* Compute the difference between the current ODE solution and target ODE solution */
599: PetscCall(VecWAXPY(G, -1.0, appctx->dat.curr_sol, appctx->dat.reference));
601: /* Compute the objective/cost function */
602: PetscCall(VecDuplicate(G, &temp));
603: PetscCall(VecPointwiseMult(temp, G, G));
604: PetscCall(VecDot(temp, appctx->SEMop.mass, f));
605: PetscCall(VecDestroy(&temp));
607: /* Compute initial conditions for the adjoint integration. See Notes above */
608: PetscCall(VecScale(G, -2.0));
609: PetscCall(VecPointwiseMult(G, G, appctx->SEMop.mass));
610: PetscCall(TSSetCostGradients(appctx->ts, 1, &G, NULL));
612: PetscCall(TSAdjointSolve(appctx->ts));
613: /* PetscCall(VecPointwiseDivide(G,G,appctx->SEMop.mass));*/
614: PetscFunctionReturn(PETSC_SUCCESS);
615: }
617: PetscErrorCode MonitorError(Tao tao, void *ctx)
618: {
619: AppCtx *appctx = (AppCtx *)ctx;
620: Vec temp, grad;
621: PetscReal nrm;
622: PetscInt its;
623: PetscReal fct, gnorm;
625: PetscFunctionBegin;
626: PetscCall(VecDuplicate(appctx->dat.ic, &temp));
627: PetscCall(VecWAXPY(temp, -1.0, appctx->dat.ic, appctx->dat.true_solution));
628: PetscCall(VecPointwiseMult(temp, temp, temp));
629: PetscCall(VecDot(temp, appctx->SEMop.mass, &nrm));
630: nrm = PetscSqrtReal(nrm);
631: PetscCall(TaoGetGradient(tao, &grad, NULL, NULL));
632: PetscCall(VecPointwiseMult(temp, temp, temp));
633: PetscCall(VecDot(temp, appctx->SEMop.mass, &gnorm));
634: gnorm = PetscSqrtReal(gnorm);
635: PetscCall(VecDestroy(&temp));
636: PetscCall(TaoGetIterationNumber(tao, &its));
637: PetscCall(TaoGetSolutionStatus(tao, NULL, &fct, NULL, NULL, NULL, NULL));
638: if (!its) {
639: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%% Iteration Error Objective Gradient-norm\n"));
640: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "history = [\n"));
641: }
642: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%3" PetscInt_FMT " %g %g %g\n", its, (double)nrm, (double)fct, (double)gnorm));
643: PetscFunctionReturn(PETSC_SUCCESS);
644: }
646: PetscErrorCode MonitorDestroy(void **ctx)
647: {
648: PetscFunctionBegin;
649: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "];\n"));
650: PetscFunctionReturn(PETSC_SUCCESS);
651: }
653: /*TEST
655: build:
656: requires: !complex
658: test:
659: requires: !single
660: args: -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none
662: test:
663: suffix: cn
664: requires: !single
665: args: -ts_type cn -ts_dt .003 -pc_type lu -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none
667: test:
668: suffix: 2
669: requires: !single
670: args: -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -a .1 -tao_bqnls_mat_lmvm_scale_type none
672: TEST*/