Actual source code: ex50.c

  1: static char help[] = "Solves one dimensional Burger's equation compares with exact solution\n\n";

  3: /*
  4:     Not yet tested in parallel
  5: */

  7: /* ------------------------------------------------------------------------

  9:    This program uses the one-dimensional Burger's equation
 10:        u_t = mu*u_xx - u u_x,
 11:    on the domain 0 <= x <= 1, with periodic boundary conditions

 13:    The operators are discretized with the spectral element method

 15:    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
 16:    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
 17:    used

 19:    See src/tao/unconstrained/tutorials/burgers_spectral.c

 21:   ------------------------------------------------------------------------- */

 23: #include <petscts.h>
 24: #include <petscdt.h>
 25: #include <petscdraw.h>
 26: #include <petscdmda.h>

 28: /*
 29:    User-defined application context - contains data needed by the
 30:    application-provided call-back routines.
 31: */

 33: typedef struct {
 34:   PetscInt   n;       /* number of nodes */
 35:   PetscReal *nodes;   /* GLL nodes */
 36:   PetscReal *weights; /* GLL weights */
 37: } PetscGLL;

 39: typedef struct {
 40:   PetscInt  N;               /* grid points per elements*/
 41:   PetscInt  E;               /* number of elements */
 42:   PetscReal tol_L2, tol_max; /* error norms */
 43:   PetscInt  steps;           /* number of timesteps */
 44:   PetscReal Tend;            /* endtime */
 45:   PetscReal mu;              /* viscosity */
 46:   PetscReal L;               /* total length of domain */
 47:   PetscReal Le;
 48:   PetscReal Tadj;
 49: } PetscParam;

 51: typedef struct {
 52:   Vec grid; /* total grid */
 53:   Vec curr_sol;
 54: } PetscData;

 56: typedef struct {
 57:   Vec      grid;  /* total grid */
 58:   Vec      mass;  /* mass matrix for total integration */
 59:   Mat      stiff; /* stifness matrix */
 60:   Mat      keptstiff;
 61:   Mat      grad;
 62:   PetscGLL gll;
 63: } PetscSEMOperators;

 65: typedef struct {
 66:   DM                da; /* distributed array data structure */
 67:   PetscSEMOperators SEMop;
 68:   PetscParam        param;
 69:   PetscData         dat;
 70:   TS                ts;
 71:   PetscReal         initial_dt;
 72: } AppCtx;

 74: /*
 75:    User-defined routines
 76: */
 77: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS, PetscReal, Vec, Mat, Mat, void *);
 78: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS, PetscReal, Vec, Mat, Mat, void *);
 79: extern PetscErrorCode TrueSolution(TS, PetscReal, Vec, AppCtx *);
 80: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
 81: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);

 83: int main(int argc, char **argv)
 84: {
 85:   AppCtx       appctx; /* user-defined application context */
 86:   PetscInt     i, xs, xm, ind, j, lenglob;
 87:   PetscReal    x, *wrk_ptr1, *wrk_ptr2;
 88:   MatNullSpace nsp;
 89:   PetscMPIInt  size;

 91:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 92:      Initialize program and set problem parameters
 93:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 94:   PetscFunctionBeginUser;
 95:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));

 97:   /*initialize parameters */
 98:   appctx.param.N     = 10;   /* order of the spectral element */
 99:   appctx.param.E     = 10;   /* number of elements */
100:   appctx.param.L     = 4.0;  /* length of the domain */
101:   appctx.param.mu    = 0.01; /* diffusion coefficient */
102:   appctx.initial_dt  = 5e-3;
103:   appctx.param.steps = PETSC_INT_MAX;
104:   appctx.param.Tend  = 4;

106:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
107:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
108:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
109:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
110:   appctx.param.Le = appctx.param.L / appctx.param.E;

112:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
113:   PetscCheck((appctx.param.E % size) == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of elements must be divisible by number of processes");

115:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116:      Create GLL data structures
117:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118:   PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
119:   PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
120:   appctx.SEMop.gll.n = appctx.param.N;
121:   lenglob            = appctx.param.E * (appctx.param.N - 1);

123:   /*
124:      Create distributed array (DMDA) to manage parallel grid and vectors
125:      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
126:      total grid values spread equally among all the processors, except first and last
127:   */

129:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
130:   PetscCall(DMSetFromOptions(appctx.da));
131:   PetscCall(DMSetUp(appctx.da));

133:   /*
134:      Extract global and local vectors from DMDA; we use these to store the
135:      approximate solution.  Then duplicate these for remaining vectors that
136:      have the same types.
137:   */

139:   PetscCall(DMCreateGlobalVector(appctx.da, &appctx.dat.curr_sol));
140:   PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.grid));
141:   PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.mass));

143:   PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
144:   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
145:   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));

147:   /* Compute function over the locally owned part of the grid */

149:   xs = xs / (appctx.param.N - 1);
150:   xm = xm / (appctx.param.N - 1);

152:   /*
153:      Build total grid and mass over entire mesh (multi-elemental)
154:   */

156:   for (i = xs; i < xs + xm; i++) {
157:     for (j = 0; j < appctx.param.N - 1; j++) {
158:       x             = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
159:       ind           = i * (appctx.param.N - 1) + j;
160:       wrk_ptr1[ind] = x;
161:       wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
162:       if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
163:     }
164:   }
165:   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
166:   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));

168:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169:    Create matrix data structure; set matrix evaluation routine.
170:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171:   PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
172:   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
173:   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.grad));
174:   /*
175:    For linear problems with a time-dependent f(u,t) in the equation
176:    u_t = f(u,t), the user provides the discretized right-hand side
177:    as a time-dependent matrix.
178:    */
179:   PetscCall(RHSMatrixLaplaciangllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
180:   PetscCall(RHSMatrixAdvectiongllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.grad, appctx.SEMop.grad, &appctx));
181:   /*
182:        For linear problems with a time-dependent f(u,t) in the equation
183:        u_t = f(u,t), the user provides the discretized right-hand side
184:        as a time-dependent matrix.
185:     */

187:   PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));

189:   /* attach the null space to the matrix, this probably is not needed but does no harm */
190:   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
191:   PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
192:   PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff, nsp));
193:   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
194:   PetscCall(MatNullSpaceDestroy(&nsp));
195:   /* attach the null space to the matrix, this probably is not needed but does no harm */
196:   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
197:   PetscCall(MatSetNullSpace(appctx.SEMop.grad, nsp));
198:   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.grad, NULL));
199:   PetscCall(MatNullSpaceDestroy(&nsp));

201:   /* Create the TS solver that solves the ODE and its adjoint; set its options */
202:   PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
203:   PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR));
204:   PetscCall(TSSetType(appctx.ts, TSRK));
205:   PetscCall(TSSetDM(appctx.ts, appctx.da));
206:   PetscCall(TSSetTime(appctx.ts, 0.0));
207:   PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
208:   PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
209:   PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
210:   PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
211:   PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
212:   PetscCall(TSSetSaveTrajectory(appctx.ts));
213:   PetscCall(TSSetFromOptions(appctx.ts));
214:   PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx));
215:   PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, RHSJacobian, &appctx));

217:   /* Set Initial conditions for the problem  */
218:   PetscCall(TrueSolution(appctx.ts, 0, appctx.dat.curr_sol, &appctx));

220:   PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode (*)(TS, PetscReal, Vec, void *))TrueSolution, &appctx));
221:   PetscCall(TSSetTime(appctx.ts, 0.0));
222:   PetscCall(TSSetStepNumber(appctx.ts, 0));

224:   PetscCall(TSSolve(appctx.ts, appctx.dat.curr_sol));

226:   PetscCall(MatDestroy(&appctx.SEMop.stiff));
227:   PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
228:   PetscCall(MatDestroy(&appctx.SEMop.grad));
229:   PetscCall(VecDestroy(&appctx.SEMop.grid));
230:   PetscCall(VecDestroy(&appctx.SEMop.mass));
231:   PetscCall(VecDestroy(&appctx.dat.curr_sol));
232:   PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
233:   PetscCall(DMDestroy(&appctx.da));
234:   PetscCall(TSDestroy(&appctx.ts));

236:   /*
237:      Always call PetscFinalize() before exiting a program.  This routine
238:        - finalizes the PETSc libraries as well as MPI
239:        - provides summary and diagnostic information if certain runtime
240:          options are chosen (e.g., -log_view).
241:   */
242:   PetscCall(PetscFinalize());
243:   return 0;
244: }

246: /*
247:    TrueSolution() computes the true solution for the PDE

249:    Input Parameter:
250:    u - uninitialized solution vector (global)
251:    appctx - user-defined application context

253:    Output Parameter:
254:    u - vector with solution at initial time (global)
255: */
256: PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u, AppCtx *appctx)
257: {
258:   PetscScalar       *s;
259:   const PetscScalar *xg;
260:   PetscInt           i, xs, xn;

262:   PetscFunctionBeginUser;
263:   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
264:   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
265:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
266:   for (i = xs; i < xs + xn; i++) {
267:     s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t) / (2.0 + PetscCosScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t));
268:   }
269:   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
270:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
271:   PetscFunctionReturn(PETSC_SUCCESS);
272: }

274: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
275: {
276:   AppCtx *appctx = (AppCtx *)ctx;

278:   PetscFunctionBeginUser;
279:   PetscCall(MatMult(appctx->SEMop.grad, globalin, globalout)); /* grad u */
280:   PetscCall(VecPointwiseMult(globalout, globalin, globalout)); /* u grad u */
281:   PetscCall(VecScale(globalout, -1.0));
282:   PetscCall(MatMultAdd(appctx->SEMop.keptstiff, globalin, globalout, globalout));
283:   PetscFunctionReturn(PETSC_SUCCESS);
284: }

286: /*

288:       K is the discretiziation of the Laplacian
289:       G is the discretization of the gradient

291:       Computes Jacobian of      K u + diag(u) G u   which is given by
292:               K   + diag(u)G + diag(Gu)
293: */
294: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
295: {
296:   AppCtx *appctx = (AppCtx *)ctx;
297:   Vec     Gglobalin;

299:   PetscFunctionBeginUser;
300:   /*    A = diag(u) G */

302:   PetscCall(MatCopy(appctx->SEMop.grad, A, SAME_NONZERO_PATTERN));
303:   PetscCall(MatDiagonalScale(A, globalin, NULL));

305:   /*    A  = A + diag(Gu) */
306:   PetscCall(VecDuplicate(globalin, &Gglobalin));
307:   PetscCall(MatMult(appctx->SEMop.grad, globalin, Gglobalin));
308:   PetscCall(MatDiagonalSet(A, Gglobalin, ADD_VALUES));
309:   PetscCall(VecDestroy(&Gglobalin));

311:   /*   A  = K - A    */
312:   PetscCall(MatScale(A, -1.0));
313:   PetscCall(MatAXPY(A, 0.0, appctx->SEMop.keptstiff, SAME_NONZERO_PATTERN));
314:   PetscFunctionReturn(PETSC_SUCCESS);
315: }

317: #include <petscblaslapack.h>
318: /*
319:      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
320: */
321: PetscErrorCode MatMult_Laplacian(Mat A, Vec x, Vec y)
322: {
323:   AppCtx            *appctx;
324:   PetscReal        **temp, vv;
325:   PetscInt           i, j, xs, xn;
326:   Vec                xlocal, ylocal;
327:   const PetscScalar *xl;
328:   PetscScalar       *yl;
329:   PetscBLASInt       _One  = 1, n;
330:   PetscScalar        _DOne = 1;

332:   PetscFunctionBeginUser;
333:   PetscCall(MatShellGetContext(A, &appctx));
334:   PetscCall(DMGetLocalVector(appctx->da, &xlocal));
335:   PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
336:   PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
337:   PetscCall(DMGetLocalVector(appctx->da, &ylocal));
338:   PetscCall(VecSet(ylocal, 0.0));
339:   PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
340:   for (i = 0; i < appctx->param.N; i++) {
341:     vv = -appctx->param.mu * 2.0 / appctx->param.Le;
342:     for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
343:   }
344:   PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
345:   PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
346:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
347:   PetscCall(PetscBLASIntCast(appctx->param.N, &n));
348:   for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
349:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
350:   PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
351:   PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
352:   PetscCall(VecSet(y, 0.0));
353:   PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
354:   PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
355:   PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
356:   PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
357:   PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
358:   PetscFunctionReturn(PETSC_SUCCESS);
359: }

361: PetscErrorCode MatMult_Advection(Mat A, Vec x, Vec y)
362: {
363:   AppCtx            *appctx;
364:   PetscReal        **temp;
365:   PetscInt           j, xs, xn;
366:   Vec                xlocal, ylocal;
367:   const PetscScalar *xl;
368:   PetscScalar       *yl;
369:   PetscBLASInt       _One  = 1, n;
370:   PetscScalar        _DOne = 1;

372:   PetscFunctionBeginUser;
373:   PetscCall(MatShellGetContext(A, &appctx));
374:   PetscCall(DMGetLocalVector(appctx->da, &xlocal));
375:   PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
376:   PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
377:   PetscCall(DMGetLocalVector(appctx->da, &ylocal));
378:   PetscCall(VecSet(ylocal, 0.0));
379:   PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
380:   PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
381:   PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
382:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
383:   PetscCall(PetscBLASIntCast(appctx->param.N, &n));
384:   for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
385:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
386:   PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
387:   PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
388:   PetscCall(VecSet(y, 0.0));
389:   PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
390:   PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
391:   PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
392:   PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
393:   PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
394:   PetscCall(VecScale(y, -1.0));
395:   PetscFunctionReturn(PETSC_SUCCESS);
396: }

398: /*
399:    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
400:    matrix for the Laplacian operator

402:    Input Parameters:
403:    ts - the TS context
404:    t - current time  (ignored)
405:    X - current solution (ignored)
406:    dummy - optional user-defined context, as set by TSetRHSJacobian()

408:    Output Parameters:
409:    AA - Jacobian matrix
410:    BB - optionally different matrix from which the preconditioner is built
411:    str - flag indicating matrix structure

413: */
414: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
415: {
416:   PetscReal **temp;
417:   PetscReal   vv;
418:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
419:   PetscInt    i, xs, xn, l, j;
420:   PetscInt   *rowsDM;
421:   PetscBool   flg = PETSC_FALSE;

423:   PetscFunctionBeginUser;
424:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));

426:   if (!flg) {
427:     /*
428:      Creates the element stiffness matrix for the given gll
429:      */
430:     PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
431:     /* workaround for clang analyzer warning: Division by zero */
432:     PetscCheck(appctx->param.N > 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Spectral element order should be > 1");

434:     /* scale by the size of the element */
435:     for (i = 0; i < appctx->param.N; i++) {
436:       vv = -appctx->param.mu * 2.0 / appctx->param.Le;
437:       for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
438:     }

440:     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
441:     PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));

443:     xs = xs / (appctx->param.N - 1);
444:     xn = xn / (appctx->param.N - 1);

446:     PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
447:     /*
448:      loop over local elements
449:      */
450:     for (j = xs; j < xs + xn; j++) {
451:       for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
452:       PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
453:     }
454:     PetscCall(PetscFree(rowsDM));
455:     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
456:     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
457:     PetscCall(VecReciprocal(appctx->SEMop.mass));
458:     PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
459:     PetscCall(VecReciprocal(appctx->SEMop.mass));

461:     PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
462:   } else {
463:     PetscCall(MatSetType(A, MATSHELL));
464:     PetscCall(MatSetUp(A));
465:     PetscCall(MatShellSetContext(A, appctx));
466:     PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Laplacian));
467:   }
468:   PetscFunctionReturn(PETSC_SUCCESS);
469: }

471: /*
472:    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
473:    matrix for the Advection (gradient) operator.

475:    Input Parameters:
476:    ts - the TS context
477:    t - current time
478:    global_in - global input vector
479:    dummy - optional user-defined context, as set by TSetRHSJacobian()

481:    Output Parameters:
482:    AA - Jacobian matrix
483:    BB - optionally different preconditioning matrix
484:    str - flag indicating matrix structure

486: */
487: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
488: {
489:   PetscReal **temp;
490:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
491:   PetscInt    xs, xn, l, j;
492:   PetscInt   *rowsDM;
493:   PetscBool   flg = PETSC_FALSE;

495:   PetscFunctionBeginUser;
496:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));

498:   if (!flg) {
499:     /*
500:      Creates the advection matrix for the given gll
501:      */
502:     PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
503:     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
504:     PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
505:     xs = xs / (appctx->param.N - 1);
506:     xn = xn / (appctx->param.N - 1);

508:     PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
509:     for (j = xs; j < xs + xn; j++) {
510:       for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
511:       PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
512:     }
513:     PetscCall(PetscFree(rowsDM));
514:     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
515:     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

517:     PetscCall(VecReciprocal(appctx->SEMop.mass));
518:     PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
519:     PetscCall(VecReciprocal(appctx->SEMop.mass));

521:     PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
522:   } else {
523:     PetscCall(MatSetType(A, MATSHELL));
524:     PetscCall(MatSetUp(A));
525:     PetscCall(MatShellSetContext(A, appctx));
526:     PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Advection));
527:   }
528:   PetscFunctionReturn(PETSC_SUCCESS);
529: }

531: /*TEST

533:     build:
534:       requires: !complex

536:     test:
537:       suffix: 1
538:       requires: !single

540:     test:
541:       suffix: 2
542:       nsize: 5
543:       requires: !single

545:     test:
546:       suffix: 3
547:       requires: !single
548:       args: -ts_view -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error

550:     test:
551:       suffix: 4
552:       requires: !single
553:       args: -ts_view -ts_type beuler -pc_type none -ts_max_steps 5 -ts_monitor_error

555: TEST*/