Actual source code: ex3.c
1: static char help[] = "Model Equations for Advection-Diffusion\n";
3: /*
4: Page 9, Section 1.2 Model Equations for Advection-Diffusion
6: u_t = a u_x + d u_xx
8: The initial conditions used here different then in the book.
10: */
12: /*
13: Helpful runtime linear solver options:
14: -pc_type mg -da_refine 2 -snes_monitor -ksp_monitor -ts_view (geometric multigrid with three levels)
16: */
18: /*
19: Include "petscts.h" so that we can use TS solvers. Note that this file
20: automatically includes:
21: petscsys.h - base PETSc routines petscvec.h - vectors
22: petscmat.h - matrices
23: petscis.h - index sets petscksp.h - Krylov subspace methods
24: petscviewer.h - viewers petscpc.h - preconditioners
25: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
26: */
28: #include <petscts.h>
29: #include <petscdm.h>
30: #include <petscdmda.h>
32: /*
33: User-defined application context - contains data needed by the
34: application-provided call-back routines.
35: */
36: typedef struct {
37: PetscScalar a, d; /* advection and diffusion strength */
38: PetscBool upwind;
39: } AppCtx;
41: /*
42: User-defined routines
43: */
44: extern PetscErrorCode InitialConditions(TS, Vec, AppCtx *);
45: extern PetscErrorCode RHSMatrixHeat(TS, PetscReal, Vec, Mat, Mat, void *);
46: extern PetscErrorCode Solution(TS, PetscReal, Vec, AppCtx *);
48: int main(int argc, char **argv)
49: {
50: AppCtx appctx; /* user-defined application context */
51: TS ts; /* timestepping context */
52: Vec U; /* approximate solution vector */
53: PetscReal dt;
54: DM da;
55: PetscInt M;
57: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
58: Initialize program and set problem parameters
59: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
61: PetscFunctionBeginUser;
62: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
63: appctx.a = 1.0;
64: appctx.d = 0.0;
65: PetscCall(PetscOptionsGetScalar(NULL, NULL, "-a", &appctx.a, NULL));
66: PetscCall(PetscOptionsGetScalar(NULL, NULL, "-d", &appctx.d, NULL));
67: appctx.upwind = PETSC_TRUE;
68: PetscCall(PetscOptionsGetBool(NULL, NULL, "-upwind", &appctx.upwind, NULL));
70: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, 60, 1, 1, NULL, &da));
71: PetscCall(DMSetFromOptions(da));
72: PetscCall(DMSetUp(da));
73: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
74: Create vector data structures
75: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
77: /*
78: Create vector data structures for approximate and exact solutions
79: */
80: PetscCall(DMCreateGlobalVector(da, &U));
82: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: Create timestepping solver context
84: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
86: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
87: PetscCall(TSSetDM(ts, da));
89: /*
90: For linear problems with a time-dependent f(U,t) in the equation
91: u_t = f(u,t), the user provides the discretized right-hand side
92: as a time-dependent matrix.
93: */
94: PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
95: PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSMatrixHeat, &appctx));
96: PetscCall(TSSetSolutionFunction(ts, (PetscErrorCode (*)(TS, PetscReal, Vec, void *))Solution, &appctx));
98: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99: Customize timestepping solver:
100: - Set timestepping duration info
101: Then set runtime options, which can override these defaults.
102: For example,
103: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
104: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
105: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
107: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &M, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
108: dt = .48 / (M * M);
109: PetscCall(TSSetTimeStep(ts, dt));
110: PetscCall(TSSetMaxSteps(ts, 1000));
111: PetscCall(TSSetMaxTime(ts, 100.0));
112: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
113: PetscCall(TSSetType(ts, TSARKIMEX));
114: PetscCall(TSSetFromOptions(ts));
116: /*
117: Evaluate initial conditions
118: */
119: PetscCall(InitialConditions(ts, U, &appctx));
121: /*
122: Run the timestepping solver
123: */
124: PetscCall(TSSolve(ts, U));
126: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127: Free work space. All PETSc objects should be destroyed when they
128: are no longer needed.
129: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
131: PetscCall(TSDestroy(&ts));
132: PetscCall(VecDestroy(&U));
133: PetscCall(DMDestroy(&da));
135: /*
136: Always call PetscFinalize() before exiting a program. This routine
137: - finalizes the PETSc libraries as well as MPI
138: - provides summary and diagnostic information if certain runtime
139: options are chosen (e.g., -log_view).
140: */
141: PetscCall(PetscFinalize());
142: return 0;
143: }
144: /* --------------------------------------------------------------------- */
145: /*
146: InitialConditions - Computes the solution at the initial time.
148: Input Parameter:
149: u - uninitialized solution vector (global)
150: appctx - user-defined application context
152: Output Parameter:
153: u - vector with solution at initial time (global)
154: */
155: PetscErrorCode InitialConditions(TS ts, Vec U, AppCtx *appctx)
156: {
157: PetscScalar *u, h;
158: PetscInt i, mstart, mend, xm, M;
159: DM da;
161: PetscFunctionBeginUser;
162: PetscCall(TSGetDM(ts, &da));
163: PetscCall(DMDAGetCorners(da, &mstart, 0, 0, &xm, 0, 0));
164: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &M, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
165: h = 1.0 / M;
166: mend = mstart + xm;
167: /*
168: Get a pointer to vector data.
169: - For default PETSc vectors, VecGetArray() returns a pointer to
170: the data array. Otherwise, the routine is implementation dependent.
171: - You MUST call VecRestoreArray() when you no longer need access to
172: the array.
173: - Note that the Fortran interface to VecGetArray() differs from the
174: C version. See the users manual for details.
175: */
176: PetscCall(DMDAVecGetArray(da, U, &u));
178: /*
179: We initialize the solution array by simply writing the solution
180: directly into the array locations. Alternatively, we could use
181: VecSetValues() or VecSetValuesLocal().
182: */
183: for (i = mstart; i < mend; i++) u[i] = PetscSinScalar(PETSC_PI * i * 6. * h) + 3. * PetscSinScalar(PETSC_PI * i * 2. * h);
185: /*
186: Restore vector
187: */
188: PetscCall(DMDAVecRestoreArray(da, U, &u));
189: PetscFunctionReturn(PETSC_SUCCESS);
190: }
191: /* --------------------------------------------------------------------- */
192: /*
193: Solution - Computes the exact solution at a given time.
195: Input Parameters:
196: t - current time
197: solution - vector in which exact solution will be computed
198: appctx - user-defined application context
200: Output Parameter:
201: solution - vector with the newly computed exact solution
202: */
203: PetscErrorCode Solution(TS ts, PetscReal t, Vec U, AppCtx *appctx)
204: {
205: PetscScalar *u, ex1, ex2, sc1, sc2, h;
206: PetscInt i, mstart, mend, xm, M;
207: DM da;
209: PetscFunctionBeginUser;
210: PetscCall(TSGetDM(ts, &da));
211: PetscCall(DMDAGetCorners(da, &mstart, 0, 0, &xm, 0, 0));
212: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &M, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
213: h = 1.0 / M;
214: mend = mstart + xm;
215: /*
216: Get a pointer to vector data.
217: */
218: PetscCall(DMDAVecGetArray(da, U, &u));
220: /*
221: Simply write the solution directly into the array locations.
222: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
223: */
224: ex1 = PetscExpScalar(-36. * PETSC_PI * PETSC_PI * appctx->d * t);
225: ex2 = PetscExpScalar(-4. * PETSC_PI * PETSC_PI * appctx->d * t);
226: sc1 = PETSC_PI * 6. * h;
227: sc2 = PETSC_PI * 2. * h;
228: for (i = mstart; i < mend; i++) u[i] = PetscSinScalar(sc1 * (PetscReal)i + appctx->a * PETSC_PI * 6. * t) * ex1 + 3. * PetscSinScalar(sc2 * (PetscReal)i + appctx->a * PETSC_PI * 2. * t) * ex2;
230: /*
231: Restore vector
232: */
233: PetscCall(DMDAVecRestoreArray(da, U, &u));
234: PetscFunctionReturn(PETSC_SUCCESS);
235: }
237: /* --------------------------------------------------------------------- */
238: /*
239: RHSMatrixHeat - User-provided routine to compute the right-hand-side
240: matrix for the heat equation.
242: Input Parameters:
243: ts - the TS context
244: t - current time
245: global_in - global input vector
246: dummy - optional user-defined context, as set by TSetRHSJacobian()
248: Output Parameters:
249: AA - Jacobian matrix
250: BB - optionally different preconditioning matrix
251: str - flag indicating matrix structure
253: Notes:
254: Recall that MatSetValues() uses 0-based row and column numbers
255: in Fortran as well as in C.
256: */
257: PetscErrorCode RHSMatrixHeat(TS ts, PetscReal t, Vec U, Mat AA, Mat BB, void *ctx)
258: {
259: Mat A = AA; /* Jacobian matrix */
260: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
261: PetscInt mstart, mend;
262: PetscInt i, idx[3], M, xm;
263: PetscScalar v[3], h;
264: DM da;
266: PetscFunctionBeginUser;
267: PetscCall(TSGetDM(ts, &da));
268: PetscCall(DMDAGetInfo(da, 0, &M, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
269: PetscCall(DMDAGetCorners(da, &mstart, 0, 0, &xm, 0, 0));
270: h = 1.0 / M;
271: mend = mstart + xm;
272: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
273: Compute entries for the locally owned part of the matrix
274: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
275: /*
276: Set matrix rows corresponding to boundary data
277: */
279: /* diffusion */
280: v[0] = appctx->d / (h * h);
281: v[1] = -2.0 * appctx->d / (h * h);
282: v[2] = appctx->d / (h * h);
283: if (!mstart) {
284: idx[0] = M - 1;
285: idx[1] = 0;
286: idx[2] = 1;
287: PetscCall(MatSetValues(A, 1, &mstart, 3, idx, v, INSERT_VALUES));
288: mstart++;
289: }
291: if (mend == M) {
292: mend--;
293: idx[0] = M - 2;
294: idx[1] = M - 1;
295: idx[2] = 0;
296: PetscCall(MatSetValues(A, 1, &mend, 3, idx, v, INSERT_VALUES));
297: }
299: /*
300: Set matrix rows corresponding to interior data. We construct the
301: matrix one row at a time.
302: */
303: for (i = mstart; i < mend; i++) {
304: idx[0] = i - 1;
305: idx[1] = i;
306: idx[2] = i + 1;
307: PetscCall(MatSetValues(A, 1, &i, 3, idx, v, INSERT_VALUES));
308: }
309: PetscCall(MatAssemblyBegin(A, MAT_FLUSH_ASSEMBLY));
310: PetscCall(MatAssemblyEnd(A, MAT_FLUSH_ASSEMBLY));
312: PetscCall(DMDAGetCorners(da, &mstart, 0, 0, &xm, 0, 0));
313: mend = mstart + xm;
314: if (!appctx->upwind) {
315: /* advection -- centered differencing */
316: v[0] = -.5 * appctx->a / (h);
317: v[1] = .5 * appctx->a / (h);
318: if (!mstart) {
319: idx[0] = M - 1;
320: idx[1] = 1;
321: PetscCall(MatSetValues(A, 1, &mstart, 2, idx, v, ADD_VALUES));
322: mstart++;
323: }
325: if (mend == M) {
326: mend--;
327: idx[0] = M - 2;
328: idx[1] = 0;
329: PetscCall(MatSetValues(A, 1, &mend, 2, idx, v, ADD_VALUES));
330: }
332: for (i = mstart; i < mend; i++) {
333: idx[0] = i - 1;
334: idx[1] = i + 1;
335: PetscCall(MatSetValues(A, 1, &i, 2, idx, v, ADD_VALUES));
336: }
337: } else {
338: /* advection -- upwinding */
339: v[0] = -appctx->a / (h);
340: v[1] = appctx->a / (h);
341: if (!mstart) {
342: idx[0] = 0;
343: idx[1] = 1;
344: PetscCall(MatSetValues(A, 1, &mstart, 2, idx, v, ADD_VALUES));
345: mstart++;
346: }
348: if (mend == M) {
349: mend--;
350: idx[0] = M - 1;
351: idx[1] = 0;
352: PetscCall(MatSetValues(A, 1, &mend, 2, idx, v, ADD_VALUES));
353: }
355: for (i = mstart; i < mend; i++) {
356: idx[0] = i;
357: idx[1] = i + 1;
358: PetscCall(MatSetValues(A, 1, &i, 2, idx, v, ADD_VALUES));
359: }
360: }
362: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
363: Complete the matrix assembly process and set some options
364: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
365: /*
366: Assemble matrix, using the 2-step process:
367: MatAssemblyBegin(), MatAssemblyEnd()
368: Computations can be done while messages are in transition
369: by placing code between these two statements.
370: */
371: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
372: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
374: /*
375: Set and option to indicate that we will never add a new nonzero location
376: to the matrix. If we do, it will generate an error.
377: */
378: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
379: PetscFunctionReturn(PETSC_SUCCESS);
380: }
382: /*TEST
384: test:
385: args: -pc_type mg -da_refine 2 -ts_view -ts_monitor -ts_max_time .3 -mg_levels_ksp_max_it 3
386: requires: double
387: filter: grep -v "total number of"
389: test:
390: suffix: 2
391: args: -pc_type mg -da_refine 2 -ts_view -ts_monitor_draw_solution -ts_monitor -ts_max_time .3 -mg_levels_ksp_max_it 3
392: requires: x
393: output_file: output/ex3_1.out
394: requires: double
395: filter: grep -v "total number of"
397: TEST*/