Actual source code: ex12.c

  1: static char help[] = "Tests PetscObjectSetOptions() for TS object\n\n";

  3: /* ------------------------------------------------------------------------

  5:    This program solves the PDE

  7:                u * u_xx
  8:          u_t = ---------
  9:                2*(t+1)^2

 11:     on the domain 0 <= x <= 1, with boundary conditions
 12:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 13:     and initial condition
 14:          u(0,x) = 1 + x*x.

 16:     The exact solution is:
 17:          u(t,x) = (1 + x*x) * (1 + t)

 19:     Note that since the solution is linear in time and quadratic in x,
 20:     the finite difference scheme actually computes the "exact" solution.

 22:     We use by default the backward Euler method.

 24:   ------------------------------------------------------------------------- */

 26: /*
 27:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 28:    this file automatically includes "petscsys.h" and other lower-level
 29:    PETSc include files.

 31:    Include the "petscdmda.h" to allow us to use the distributed array data
 32:    structures to manage the parallel grid.
 33: */
 34: #include <petscts.h>
 35: #include <petscdm.h>
 36: #include <petscdmda.h>
 37: #include <petscdraw.h>

 39: /*
 40:    User-defined application context - contains data needed by the
 41:    application-provided callback routines.
 42: */
 43: typedef struct {
 44:   MPI_Comm  comm;      /* communicator */
 45:   DM        da;        /* distributed array data structure */
 46:   Vec       localwork; /* local ghosted work vector */
 47:   Vec       u_local;   /* local ghosted approximate solution vector */
 48:   Vec       solution;  /* global exact solution vector */
 49:   PetscInt  m;         /* total number of grid points */
 50:   PetscReal h;         /* mesh width: h = 1/(m-1) */
 51: } AppCtx;

 53: /*
 54:    User-defined routines, provided below.
 55: */
 56: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
 57: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
 58: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
 59: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);

 61: int main(int argc, char **argv)
 62: {
 63:   AppCtx       appctx;               /* user-defined application context */
 64:   TS           ts;                   /* timestepping context */
 65:   Mat          A;                    /* Jacobian matrix data structure */
 66:   Vec          u;                    /* approximate solution vector */
 67:   PetscInt     time_steps_max = 100; /* default max timesteps */
 68:   PetscReal    dt;
 69:   PetscReal    time_total_max = 100.0; /* default max total time */
 70:   PetscOptions options, optionscopy;

 72:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 73:      Initialize program and set problem parameters
 74:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 76:   PetscFunctionBeginUser;
 77:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));

 79:   PetscCall(PetscOptionsCreate(&options));
 80:   PetscCall(PetscOptionsSetValue(options, "-ts_monitor", "ascii"));
 81:   PetscCall(PetscOptionsSetValue(options, "-snes_monitor", "ascii"));
 82:   PetscCall(PetscOptionsSetValue(options, "-ksp_monitor", "ascii"));

 84:   appctx.comm = PETSC_COMM_WORLD;
 85:   appctx.m    = 60;

 87:   PetscCall(PetscOptionsGetInt(options, NULL, "-M", &appctx.m, NULL));

 89:   appctx.h = 1.0 / (appctx.m - 1.0);

 91:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 92:      Create vector data structures
 93:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 95:   /*
 96:      Create distributed array (DMDA) to manage parallel grid and vectors
 97:      and to set up the ghost point communication pattern.  There are M
 98:      total grid values spread equally among all the processors.
 99:   */
100:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
101:   PetscCall(PetscObjectSetOptions((PetscObject)appctx.da, options));
102:   PetscCall(DMSetFromOptions(appctx.da));
103:   PetscCall(DMSetUp(appctx.da));

105:   /*
106:      Extract global and local vectors from DMDA; we use these to store the
107:      approximate solution.  Then duplicate these for remaining vectors that
108:      have the same types.
109:   */
110:   PetscCall(DMCreateGlobalVector(appctx.da, &u));
111:   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));

113:   /*
114:      Create local work vector for use in evaluating right-hand-side function;
115:      create global work vector for storing exact solution.
116:   */
117:   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
118:   PetscCall(VecDuplicate(u, &appctx.solution));

120:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121:      Create timestepping solver context; set callback routine for
122:      right-hand-side function evaluation.
123:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

125:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
126:   PetscCall(PetscObjectSetOptions((PetscObject)ts, options));
127:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
128:   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));

130:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131:      For nonlinear problems, the user can provide a Jacobian evaluation
132:      routine (or use a finite differencing approximation).

134:      Create matrix data structure; set Jacobian evaluation routine.
135:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

137:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
138:   PetscCall(PetscObjectSetOptions((PetscObject)A, options));
139:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
140:   PetscCall(MatSetFromOptions(A));
141:   PetscCall(MatSetUp(A));
142:   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));

144:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145:      Set solution vector and initial timestep
146:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

148:   dt = appctx.h / 2.0;
149:   PetscCall(TSSetTimeStep(ts, dt));

151:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152:      Customize timestepping solver:
153:        - Set the solution method to be the Backward Euler method.
154:        - Set timestepping duration info
155:      Then set runtime options, which can override these defaults.
156:      For example,
157:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
158:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
159:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

161:   PetscCall(TSSetType(ts, TSBEULER));
162:   PetscCall(TSSetMaxSteps(ts, time_steps_max));
163:   PetscCall(TSSetMaxTime(ts, time_total_max));
164:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
165:   PetscCall(TSSetFromOptions(ts));

167:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168:      Solve the problem
169:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

171:   /*
172:      Evaluate initial conditions
173:   */
174:   PetscCall(InitialConditions(u, &appctx));

176:   /*
177:      Run the timestepping solver
178:   */
179:   PetscCall(TSSolve(ts, u));

181:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182:      Free work space.  All PETSc objects should be destroyed when they
183:      are no longer needed.
184:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

186:   PetscCall(PetscObjectGetOptions((PetscObject)ts, &optionscopy));
187:   PetscCheck(options == optionscopy, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "PetscObjectGetOptions() failed");

189:   PetscCall(TSDestroy(&ts));
190:   PetscCall(VecDestroy(&u));
191:   PetscCall(MatDestroy(&A));
192:   PetscCall(DMDestroy(&appctx.da));
193:   PetscCall(VecDestroy(&appctx.localwork));
194:   PetscCall(VecDestroy(&appctx.solution));
195:   PetscCall(VecDestroy(&appctx.u_local));
196:   PetscCall(PetscOptionsDestroy(&options));

198:   /*
199:      Always call PetscFinalize() before exiting a program.  This routine
200:        - finalizes the PETSc libraries as well as MPI
201:        - provides summary and diagnostic information if certain runtime
202:          options are chosen (e.g., -log_view).
203:   */
204:   PetscCall(PetscFinalize());
205:   return 0;
206: }
207: /* --------------------------------------------------------------------- */
208: /*
209:    InitialConditions - Computes the solution at the initial time.

211:    Input Parameters:
212:    u - uninitialized solution vector (global)
213:    appctx - user-defined application context

215:    Output Parameter:
216:    u - vector with solution at initial time (global)
217: */
218: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
219: {
220:   PetscScalar *u_localptr, h = appctx->h, x;
221:   PetscInt     i, mybase, myend;

223:   PetscFunctionBeginUser;
224:   /*
225:      Determine starting point of each processor's range of
226:      grid values.
227:   */
228:   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));

230:   /*
231:     Get a pointer to vector data.
232:     - For default PETSc vectors, VecGetArray() returns a pointer to
233:       the data array.  Otherwise, the routine is implementation dependent.
234:     - You MUST call VecRestoreArray() when you no longer need access to
235:       the array.
236:     - Note that the Fortran interface to VecGetArray() differs from the
237:       C version.  See the users manual for details.
238:   */
239:   PetscCall(VecGetArray(u, &u_localptr));

241:   /*
242:      We initialize the solution array by simply writing the solution
243:      directly into the array locations.  Alternatively, we could use
244:      VecSetValues() or VecSetValuesLocal().
245:   */
246:   for (i = mybase; i < myend; i++) {
247:     x                      = h * (PetscReal)i; /* current location in global grid */
248:     u_localptr[i - mybase] = 1.0 + x * x;
249:   }

251:   /*
252:      Restore vector
253:   */
254:   PetscCall(VecRestoreArray(u, &u_localptr));
255:   PetscFunctionReturn(PETSC_SUCCESS);
256: }
257: /* --------------------------------------------------------------------- */
258: /*
259:    ExactSolution - Computes the exact solution at a given time.

261:    Input Parameters:
262:    t - current time
263:    solution - vector in which exact solution will be computed
264:    appctx - user-defined application context

266:    Output Parameter:
267:    solution - vector with the newly computed exact solution
268: */
269: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
270: {
271:   PetscScalar *s_localptr, h = appctx->h, x;
272:   PetscInt     i, mybase, myend;

274:   PetscFunctionBeginUser;
275:   /*
276:      Determine starting and ending points of each processor's
277:      range of grid values
278:   */
279:   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));

281:   /*
282:      Get a pointer to vector data.
283:   */
284:   PetscCall(VecGetArray(solution, &s_localptr));

286:   /*
287:      Simply write the solution directly into the array locations.
288:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
289:   */
290:   for (i = mybase; i < myend; i++) {
291:     x                      = h * (PetscReal)i;
292:     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
293:   }

295:   /*
296:      Restore vector
297:   */
298:   PetscCall(VecRestoreArray(solution, &s_localptr));
299:   PetscFunctionReturn(PETSC_SUCCESS);
300: }
301: /* --------------------------------------------------------------------- */
302: /*
303:    RHSFunction - User-provided routine that evalues the right-hand-side
304:    function of the ODE.  This routine is set in the main program by
305:    calling TSSetRHSFunction().  We compute:
306:           global_out = F(global_in)

308:    Input Parameters:
309:    ts         - timesteping context
310:    t          - current time
311:    global_in  - vector containing the current iterate
312:    ctx        - (optional) user-provided context for function evaluation.
313:                 In this case we use the appctx defined above.

315:    Output Parameter:
316:    global_out - vector containing the newly evaluated function
317: */
318: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
319: {
320:   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
321:   DM                 da        = appctx->da;        /* distributed array */
322:   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
323:   Vec                localwork = appctx->localwork; /* local ghosted work vector */
324:   PetscInt           i, localsize;
325:   PetscMPIInt        rank, size;
326:   PetscScalar       *copyptr, sc;
327:   const PetscScalar *localptr;

329:   PetscFunctionBeginUser;
330:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
331:      Get ready for local function computations
332:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
333:   /*
334:      Scatter ghost points to local vector, using the 2-step process
335:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
336:      By placing code between these two statements, computations can be
337:      done while messages are in transition.
338:   */
339:   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
340:   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));

342:   /*
343:       Access directly the values in our local INPUT work array
344:   */
345:   PetscCall(VecGetArrayRead(local_in, &localptr));

347:   /*
348:       Access directly the values in our local OUTPUT work array
349:   */
350:   PetscCall(VecGetArray(localwork, &copyptr));

352:   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));

354:   /*
355:       Evaluate our function on the nodes owned by this processor
356:   */
357:   PetscCall(VecGetLocalSize(local_in, &localsize));

359:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
360:      Compute entries for the locally owned part
361:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

363:   /*
364:      Handle boundary conditions: This is done by using the boundary condition
365:         u(t,boundary) = g(t,boundary)
366:      for some function g. Now take the derivative with respect to t to obtain
367:         u_{t}(t,boundary) = g_{t}(t,boundary)

369:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
370:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
371:   */
372:   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
373:   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
374:   if (rank == 0) copyptr[0] = 1.0;
375:   if (rank == size - 1) copyptr[localsize - 1] = 2.0;

377:   /*
378:      Handle the interior nodes where the PDE is replace by finite
379:      difference operators.
380:   */
381:   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);

383:   /*
384:      Restore vectors
385:   */
386:   PetscCall(VecRestoreArrayRead(local_in, &localptr));
387:   PetscCall(VecRestoreArray(localwork, &copyptr));

389:   /*
390:      Insert values from the local OUTPUT vector into the global
391:      output vector
392:   */
393:   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
394:   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
395:   PetscFunctionReturn(PETSC_SUCCESS);
396: }
397: /* --------------------------------------------------------------------- */
398: /*
399:    RHSJacobian - User-provided routine to compute the Jacobian of
400:    the nonlinear right-hand-side function of the ODE.

402:    Input Parameters:
403:    ts - the TS context
404:    t - current time
405:    global_in - global input vector
406:    dummy - optional user-defined context, as set by TSetRHSJacobian()

408:    Output Parameters:
409:    AA - Jacobian matrix
410:    BB - optionally different preconditioning matrix
411:    str - flag indicating matrix structure

413:   Notes:
414:   RHSJacobian computes entries for the locally owned part of the Jacobian.
415:    - Currently, all PETSc parallel matrix formats are partitioned by
416:      contiguous chunks of rows across the processors.
417:    - Each processor needs to insert only elements that it owns
418:      locally (but any non-local elements will be sent to the
419:      appropriate processor during matrix assembly).
420:    - Always specify global row and columns of matrix entries when
421:      using MatSetValues().
422:    - Here, we set all entries for a particular row at once.
423:    - Note that MatSetValues() uses 0-based row and column numbers
424:      in Fortran as well as in C.
425: */
426: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
427: {
428:   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
429:   Vec                local_in = appctx->u_local; /* local ghosted input vector */
430:   DM                 da       = appctx->da;      /* distributed array */
431:   PetscScalar        v[3], sc;
432:   const PetscScalar *localptr;
433:   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;

435:   PetscFunctionBeginUser;
436:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
437:      Get ready for local Jacobian computations
438:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
439:   /*
440:      Scatter ghost points to local vector, using the 2-step process
441:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
442:      By placing code between these two statements, computations can be
443:      done while messages are in transition.
444:   */
445:   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
446:   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));

448:   /*
449:      Get pointer to vector data
450:   */
451:   PetscCall(VecGetArrayRead(local_in, &localptr));

453:   /*
454:      Get starting and ending locally owned rows of the matrix
455:   */
456:   PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
457:   mstart = mstarts;
458:   mend   = mends;

460:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
461:      Compute entries for the locally owned part of the Jacobian.
462:       - Currently, all PETSc parallel matrix formats are partitioned by
463:         contiguous chunks of rows across the processors.
464:       - Each processor needs to insert only elements that it owns
465:         locally (but any non-local elements will be sent to the
466:         appropriate processor during matrix assembly).
467:       - Here, we set all entries for a particular row at once.
468:       - We can set matrix entries either using either
469:         MatSetValuesLocal() or MatSetValues().
470:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

472:   /*
473:      Set matrix rows corresponding to boundary data
474:   */
475:   if (mstart == 0) {
476:     v[0] = 0.0;
477:     PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
478:     mstart++;
479:   }
480:   if (mend == appctx->m) {
481:     mend--;
482:     v[0] = 0.0;
483:     PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
484:   }

486:   /*
487:      Set matrix rows corresponding to interior data.  We construct the
488:      matrix one row at a time.
489:   */
490:   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
491:   for (i = mstart; i < mend; i++) {
492:     idx[0] = i - 1;
493:     idx[1] = i;
494:     idx[2] = i + 1;
495:     is     = i - mstart + 1;
496:     v[0]   = sc * localptr[is];
497:     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
498:     v[2]   = sc * localptr[is];
499:     PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
500:   }

502:   /*
503:      Restore vector
504:   */
505:   PetscCall(VecRestoreArrayRead(local_in, &localptr));

507:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
508:      Complete the matrix assembly process and set some options
509:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
510:   /*
511:      Assemble matrix, using the 2-step process:
512:        MatAssemblyBegin(), MatAssemblyEnd()
513:      Computations can be done while messages are in transition
514:      by placing code between these two statements.
515:   */
516:   PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
517:   PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
518:   if (BB != AA) {
519:     PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
520:     PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
521:   }

523:   /*
524:      Set and option to indicate that we will never add a new nonzero location
525:      to the matrix. If we do, it will generate an error.
526:   */
527:   PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
528:   PetscFunctionReturn(PETSC_SUCCESS);
529: }

531: /*TEST

533:     test:
534:       requires: !single

536: TEST*/