Actual source code: ex2.c

  1: static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\
  2: timestepping.  Runtime options include:\n\
  3:   -M <xg>, where <xg> = number of grid points\n\
  4:   -debug : Activate debugging printouts\n\
  5:   -nox   : Deactivate x-window graphics\n\n";

  7: /* ------------------------------------------------------------------------

  9:    This program solves the PDE

 11:                u * u_xx
 12:          u_t = ---------
 13:                2*(t+1)^2

 15:     on the domain 0 <= x <= 1, with boundary conditions
 16:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 17:     and initial condition
 18:          u(0,x) = 1 + x*x.

 20:     The exact solution is:
 21:          u(t,x) = (1 + x*x) * (1 + t)

 23:     Note that since the solution is linear in time and quadratic in x,
 24:     the finite difference scheme actually computes the "exact" solution.

 26:     We use by default the backward Euler method.

 28:   ------------------------------------------------------------------------- */

 30: /*
 31:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 32:    this file automatically includes "petscsys.h" and other lower-level
 33:    PETSc include files.

 35:    Include the "petscdmda.h" to allow us to use the distributed array data
 36:    structures to manage the parallel grid.
 37: */
 38: #include <petscts.h>
 39: #include <petscdm.h>
 40: #include <petscdmda.h>
 41: #include <petscdraw.h>

 43: /*
 44:    User-defined application context - contains data needed by the
 45:    application-provided callback routines.
 46: */
 47: typedef struct {
 48:   MPI_Comm  comm;      /* communicator */
 49:   DM        da;        /* distributed array data structure */
 50:   Vec       localwork; /* local ghosted work vector */
 51:   Vec       u_local;   /* local ghosted approximate solution vector */
 52:   Vec       solution;  /* global exact solution vector */
 53:   PetscInt  m;         /* total number of grid points */
 54:   PetscReal h;         /* mesh width: h = 1/(m-1) */
 55:   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
 56: } AppCtx;

 58: /*
 59:    User-defined routines, provided below.
 60: */
 61: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
 62: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
 63: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
 64: extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
 65: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);

 67: int main(int argc, char **argv)
 68: {
 69:   AppCtx    appctx;               /* user-defined application context */
 70:   TS        ts;                   /* timestepping context */
 71:   Mat       A;                    /* Jacobian matrix data structure */
 72:   Vec       u;                    /* approximate solution vector */
 73:   PetscInt  time_steps_max = 100; /* default max timesteps */
 74:   PetscReal dt;
 75:   PetscReal time_total_max = 100.0; /* default max total time */
 76:   PetscBool mymonitor      = PETSC_FALSE;
 77:   PetscReal bounds[]       = {1.0, 3.3};

 79:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 80:      Initialize program and set problem parameters
 81:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 83:   PetscFunctionBeginUser;
 84:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 85:   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));

 87:   appctx.comm = PETSC_COMM_WORLD;
 88:   appctx.m    = 60;

 90:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
 91:   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
 92:   PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));

 94:   appctx.h = 1.0 / (appctx.m - 1.0);

 96:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 97:      Create vector data structures
 98:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

100:   /*
101:      Create distributed array (DMDA) to manage parallel grid and vectors
102:      and to set up the ghost point communication pattern.  There are M
103:      total grid values spread equally among all the processors.
104:   */
105:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
106:   PetscCall(DMSetFromOptions(appctx.da));
107:   PetscCall(DMSetUp(appctx.da));

109:   /*
110:      Extract global and local vectors from DMDA; we use these to store the
111:      approximate solution.  Then duplicate these for remaining vectors that
112:      have the same types.
113:   */
114:   PetscCall(DMCreateGlobalVector(appctx.da, &u));
115:   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));

117:   /*
118:      Create local work vector for use in evaluating right-hand-side function;
119:      create global work vector for storing exact solution.
120:   */
121:   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
122:   PetscCall(VecDuplicate(u, &appctx.solution));

124:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125:      Create timestepping solver context; set callback routine for
126:      right-hand-side function evaluation.
127:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

129:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
130:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
131:   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));

133:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134:      Set optional user-defined monitoring routine
135:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

137:   if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));

139:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140:      For nonlinear problems, the user can provide a Jacobian evaluation
141:      routine (or use a finite differencing approximation).

143:      Create matrix data structure; set Jacobian evaluation routine.
144:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

146:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
147:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
148:   PetscCall(MatSetFromOptions(A));
149:   PetscCall(MatSetUp(A));
150:   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));

152:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153:      Set solution vector and initial timestep
154:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

156:   dt = appctx.h / 2.0;
157:   PetscCall(TSSetTimeStep(ts, dt));

159:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160:      Customize timestepping solver:
161:        - Set the solution method to be the Backward Euler method.
162:        - Set timestepping duration info
163:      Then set runtime options, which can override these defaults.
164:      For example,
165:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
166:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
167:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

169:   PetscCall(TSSetType(ts, TSBEULER));
170:   PetscCall(TSSetMaxSteps(ts, time_steps_max));
171:   PetscCall(TSSetMaxTime(ts, time_total_max));
172:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
173:   PetscCall(TSSetFromOptions(ts));

175:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176:      Solve the problem
177:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

179:   /*
180:      Evaluate initial conditions
181:   */
182:   PetscCall(InitialConditions(u, &appctx));

184:   /*
185:      Run the timestepping solver
186:   */
187:   PetscCall(TSSolve(ts, u));

189:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190:      Free work space.  All PETSc objects should be destroyed when they
191:      are no longer needed.
192:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

194:   PetscCall(TSDestroy(&ts));
195:   PetscCall(VecDestroy(&u));
196:   PetscCall(MatDestroy(&A));
197:   PetscCall(DMDestroy(&appctx.da));
198:   PetscCall(VecDestroy(&appctx.localwork));
199:   PetscCall(VecDestroy(&appctx.solution));
200:   PetscCall(VecDestroy(&appctx.u_local));

202:   /*
203:      Always call PetscFinalize() before exiting a program.  This routine
204:        - finalizes the PETSc libraries as well as MPI
205:        - provides summary and diagnostic information if certain runtime
206:          options are chosen (e.g., -log_view).
207:   */
208:   PetscCall(PetscFinalize());
209:   return 0;
210: }
211: /* --------------------------------------------------------------------- */
212: /*
213:    InitialConditions - Computes the solution at the initial time.

215:    Input Parameters:
216:    u - uninitialized solution vector (global)
217:    appctx - user-defined application context

219:    Output Parameter:
220:    u - vector with solution at initial time (global)
221: */
222: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
223: {
224:   PetscScalar *u_localptr, h = appctx->h, x;
225:   PetscInt     i, mybase, myend;

227:   PetscFunctionBeginUser;
228:   /*
229:      Determine starting point of each processor's range of
230:      grid values.
231:   */
232:   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));

234:   /*
235:     Get a pointer to vector data.
236:     - For default PETSc vectors, VecGetArray() returns a pointer to
237:       the data array.  Otherwise, the routine is implementation dependent.
238:     - You MUST call VecRestoreArray() when you no longer need access to
239:       the array.
240:     - Note that the Fortran interface to VecGetArray() differs from the
241:       C version.  See the users manual for details.
242:   */
243:   PetscCall(VecGetArray(u, &u_localptr));

245:   /*
246:      We initialize the solution array by simply writing the solution
247:      directly into the array locations.  Alternatively, we could use
248:      VecSetValues() or VecSetValuesLocal().
249:   */
250:   for (i = mybase; i < myend; i++) {
251:     x                      = h * (PetscReal)i; /* current location in global grid */
252:     u_localptr[i - mybase] = 1.0 + x * x;
253:   }

255:   /*
256:      Restore vector
257:   */
258:   PetscCall(VecRestoreArray(u, &u_localptr));

260:   /*
261:      Print debugging information if desired
262:   */
263:   if (appctx->debug) {
264:     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
265:     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
266:   }

268:   PetscFunctionReturn(PETSC_SUCCESS);
269: }
270: /* --------------------------------------------------------------------- */
271: /*
272:    ExactSolution - Computes the exact solution at a given time.

274:    Input Parameters:
275:    t - current time
276:    solution - vector in which exact solution will be computed
277:    appctx - user-defined application context

279:    Output Parameter:
280:    solution - vector with the newly computed exact solution
281: */
282: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
283: {
284:   PetscScalar *s_localptr, h = appctx->h, x;
285:   PetscInt     i, mybase, myend;

287:   PetscFunctionBeginUser;
288:   /*
289:      Determine starting and ending points of each processor's
290:      range of grid values
291:   */
292:   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));

294:   /*
295:      Get a pointer to vector data.
296:   */
297:   PetscCall(VecGetArray(solution, &s_localptr));

299:   /*
300:      Simply write the solution directly into the array locations.
301:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
302:   */
303:   for (i = mybase; i < myend; i++) {
304:     x                      = h * (PetscReal)i;
305:     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
306:   }

308:   /*
309:      Restore vector
310:   */
311:   PetscCall(VecRestoreArray(solution, &s_localptr));
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }
314: /* --------------------------------------------------------------------- */
315: /*
316:    Monitor - User-provided routine to monitor the solution computed at
317:    each timestep.  This example plots the solution and computes the
318:    error in two different norms.

320:    Input Parameters:
321:    ts     - the timestep context
322:    step   - the count of the current step (with 0 meaning the
323:             initial condition)
324:    time   - the current time
325:    u      - the solution at this timestep
326:    ctx    - the user-provided context for this monitoring routine.
327:             In this case we use the application context which contains
328:             information about the problem size, workspace and the exact
329:             solution.
330: */
331: PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
332: {
333:   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
334:   PetscReal en2, en2s, enmax;
335:   PetscDraw draw;

337:   PetscFunctionBeginUser;
338:   /*
339:      We use the default X Windows viewer
340:              PETSC_VIEWER_DRAW_(appctx->comm)
341:      that is associated with the current communicator. This saves
342:      the effort of calling PetscViewerDrawOpen() to create the window.
343:      Note that if we wished to plot several items in separate windows we
344:      would create each viewer with PetscViewerDrawOpen() and store them in
345:      the application context, appctx.

347:      PetscReal buffering makes graphics look better.
348:   */
349:   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
350:   PetscCall(PetscDrawSetDoubleBuffer(draw));
351:   PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));

353:   /*
354:      Compute the exact solution at this timestep
355:   */
356:   PetscCall(ExactSolution(time, appctx->solution, appctx));

358:   /*
359:      Print debugging information if desired
360:   */
361:   if (appctx->debug) {
362:     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
363:     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
364:     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
365:     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
366:   }

368:   /*
369:      Compute the 2-norm and max-norm of the error
370:   */
371:   PetscCall(VecAXPY(appctx->solution, -1.0, u));
372:   PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
373:   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
374:   PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));

376:   /*
377:      PetscPrintf() causes only the first processor in this
378:      communicator to print the timestep information.
379:   */
380:   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g  max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));

382:   /*
383:      Print debugging information if desired
384:   */
385:   if (appctx->debug) {
386:     PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
387:     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
388:   }
389:   PetscFunctionReturn(PETSC_SUCCESS);
390: }
391: /* --------------------------------------------------------------------- */
392: /*
393:    RHSFunction - User-provided routine that evalues the right-hand-side
394:    function of the ODE.  This routine is set in the main program by
395:    calling TSSetRHSFunction().  We compute:
396:           global_out = F(global_in)

398:    Input Parameters:
399:    ts         - timesteping context
400:    t          - current time
401:    global_in  - vector containing the current iterate
402:    ctx        - (optional) user-provided context for function evaluation.
403:                 In this case we use the appctx defined above.

405:    Output Parameter:
406:    global_out - vector containing the newly evaluated function
407: */
408: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
409: {
410:   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
411:   DM                 da        = appctx->da;        /* distributed array */
412:   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
413:   Vec                localwork = appctx->localwork; /* local ghosted work vector */
414:   PetscInt           i, localsize;
415:   PetscMPIInt        rank, size;
416:   PetscScalar       *copyptr, sc;
417:   const PetscScalar *localptr;

419:   PetscFunctionBeginUser;
420:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
421:      Get ready for local function computations
422:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
423:   /*
424:      Scatter ghost points to local vector, using the 2-step process
425:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
426:      By placing code between these two statements, computations can be
427:      done while messages are in transition.
428:   */
429:   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
430:   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));

432:   /*
433:       Access directly the values in our local INPUT work array
434:   */
435:   PetscCall(VecGetArrayRead(local_in, &localptr));

437:   /*
438:       Access directly the values in our local OUTPUT work array
439:   */
440:   PetscCall(VecGetArray(localwork, &copyptr));

442:   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));

444:   /*
445:       Evaluate our function on the nodes owned by this processor
446:   */
447:   PetscCall(VecGetLocalSize(local_in, &localsize));

449:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
450:      Compute entries for the locally owned part
451:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

453:   /*
454:      Handle boundary conditions: This is done by using the boundary condition
455:         u(t,boundary) = g(t,boundary)
456:      for some function g. Now take the derivative with respect to t to obtain
457:         u_{t}(t,boundary) = g_{t}(t,boundary)

459:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
460:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
461:   */
462:   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
463:   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
464:   if (rank == 0) copyptr[0] = 1.0;
465:   if (rank == size - 1) copyptr[localsize - 1] = 2.0;

467:   /*
468:      Handle the interior nodes where the PDE is replace by finite
469:      difference operators.
470:   */
471:   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);

473:   /*
474:      Restore vectors
475:   */
476:   PetscCall(VecRestoreArrayRead(local_in, &localptr));
477:   PetscCall(VecRestoreArray(localwork, &copyptr));

479:   /*
480:      Insert values from the local OUTPUT vector into the global
481:      output vector
482:   */
483:   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
484:   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));

486:   /* Print debugging information if desired */
487:   if (appctx->debug) {
488:     PetscCall(PetscPrintf(appctx->comm, "RHS function vector\n"));
489:     PetscCall(VecView(global_out, PETSC_VIEWER_STDOUT_WORLD));
490:   }

492:   PetscFunctionReturn(PETSC_SUCCESS);
493: }
494: /* --------------------------------------------------------------------- */
495: /*
496:    RHSJacobian - User-provided routine to compute the Jacobian of
497:    the nonlinear right-hand-side function of the ODE.

499:    Input Parameters:
500:    ts - the TS context
501:    t - current time
502:    global_in - global input vector
503:    dummy - optional user-defined context, as set by TSetRHSJacobian()

505:    Output Parameters:
506:    AA - Jacobian matrix
507:    BB - optionally different preconditioning matrix
508:    str - flag indicating matrix structure

510:   Notes:
511:   RHSJacobian computes entries for the locally owned part of the Jacobian.
512:    - Currently, all PETSc parallel matrix formats are partitioned by
513:      contiguous chunks of rows across the processors.
514:    - Each processor needs to insert only elements that it owns
515:      locally (but any non-local elements will be sent to the
516:      appropriate processor during matrix assembly).
517:    - Always specify global row and columns of matrix entries when
518:      using MatSetValues().
519:    - Here, we set all entries for a particular row at once.
520:    - Note that MatSetValues() uses 0-based row and column numbers
521:      in Fortran as well as in C.
522: */
523: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
524: {
525:   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
526:   Vec                local_in = appctx->u_local; /* local ghosted input vector */
527:   DM                 da       = appctx->da;      /* distributed array */
528:   PetscScalar        v[3], sc;
529:   const PetscScalar *localptr;
530:   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;

532:   PetscFunctionBeginUser;
533:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
534:      Get ready for local Jacobian computations
535:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
536:   /*
537:      Scatter ghost points to local vector, using the 2-step process
538:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
539:      By placing code between these two statements, computations can be
540:      done while messages are in transition.
541:   */
542:   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
543:   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));

545:   /*
546:      Get pointer to vector data
547:   */
548:   PetscCall(VecGetArrayRead(local_in, &localptr));

550:   /*
551:      Get starting and ending locally owned rows of the matrix
552:   */
553:   PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
554:   mstart = mstarts;
555:   mend   = mends;

557:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
558:      Compute entries for the locally owned part of the Jacobian.
559:       - Currently, all PETSc parallel matrix formats are partitioned by
560:         contiguous chunks of rows across the processors.
561:       - Each processor needs to insert only elements that it owns
562:         locally (but any non-local elements will be sent to the
563:         appropriate processor during matrix assembly).
564:       - Here, we set all entries for a particular row at once.
565:       - We can set matrix entries either using either
566:         MatSetValuesLocal() or MatSetValues().
567:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

569:   /*
570:      Set matrix rows corresponding to boundary data
571:   */
572:   if (mstart == 0) {
573:     v[0] = 0.0;
574:     PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
575:     mstart++;
576:   }
577:   if (mend == appctx->m) {
578:     mend--;
579:     v[0] = 0.0;
580:     PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
581:   }

583:   /*
584:      Set matrix rows corresponding to interior data.  We construct the
585:      matrix one row at a time.
586:   */
587:   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
588:   for (i = mstart; i < mend; i++) {
589:     idx[0] = i - 1;
590:     idx[1] = i;
591:     idx[2] = i + 1;
592:     is     = i - mstart + 1;
593:     v[0]   = sc * localptr[is];
594:     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
595:     v[2]   = sc * localptr[is];
596:     PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
597:   }

599:   /*
600:      Restore vector
601:   */
602:   PetscCall(VecRestoreArrayRead(local_in, &localptr));

604:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
605:      Complete the matrix assembly process and set some options
606:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
607:   /*
608:      Assemble matrix, using the 2-step process:
609:        MatAssemblyBegin(), MatAssemblyEnd()
610:      Computations can be done while messages are in transition
611:      by placing code between these two statements.
612:   */
613:   PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
614:   PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
615:   if (BB != AA) {
616:     PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
617:     PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
618:   }

620:   /*
621:      Set and option to indicate that we will never add a new nonzero location
622:      to the matrix. If we do, it will generate an error.
623:   */
624:   PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));

626:   PetscFunctionReturn(PETSC_SUCCESS);
627: }

629: /*TEST

631:     test:
632:       args: -nox -ts_dt 10 -mymonitor
633:       nsize: 2
634:       requires: !single

636:     test:
637:       suffix: tut_1
638:       nsize: 1
639:       args: -ts_max_steps 10 -ts_monitor

641:     test:
642:       suffix: tut_2
643:       nsize: 4
644:       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor

646:     test:
647:       suffix: tut_3
648:       nsize: 4
649:       args: -ts_max_steps 10 -ts_monitor -M 128

651: TEST*/