Actual source code: ex21.c


  2: static char help[] = "Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
  3: timestepping.  Runtime options include:\n\
  4:   -M <xg>, where <xg> = number of grid points\n\
  5:   -debug : Activate debugging printouts\n\
  6:   -nox   : Deactivate x-window graphics\n\
  7:   -ul   : lower bound\n\
  8:   -uh  : upper bound\n\n";

 10: /* ------------------------------------------------------------------------

 12:    This is a variation of ex2.c to solve the PDE

 14:                u * u_xx
 15:          u_t = ---------
 16:                2*(t+1)^2

 18:     with box constraints on the interior grid points
 19:     ul <= u(t,x) <= uh with x != 0,1
 20:     on the domain 0 <= x <= 1, with boundary conditions
 21:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 22:     and initial condition
 23:          u(0,x) = 1 + x*x.

 25:     The exact solution is:
 26:          u(t,x) = (1 + x*x) * (1 + t)

 28:     We use by default the backward Euler method.

 30:   ------------------------------------------------------------------------- */

 32: /*
 33:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 34:    this file automatically includes "petscsys.h" and other lower-level
 35:    PETSc include files.

 37:    Include the "petscdmda.h" to allow us to use the distributed array data
 38:    structures to manage the parallel grid.
 39: */
 40: #include <petscts.h>
 41: #include <petscdm.h>
 42: #include <petscdmda.h>
 43: #include <petscdraw.h>

 45: /*
 46:    User-defined application context - contains data needed by the
 47:    application-provided callback routines.
 48: */
 49: typedef struct {
 50:   MPI_Comm  comm;      /* communicator */
 51:   DM        da;        /* distributed array data structure */
 52:   Vec       localwork; /* local ghosted work vector */
 53:   Vec       u_local;   /* local ghosted approximate solution vector */
 54:   Vec       solution;  /* global exact solution vector */
 55:   PetscInt  m;         /* total number of grid points */
 56:   PetscReal h;         /* mesh width: h = 1/(m-1) */
 57:   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
 58: } AppCtx;

 60: /*
 61:    User-defined routines, provided below.
 62: */
 63: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
 64: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
 65: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
 66: extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
 67: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
 68: extern PetscErrorCode SetBounds(Vec, Vec, PetscScalar, PetscScalar, AppCtx *);

 70: int main(int argc, char **argv)
 71: {
 72:   AppCtx      appctx;                /* user-defined application context */
 73:   TS          ts;                    /* timestepping context */
 74:   Mat         A;                     /* Jacobian matrix data structure */
 75:   Vec         u;                     /* approximate solution vector */
 76:   Vec         r;                     /* residual vector */
 77:   PetscInt    time_steps_max = 1000; /* default max timesteps */
 78:   PetscReal   dt;
 79:   PetscReal   time_total_max = 100.0; /* default max total time */
 80:   Vec         xl, xu;                 /* Lower and upper bounds on variables */
 81:   PetscScalar ul = 0.0, uh = 3.0;
 82:   PetscBool   mymonitor;
 83:   PetscReal   bounds[] = {1.0, 3.3};

 85:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 86:      Initialize program and set problem parameters
 87:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 89:   PetscFunctionBeginUser;
 90:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 91:   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));

 93:   appctx.comm = PETSC_COMM_WORLD;
 94:   appctx.m    = 60;
 95:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
 96:   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-ul", &ul, NULL));
 97:   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-uh", &uh, NULL));
 98:   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
 99:   PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
100:   appctx.h = 1.0 / (appctx.m - 1.0);

102:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103:      Create vector data structures
104:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

106:   /*
107:      Create distributed array (DMDA) to manage parallel grid and vectors
108:      and to set up the ghost point communication pattern.  There are M
109:      total grid values spread equally among all the processors.
110:   */
111:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
112:   PetscCall(DMSetFromOptions(appctx.da));
113:   PetscCall(DMSetUp(appctx.da));

115:   /*
116:      Extract global and local vectors from DMDA; we use these to store the
117:      approximate solution.  Then duplicate these for remaining vectors that
118:      have the same types.
119:   */
120:   PetscCall(DMCreateGlobalVector(appctx.da, &u));
121:   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));

123:   /*
124:      Create local work vector for use in evaluating right-hand-side function;
125:      create global work vector for storing exact solution.
126:   */
127:   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
128:   PetscCall(VecDuplicate(u, &appctx.solution));

130:   /* Create residual vector */
131:   PetscCall(VecDuplicate(u, &r));
132:   /* Create lower and upper bound vectors */
133:   PetscCall(VecDuplicate(u, &xl));
134:   PetscCall(VecDuplicate(u, &xu));
135:   PetscCall(SetBounds(xl, xu, ul, uh, &appctx));

137:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138:      Create timestepping solver context; set callback routine for
139:      right-hand-side function evaluation.
140:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

142:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
143:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
144:   PetscCall(TSSetRHSFunction(ts, r, RHSFunction, &appctx));

146:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147:      Set optional user-defined monitoring routine
148:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

150:   if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));

152:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153:      For nonlinear problems, the user can provide a Jacobian evaluation
154:      routine (or use a finite differencing approximation).

156:      Create matrix data structure; set Jacobian evaluation routine.
157:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

159:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
160:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
161:   PetscCall(MatSetFromOptions(A));
162:   PetscCall(MatSetUp(A));
163:   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));

165:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166:      Set solution vector and initial timestep
167:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

169:   dt = appctx.h / 2.0;
170:   PetscCall(TSSetTimeStep(ts, dt));

172:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173:      Customize timestepping solver:
174:        - Set the solution method to be the Backward Euler method.
175:        - Set timestepping duration info
176:      Then set runtime options, which can override these defaults.
177:      For example,
178:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
179:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
180:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

182:   PetscCall(TSSetType(ts, TSBEULER));
183:   PetscCall(TSSetMaxSteps(ts, time_steps_max));
184:   PetscCall(TSSetMaxTime(ts, time_total_max));
185:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
186:   /* Set lower and upper bound on the solution vector for each time step */
187:   PetscCall(TSVISetVariableBounds(ts, xl, xu));
188:   PetscCall(TSSetFromOptions(ts));

190:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191:      Solve the problem
192:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

194:   /*
195:      Evaluate initial conditions
196:   */
197:   PetscCall(InitialConditions(u, &appctx));

199:   /*
200:      Run the timestepping solver
201:   */
202:   PetscCall(TSSolve(ts, u));

204:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205:      Free work space.  All PETSc objects should be destroyed when they
206:      are no longer needed.
207:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

209:   PetscCall(VecDestroy(&r));
210:   PetscCall(VecDestroy(&xl));
211:   PetscCall(VecDestroy(&xu));
212:   PetscCall(TSDestroy(&ts));
213:   PetscCall(VecDestroy(&u));
214:   PetscCall(MatDestroy(&A));
215:   PetscCall(DMDestroy(&appctx.da));
216:   PetscCall(VecDestroy(&appctx.localwork));
217:   PetscCall(VecDestroy(&appctx.solution));
218:   PetscCall(VecDestroy(&appctx.u_local));

220:   /*
221:      Always call PetscFinalize() before exiting a program.  This routine
222:        - finalizes the PETSc libraries as well as MPI
223:        - provides summary and diagnostic information if certain runtime
224:          options are chosen (e.g., -log_view).
225:   */
226:   PetscCall(PetscFinalize());
227:   return 0;
228: }
229: /* --------------------------------------------------------------------- */
230: /*
231:    InitialConditions - Computes the solution at the initial time.

233:    Input Parameters:
234:    u - uninitialized solution vector (global)
235:    appctx - user-defined application context

237:    Output Parameter:
238:    u - vector with solution at initial time (global)
239: */
240: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
241: {
242:   PetscScalar *u_localptr, h = appctx->h, x;
243:   PetscInt     i, mybase, myend;

245:   PetscFunctionBeginUser;
246:   /*
247:      Determine starting point of each processor's range of
248:      grid values.
249:   */
250:   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));

252:   /*
253:     Get a pointer to vector data.
254:     - For default PETSc vectors, VecGetArray() returns a pointer to
255:       the data array.  Otherwise, the routine is implementation dependent.
256:     - You MUST call VecRestoreArray() when you no longer need access to
257:       the array.
258:     - Note that the Fortran interface to VecGetArray() differs from the
259:       C version.  See the users manual for details.
260:   */
261:   PetscCall(VecGetArray(u, &u_localptr));

263:   /*
264:      We initialize the solution array by simply writing the solution
265:      directly into the array locations.  Alternatively, we could use
266:      VecSetValues() or VecSetValuesLocal().
267:   */
268:   for (i = mybase; i < myend; i++) {
269:     x                      = h * (PetscReal)i; /* current location in global grid */
270:     u_localptr[i - mybase] = 1.0 + x * x;
271:   }

273:   /*
274:      Restore vector
275:   */
276:   PetscCall(VecRestoreArray(u, &u_localptr));

278:   /*
279:      Print debugging information if desired
280:   */
281:   if (appctx->debug) {
282:     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
283:     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
284:   }

286:   PetscFunctionReturn(PETSC_SUCCESS);
287: }

289: /* --------------------------------------------------------------------- */
290: /*
291:   SetBounds - Sets the lower and upper bounds on the interior points

293:   Input parameters:
294:   xl - vector of lower bounds
295:   xu - vector of upper bounds
296:   ul - constant lower bound for all variables
297:   uh - constant upper bound for all variables
298:   appctx - Application context
299:  */
300: PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh, AppCtx *appctx)
301: {
302:   PetscScalar *l, *u;
303:   PetscMPIInt  rank, size;
304:   PetscInt     localsize;

306:   PetscFunctionBeginUser;
307:   PetscCall(VecSet(xl, ul));
308:   PetscCall(VecSet(xu, uh));
309:   PetscCall(VecGetLocalSize(xl, &localsize));
310:   PetscCall(VecGetArray(xl, &l));
311:   PetscCall(VecGetArray(xu, &u));

313:   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
314:   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
315:   if (rank == 0) {
316:     l[0] = -PETSC_INFINITY;
317:     u[0] = PETSC_INFINITY;
318:   }
319:   if (rank == size - 1) {
320:     l[localsize - 1] = -PETSC_INFINITY;
321:     u[localsize - 1] = PETSC_INFINITY;
322:   }
323:   PetscCall(VecRestoreArray(xl, &l));
324:   PetscCall(VecRestoreArray(xu, &u));
325:   PetscFunctionReturn(PETSC_SUCCESS);
326: }

328: /* --------------------------------------------------------------------- */
329: /*
330:    ExactSolution - Computes the exact solution at a given time.

332:    Input Parameters:
333:    t - current time
334:    solution - vector in which exact solution will be computed
335:    appctx - user-defined application context

337:    Output Parameter:
338:    solution - vector with the newly computed exact solution
339: */
340: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
341: {
342:   PetscScalar *s_localptr, h = appctx->h, x;
343:   PetscInt     i, mybase, myend;

345:   PetscFunctionBeginUser;
346:   /*
347:      Determine starting and ending points of each processor's
348:      range of grid values
349:   */
350:   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));

352:   /*
353:      Get a pointer to vector data.
354:   */
355:   PetscCall(VecGetArray(solution, &s_localptr));

357:   /*
358:      Simply write the solution directly into the array locations.
359:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
360:   */
361:   for (i = mybase; i < myend; i++) {
362:     x                      = h * (PetscReal)i;
363:     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
364:   }

366:   /*
367:      Restore vector
368:   */
369:   PetscCall(VecRestoreArray(solution, &s_localptr));
370:   PetscFunctionReturn(PETSC_SUCCESS);
371: }
372: /* --------------------------------------------------------------------- */
373: /*
374:    Monitor - User-provided routine to monitor the solution computed at
375:    each timestep.  This example plots the solution and computes the
376:    error in two different norms.

378:    Input Parameters:
379:    ts     - the timestep context
380:    step   - the count of the current step (with 0 meaning the
381:             initial condition)
382:    time   - the current time
383:    u      - the solution at this timestep
384:    ctx    - the user-provided context for this monitoring routine.
385:             In this case we use the application context which contains
386:             information about the problem size, workspace and the exact
387:             solution.
388: */
389: PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
390: {
391:   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
392:   PetscReal en2, en2s, enmax;
393:   PetscDraw draw;

395:   PetscFunctionBeginUser;
396:   /*
397:      We use the default X windows viewer
398:              PETSC_VIEWER_DRAW_(appctx->comm)
399:      that is associated with the current communicator. This saves
400:      the effort of calling PetscViewerDrawOpen() to create the window.
401:      Note that if we wished to plot several items in separate windows we
402:      would create each viewer with PetscViewerDrawOpen() and store them in
403:      the application context, appctx.

405:      PetscReal buffering makes graphics look better.
406:   */
407:   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
408:   PetscCall(PetscDrawSetDoubleBuffer(draw));
409:   PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));

411:   /*
412:      Compute the exact solution at this timestep
413:   */
414:   PetscCall(ExactSolution(time, appctx->solution, appctx));

416:   /*
417:      Print debugging information if desired
418:   */
419:   if (appctx->debug) {
420:     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
421:     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
422:     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
423:     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
424:   }

426:   /*
427:      Compute the 2-norm and max-norm of the error
428:   */
429:   PetscCall(VecAXPY(appctx->solution, -1.0, u));
430:   PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
431:   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
432:   PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));

434:   /*
435:      PetscPrintf() causes only the first processor in this
436:      communicator to print the timestep information.
437:   */
438:   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));

440:   /*
441:      Print debugging information if desired
442:    */
443:   /*  if (appctx->debug) {
444:      PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
445:      PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
446:    } */
447:   PetscFunctionReturn(PETSC_SUCCESS);
448: }
449: /* --------------------------------------------------------------------- */
450: /*
451:    RHSFunction - User-provided routine that evalues the right-hand-side
452:    function of the ODE.  This routine is set in the main program by
453:    calling TSSetRHSFunction().  We compute:
454:           global_out = F(global_in)

456:    Input Parameters:
457:    ts         - timesteping context
458:    t          - current time
459:    global_in  - vector containing the current iterate
460:    ctx        - (optional) user-provided context for function evaluation.
461:                 In this case we use the appctx defined above.

463:    Output Parameter:
464:    global_out - vector containing the newly evaluated function
465: */
466: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
467: {
468:   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
469:   DM                 da        = appctx->da;        /* distributed array */
470:   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
471:   Vec                localwork = appctx->localwork; /* local ghosted work vector */
472:   PetscInt           i, localsize;
473:   PetscMPIInt        rank, size;
474:   PetscScalar       *copyptr, sc;
475:   const PetscScalar *localptr;

477:   PetscFunctionBeginUser;
478:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
479:      Get ready for local function computations
480:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
481:   /*
482:      Scatter ghost points to local vector, using the 2-step process
483:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
484:      By placing code between these two statements, computations can be
485:      done while messages are in transition.
486:   */
487:   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
488:   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));

490:   /*
491:       Access directly the values in our local INPUT work array
492:   */
493:   PetscCall(VecGetArrayRead(local_in, &localptr));

495:   /*
496:       Access directly the values in our local OUTPUT work array
497:   */
498:   PetscCall(VecGetArray(localwork, &copyptr));

500:   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));

502:   /*
503:       Evaluate our function on the nodes owned by this processor
504:   */
505:   PetscCall(VecGetLocalSize(local_in, &localsize));

507:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
508:      Compute entries for the locally owned part
509:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

511:   /*
512:      Handle boundary conditions: This is done by using the boundary condition
513:         u(t,boundary) = g(t,boundary)
514:      for some function g. Now take the derivative with respect to t to obtain
515:         u_{t}(t,boundary) = g_{t}(t,boundary)

517:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
518:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
519:   */
520:   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
521:   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
522:   if (rank == 0) copyptr[0] = 1.0;
523:   if (rank == size - 1) copyptr[localsize - 1] = (t < .5) ? 2.0 : 0.0;

525:   /*
526:      Handle the interior nodes where the PDE is replace by finite
527:      difference operators.
528:   */
529:   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);

531:   /*
532:      Restore vectors
533:   */
534:   PetscCall(VecRestoreArrayRead(local_in, &localptr));
535:   PetscCall(VecRestoreArray(localwork, &copyptr));

537:   /*
538:      Insert values from the local OUTPUT vector into the global
539:      output vector
540:   */
541:   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
542:   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));

544:   /* Print debugging information if desired */
545:   /*  if (appctx->debug) {
546:      PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
547:      PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
548:    } */

550:   PetscFunctionReturn(PETSC_SUCCESS);
551: }
552: /* --------------------------------------------------------------------- */
553: /*
554:    RHSJacobian - User-provided routine to compute the Jacobian of
555:    the nonlinear right-hand-side function of the ODE.

557:    Input Parameters:
558:    ts - the TS context
559:    t - current time
560:    global_in - global input vector
561:    dummy - optional user-defined context, as set by TSetRHSJacobian()

563:    Output Parameters:
564:    AA - Jacobian matrix
565:    BB - optionally different preconditioning matrix
566:    str - flag indicating matrix structure

568:   Notes:
569:   RHSJacobian computes entries for the locally owned part of the Jacobian.
570:    - Currently, all PETSc parallel matrix formats are partitioned by
571:      contiguous chunks of rows across the processors.
572:    - Each processor needs to insert only elements that it owns
573:      locally (but any non-local elements will be sent to the
574:      appropriate processor during matrix assembly).
575:    - Always specify global row and columns of matrix entries when
576:      using MatSetValues().
577:    - Here, we set all entries for a particular row at once.
578:    - Note that MatSetValues() uses 0-based row and column numbers
579:      in Fortran as well as in C.
580: */
581: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat B, void *ctx)
582: {
583:   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
584:   Vec                local_in = appctx->u_local; /* local ghosted input vector */
585:   DM                 da       = appctx->da;      /* distributed array */
586:   PetscScalar        v[3], sc;
587:   const PetscScalar *localptr;
588:   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;

590:   PetscFunctionBeginUser;
591:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
592:      Get ready for local Jacobian computations
593:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
594:   /*
595:      Scatter ghost points to local vector, using the 2-step process
596:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
597:      By placing code between these two statements, computations can be
598:      done while messages are in transition.
599:   */
600:   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
601:   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));

603:   /*
604:      Get pointer to vector data
605:   */
606:   PetscCall(VecGetArrayRead(local_in, &localptr));

608:   /*
609:      Get starting and ending locally owned rows of the matrix
610:   */
611:   PetscCall(MatGetOwnershipRange(B, &mstarts, &mends));
612:   mstart = mstarts;
613:   mend   = mends;

615:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
616:      Compute entries for the locally owned part of the Jacobian.
617:       - Currently, all PETSc parallel matrix formats are partitioned by
618:         contiguous chunks of rows across the processors.
619:       - Each processor needs to insert only elements that it owns
620:         locally (but any non-local elements will be sent to the
621:         appropriate processor during matrix assembly).
622:       - Here, we set all entries for a particular row at once.
623:       - We can set matrix entries either using either
624:         MatSetValuesLocal() or MatSetValues().
625:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

627:   /*
628:      Set matrix rows corresponding to boundary data
629:   */
630:   if (mstart == 0) {
631:     v[0] = 0.0;
632:     PetscCall(MatSetValues(B, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
633:     mstart++;
634:   }
635:   if (mend == appctx->m) {
636:     mend--;
637:     v[0] = 0.0;
638:     PetscCall(MatSetValues(B, 1, &mend, 1, &mend, v, INSERT_VALUES));
639:   }

641:   /*
642:      Set matrix rows corresponding to interior data.  We construct the
643:      matrix one row at a time.
644:   */
645:   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
646:   for (i = mstart; i < mend; i++) {
647:     idx[0] = i - 1;
648:     idx[1] = i;
649:     idx[2] = i + 1;
650:     is     = i - mstart + 1;
651:     v[0]   = sc * localptr[is];
652:     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
653:     v[2]   = sc * localptr[is];
654:     PetscCall(MatSetValues(B, 1, &i, 3, idx, v, INSERT_VALUES));
655:   }

657:   /*
658:      Restore vector
659:   */
660:   PetscCall(VecRestoreArrayRead(local_in, &localptr));

662:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
663:      Complete the matrix assembly process and set some options
664:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
665:   /*
666:      Assemble matrix, using the 2-step process:
667:        MatAssemblyBegin(), MatAssemblyEnd()
668:      Computations can be done while messages are in transition
669:      by placing code between these two statements.
670:   */
671:   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
672:   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));

674:   /*
675:      Set and option to indicate that we will never add a new nonzero location
676:      to the matrix. If we do, it will generate an error.
677:   */
678:   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));

680:   PetscFunctionReturn(PETSC_SUCCESS);
681: }

683: /*TEST

685:     test:
686:       args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
687:       requires: !single

689: TEST*/