Actual source code: ex24.c

  1: static char help[] = "Test TSComputeIJacobian()\n\n";

  3: #include <petscsys.h>
  4: #include <petscdm.h>
  5: #include <petscdmda.h>
  6: #include <petscts.h>

  8: typedef struct {
  9:   PetscScalar u, v;
 10: } Field;

 12: typedef struct {
 13:   PetscReal D1, D2, gamma, kappa;
 14: } AppCtx;

 16: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec U, Mat A, Mat BB, void *ctx)
 17: {
 18:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
 19:   DM          da;
 20:   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
 21:   PetscReal   hx, hy, sx, sy;
 22:   PetscScalar uc, vc;
 23:   Field     **u;
 24:   Vec         localU;
 25:   MatStencil  stencil[6], rowstencil;
 26:   PetscScalar entries[6];

 28:   PetscFunctionBeginUser;
 29:   PetscCall(TSGetDM(ts, &da));
 30:   PetscCall(DMGetLocalVector(da, &localU));
 31:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

 33:   hx = 2.50 / (PetscReal)Mx;
 34:   sx = 1.0 / (hx * hx);
 35:   hy = 2.50 / (PetscReal)My;
 36:   sy = 1.0 / (hy * hy);

 38:   /*
 39:      Scatter ghost points to local vector,using the 2-step process
 40:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
 41:      By placing code between these two statements, computations can be
 42:      done while messages are in transition.
 43:   */
 44:   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
 45:   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));

 47:   /*
 48:      Get pointers to vector data
 49:   */
 50:   PetscCall(DMDAVecGetArrayRead(da, localU, &u));

 52:   /*
 53:      Get local grid boundaries
 54:   */
 55:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

 57:   stencil[0].k = 0;
 58:   stencil[1].k = 0;
 59:   stencil[2].k = 0;
 60:   stencil[3].k = 0;
 61:   stencil[4].k = 0;
 62:   stencil[5].k = 0;
 63:   rowstencil.k = 0;
 64:   /*
 65:      Compute function over the locally owned part of the grid
 66:   */
 67:   for (j = ys; j < ys + ym; j++) {
 68:     stencil[0].j = j - 1;
 69:     stencil[1].j = j + 1;
 70:     stencil[2].j = j;
 71:     stencil[3].j = j;
 72:     stencil[4].j = j;
 73:     stencil[5].j = j;
 74:     rowstencil.k = 0;
 75:     rowstencil.j = j;
 76:     for (i = xs; i < xs + xm; i++) {
 77:       uc = u[j][i].u;
 78:       vc = u[j][i].v;

 80:       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
 81:       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;

 83:       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
 84:       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
 85:        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/

 87:       stencil[0].i = i;
 88:       stencil[0].c = 0;
 89:       entries[0]   = appctx->D1 * sy;
 90:       stencil[1].i = i;
 91:       stencil[1].c = 0;
 92:       entries[1]   = appctx->D1 * sy;
 93:       stencil[2].i = i - 1;
 94:       stencil[2].c = 0;
 95:       entries[2]   = appctx->D1 * sx;
 96:       stencil[3].i = i + 1;
 97:       stencil[3].c = 0;
 98:       entries[3]   = appctx->D1 * sx;
 99:       stencil[4].i = i;
100:       stencil[4].c = 0;
101:       entries[4]   = -2.0 * appctx->D1 * (sx + sy) - vc * vc - appctx->gamma;
102:       stencil[5].i = i;
103:       stencil[5].c = 1;
104:       entries[5]   = -2.0 * uc * vc;
105:       rowstencil.i = i;
106:       rowstencil.c = 0;

108:       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
109:       stencil[0].c = 1;
110:       entries[0]   = appctx->D2 * sy;
111:       stencil[1].c = 1;
112:       entries[1]   = appctx->D2 * sy;
113:       stencil[2].c = 1;
114:       entries[2]   = appctx->D2 * sx;
115:       stencil[3].c = 1;
116:       entries[3]   = appctx->D2 * sx;
117:       stencil[4].c = 1;
118:       entries[4]   = -2.0 * appctx->D2 * (sx + sy) + 2.0 * uc * vc - appctx->gamma - appctx->kappa;
119:       stencil[5].c = 0;
120:       entries[5]   = vc * vc;
121:       rowstencil.c = 1;

123:       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
124:       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
125:     }
126:   }

128:   /*
129:      Restore vectors
130:   */
131:   PetscCall(PetscLogFlops(19 * xm * ym));
132:   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
133:   PetscCall(DMRestoreLocalVector(da, &localU));
134:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
135:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
136:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
137:   PetscFunctionReturn(PETSC_SUCCESS);
138: }

140: PetscErrorCode InitialConditions(DM da, Vec U)
141: {
142:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
143:   Field   **u;
144:   PetscReal hx, hy, x, y;

146:   PetscFunctionBeginUser;
147:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

149:   hx = 2.5 / (PetscReal)Mx;
150:   hy = 2.5 / (PetscReal)My;

152:   /*
153:      Get pointers to vector data
154:   */
155:   PetscCall(DMDAVecGetArray(da, U, &u));

157:   /*
158:      Get local grid boundaries
159:   */
160:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

162:   /*
163:      Compute function over the locally owned part of the grid
164:   */
165:   for (j = ys; j < ys + ym; j++) {
166:     y = j * hy;
167:     for (i = xs; i < xs + xm; i++) {
168:       x = i * hx;
169:       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0);
170:       else u[j][i].v = 0.0;

172:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
173:     }
174:   }

176:   /*
177:      Restore vectors
178:   */
179:   PetscCall(DMDAVecRestoreArray(da, U, &u));
180:   PetscFunctionReturn(PETSC_SUCCESS);
181: }

183: int main(int argc, char **argv)
184: {
185:   TS        ts;
186:   Vec       U, Udot;
187:   Mat       Jac, Jac2;
188:   DM        da;
189:   AppCtx    appctx;
190:   PetscReal t = 0, shift, norm;

192:   PetscFunctionBeginUser;
193:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));

195:   appctx.D1    = 8.0e-5;
196:   appctx.D2    = 4.0e-5;
197:   appctx.gamma = .024;
198:   appctx.kappa = .06;

200:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201:      Create distributed array (DMDA) to manage parallel grid and vectors
202:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
203:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
204:   PetscCall(DMSetFromOptions(da));
205:   PetscCall(DMSetUp(da));
206:   PetscCall(DMDASetFieldName(da, 0, "u"));
207:   PetscCall(DMDASetFieldName(da, 1, "v"));
208:   PetscCall(DMSetMatType(da, MATAIJ));
209:   PetscCall(DMCreateMatrix(da, &Jac));
210:   PetscCall(DMCreateMatrix(da, &Jac2));

212:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213:      Extract global vectors from DMDA; then duplicate for remaining
214:      vectors that are the same types
215:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
216:   PetscCall(DMCreateGlobalVector(da, &U));
217:   PetscCall(VecDuplicate(U, &Udot));
218:   PetscCall(VecSet(Udot, 0.0));

220:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
221:      Create timestepping solver context
222:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
223:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
224:   PetscCall(TSSetDM(ts, da));
225:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
226:   PetscCall(TSSetRHSJacobian(ts, Jac, Jac, RHSJacobian, &appctx));

228:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
229:      Set initial conditions
230:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
231:   PetscCall(InitialConditions(da, U));
232:   PetscCall(TSSetSolution(ts, U));
233:   PetscCall(TSSetFromOptions(ts));
234:   PetscCall(TSSetUp(ts));

236:   shift = 2.;
237:   PetscCall(TSComputeIJacobian(ts, t, U, Udot, shift, Jac2, Jac2, PETSC_FALSE));
238:   shift = 1.;
239:   PetscCall(TSComputeIJacobian(ts, t, U, Udot, shift, Jac, Jac, PETSC_FALSE));
240:   shift = 2.;
241:   PetscCall(TSComputeIJacobian(ts, t, U, Udot, shift, Jac, Jac, PETSC_FALSE));
242:   PetscCall(MatAXPY(Jac, -1, Jac2, SAME_NONZERO_PATTERN));
243:   PetscCall(MatNorm(Jac, NORM_INFINITY, &norm));
244:   if (norm > 100.0 * PETSC_MACHINE_EPSILON) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error Norm %g \n Incorrect behaviour of TSComputeIJacobian(). The two matrices should have the same results.\n", (double)norm));
245:   PetscCall(MatDestroy(&Jac));
246:   PetscCall(MatDestroy(&Jac2));
247:   PetscCall(VecDestroy(&U));
248:   PetscCall(VecDestroy(&Udot));
249:   PetscCall(TSDestroy(&ts));
250:   PetscCall(DMDestroy(&da));
251:   PetscCall(PetscFinalize());
252:   return 0;
253: }

255: /*TEST
256:   test:

258: TEST*/