Actual source code: ex73.c

  1: /*T
  2:    Concepts: KSP^solving a system of linear equations
  3:    Concepts: KSP^Laplacian, 2d
  4:    Processors: n
  5: T*/

  7: /*
  8: This example was derived from src/ksp/ksp/tutorials ex29.c

 10: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation

 12:    -div \rho grad u = f,  0 < x,y < 1,

 14: with forcing function

 16:    f = e^{-x^2/\nu} e^{-y^2/\nu}

 18: with Dirichlet boundary conditions

 20:    u = f(x,y) for x = 0, x = 1, y = 0, y = 1

 22: or pure Neumman boundary conditions.
 23: */

 25: static char help[] = "Solves 2D inhomogeneous Laplacian. Demonstates using PCTelescopeSetCoarseDM functionality of PCTelescope via a DMShell\n\n";

 27: #include <petscdm.h>
 28: #include <petscdmda.h>
 29: #include <petscdmshell.h>
 30: #include <petscksp.h>

 32: PetscErrorCode ComputeMatrix_ShellDA(KSP,Mat,Mat,void*);
 33: PetscErrorCode ComputeMatrix_DMDA(DM,Mat,Mat,void*);
 34: PetscErrorCode ComputeRHS_DMDA(DM,Vec,void*);
 35: PetscErrorCode DMShellCreate_ShellDA(DM,DM*);
 36: PetscErrorCode DMFieldScatter_ShellDA(DM,Vec,ScatterMode,DM,Vec);
 37: PetscErrorCode DMStateScatter_ShellDA(DM,ScatterMode,DM);

 39: typedef enum {DIRICHLET, NEUMANN} BCType;

 41: typedef struct {
 42:   PetscReal rho;
 43:   PetscReal nu;
 44:   BCType    bcType;
 45:   MPI_Comm  comm;
 46: } UserContext;

 48: PetscErrorCode UserContextCreate(MPI_Comm comm,UserContext **ctx)
 49: {
 50:   UserContext    *user;
 51:   const char     *bcTypes[2] = {"dirichlet","neumann"};
 52:   PetscInt       bc;

 56:   PetscCalloc1(1,&user);
 57:   user->comm = comm;
 58:   PetscOptionsBegin(comm, "", "Options for the inhomogeneous Poisson equation", "DMqq");
 59:   user->rho = 1.0;
 60:   PetscOptionsReal("-rho", "The conductivity", "ex29.c", user->rho, &user->rho, NULL);
 61:   user->nu = 0.1;
 62:   PetscOptionsReal("-nu", "The width of the Gaussian source", "ex29.c", user->nu, &user->nu, NULL);
 63:   bc = (PetscInt)DIRICHLET;
 64:   PetscOptionsEList("-bc_type","Type of boundary condition","ex29.c",bcTypes,2,bcTypes[0],&bc,NULL);
 65:   user->bcType = (BCType)bc;
 66:   PetscOptionsEnd();
 67:   *ctx = user;
 68:   return(0);
 69: }

 71: PetscErrorCode CommCoarsen(MPI_Comm comm,PetscInt number,PetscSubcomm *p)
 72: {
 73:   PetscSubcomm   psubcomm;
 76:   PetscSubcommCreate(comm,&psubcomm);
 77:   PetscSubcommSetNumber(psubcomm,number);
 78:   PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_INTERLACED);
 79:   *p = psubcomm;
 80:   return(0);
 81: }

 83: PetscErrorCode CommHierarchyCreate(MPI_Comm comm,PetscInt n,PetscInt number[],PetscSubcomm pscommlist[])
 84: {
 85:   PetscInt       k;
 86:   PetscBool      view_hierarchy = PETSC_FALSE;

 90:   for (k=0; k<n; k++) {
 91:     pscommlist[k] = NULL;
 92:   }

 94:   if (n < 1) return(0);

 96:   CommCoarsen(comm,number[n-1],&pscommlist[n-1]);
 97:   for (k=n-2; k>=0; k--) {
 98:     MPI_Comm comm_k = PetscSubcommChild(pscommlist[k+1]);
 99:     if (pscommlist[k+1]->color == 0) {
100:       CommCoarsen(comm_k,number[k],&pscommlist[k]);
101:     }
102:   }

104:   PetscOptionsGetBool(NULL,NULL,"-view_hierarchy",&view_hierarchy,NULL);
105:   if (view_hierarchy) {
106:     PetscMPIInt size;

108:     MPI_Comm_size(comm,&size);
109:     PetscPrintf(comm,"level[%D] size %d\n",n,(int)size);
110:     for (k=n-1; k>=0; k--) {
111:       if (pscommlist[k]) {
112:         MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);

114:         if (pscommlist[k]->color == 0) {
115:           MPI_Comm_size(comm_k,&size);
116:           PetscPrintf(comm_k,"level[%D] size %d\n",k,(int)size);
117:         }
118:       }
119:     }
120:   }
121:   return(0);
122: }

124: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
125: static PetscErrorCode _DMDADetermineRankFromGlobalIJ_2d(PetscInt i,PetscInt j,PetscInt Mp,PetscInt Np,
126:                                                         PetscInt start_i[],PetscInt start_j[],
127:                                                         PetscInt span_i[],PetscInt span_j[],
128:                                                         PetscMPIInt *_pi,PetscMPIInt *_pj,PetscMPIInt *rank_re)
129: {
130:   PetscInt pi,pj,n;

133:   *rank_re = -1;
134:   pi = pj = -1;
135:   if (_pi) {
136:     for (n=0; n<Mp; n++) {
137:       if ((i >= start_i[n]) && (i < start_i[n]+span_i[n])) {
138:         pi = n;
139:         break;
140:       }
141:     }
142:     if (pi == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ij] pi cannot be determined : range %D, val %D",Mp,i);
143:     *_pi = (PetscMPIInt)pi;
144:   }

146:   if (_pj) {
147:     for (n=0; n<Np; n++) {
148:       if ((j >= start_j[n]) && (j < start_j[n]+span_j[n])) {
149:         pj = n;
150:         break;
151:       }
152:     }
153:     if (pj == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ij] pj cannot be determined : range %D, val %D",Np,j);
154:     *_pj = (PetscMPIInt)pj;
155:   }

157:   *rank_re = (PetscMPIInt)(pi + pj * Mp);
158:   return(0);
159: }

161: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
162: static PetscErrorCode _DMDADetermineGlobalS0_2d(PetscMPIInt rank_re,PetscInt Mp_re,PetscInt Np_re,
163:                                                 PetscInt range_i_re[],PetscInt range_j_re[],PetscInt *s0)
164: {
165:   PetscInt    i,j,start_IJ = 0;
166:   PetscMPIInt rank_ij;

169:   *s0 = -1;
170:   for (j=0; j<Np_re; j++) {
171:     for (i=0; i<Mp_re; i++) {
172:       rank_ij = (PetscMPIInt)(i + j*Mp_re);
173:       if (rank_ij < rank_re) {
174:         start_IJ += range_i_re[i]*range_j_re[j];
175:       }
176:     }
177:   }
178:   *s0 = start_IJ;
179:   return(0);
180: }

182: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
183: static PetscErrorCode DMDACreatePermutation_2d(DM dmrepart,DM dmf,Mat *mat)
184: {
186:   PetscInt       k,sum,Mp_re = 0,Np_re = 0;
187:   PetscInt       nx,ny,sr,er,Mr,ndof;
188:   PetscInt       i,j,location,startI[2],endI[2],lenI[2];
189:   const PetscInt *_range_i_re = NULL,*_range_j_re = NULL;
190:   PetscInt       *range_i_re,*range_j_re;
191:   PetscInt       *start_i_re,*start_j_re;
192:   MPI_Comm       comm;
193:   Vec            V;
194:   Mat            Pscalar;

197:   PetscInfo(dmf,"setting up the permutation matrix (DMDA-2D)\n");
198:   PetscObjectGetComm((PetscObject)dmf,&comm);

200:   _range_i_re = _range_j_re = NULL;
201:   /* Create DMDA on the child communicator */
202:   if (dmrepart) {
203:     DMDAGetInfo(dmrepart,NULL,NULL,NULL,NULL,&Mp_re,&Np_re,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
204:     DMDAGetOwnershipRanges(dmrepart,&_range_i_re,&_range_j_re,NULL);
205:   }

207:   /* note - assume rank 0 always participates */
208:   MPI_Bcast(&Mp_re,1,MPIU_INT,0,comm);
209:   MPI_Bcast(&Np_re,1,MPIU_INT,0,comm);

211:   PetscCalloc1(Mp_re,&range_i_re);
212:   PetscCalloc1(Np_re,&range_j_re);

214:   if (_range_i_re) {PetscArraycpy(range_i_re,_range_i_re,Mp_re);}
215:   if (_range_j_re) {PetscArraycpy(range_j_re,_range_j_re,Np_re);}

217:   MPI_Bcast(range_i_re,Mp_re,MPIU_INT,0,comm);
218:   MPI_Bcast(range_j_re,Np_re,MPIU_INT,0,comm);

220:   PetscMalloc1(Mp_re,&start_i_re);
221:   PetscMalloc1(Np_re,&start_j_re);

223:   sum = 0;
224:   for (k=0; k<Mp_re; k++) {
225:     start_i_re[k] = sum;
226:     sum += range_i_re[k];
227:   }

229:   sum = 0;
230:   for (k=0; k<Np_re; k++) {
231:     start_j_re[k] = sum;
232:     sum += range_j_re[k];
233:   }

235:   /* Create permutation */
236:   DMDAGetInfo(dmf,NULL,&nx,&ny,NULL,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);
237:   DMGetGlobalVector(dmf,&V);
238:   VecGetSize(V,&Mr);
239:   VecGetOwnershipRange(V,&sr,&er);
240:   DMRestoreGlobalVector(dmf,&V);
241:   sr = sr / ndof;
242:   er = er / ndof;
243:   Mr = Mr / ndof;

245:   MatCreate(comm,&Pscalar);
246:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
247:   MatSetType(Pscalar,MATAIJ);
248:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
249:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

251:   DMDAGetCorners(dmf,NULL,NULL,NULL,&lenI[0],&lenI[1],NULL);
252:   DMDAGetCorners(dmf,&startI[0],&startI[1],NULL,&endI[0],&endI[1],NULL);
253:   endI[0] += startI[0];
254:   endI[1] += startI[1];

256:   for (j=startI[1]; j<endI[1]; j++) {
257:     for (i=startI[0]; i<endI[0]; i++) {
258:       PetscMPIInt rank_ijk_re,rank_reI[] = {0,0};
259:       PetscInt    s0_re;
260:       PetscInt    ii,jj,local_ijk_re,mapped_ijk;
261:       PetscInt    lenI_re[] = {0,0};

263:       location = (i - startI[0]) + (j - startI[1])*lenI[0];
264:       _DMDADetermineRankFromGlobalIJ_2d(i,j,Mp_re,Np_re,
265:                                              start_i_re,start_j_re,
266:                                              range_i_re,range_j_re,
267:                                              &rank_reI[0],&rank_reI[1],&rank_ijk_re);

269:       _DMDADetermineGlobalS0_2d(rank_ijk_re,Mp_re,Np_re,range_i_re,range_j_re,&s0_re);

271:       ii = i - start_i_re[ rank_reI[0] ];
272:       if (ii < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error ii");
273:       jj = j - start_j_re[ rank_reI[1] ];
274:       if (jj < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error jj");

276:       lenI_re[0] = range_i_re[ rank_reI[0] ];
277:       lenI_re[1] = range_j_re[ rank_reI[1] ];
278:       local_ijk_re = ii + jj * lenI_re[0];
279:       mapped_ijk = s0_re + local_ijk_re;
280:       MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
281:     }
282:   }
283:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
284:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);

286:   *mat = Pscalar;

288:   PetscFree(range_i_re);
289:   PetscFree(range_j_re);
290:   PetscFree(start_i_re);
291:   PetscFree(start_j_re);
292:   return(0);
293: }

295: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
296: static PetscErrorCode PCTelescopeSetUp_dmda_scatters(DM dmf,DM dmc)
297: {
299:   Vec            xred,yred,xtmp,x,xp;
300:   VecScatter     scatter;
301:   IS             isin;
302:   PetscInt       m,bs,st,ed;
303:   MPI_Comm       comm;
304:   VecType        vectype;

307:   PetscObjectGetComm((PetscObject)dmf,&comm);
308:   DMCreateGlobalVector(dmf,&x);
309:   VecGetBlockSize(x,&bs);
310:   VecGetType(x,&vectype);

312:   /* cannot use VecDuplicate as xp is already composed with dmf */
313:   /*VecDuplicate(x,&xp);*/
314:   {
315:     PetscInt m,M;

317:     VecGetSize(x,&M);
318:     VecGetLocalSize(x,&m);
319:     VecCreate(comm,&xp);
320:     VecSetSizes(xp,m,M);
321:     VecSetBlockSize(xp,bs);
322:     VecSetType(xp,vectype);
323:   }

325:   m = 0;
326:   xred = NULL;
327:   yred = NULL;
328:   if (dmc) {
329:     DMCreateGlobalVector(dmc,&xred);
330:     VecDuplicate(xred,&yred);
331:     VecGetOwnershipRange(xred,&st,&ed);
332:     ISCreateStride(comm,ed-st,st,1,&isin);
333:     VecGetLocalSize(xred,&m);
334:   } else {
335:     VecGetOwnershipRange(x,&st,&ed);
336:     ISCreateStride(comm,0,st,1,&isin);
337:   }
338:   ISSetBlockSize(isin,bs);
339:   VecCreate(comm,&xtmp);
340:   VecSetSizes(xtmp,m,PETSC_DECIDE);
341:   VecSetBlockSize(xtmp,bs);
342:   VecSetType(xtmp,vectype);
343:   VecScatterCreate(x,isin,xtmp,NULL,&scatter);

345:   PetscObjectCompose((PetscObject)dmf,"isin",(PetscObject)isin);
346:   PetscObjectCompose((PetscObject)dmf,"scatter",(PetscObject)scatter);
347:   PetscObjectCompose((PetscObject)dmf,"xtmp",(PetscObject)xtmp);
348:   PetscObjectCompose((PetscObject)dmf,"xp",(PetscObject)xp);

350:   VecDestroy(&xred);
351:   VecDestroy(&yred);
352:   VecDestroy(&x);
353:   return(0);
354: }

356: PetscErrorCode DMCreateMatrix_ShellDA(DM dm,Mat *A)
357: {
358:   DM             da;
359:   MPI_Comm       comm;
360:   PetscMPIInt    size;
361:   UserContext    *ctx = NULL;
362:   PetscInt       M,N;

366:   DMShellGetContext(dm,&da);
367:   PetscObjectGetComm((PetscObject)da,&comm);
368:   MPI_Comm_size(comm,&size);
369:   DMCreateMatrix(da,A);
370:   MatGetSize(*A,&M,&N);
371:   PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA (%D x %D)\n",(PetscInt)size,M,N);

373:   DMGetApplicationContext(dm,&ctx);
374:   if (ctx->bcType == NEUMANN) {
375:     MatNullSpace nullspace = NULL;
376:     PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: using neumann bcs\n",(PetscInt)size);

378:     MatGetNullSpace(*A,&nullspace);
379:     if (!nullspace) {
380:       PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: operator does not have nullspace - attaching\n",(PetscInt)size);
381:       MatNullSpaceCreate(comm,PETSC_TRUE,0,0,&nullspace);
382:       MatSetNullSpace(*A,nullspace);
383:       MatNullSpaceDestroy(&nullspace);
384:     } else {
385:       PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: operator already has a nullspace\n",(PetscInt)size);
386:     }
387:   }
388:   return(0);
389: }

391: PetscErrorCode DMCreateGlobalVector_ShellDA(DM dm,Vec *x)
392: {
393:   DM             da;
396:   DMShellGetContext(dm,&da);
397:   DMCreateGlobalVector(da,x);
398:   VecSetDM(*x,dm);
399:   return(0);
400: }

402: PetscErrorCode DMCreateLocalVector_ShellDA(DM dm,Vec *x)
403: {
404:   DM             da;
407:   DMShellGetContext(dm,&da);
408:   DMCreateLocalVector(da,x);
409:   VecSetDM(*x,dm);
410:   return(0);
411: }

413: PetscErrorCode DMCoarsen_ShellDA(DM dm,MPI_Comm comm,DM *dmc)
414: {
417:   *dmc = NULL;
418:   DMGetCoarseDM(dm,dmc);
419:   if (!*dmc) {
420:     SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"The coarse DM should never be NULL. The DM hierarchy should have already been defined");
421:   } else {
422:     PetscObjectReference((PetscObject)(*dmc));
423:   }
424:   return(0);
425: }

427: PetscErrorCode DMCreateInterpolation_ShellDA(DM dm1,DM dm2,Mat *mat,Vec *vec)
428: {
429:   DM             da1,da2;
432:   DMShellGetContext(dm1,&da1);
433:   DMShellGetContext(dm2,&da2);
434:   DMCreateInterpolation(da1,da2,mat,vec);
435:   return(0);
436: }

438: PetscErrorCode DMShellDASetUp_TelescopeDMScatter(DM dmf_shell,DM dmc_shell)
439: {
441:   Mat            P = NULL;
442:   DM             dmf = NULL,dmc = NULL;

445:   DMShellGetContext(dmf_shell,&dmf);
446:   if (dmc_shell) {
447:     DMShellGetContext(dmc_shell,&dmc);
448:   }
449:   DMDACreatePermutation_2d(dmc,dmf,&P);
450:   PetscObjectCompose((PetscObject)dmf,"P",(PetscObject)P);
451:   PCTelescopeSetUp_dmda_scatters(dmf,dmc);
452:   PetscObjectComposeFunction((PetscObject)dmf_shell,"PCTelescopeFieldScatter",DMFieldScatter_ShellDA);
453:   PetscObjectComposeFunction((PetscObject)dmf_shell,"PCTelescopeStateScatter",DMStateScatter_ShellDA);
454:   return(0);
455: }

457: PetscErrorCode DMShellDAFieldScatter_Forward(DM dmf,Vec x,DM dmc,Vec xc)
458: {
459:   PetscErrorCode    ierr;
460:   Mat               P = NULL;
461:   Vec               xp = NULL,xtmp = NULL;
462:   VecScatter        scatter = NULL;
463:   const PetscScalar *x_array;
464:   PetscInt          i,st,ed;

467:   PetscObjectQuery((PetscObject)dmf,"P",(PetscObject*)&P);
468:   PetscObjectQuery((PetscObject)dmf,"xp",(PetscObject*)&xp);
469:   PetscObjectQuery((PetscObject)dmf,"scatter",(PetscObject*)&scatter);
470:   PetscObjectQuery((PetscObject)dmf,"xtmp",(PetscObject*)&xtmp);
471:   if (!P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require a permutation matrix (\"P\")to be composed with the parent (fine) DM");
472:   if (!xp) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"xp\" to be composed with the parent (fine) DM");
473:   if (!scatter) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"scatter\" to be composed with the parent (fine) DM");
474:   if (!xtmp) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"xtmp\" to be composed with the parent (fine) DM");

476:   MatMultTranspose(P,x,xp);

478:   /* pull in vector x->xtmp */
479:   VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
480:   VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

482:   /* copy vector entries into xred */
483:   VecGetArrayRead(xtmp,&x_array);
484:   if (xc) {
485:     PetscScalar *LA_xred;
486:     VecGetOwnershipRange(xc,&st,&ed);

488:     VecGetArray(xc,&LA_xred);
489:     for (i=0; i<ed-st; i++) {
490:       LA_xred[i] = x_array[i];
491:     }
492:     VecRestoreArray(xc,&LA_xred);
493:   }
494:   VecRestoreArrayRead(xtmp,&x_array);
495:   return(0);
496: }

498: PetscErrorCode DMShellDAFieldScatter_Reverse(DM dmf,Vec y,DM dmc,Vec yc)
499: {
501:   Mat            P = NULL;
502:   Vec            xp = NULL,xtmp = NULL;
503:   VecScatter     scatter = NULL;
504:   PetscScalar    *array;
505:   PetscInt       i,st,ed;

508:   PetscObjectQuery((PetscObject)dmf,"P",(PetscObject*)&P);
509:   PetscObjectQuery((PetscObject)dmf,"xp",(PetscObject*)&xp);
510:   PetscObjectQuery((PetscObject)dmf,"scatter",(PetscObject*)&scatter);
511:   PetscObjectQuery((PetscObject)dmf,"xtmp",(PetscObject*)&xtmp);

513:   if (!P) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require a permutation matrix (\"P\")to be composed with the parent (fine) DM");
514:   if (!xp) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"xp\" to be composed with the parent (fine) DM");
515:   if (!scatter) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"scatter\" to be composed with the parent (fine) DM");
516:   if (!xtmp) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require \"xtmp\" to be composed with the parent (fine) DM");

518:   /* return vector */
519:   VecGetArray(xtmp,&array);
520:   if (yc) {
521:     const PetscScalar *LA_yred;
522:     VecGetOwnershipRange(yc,&st,&ed);
523:     VecGetArrayRead(yc,&LA_yred);
524:     for (i=0; i<ed-st; i++) {
525:       array[i] = LA_yred[i];
526:     }
527:     VecRestoreArrayRead(yc,&LA_yred);
528:   }
529:   VecRestoreArray(xtmp,&array);
530:   VecScatterBegin(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
531:   VecScatterEnd(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
532:   MatMult(P,xp,y);
533:   return(0);
534: }

536: PetscErrorCode DMFieldScatter_ShellDA(DM dmf_shell,Vec x,ScatterMode mode,DM dmc_shell,Vec xc)
537: {
539:   DM             dmf = NULL,dmc = NULL;

542:   DMShellGetContext(dmf_shell,&dmf);
543:   if (dmc_shell) {
544:     DMShellGetContext(dmc_shell,&dmc);
545:   }
546:   if (mode == SCATTER_FORWARD) {
547:     DMShellDAFieldScatter_Forward(dmf,x,dmc,xc);
548:   } else if (mode == SCATTER_REVERSE) {
549:     DMShellDAFieldScatter_Reverse(dmf,x,dmc,xc);
550:   } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell),PETSC_ERR_SUP,"Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
551:   return(0);
552: }

554: PetscErrorCode DMStateScatter_ShellDA(DM dmf_shell,ScatterMode mode,DM dmc_shell)
555: {
556:   PetscMPIInt    size_f = 0,size_c = 0;
559:   MPI_Comm_size(PetscObjectComm((PetscObject)dmf_shell),&size_f);
560:   if (dmc_shell) {
561:     MPI_Comm_size(PetscObjectComm((PetscObject)dmc_shell),&size_c);
562:   }
563:   if (mode == SCATTER_FORWARD) {
564:     PetscPrintf(PetscObjectComm((PetscObject)dmf_shell),"User supplied state scatter (fine [size %d]-> coarse [size %d])\n",(int)size_f,(int)size_c);
565:   } else if (mode == SCATTER_REVERSE) {
566:   } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell),PETSC_ERR_SUP,"Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
567:   return(0);
568: }

570: PetscErrorCode DMShellCreate_ShellDA(DM da,DM *dms)
571: {
574:   if (da) {
575:     DMShellCreate(PetscObjectComm((PetscObject)da),dms);
576:     DMShellSetContext(*dms,da);
577:     DMShellSetCreateGlobalVector(*dms,DMCreateGlobalVector_ShellDA);
578:     DMShellSetCreateLocalVector(*dms,DMCreateLocalVector_ShellDA);
579:     DMShellSetCreateMatrix(*dms,DMCreateMatrix_ShellDA);
580:     DMShellSetCoarsen(*dms,DMCoarsen_ShellDA);
581:     DMShellSetCreateInterpolation(*dms,DMCreateInterpolation_ShellDA);
582:   } else {
583:     *dms = NULL;
584:   }
585:   return(0);
586: }

588: PetscErrorCode DMDestroyShellDMDA(DM *_dm)
589: {
591:   DM             dm,da = NULL;

594:   if (!_dm) return(0);
595:   dm = *_dm;
596:   if (!dm) return(0);

598:   DMShellGetContext(dm,&da);
599:   if (da) {
600:     Vec        vec;
601:     VecScatter scatter = NULL;
602:     IS         is = NULL;
603:     Mat        P = NULL;

605:     PetscObjectQuery((PetscObject)da,"P",(PetscObject*)&P);
606:     MatDestroy(&P);

608:     vec = NULL;
609:     PetscObjectQuery((PetscObject)da,"xp",(PetscObject*)&vec);
610:     VecDestroy(&vec);

612:     PetscObjectQuery((PetscObject)da,"scatter",(PetscObject*)&scatter);
613:     VecScatterDestroy(&scatter);

615:     vec = NULL;
616:     PetscObjectQuery((PetscObject)da,"xtmp",(PetscObject*)&vec);
617:     VecDestroy(&vec);

619:     PetscObjectQuery((PetscObject)da,"isin",(PetscObject*)&is);
620:     ISDestroy(&is);

622:     DMDestroy(&da);
623:   }
624:   DMDestroy(&dm);
625:   *_dm = NULL;
626:   return(0);
627: }

629: PetscErrorCode HierarchyCreate_Basic(DM *dm_f,DM *dm_c,UserContext *ctx)
630: {
632:   DM             dm,dmc,dm_shell,dmc_shell;
633:   PetscMPIInt    rank;

636:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
637:   DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,17,17,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dm);
638:   DMSetFromOptions(dm);
639:   DMSetUp(dm);
640:   DMDASetUniformCoordinates(dm,0,1,0,1,0,0);
641:   DMDASetFieldName(dm,0,"Pressure");
642:   DMShellCreate_ShellDA(dm,&dm_shell);
643:   DMSetApplicationContext(dm_shell,ctx);

645:   dmc = NULL;
646:   dmc_shell = NULL;
647:   if (rank == 0) {
648:     DMDACreate2d(PETSC_COMM_SELF,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,17,17,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dmc);
649:     DMSetFromOptions(dmc);
650:     DMSetUp(dmc);
651:     DMDASetUniformCoordinates(dmc,0,1,0,1,0,0);
652:     DMDASetFieldName(dmc,0,"Pressure");
653:     DMShellCreate_ShellDA(dmc,&dmc_shell);
654:     DMSetApplicationContext(dmc_shell,ctx);
655:   }

657:   DMSetCoarseDM(dm_shell,dmc_shell);
658:   DMShellDASetUp_TelescopeDMScatter(dm_shell,dmc_shell);

660:   *dm_f = dm_shell;
661:   *dm_c = dmc_shell;
662:   return(0);
663: }

665: PetscErrorCode HierarchyCreate(PetscInt *_nd,PetscInt *_nref,MPI_Comm **_cl,DM **_dl)
666: {
667:   PetscInt       d,k,ndecomps,ncoarsen,found,nx;
668:   PetscInt       levelrefs,*number;
669:   PetscSubcomm   *pscommlist;
670:   MPI_Comm       *commlist;
671:   DM             *dalist,*dmlist;
672:   PetscBool      set;

676:   ndecomps = 1;
677:   PetscOptionsGetInt(NULL,NULL,"-ndecomps",&ndecomps,NULL);
678:   ncoarsen = ndecomps - 1;
679:   if (ncoarsen < 0) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"-ndecomps must be >= 1");

681:   levelrefs = 2;
682:   PetscOptionsGetInt(NULL,NULL,"-level_nrefs",&levelrefs,NULL);
683:   PetscMalloc1(ncoarsen+1,&number);
684:   for (k=0; k<ncoarsen+1; k++) {
685:     number[k] = 2;
686:   }
687:   found = ncoarsen;
688:   set = PETSC_FALSE;
689:   PetscOptionsGetIntArray(NULL,NULL,"-level_comm_red_factor",number,&found,&set);
690:   if (set) {
691:     if (found != ncoarsen) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_USER,"Expected %D values for -level_comm_red_factor. Found %D",ncoarsen,found);
692:   }

694:   PetscMalloc1(ncoarsen+1,&pscommlist);
695:   for (k=0; k<ncoarsen+1; k++) {
696:     pscommlist[k] = NULL;
697:   }

699:   PetscMalloc1(ndecomps,&commlist);
700:   for (k=0; k<ndecomps; k++) {
701:     commlist[k] = MPI_COMM_NULL;
702:   }
703:   PetscMalloc1(ndecomps*levelrefs,&dalist);
704:   PetscMalloc1(ndecomps*levelrefs,&dmlist);
705:   for (k=0; k<ndecomps*levelrefs; k++) {
706:     dalist[k] = NULL;
707:     dmlist[k] = NULL;
708:   }

710:   CommHierarchyCreate(PETSC_COMM_WORLD,ncoarsen,number,pscommlist);
711:   for (k=0; k<ncoarsen; k++) {
712:     if (pscommlist[k]) {
713:       MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);
714:       if (pscommlist[k]->color == 0) {
715:         PetscCommDuplicate(comm_k,&commlist[k],NULL);
716:       }
717:     }
718:   }
719:   PetscCommDuplicate(PETSC_COMM_WORLD,&commlist[ndecomps-1],NULL);

721:   for (k=0; k<ncoarsen; k++) {
722:     if (pscommlist[k]) {
723:       PetscSubcommDestroy(&pscommlist[k]);
724:     }
725:   }

727:   nx = 17;
728:   PetscOptionsGetInt(NULL,NULL,"-m",&nx,NULL);
729:   for (d=0; d<ndecomps; d++) {
730:     DM   dmroot = NULL;
731:     char name[PETSC_MAX_PATH_LEN];

733:     if (commlist[d] != MPI_COMM_NULL) {
734:       DMDACreate2d(commlist[d],DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,nx,nx,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dmroot);
735:       DMSetUp(dmroot);
736:       DMDASetUniformCoordinates(dmroot,0,1,0,1,0,0);
737:       DMDASetFieldName(dmroot,0,"Pressure");
738:       PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"root-decomp-%D",d);
739:       PetscObjectSetName((PetscObject)dmroot,name);
740:       /*DMView(dmroot,PETSC_VIEWER_STDOUT_(commlist[d]));*/
741:     }

743:     dalist[d*levelrefs + 0] = dmroot;
744:     for (k=1; k<levelrefs; k++) {
745:       DM dmref = NULL;

747:       if (commlist[d] != MPI_COMM_NULL) {
748:         DMRefine(dalist[d*levelrefs + (k-1)],MPI_COMM_NULL,&dmref);
749:         PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"ref%D-decomp-%D",k,d);
750:         PetscObjectSetName((PetscObject)dmref,name);
751:         DMDAGetInfo(dmref,NULL,&nx,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
752:         /*DMView(dmref,PETSC_VIEWER_STDOUT_(commlist[d]));*/
753:       }
754:       dalist[d*levelrefs + k] = dmref;
755:     }
756:     MPI_Allreduce(MPI_IN_PLACE,&nx,1,MPIU_INT,MPI_MAX,PETSC_COMM_WORLD);
757:   }

759:   /* create the hierarchy of DMShell's */
760:   for (d=0; d<ndecomps; d++) {
761:     char name[PETSC_MAX_PATH_LEN];

763:     UserContext *ctx = NULL;
764:     if (commlist[d] != MPI_COMM_NULL) {
765:       UserContextCreate(commlist[d],&ctx);
766:       for (k=0; k<levelrefs; k++) {
767:         DMShellCreate_ShellDA(dalist[d*levelrefs + k],&dmlist[d*levelrefs + k]);
768:         DMSetApplicationContext(dmlist[d*levelrefs + k],ctx);
769:         PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"level%D-decomp-%D",k,d);
770:         PetscObjectSetName((PetscObject)dmlist[d*levelrefs + k],name);
771:       }
772:     }
773:   }

775:   /* set all the coarse DMs */
776:   for (k=1; k<ndecomps*levelrefs; k++) { /* skip first DM as it doesn't have a coarse representation */
777:     DM dmfine = NULL,dmcoarse = NULL;

779:     dmfine = dmlist[k];
780:     dmcoarse = dmlist[k-1];
781:     if (dmfine) {
782:       DMSetCoarseDM(dmfine,dmcoarse);
783:     }
784:   }

786:   /* do special setup on the fine DM coupling different decompositions */
787:   for (d=1; d<ndecomps; d++) { /* skip first decomposition as it doesn't have a coarse representation */
788:     DM dmfine = NULL,dmcoarse = NULL;

790:     dmfine = dmlist[d*levelrefs + 0];
791:     dmcoarse = dmlist[(d-1)*levelrefs + (levelrefs-1)];
792:     if (dmfine) {
793:       DMShellDASetUp_TelescopeDMScatter(dmfine,dmcoarse);
794:     }
795:   }

797:   PetscFree(number);
798:   for (k=0; k<ncoarsen; k++) {
799:     PetscSubcommDestroy(&pscommlist[k]);
800:   }
801:   PetscFree(pscommlist);

803:   if (_nd) {
804:     *_nd = ndecomps;
805:   }
806:   if (_nref) {
807:     *_nref = levelrefs;
808:   }
809:   if (_cl) {
810:     *_cl = commlist;
811:   } else {
812:     for (k=0; k<ndecomps; k++) {
813:       if (commlist[k] != MPI_COMM_NULL) {
814:         PetscCommDestroy(&commlist[k]);
815:       }
816:     }
817:     PetscFree(commlist);
818:   }
819:   if (_dl) {
820:     *_dl = dmlist;
821:     PetscFree(dalist);
822:   } else {
823:     for (k=0; k<ndecomps*levelrefs; k++) {
824:       DMDestroy(&dmlist[k]);
825:       DMDestroy(&dalist[k]);
826:     }
827:     PetscFree(dmlist);
828:     PetscFree(dalist);
829:   }
830:   return(0);
831: }

833: PetscErrorCode test_hierarchy(void)
834: {
836:   PetscInt       d,k,nd,nref;
837:   MPI_Comm       *comms;
838:   DM             *dms;

841:   HierarchyCreate(&nd,&nref,&comms,&dms);

843:   /* destroy user context */
844:   for (d=0; d<nd; d++) {
845:     DM first = dms[d*nref+0];

847:     if (first) {
848:       UserContext *ctx = NULL;

850:       DMGetApplicationContext(first,&ctx);
851:       if (ctx) { PetscFree(ctx); }
852:       DMSetApplicationContext(first,NULL);
853:     }
854:     for (k=1; k<nref; k++) {
855:       DM dm = dms[d*nref+k];
856:       if (dm) {
857:         DMSetApplicationContext(dm,NULL);
858:       }
859:     }
860:   }

862:   /* destroy DMs */
863:   for (k=0; k<nd*nref; k++) {
864:     if (dms[k]) {
865:       DMDestroyShellDMDA(&dms[k]);
866:     }
867:   }
868:   PetscFree(dms);

870:   /* destroy communicators */
871:   for (k=0; k<nd; k++) {
872:     if (comms[k] != MPI_COMM_NULL) {
873:       PetscCommDestroy(&comms[k]);
874:     }
875:   }
876:   PetscFree(comms);
877:   return(0);
878: }

880: PetscErrorCode test_basic(void)
881: {
883:   DM             dmF,dmdaF = NULL,dmC = NULL;
884:   Mat            A;
885:   Vec            x,b;
886:   KSP            ksp;
887:   PC             pc;
888:   UserContext    *user = NULL;

891:   UserContextCreate(PETSC_COMM_WORLD,&user);
892:   HierarchyCreate_Basic(&dmF,&dmC,user);
893:   DMShellGetContext(dmF,&dmdaF);

895:   DMCreateMatrix(dmF,&A);
896:   DMCreateGlobalVector(dmF,&x);
897:   DMCreateGlobalVector(dmF,&b);
898:   ComputeRHS_DMDA(dmdaF,b,user);

900:   KSPCreate(PETSC_COMM_WORLD,&ksp);
901:   KSPSetComputeOperators(ksp,ComputeMatrix_ShellDA,user);
902:   /*KSPSetOperators(ksp,A,A);*/
903:   KSPSetDM(ksp,dmF);
904:   KSPSetDMActive(ksp,PETSC_TRUE);
905:   KSPSetFromOptions(ksp);
906:   KSPGetPC(ksp,&pc);
907:   PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);

909:   KSPSolve(ksp,b,x);

911:   if (dmC) {
912:     DMDestroyShellDMDA(&dmC);
913:   }
914:   DMDestroyShellDMDA(&dmF);
915:   KSPDestroy(&ksp);
916:   MatDestroy(&A);
917:   VecDestroy(&b);
918:   VecDestroy(&x);
919:   PetscFree(user);
920:   return(0);
921: }

923: PetscErrorCode test_mg(void)
924: {
926:   DM             dmF,dmdaF = NULL,*dms = NULL;
927:   Mat            A;
928:   Vec            x,b;
929:   KSP            ksp;
930:   PetscInt       k,d,nd,nref;
931:   MPI_Comm       *comms = NULL;
932:   UserContext    *user = NULL;

935:   HierarchyCreate(&nd,&nref,&comms,&dms);
936:   dmF = dms[nd*nref-1];

938:   DMShellGetContext(dmF,&dmdaF);
939:   DMGetApplicationContext(dmF,&user);

941:   DMCreateMatrix(dmF,&A);
942:   DMCreateGlobalVector(dmF,&x);
943:   DMCreateGlobalVector(dmF,&b);
944:   ComputeRHS_DMDA(dmdaF,b,user);

946:   KSPCreate(PETSC_COMM_WORLD,&ksp);
947:   KSPSetComputeOperators(ksp,ComputeMatrix_ShellDA,user);
948:   /*KSPSetOperators(ksp,A,A);*/
949:   KSPSetDM(ksp,dmF);
950:   KSPSetDMActive(ksp,PETSC_TRUE);
951:   KSPSetFromOptions(ksp);

953:   KSPSolve(ksp,b,x);

955:   for (d=0; d<nd; d++) {
956:     DM first = dms[d*nref+0];

958:     if (first) {
959:       UserContext *ctx = NULL;

961:       DMGetApplicationContext(first,&ctx);
962:       if (ctx) { PetscFree(ctx); }
963:       DMSetApplicationContext(first,NULL);
964:     }
965:     for (k=1; k<nref; k++) {
966:       DM dm = dms[d*nref+k];
967:       if (dm) {
968:         DMSetApplicationContext(dm,NULL);
969:       }
970:     }
971:   }

973:   for (k=0; k<nd*nref; k++) {
974:     if (dms[k]) {
975:       DMDestroyShellDMDA(&dms[k]);
976:     }
977:   }
978:   PetscFree(dms);

980:   KSPDestroy(&ksp);
981:   MatDestroy(&A);
982:   VecDestroy(&b);
983:   VecDestroy(&x);

985:   for (k=0; k<nd; k++) {
986:     if (comms[k] != MPI_COMM_NULL) {
987:       PetscCommDestroy(&comms[k]);
988:     }
989:   }
990:   PetscFree(comms);
991:   return(0);
992: }

994: int main(int argc,char **argv)
995: {
997:   PetscInt       test_id = 0;

999:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
1000:   PetscOptionsGetInt(NULL,NULL,"-tid",&test_id,NULL);
1001:   switch (test_id) {
1002:   case 0:
1003:     test_basic();
1004:       break;
1005:   case 1:
1006:     test_hierarchy();
1007:       break;
1008:   case 2:
1009:     test_mg();
1010:       break;
1011:   default:
1012:     SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"-tid must be 0,1,2");
1013:   }
1014:   PetscFinalize();
1015:   return ierr;
1016: }

1018: PetscErrorCode ComputeRHS_DMDA(DM da,Vec b,void *ctx)
1019: {
1020:   UserContext    *user = (UserContext*)ctx;
1022:   PetscInt       i,j,mx,my,xm,ym,xs,ys;
1023:   PetscScalar    Hx,Hy;
1024:   PetscScalar    **array;
1025:   PetscBool      isda = PETSC_FALSE;

1028:   PetscObjectTypeCompare((PetscObject)da,DMDA,&isda);
1029:   if (!isda) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"DM provided must be a DMDA");
1030:   DMDAGetInfo(da,NULL,&mx,&my,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1031:   Hx   = 1.0 / (PetscReal)(mx-1);
1032:   Hy   = 1.0 / (PetscReal)(my-1);
1033:   DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
1034:   DMDAVecGetArray(da,b,&array);
1035:   for (j=ys; j<ys+ym; j++) {
1036:     for (i=xs; i<xs+xm; i++) {
1037:       array[j][i] = PetscExpScalar(-((PetscReal)i*Hx)*((PetscReal)i*Hx)/user->nu)*PetscExpScalar(-((PetscReal)j*Hy)*((PetscReal)j*Hy)/user->nu)*Hx*Hy;
1038:     }
1039:   }
1040:   DMDAVecRestoreArray(da, b, &array);
1041:   VecAssemblyBegin(b);
1042:   VecAssemblyEnd(b);

1044:   /* force right hand side to be consistent for singular matrix */
1045:   /* note this is really a hack, normally the model would provide you with a consistent right handside */
1046:   if (user->bcType == NEUMANN) {
1047:     MatNullSpace nullspace;

1049:     MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,0,&nullspace);
1050:     MatNullSpaceRemove(nullspace,b);
1051:     MatNullSpaceDestroy(&nullspace);
1052:   }
1053:   return(0);
1054: }

1056: PetscErrorCode ComputeRho(PetscInt i, PetscInt j, PetscInt mx, PetscInt my, PetscReal centerRho, PetscReal *rho)
1057: {
1059:   if ((i > mx/3.0) && (i < 2.0*mx/3.0) && (j > my/3.0) && (j < 2.0*my/3.0)) {
1060:     *rho = centerRho;
1061:   } else {
1062:     *rho = 1.0;
1063:   }
1064:   return(0);
1065: }

1067: PetscErrorCode ComputeMatrix_DMDA(DM da,Mat J,Mat jac,void *ctx)
1068: {
1069:   UserContext    *user = (UserContext*)ctx;
1070:   PetscReal      centerRho;
1072:   PetscInt       i,j,mx,my,xm,ym,xs,ys;
1073:   PetscScalar    v[5];
1074:   PetscReal      Hx,Hy,HydHx,HxdHy,rho;
1075:   MatStencil     row, col[5];
1076:   PetscBool      isda = PETSC_FALSE;

1079:   PetscObjectTypeCompare((PetscObject)da,DMDA,&isda);
1080:   if (!isda) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"DM provided must be a DMDA");
1081:   MatZeroEntries(jac);
1082:   centerRho = user->rho;
1083:   DMDAGetInfo(da,NULL,&mx,&my,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1084:   Hx        = 1.0 / (PetscReal)(mx-1);
1085:   Hy        = 1.0 / (PetscReal)(my-1);
1086:   HxdHy     = Hx/Hy;
1087:   HydHx     = Hy/Hx;
1088:   DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
1089:   for (j=ys; j<ys+ym; j++) {
1090:     for (i=xs; i<xs+xm; i++) {
1091:       row.i = i; row.j = j;
1092:       ComputeRho(i, j, mx, my, centerRho, &rho);
1093:       if (i==0 || j==0 || i==mx-1 || j==my-1) {
1094:         if (user->bcType == DIRICHLET) {
1095:           v[0] = 2.0*rho*(HxdHy + HydHx);
1096:           MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
1097:         } else if (user->bcType == NEUMANN) {
1098:           PetscInt numx = 0, numy = 0, num = 0;
1099:           if (j!=0) {
1100:             v[num] = -rho*HxdHy;  col[num].i = i;    col[num].j = j-1;
1101:             numy++; num++;
1102:           }
1103:           if (i!=0) {
1104:             v[num] = -rho*HydHx;  col[num].i = i-1;  col[num].j = j;
1105:             numx++; num++;
1106:           }
1107:           if (i!=mx-1) {
1108:             v[num] = -rho*HydHx;  col[num].i = i+1;  col[num].j = j;
1109:             numx++; num++;
1110:           }
1111:           if (j!=my-1) {
1112:             v[num] = -rho*HxdHy;  col[num].i = i;    col[num].j = j+1;
1113:             numy++; num++;
1114:           }
1115:           v[num] = numx*rho*HydHx + numy*rho*HxdHy;  col[num].i = i; col[num].j = j;
1116:           num++;
1117:           MatSetValuesStencil(jac,1,&row,num,col,v,INSERT_VALUES);
1118:         }
1119:       } else {
1120:         v[0] = -rho*HxdHy;              col[0].i = i;   col[0].j = j-1;
1121:         v[1] = -rho*HydHx;              col[1].i = i-1; col[1].j = j;
1122:         v[2] = 2.0*rho*(HxdHy + HydHx); col[2].i = i;   col[2].j = j;
1123:         v[3] = -rho*HydHx;              col[3].i = i+1; col[3].j = j;
1124:         v[4] = -rho*HxdHy;              col[4].i = i;   col[4].j = j+1;
1125:         MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
1126:       }
1127:     }
1128:   }
1129:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
1130:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
1131:   return(0);
1132: }

1134: PetscErrorCode ComputeMatrix_ShellDA(KSP ksp,Mat J,Mat jac,void *ctx)
1135: {
1137:   DM             dm,da;
1139:   KSPGetDM(ksp,&dm);
1140:   DMShellGetContext(dm,&da);
1141:   ComputeMatrix_DMDA(da,J,jac,ctx);
1142:   return(0);
1143: }

1145: /*TEST

1147:   test:
1148:     suffix: basic_dirichlet
1149:     nsize: 4
1150:     args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type lu -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr

1152:   test:
1153:     suffix: basic_neumann
1154:     nsize: 4
1155:     requires: !single
1156:     args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type jacobi -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr -bc_type neumann

1158:   test:
1159:     suffix: mg_2lv_2mg
1160:     nsize: 6
1161:     args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 2 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -level_comm_red_factor 6 -mg_coarse_telescope_mg_coarse_pc_type lu

1163:   test:
1164:     suffix: mg_3lv_2mg
1165:     nsize: 4
1166:     args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu -m 5

1168:   test:
1169:     suffix: mg_3lv_2mg_customcommsize
1170:     nsize: 12
1171:     args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm  -m 5 -level_comm_red_factor 2,6 -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu

1173:  TEST*/