Actual source code: ex73.c


  2: /*
  3: This example was derived from src/ksp/ksp/tutorials ex29.c

  5: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation

  7:    -div \rho grad u = f,  0 < x,y < 1,

  9: with forcing function

 11:    f = e^{-x^2/\nu} e^{-y^2/\nu}

 13: with Dirichlet boundary conditions

 15:    u = f(x,y) for x = 0, x = 1, y = 0, y = 1

 17: or pure Neumman boundary conditions.
 18: */

 20: static char help[] = "Solves 2D inhomogeneous Laplacian. Demonstrates using PCTelescopeSetCoarseDM functionality of PCTelescope via a DMShell\n\n";

 22: #include <petscdm.h>
 23: #include <petscdmda.h>
 24: #include <petscdmshell.h>
 25: #include <petscksp.h>

 27: PetscErrorCode ComputeMatrix_ShellDA(KSP, Mat, Mat, void *);
 28: PetscErrorCode ComputeMatrix_DMDA(DM, Mat, Mat, void *);
 29: PetscErrorCode ComputeRHS_DMDA(DM, Vec, void *);
 30: PetscErrorCode DMShellCreate_ShellDA(DM, DM *);
 31: PetscErrorCode DMFieldScatter_ShellDA(DM, Vec, ScatterMode, DM, Vec);
 32: PetscErrorCode DMStateScatter_ShellDA(DM, ScatterMode, DM);

 34: typedef enum {
 35:   DIRICHLET,
 36:   NEUMANN
 37: } BCType;

 39: typedef struct {
 40:   PetscReal rho;
 41:   PetscReal nu;
 42:   BCType    bcType;
 43:   MPI_Comm  comm;
 44: } UserContext;

 46: PetscErrorCode UserContextCreate(MPI_Comm comm, UserContext **ctx)
 47: {
 48:   UserContext *user;
 49:   const char  *bcTypes[2] = {"dirichlet", "neumann"};
 50:   PetscInt     bc;

 53:   PetscCalloc1(1, &user);
 54:   user->comm = comm;
 55:   PetscOptionsBegin(comm, "", "Options for the inhomogeneous Poisson equation", "DMqq");
 56:   user->rho = 1.0;
 57:   PetscOptionsReal("-rho", "The conductivity", "ex29.c", user->rho, &user->rho, NULL);
 58:   user->nu = 0.1;
 59:   PetscOptionsReal("-nu", "The width of the Gaussian source", "ex29.c", user->nu, &user->nu, NULL);
 60:   bc = (PetscInt)DIRICHLET;
 61:   PetscOptionsEList("-bc_type", "Type of boundary condition", "ex29.c", bcTypes, 2, bcTypes[0], &bc, NULL);
 62:   user->bcType = (BCType)bc;
 63:   PetscOptionsEnd();
 64:   *ctx = user;
 65:   return 0;
 66: }

 68: PetscErrorCode CommCoarsen(MPI_Comm comm, PetscInt number, PetscSubcomm *p)
 69: {
 70:   PetscSubcomm psubcomm;
 72:   PetscSubcommCreate(comm, &psubcomm);
 73:   PetscSubcommSetNumber(psubcomm, number);
 74:   PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_INTERLACED);
 75:   *p = psubcomm;
 76:   return 0;
 77: }

 79: PetscErrorCode CommHierarchyCreate(MPI_Comm comm, PetscInt n, PetscInt number[], PetscSubcomm pscommlist[])
 80: {
 81:   PetscInt  k;
 82:   PetscBool view_hierarchy = PETSC_FALSE;

 85:   for (k = 0; k < n; k++) pscommlist[k] = NULL;

 87:   if (n < 1) return 0;

 89:   CommCoarsen(comm, number[n - 1], &pscommlist[n - 1]);
 90:   for (k = n - 2; k >= 0; k--) {
 91:     MPI_Comm comm_k = PetscSubcommChild(pscommlist[k + 1]);
 92:     if (pscommlist[k + 1]->color == 0) CommCoarsen(comm_k, number[k], &pscommlist[k]);
 93:   }

 95:   PetscOptionsGetBool(NULL, NULL, "-view_hierarchy", &view_hierarchy, NULL);
 96:   if (view_hierarchy) {
 97:     PetscMPIInt size;

 99:     MPI_Comm_size(comm, &size);
100:     PetscPrintf(comm, "level[%" PetscInt_FMT "] size %d\n", n, (int)size);
101:     for (k = n - 1; k >= 0; k--) {
102:       if (pscommlist[k]) {
103:         MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);

105:         if (pscommlist[k]->color == 0) {
106:           MPI_Comm_size(comm_k, &size);
107:           PetscPrintf(comm_k, "level[%" PetscInt_FMT "] size %d\n", k, (int)size);
108:         }
109:       }
110:     }
111:   }
112:   return 0;
113: }

115: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
116: static PetscErrorCode _DMDADetermineRankFromGlobalIJ_2d(PetscInt i, PetscInt j, PetscInt Mp, PetscInt Np, PetscInt start_i[], PetscInt start_j[], PetscInt span_i[], PetscInt span_j[], PetscMPIInt *_pi, PetscMPIInt *_pj, PetscMPIInt *rank_re)
117: {
118:   PetscInt pi, pj, n;

121:   *rank_re = -1;
122:   pi = pj = -1;
123:   if (_pi) {
124:     for (n = 0; n < Mp; n++) {
125:       if ((i >= start_i[n]) && (i < start_i[n] + span_i[n])) {
126:         pi = n;
127:         break;
128:       }
129:     }
131:     *_pi = (PetscMPIInt)pi;
132:   }

134:   if (_pj) {
135:     for (n = 0; n < Np; n++) {
136:       if ((j >= start_j[n]) && (j < start_j[n] + span_j[n])) {
137:         pj = n;
138:         break;
139:       }
140:     }
142:     *_pj = (PetscMPIInt)pj;
143:   }

145:   *rank_re = (PetscMPIInt)(pi + pj * Mp);
146:   return 0;
147: }

149: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
150: static PetscErrorCode _DMDADetermineGlobalS0_2d(PetscMPIInt rank_re, PetscInt Mp_re, PetscInt Np_re, PetscInt range_i_re[], PetscInt range_j_re[], PetscInt *s0)
151: {
152:   PetscInt    i, j, start_IJ = 0;
153:   PetscMPIInt rank_ij;

156:   *s0 = -1;
157:   for (j = 0; j < Np_re; j++) {
158:     for (i = 0; i < Mp_re; i++) {
159:       rank_ij = (PetscMPIInt)(i + j * Mp_re);
160:       if (rank_ij < rank_re) start_IJ += range_i_re[i] * range_j_re[j];
161:     }
162:   }
163:   *s0 = start_IJ;
164:   return 0;
165: }

167: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
168: static PetscErrorCode DMDACreatePermutation_2d(DM dmrepart, DM dmf, Mat *mat)
169: {
170:   PetscInt        k, sum, Mp_re = 0, Np_re = 0;
171:   PetscInt        nx, ny, sr, er, Mr, ndof;
172:   PetscInt        i, j, location, startI[2], endI[2], lenI[2];
173:   const PetscInt *_range_i_re = NULL, *_range_j_re = NULL;
174:   PetscInt       *range_i_re, *range_j_re;
175:   PetscInt       *start_i_re, *start_j_re;
176:   MPI_Comm        comm;
177:   Vec             V;
178:   Mat             Pscalar;

181:   PetscInfo(dmf, "setting up the permutation matrix (DMDA-2D)\n");
182:   PetscObjectGetComm((PetscObject)dmf, &comm);

184:   _range_i_re = _range_j_re = NULL;
185:   /* Create DMDA on the child communicator */
186:   if (dmrepart) {
187:     DMDAGetInfo(dmrepart, NULL, NULL, NULL, NULL, &Mp_re, &Np_re, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
188:     DMDAGetOwnershipRanges(dmrepart, &_range_i_re, &_range_j_re, NULL);
189:   }

191:   /* note - assume rank 0 always participates */
192:   MPI_Bcast(&Mp_re, 1, MPIU_INT, 0, comm);
193:   MPI_Bcast(&Np_re, 1, MPIU_INT, 0, comm);

195:   PetscCalloc1(Mp_re, &range_i_re);
196:   PetscCalloc1(Np_re, &range_j_re);

198:   if (_range_i_re) PetscArraycpy(range_i_re, _range_i_re, Mp_re);
199:   if (_range_j_re) PetscArraycpy(range_j_re, _range_j_re, Np_re);

201:   MPI_Bcast(range_i_re, Mp_re, MPIU_INT, 0, comm);
202:   MPI_Bcast(range_j_re, Np_re, MPIU_INT, 0, comm);

204:   PetscMalloc1(Mp_re, &start_i_re);
205:   PetscMalloc1(Np_re, &start_j_re);

207:   sum = 0;
208:   for (k = 0; k < Mp_re; k++) {
209:     start_i_re[k] = sum;
210:     sum += range_i_re[k];
211:   }

213:   sum = 0;
214:   for (k = 0; k < Np_re; k++) {
215:     start_j_re[k] = sum;
216:     sum += range_j_re[k];
217:   }

219:   /* Create permutation */
220:   DMDAGetInfo(dmf, NULL, &nx, &ny, NULL, NULL, NULL, NULL, &ndof, NULL, NULL, NULL, NULL, NULL);
221:   DMGetGlobalVector(dmf, &V);
222:   VecGetSize(V, &Mr);
223:   VecGetOwnershipRange(V, &sr, &er);
224:   DMRestoreGlobalVector(dmf, &V);
225:   sr = sr / ndof;
226:   er = er / ndof;
227:   Mr = Mr / ndof;

229:   MatCreate(comm, &Pscalar);
230:   MatSetSizes(Pscalar, (er - sr), (er - sr), Mr, Mr);
231:   MatSetType(Pscalar, MATAIJ);
232:   MatSeqAIJSetPreallocation(Pscalar, 1, NULL);
233:   MatMPIAIJSetPreallocation(Pscalar, 1, NULL, 1, NULL);

235:   DMDAGetCorners(dmf, NULL, NULL, NULL, &lenI[0], &lenI[1], NULL);
236:   DMDAGetCorners(dmf, &startI[0], &startI[1], NULL, &endI[0], &endI[1], NULL);
237:   endI[0] += startI[0];
238:   endI[1] += startI[1];

240:   for (j = startI[1]; j < endI[1]; j++) {
241:     for (i = startI[0]; i < endI[0]; i++) {
242:       PetscMPIInt rank_ijk_re, rank_reI[] = {0, 0};
243:       PetscInt    s0_re;
244:       PetscInt    ii, jj, local_ijk_re, mapped_ijk;
245:       PetscInt    lenI_re[] = {0, 0};

247:       location = (i - startI[0]) + (j - startI[1]) * lenI[0];
248:       _DMDADetermineRankFromGlobalIJ_2d(i, j, Mp_re, Np_re, start_i_re, start_j_re, range_i_re, range_j_re, &rank_reI[0], &rank_reI[1], &rank_ijk_re);

250:       _DMDADetermineGlobalS0_2d(rank_ijk_re, Mp_re, Np_re, range_i_re, range_j_re, &s0_re);

252:       ii = i - start_i_re[rank_reI[0]];
254:       jj = j - start_j_re[rank_reI[1]];

257:       lenI_re[0]   = range_i_re[rank_reI[0]];
258:       lenI_re[1]   = range_j_re[rank_reI[1]];
259:       local_ijk_re = ii + jj * lenI_re[0];
260:       mapped_ijk   = s0_re + local_ijk_re;
261:       MatSetValue(Pscalar, sr + location, mapped_ijk, 1.0, INSERT_VALUES);
262:     }
263:   }
264:   MatAssemblyBegin(Pscalar, MAT_FINAL_ASSEMBLY);
265:   MatAssemblyEnd(Pscalar, MAT_FINAL_ASSEMBLY);

267:   *mat = Pscalar;

269:   PetscFree(range_i_re);
270:   PetscFree(range_j_re);
271:   PetscFree(start_i_re);
272:   PetscFree(start_j_re);
273:   return 0;
274: }

276: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
277: static PetscErrorCode PCTelescopeSetUp_dmda_scatters(DM dmf, DM dmc)
278: {
279:   Vec        xred, yred, xtmp, x, xp;
280:   VecScatter scatter;
281:   IS         isin;
282:   PetscInt   m, bs, st, ed;
283:   MPI_Comm   comm;
284:   VecType    vectype;

287:   PetscObjectGetComm((PetscObject)dmf, &comm);
288:   DMCreateGlobalVector(dmf, &x);
289:   VecGetBlockSize(x, &bs);
290:   VecGetType(x, &vectype);

292:   /* cannot use VecDuplicate as xp is already composed with dmf */
293:   /*VecDuplicate(x,&xp);*/
294:   {
295:     PetscInt m, M;

297:     VecGetSize(x, &M);
298:     VecGetLocalSize(x, &m);
299:     VecCreate(comm, &xp);
300:     VecSetSizes(xp, m, M);
301:     VecSetBlockSize(xp, bs);
302:     VecSetType(xp, vectype);
303:   }

305:   m    = 0;
306:   xred = NULL;
307:   yred = NULL;
308:   if (dmc) {
309:     DMCreateGlobalVector(dmc, &xred);
310:     VecDuplicate(xred, &yred);
311:     VecGetOwnershipRange(xred, &st, &ed);
312:     ISCreateStride(comm, ed - st, st, 1, &isin);
313:     VecGetLocalSize(xred, &m);
314:   } else {
315:     VecGetOwnershipRange(x, &st, &ed);
316:     ISCreateStride(comm, 0, st, 1, &isin);
317:   }
318:   ISSetBlockSize(isin, bs);
319:   VecCreate(comm, &xtmp);
320:   VecSetSizes(xtmp, m, PETSC_DECIDE);
321:   VecSetBlockSize(xtmp, bs);
322:   VecSetType(xtmp, vectype);
323:   VecScatterCreate(x, isin, xtmp, NULL, &scatter);

325:   PetscObjectCompose((PetscObject)dmf, "isin", (PetscObject)isin);
326:   PetscObjectCompose((PetscObject)dmf, "scatter", (PetscObject)scatter);
327:   PetscObjectCompose((PetscObject)dmf, "xtmp", (PetscObject)xtmp);
328:   PetscObjectCompose((PetscObject)dmf, "xp", (PetscObject)xp);

330:   VecDestroy(&xred);
331:   VecDestroy(&yred);
332:   VecDestroy(&x);
333:   return 0;
334: }

336: PetscErrorCode DMCreateMatrix_ShellDA(DM dm, Mat *A)
337: {
338:   DM           da;
339:   MPI_Comm     comm;
340:   PetscMPIInt  size;
341:   UserContext *ctx = NULL;
342:   PetscInt     M, N;

345:   DMShellGetContext(dm, &da);
346:   PetscObjectGetComm((PetscObject)da, &comm);
347:   MPI_Comm_size(comm, &size);
348:   DMCreateMatrix(da, A);
349:   MatGetSize(*A, &M, &N);
350:   PetscPrintf(comm, "[size %" PetscInt_FMT "] DMCreateMatrix_ShellDA (%" PetscInt_FMT " x %" PetscInt_FMT ")\n", (PetscInt)size, M, N);

352:   DMGetApplicationContext(dm, &ctx);
353:   if (ctx->bcType == NEUMANN) {
354:     MatNullSpace nullspace = NULL;
355:     PetscPrintf(comm, "[size %" PetscInt_FMT "] DMCreateMatrix_ShellDA: using neumann bcs\n", (PetscInt)size);

357:     MatGetNullSpace(*A, &nullspace);
358:     if (!nullspace) {
359:       PetscPrintf(comm, "[size %" PetscInt_FMT "] DMCreateMatrix_ShellDA: operator does not have nullspace - attaching\n", (PetscInt)size);
360:       MatNullSpaceCreate(comm, PETSC_TRUE, 0, 0, &nullspace);
361:       MatSetNullSpace(*A, nullspace);
362:       MatNullSpaceDestroy(&nullspace);
363:     } else {
364:       PetscPrintf(comm, "[size %" PetscInt_FMT "] DMCreateMatrix_ShellDA: operator already has a nullspace\n", (PetscInt)size);
365:     }
366:   }
367:   return 0;
368: }

370: PetscErrorCode DMCreateGlobalVector_ShellDA(DM dm, Vec *x)
371: {
372:   DM da;
374:   DMShellGetContext(dm, &da);
375:   DMCreateGlobalVector(da, x);
376:   VecSetDM(*x, dm);
377:   return 0;
378: }

380: PetscErrorCode DMCreateLocalVector_ShellDA(DM dm, Vec *x)
381: {
382:   DM da;
384:   DMShellGetContext(dm, &da);
385:   DMCreateLocalVector(da, x);
386:   VecSetDM(*x, dm);
387:   return 0;
388: }

390: PetscErrorCode DMCoarsen_ShellDA(DM dm, MPI_Comm comm, DM *dmc)
391: {
393:   *dmc = NULL;
394:   DMGetCoarseDM(dm, dmc);
395:   if (!*dmc) {
396:     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "The coarse DM should never be NULL. The DM hierarchy should have already been defined");
397:   } else {
398:     PetscObjectReference((PetscObject)(*dmc));
399:   }
400:   return 0;
401: }

403: PetscErrorCode DMCreateInterpolation_ShellDA(DM dm1, DM dm2, Mat *mat, Vec *vec)
404: {
405:   DM da1, da2;
407:   DMShellGetContext(dm1, &da1);
408:   DMShellGetContext(dm2, &da2);
409:   DMCreateInterpolation(da1, da2, mat, vec);
410:   return 0;
411: }

413: PetscErrorCode DMShellDASetUp_TelescopeDMScatter(DM dmf_shell, DM dmc_shell)
414: {
415:   Mat P   = NULL;
416:   DM  dmf = NULL, dmc = NULL;

419:   DMShellGetContext(dmf_shell, &dmf);
420:   if (dmc_shell) DMShellGetContext(dmc_shell, &dmc);
421:   DMDACreatePermutation_2d(dmc, dmf, &P);
422:   PetscObjectCompose((PetscObject)dmf, "P", (PetscObject)P);
423:   PCTelescopeSetUp_dmda_scatters(dmf, dmc);
424:   PetscObjectComposeFunction((PetscObject)dmf_shell, "PCTelescopeFieldScatter", DMFieldScatter_ShellDA);
425:   PetscObjectComposeFunction((PetscObject)dmf_shell, "PCTelescopeStateScatter", DMStateScatter_ShellDA);
426:   return 0;
427: }

429: PetscErrorCode DMShellDAFieldScatter_Forward(DM dmf, Vec x, DM dmc, Vec xc)
430: {
431:   Mat                P  = NULL;
432:   Vec                xp = NULL, xtmp = NULL;
433:   VecScatter         scatter = NULL;
434:   const PetscScalar *x_array;
435:   PetscInt           i, st, ed;

438:   PetscObjectQuery((PetscObject)dmf, "P", (PetscObject *)&P);
439:   PetscObjectQuery((PetscObject)dmf, "xp", (PetscObject *)&xp);
440:   PetscObjectQuery((PetscObject)dmf, "scatter", (PetscObject *)&scatter);
441:   PetscObjectQuery((PetscObject)dmf, "xtmp", (PetscObject *)&xtmp);

447:   MatMultTranspose(P, x, xp);

449:   /* pull in vector x->xtmp */
450:   VecScatterBegin(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD);
451:   VecScatterEnd(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD);

453:   /* copy vector entries into xred */
454:   VecGetArrayRead(xtmp, &x_array);
455:   if (xc) {
456:     PetscScalar *LA_xred;
457:     VecGetOwnershipRange(xc, &st, &ed);

459:     VecGetArray(xc, &LA_xred);
460:     for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
461:     VecRestoreArray(xc, &LA_xred);
462:   }
463:   VecRestoreArrayRead(xtmp, &x_array);
464:   return 0;
465: }

467: PetscErrorCode DMShellDAFieldScatter_Reverse(DM dmf, Vec y, DM dmc, Vec yc)
468: {
469:   Mat          P  = NULL;
470:   Vec          xp = NULL, xtmp = NULL;
471:   VecScatter   scatter = NULL;
472:   PetscScalar *array;
473:   PetscInt     i, st, ed;

476:   PetscObjectQuery((PetscObject)dmf, "P", (PetscObject *)&P);
477:   PetscObjectQuery((PetscObject)dmf, "xp", (PetscObject *)&xp);
478:   PetscObjectQuery((PetscObject)dmf, "scatter", (PetscObject *)&scatter);
479:   PetscObjectQuery((PetscObject)dmf, "xtmp", (PetscObject *)&xtmp);


486:   /* return vector */
487:   VecGetArray(xtmp, &array);
488:   if (yc) {
489:     const PetscScalar *LA_yred;
490:     VecGetOwnershipRange(yc, &st, &ed);
491:     VecGetArrayRead(yc, &LA_yred);
492:     for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
493:     VecRestoreArrayRead(yc, &LA_yred);
494:   }
495:   VecRestoreArray(xtmp, &array);
496:   VecScatterBegin(scatter, xtmp, xp, INSERT_VALUES, SCATTER_REVERSE);
497:   VecScatterEnd(scatter, xtmp, xp, INSERT_VALUES, SCATTER_REVERSE);
498:   MatMult(P, xp, y);
499:   return 0;
500: }

502: PetscErrorCode DMFieldScatter_ShellDA(DM dmf_shell, Vec x, ScatterMode mode, DM dmc_shell, Vec xc)
503: {
504:   DM dmf = NULL, dmc = NULL;

507:   DMShellGetContext(dmf_shell, &dmf);
508:   if (dmc_shell) DMShellGetContext(dmc_shell, &dmc);
509:   if (mode == SCATTER_FORWARD) {
510:     DMShellDAFieldScatter_Forward(dmf, x, dmc, xc);
511:   } else if (mode == SCATTER_REVERSE) {
512:     DMShellDAFieldScatter_Reverse(dmf, x, dmc, xc);
513:   } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell), PETSC_ERR_SUP, "Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
514:   return 0;
515: }

517: PetscErrorCode DMStateScatter_ShellDA(DM dmf_shell, ScatterMode mode, DM dmc_shell)
518: {
519:   PetscMPIInt size_f = 0, size_c = 0;
521:   MPI_Comm_size(PetscObjectComm((PetscObject)dmf_shell), &size_f);
522:   if (dmc_shell) MPI_Comm_size(PetscObjectComm((PetscObject)dmc_shell), &size_c);
523:   if (mode == SCATTER_FORWARD) {
524:     PetscPrintf(PetscObjectComm((PetscObject)dmf_shell), "User supplied state scatter (fine [size %d]-> coarse [size %d])\n", (int)size_f, (int)size_c);
525:   } else if (mode == SCATTER_REVERSE) {
526:   } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell), PETSC_ERR_SUP, "Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
527:   return 0;
528: }

530: PetscErrorCode DMShellCreate_ShellDA(DM da, DM *dms)
531: {
533:   if (da) {
534:     DMShellCreate(PetscObjectComm((PetscObject)da), dms);
535:     DMShellSetContext(*dms, da);
536:     DMShellSetCreateGlobalVector(*dms, DMCreateGlobalVector_ShellDA);
537:     DMShellSetCreateLocalVector(*dms, DMCreateLocalVector_ShellDA);
538:     DMShellSetCreateMatrix(*dms, DMCreateMatrix_ShellDA);
539:     DMShellSetCoarsen(*dms, DMCoarsen_ShellDA);
540:     DMShellSetCreateInterpolation(*dms, DMCreateInterpolation_ShellDA);
541:   } else {
542:     *dms = NULL;
543:   }
544:   return 0;
545: }

547: PetscErrorCode DMDestroyShellDMDA(DM *_dm)
548: {
549:   DM dm, da = NULL;

552:   if (!_dm) return 0;
553:   dm = *_dm;
554:   if (!dm) return 0;

556:   DMShellGetContext(dm, &da);
557:   if (da) {
558:     Vec        vec;
559:     VecScatter scatter = NULL;
560:     IS         is      = NULL;
561:     Mat        P       = NULL;

563:     PetscObjectQuery((PetscObject)da, "P", (PetscObject *)&P);
564:     MatDestroy(&P);

566:     vec = NULL;
567:     PetscObjectQuery((PetscObject)da, "xp", (PetscObject *)&vec);
568:     VecDestroy(&vec);

570:     PetscObjectQuery((PetscObject)da, "scatter", (PetscObject *)&scatter);
571:     VecScatterDestroy(&scatter);

573:     vec = NULL;
574:     PetscObjectQuery((PetscObject)da, "xtmp", (PetscObject *)&vec);
575:     VecDestroy(&vec);

577:     PetscObjectQuery((PetscObject)da, "isin", (PetscObject *)&is);
578:     ISDestroy(&is);

580:     DMDestroy(&da);
581:   }
582:   PetscObjectComposeFunction((PetscObject)dm, "PCTelescopeFieldScatter", NULL);
583:   PetscObjectComposeFunction((PetscObject)dm, "PCTelescopeStateScatter", NULL);
584:   DMDestroy(&dm);
585:   *_dm = NULL;
586:   return 0;
587: }

589: PetscErrorCode HierarchyCreate_Basic(DM *dm_f, DM *dm_c, UserContext *ctx)
590: {
591:   DM          dm, dmc, dm_shell, dmc_shell;
592:   PetscMPIInt rank;

595:   MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
596:   DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 17, 17, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, &dm);
597:   DMSetFromOptions(dm);
598:   DMSetUp(dm);
599:   DMDASetUniformCoordinates(dm, 0, 1, 0, 1, 0, 0);
600:   DMDASetFieldName(dm, 0, "Pressure");
601:   DMShellCreate_ShellDA(dm, &dm_shell);
602:   DMSetApplicationContext(dm_shell, ctx);

604:   dmc       = NULL;
605:   dmc_shell = NULL;
606:   if (rank == 0) {
607:     DMDACreate2d(PETSC_COMM_SELF, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 17, 17, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, &dmc);
608:     DMSetFromOptions(dmc);
609:     DMSetUp(dmc);
610:     DMDASetUniformCoordinates(dmc, 0, 1, 0, 1, 0, 0);
611:     DMDASetFieldName(dmc, 0, "Pressure");
612:     DMShellCreate_ShellDA(dmc, &dmc_shell);
613:     DMSetApplicationContext(dmc_shell, ctx);
614:   }

616:   DMSetCoarseDM(dm_shell, dmc_shell);
617:   DMShellDASetUp_TelescopeDMScatter(dm_shell, dmc_shell);

619:   *dm_f = dm_shell;
620:   *dm_c = dmc_shell;
621:   return 0;
622: }

624: PetscErrorCode HierarchyCreate(PetscInt *_nd, PetscInt *_nref, MPI_Comm **_cl, DM **_dl)
625: {
626:   PetscInt      d, k, ndecomps, ncoarsen, found, nx;
627:   PetscInt      levelrefs, *number;
628:   PetscSubcomm *pscommlist;
629:   MPI_Comm     *commlist;
630:   DM           *dalist, *dmlist;
631:   PetscBool     set;

634:   ndecomps = 1;
635:   PetscOptionsGetInt(NULL, NULL, "-ndecomps", &ndecomps, NULL);
636:   ncoarsen = ndecomps - 1;

639:   levelrefs = 2;
640:   PetscOptionsGetInt(NULL, NULL, "-level_nrefs", &levelrefs, NULL);
641:   PetscMalloc1(ncoarsen + 1, &number);
642:   for (k = 0; k < ncoarsen + 1; k++) number[k] = 2;
643:   found = ncoarsen;
644:   set   = PETSC_FALSE;
645:   PetscOptionsGetIntArray(NULL, NULL, "-level_comm_red_factor", number, &found, &set);

648:   PetscMalloc1(ncoarsen + 1, &pscommlist);
649:   for (k = 0; k < ncoarsen + 1; k++) pscommlist[k] = NULL;

651:   PetscMalloc1(ndecomps, &commlist);
652:   for (k = 0; k < ndecomps; k++) commlist[k] = MPI_COMM_NULL;
653:   PetscMalloc1(ndecomps * levelrefs, &dalist);
654:   PetscMalloc1(ndecomps * levelrefs, &dmlist);
655:   for (k = 0; k < ndecomps * levelrefs; k++) {
656:     dalist[k] = NULL;
657:     dmlist[k] = NULL;
658:   }

660:   CommHierarchyCreate(PETSC_COMM_WORLD, ncoarsen, number, pscommlist);
661:   for (k = 0; k < ncoarsen; k++) {
662:     if (pscommlist[k]) {
663:       MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);
664:       if (pscommlist[k]->color == 0) PetscCommDuplicate(comm_k, &commlist[k], NULL);
665:     }
666:   }
667:   PetscCommDuplicate(PETSC_COMM_WORLD, &commlist[ndecomps - 1], NULL);

669:   for (k = 0; k < ncoarsen; k++) {
670:     if (pscommlist[k]) PetscSubcommDestroy(&pscommlist[k]);
671:   }

673:   nx = 17;
674:   PetscOptionsGetInt(NULL, NULL, "-m", &nx, NULL);
675:   for (d = 0; d < ndecomps; d++) {
676:     DM   dmroot = NULL;
677:     char name[PETSC_MAX_PATH_LEN];

679:     if (commlist[d] != MPI_COMM_NULL) {
680:       DMDACreate2d(commlist[d], DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, nx, nx, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, &dmroot);
681:       DMSetUp(dmroot);
682:       DMDASetUniformCoordinates(dmroot, 0, 1, 0, 1, 0, 0);
683:       DMDASetFieldName(dmroot, 0, "Pressure");
684:       PetscSNPrintf(name, PETSC_MAX_PATH_LEN - 1, "root-decomp-%" PetscInt_FMT, d);
685:       PetscObjectSetName((PetscObject)dmroot, name);
686:       /*DMView(dmroot,PETSC_VIEWER_STDOUT_(commlist[d]));*/
687:     }

689:     dalist[d * levelrefs + 0] = dmroot;
690:     for (k = 1; k < levelrefs; k++) {
691:       DM dmref = NULL;

693:       if (commlist[d] != MPI_COMM_NULL) {
694:         DMRefine(dalist[d * levelrefs + (k - 1)], MPI_COMM_NULL, &dmref);
695:         PetscSNPrintf(name, PETSC_MAX_PATH_LEN - 1, "ref%" PetscInt_FMT "-decomp-%" PetscInt_FMT, k, d);
696:         PetscObjectSetName((PetscObject)dmref, name);
697:         DMDAGetInfo(dmref, NULL, &nx, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
698:         /*DMView(dmref,PETSC_VIEWER_STDOUT_(commlist[d]));*/
699:       }
700:       dalist[d * levelrefs + k] = dmref;
701:     }
702:     MPI_Allreduce(MPI_IN_PLACE, &nx, 1, MPIU_INT, MPI_MAX, PETSC_COMM_WORLD);
703:   }

705:   /* create the hierarchy of DMShell's */
706:   for (d = 0; d < ndecomps; d++) {
707:     char name[PETSC_MAX_PATH_LEN];

709:     UserContext *ctx = NULL;
710:     if (commlist[d] != MPI_COMM_NULL) {
711:       UserContextCreate(commlist[d], &ctx);
712:       for (k = 0; k < levelrefs; k++) {
713:         DMShellCreate_ShellDA(dalist[d * levelrefs + k], &dmlist[d * levelrefs + k]);
714:         DMSetApplicationContext(dmlist[d * levelrefs + k], ctx);
715:         PetscSNPrintf(name, PETSC_MAX_PATH_LEN - 1, "level%" PetscInt_FMT "-decomp-%" PetscInt_FMT, k, d);
716:         PetscObjectSetName((PetscObject)dmlist[d * levelrefs + k], name);
717:       }
718:     }
719:   }

721:   /* set all the coarse DMs */
722:   for (k = 1; k < ndecomps * levelrefs; k++) { /* skip first DM as it doesn't have a coarse representation */
723:     DM dmfine = NULL, dmcoarse = NULL;

725:     dmfine   = dmlist[k];
726:     dmcoarse = dmlist[k - 1];
727:     if (dmfine) DMSetCoarseDM(dmfine, dmcoarse);
728:   }

730:   /* do special setup on the fine DM coupling different decompositions */
731:   for (d = 1; d < ndecomps; d++) { /* skip first decomposition as it doesn't have a coarse representation */
732:     DM dmfine = NULL, dmcoarse = NULL;

734:     dmfine   = dmlist[d * levelrefs + 0];
735:     dmcoarse = dmlist[(d - 1) * levelrefs + (levelrefs - 1)];
736:     if (dmfine) DMShellDASetUp_TelescopeDMScatter(dmfine, dmcoarse);
737:   }

739:   PetscFree(number);
740:   for (k = 0; k < ncoarsen; k++) PetscSubcommDestroy(&pscommlist[k]);
741:   PetscFree(pscommlist);

743:   if (_nd) *_nd = ndecomps;
744:   if (_nref) *_nref = levelrefs;
745:   if (_cl) {
746:     *_cl = commlist;
747:   } else {
748:     for (k = 0; k < ndecomps; k++) {
749:       if (commlist[k] != MPI_COMM_NULL) PetscCommDestroy(&commlist[k]);
750:     }
751:     PetscFree(commlist);
752:   }
753:   if (_dl) {
754:     *_dl = dmlist;
755:     PetscFree(dalist);
756:   } else {
757:     for (k = 0; k < ndecomps * levelrefs; k++) {
758:       DMDestroy(&dmlist[k]);
759:       DMDestroy(&dalist[k]);
760:     }
761:     PetscFree(dmlist);
762:     PetscFree(dalist);
763:   }
764:   return 0;
765: }

767: PetscErrorCode test_hierarchy(void)
768: {
769:   PetscInt  d, k, nd, nref;
770:   MPI_Comm *comms;
771:   DM       *dms;

774:   HierarchyCreate(&nd, &nref, &comms, &dms);

776:   /* destroy user context */
777:   for (d = 0; d < nd; d++) {
778:     DM first = dms[d * nref + 0];

780:     if (first) {
781:       UserContext *ctx = NULL;

783:       DMGetApplicationContext(first, &ctx);
784:       if (ctx) PetscFree(ctx);
785:       DMSetApplicationContext(first, NULL);
786:     }
787:     for (k = 1; k < nref; k++) {
788:       DM dm = dms[d * nref + k];
789:       if (dm) DMSetApplicationContext(dm, NULL);
790:     }
791:   }

793:   /* destroy DMs */
794:   for (k = 0; k < nd * nref; k++) {
795:     if (dms[k]) DMDestroyShellDMDA(&dms[k]);
796:   }
797:   PetscFree(dms);

799:   /* destroy communicators */
800:   for (k = 0; k < nd; k++) {
801:     if (comms[k] != MPI_COMM_NULL) PetscCommDestroy(&comms[k]);
802:   }
803:   PetscFree(comms);
804:   return 0;
805: }

807: PetscErrorCode test_basic(void)
808: {
809:   DM           dmF, dmdaF = NULL, dmC = NULL;
810:   Mat          A;
811:   Vec          x, b;
812:   KSP          ksp;
813:   PC           pc;
814:   UserContext *user = NULL;

817:   UserContextCreate(PETSC_COMM_WORLD, &user);
818:   HierarchyCreate_Basic(&dmF, &dmC, user);
819:   DMShellGetContext(dmF, &dmdaF);

821:   DMCreateMatrix(dmF, &A);
822:   DMCreateGlobalVector(dmF, &x);
823:   DMCreateGlobalVector(dmF, &b);
824:   ComputeRHS_DMDA(dmdaF, b, user);

826:   KSPCreate(PETSC_COMM_WORLD, &ksp);
827:   KSPSetComputeOperators(ksp, ComputeMatrix_ShellDA, user);
828:   /*KSPSetOperators(ksp,A,A);*/
829:   KSPSetDM(ksp, dmF);
830:   KSPSetDMActive(ksp, PETSC_TRUE);
831:   KSPSetFromOptions(ksp);
832:   KSPGetPC(ksp, &pc);
833:   PCTelescopeSetUseCoarseDM(pc, PETSC_TRUE);

835:   KSPSolve(ksp, b, x);

837:   if (dmC) DMDestroyShellDMDA(&dmC);
838:   DMDestroyShellDMDA(&dmF);
839:   KSPDestroy(&ksp);
840:   MatDestroy(&A);
841:   VecDestroy(&b);
842:   VecDestroy(&x);
843:   PetscFree(user);
844:   return 0;
845: }

847: PetscErrorCode test_mg(void)
848: {
849:   DM           dmF, dmdaF = NULL, *dms = NULL;
850:   Mat          A;
851:   Vec          x, b;
852:   KSP          ksp;
853:   PetscInt     k, d, nd, nref;
854:   MPI_Comm    *comms = NULL;
855:   UserContext *user  = NULL;

858:   HierarchyCreate(&nd, &nref, &comms, &dms);
859:   dmF = dms[nd * nref - 1];

861:   DMShellGetContext(dmF, &dmdaF);
862:   DMGetApplicationContext(dmF, &user);

864:   DMCreateMatrix(dmF, &A);
865:   DMCreateGlobalVector(dmF, &x);
866:   DMCreateGlobalVector(dmF, &b);
867:   ComputeRHS_DMDA(dmdaF, b, user);

869:   KSPCreate(PETSC_COMM_WORLD, &ksp);
870:   KSPSetComputeOperators(ksp, ComputeMatrix_ShellDA, user);
871:   /*KSPSetOperators(ksp,A,A);*/
872:   KSPSetDM(ksp, dmF);
873:   KSPSetDMActive(ksp, PETSC_TRUE);
874:   KSPSetFromOptions(ksp);

876:   KSPSolve(ksp, b, x);

878:   for (d = 0; d < nd; d++) {
879:     DM first = dms[d * nref + 0];

881:     if (first) {
882:       UserContext *ctx = NULL;

884:       DMGetApplicationContext(first, &ctx);
885:       if (ctx) PetscFree(ctx);
886:       DMSetApplicationContext(first, NULL);
887:     }
888:     for (k = 1; k < nref; k++) {
889:       DM dm = dms[d * nref + k];
890:       if (dm) DMSetApplicationContext(dm, NULL);
891:     }
892:   }

894:   for (k = 0; k < nd * nref; k++) {
895:     if (dms[k]) DMDestroyShellDMDA(&dms[k]);
896:   }
897:   PetscFree(dms);

899:   KSPDestroy(&ksp);
900:   MatDestroy(&A);
901:   VecDestroy(&b);
902:   VecDestroy(&x);

904:   for (k = 0; k < nd; k++) {
905:     if (comms[k] != MPI_COMM_NULL) PetscCommDestroy(&comms[k]);
906:   }
907:   PetscFree(comms);
908:   return 0;
909: }

911: int main(int argc, char **argv)
912: {
913:   PetscInt test_id = 0;

916:   PetscInitialize(&argc, &argv, (char *)0, help);
917:   PetscOptionsGetInt(NULL, NULL, "-tid", &test_id, NULL);
918:   switch (test_id) {
919:   case 0:
920:     test_basic();
921:     break;
922:   case 1:
923:     test_hierarchy();
924:     break;
925:   case 2:
926:     test_mg();
927:     break;
928:   default:
929:     SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "-tid must be 0,1,2");
930:   }
931:   PetscFinalize();
932:   return 0;
933: }

935: PetscErrorCode ComputeRHS_DMDA(DM da, Vec b, void *ctx)
936: {
937:   UserContext  *user = (UserContext *)ctx;
938:   PetscInt      i, j, mx, my, xm, ym, xs, ys;
939:   PetscScalar   Hx, Hy;
940:   PetscScalar **array;
941:   PetscBool     isda = PETSC_FALSE;

944:   PetscObjectTypeCompare((PetscObject)da, DMDA, &isda);
946:   DMDAGetInfo(da, NULL, &mx, &my, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
947:   Hx = 1.0 / (PetscReal)(mx - 1);
948:   Hy = 1.0 / (PetscReal)(my - 1);
949:   DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
950:   DMDAVecGetArray(da, b, &array);
951:   for (j = ys; j < ys + ym; j++) {
952:     for (i = xs; i < xs + xm; i++) array[j][i] = PetscExpScalar(-((PetscReal)i * Hx) * ((PetscReal)i * Hx) / user->nu) * PetscExpScalar(-((PetscReal)j * Hy) * ((PetscReal)j * Hy) / user->nu) * Hx * Hy;
953:   }
954:   DMDAVecRestoreArray(da, b, &array);
955:   VecAssemblyBegin(b);
956:   VecAssemblyEnd(b);

958:   /* force right hand side to be consistent for singular matrix */
959:   /* note this is really a hack, normally the model would provide you with a consistent right handside */
960:   if (user->bcType == NEUMANN) {
961:     MatNullSpace nullspace;

963:     MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace);
964:     MatNullSpaceRemove(nullspace, b);
965:     MatNullSpaceDestroy(&nullspace);
966:   }
967:   return 0;
968: }

970: PetscErrorCode ComputeRho(PetscInt i, PetscInt j, PetscInt mx, PetscInt my, PetscReal centerRho, PetscReal *rho)
971: {
973:   if ((i > mx / 3.0) && (i < 2.0 * mx / 3.0) && (j > my / 3.0) && (j < 2.0 * my / 3.0)) {
974:     *rho = centerRho;
975:   } else {
976:     *rho = 1.0;
977:   }
978:   return 0;
979: }

981: PetscErrorCode ComputeMatrix_DMDA(DM da, Mat J, Mat jac, void *ctx)
982: {
983:   UserContext *user = (UserContext *)ctx;
984:   PetscReal    centerRho;
985:   PetscInt     i, j, mx, my, xm, ym, xs, ys;
986:   PetscScalar  v[5];
987:   PetscReal    Hx, Hy, HydHx, HxdHy, rho;
988:   MatStencil   row, col[5];
989:   PetscBool    isda = PETSC_FALSE;

992:   PetscObjectTypeCompare((PetscObject)da, DMDA, &isda);
994:   MatZeroEntries(jac);
995:   centerRho = user->rho;
996:   DMDAGetInfo(da, NULL, &mx, &my, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
997:   Hx    = 1.0 / (PetscReal)(mx - 1);
998:   Hy    = 1.0 / (PetscReal)(my - 1);
999:   HxdHy = Hx / Hy;
1000:   HydHx = Hy / Hx;
1001:   DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
1002:   for (j = ys; j < ys + ym; j++) {
1003:     for (i = xs; i < xs + xm; i++) {
1004:       row.i = i;
1005:       row.j = j;
1006:       ComputeRho(i, j, mx, my, centerRho, &rho);
1007:       if (i == 0 || j == 0 || i == mx - 1 || j == my - 1) {
1008:         if (user->bcType == DIRICHLET) {
1009:           v[0] = 2.0 * rho * (HxdHy + HydHx);
1010:           MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES);
1011:         } else if (user->bcType == NEUMANN) {
1012:           PetscInt numx = 0, numy = 0, num = 0;
1013:           if (j != 0) {
1014:             v[num]     = -rho * HxdHy;
1015:             col[num].i = i;
1016:             col[num].j = j - 1;
1017:             numy++;
1018:             num++;
1019:           }
1020:           if (i != 0) {
1021:             v[num]     = -rho * HydHx;
1022:             col[num].i = i - 1;
1023:             col[num].j = j;
1024:             numx++;
1025:             num++;
1026:           }
1027:           if (i != mx - 1) {
1028:             v[num]     = -rho * HydHx;
1029:             col[num].i = i + 1;
1030:             col[num].j = j;
1031:             numx++;
1032:             num++;
1033:           }
1034:           if (j != my - 1) {
1035:             v[num]     = -rho * HxdHy;
1036:             col[num].i = i;
1037:             col[num].j = j + 1;
1038:             numy++;
1039:             num++;
1040:           }
1041:           v[num]     = numx * rho * HydHx + numy * rho * HxdHy;
1042:           col[num].i = i;
1043:           col[num].j = j;
1044:           num++;
1045:           MatSetValuesStencil(jac, 1, &row, num, col, v, INSERT_VALUES);
1046:         }
1047:       } else {
1048:         v[0]     = -rho * HxdHy;
1049:         col[0].i = i;
1050:         col[0].j = j - 1;
1051:         v[1]     = -rho * HydHx;
1052:         col[1].i = i - 1;
1053:         col[1].j = j;
1054:         v[2]     = 2.0 * rho * (HxdHy + HydHx);
1055:         col[2].i = i;
1056:         col[2].j = j;
1057:         v[3]     = -rho * HydHx;
1058:         col[3].i = i + 1;
1059:         col[3].j = j;
1060:         v[4]     = -rho * HxdHy;
1061:         col[4].i = i;
1062:         col[4].j = j + 1;
1063:         MatSetValuesStencil(jac, 1, &row, 5, col, v, INSERT_VALUES);
1064:       }
1065:     }
1066:   }
1067:   MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY);
1068:   MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY);
1069:   return 0;
1070: }

1072: PetscErrorCode ComputeMatrix_ShellDA(KSP ksp, Mat J, Mat jac, void *ctx)
1073: {
1074:   DM dm, da;
1076:   KSPGetDM(ksp, &dm);
1077:   DMShellGetContext(dm, &da);
1078:   ComputeMatrix_DMDA(da, J, jac, ctx);
1079:   return 0;
1080: }

1082: /*TEST

1084:   test:
1085:     suffix: basic_dirichlet
1086:     nsize: 4
1087:     args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type lu -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr

1089:   test:
1090:     suffix: basic_neumann
1091:     nsize: 4
1092:     requires: !single
1093:     args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type jacobi -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr -bc_type neumann

1095:   test:
1096:     suffix: mg_2lv_2mg
1097:     nsize: 6
1098:     args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 2 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -level_comm_red_factor 6 -mg_coarse_telescope_mg_coarse_pc_type lu

1100:   test:
1101:     suffix: mg_3lv_2mg
1102:     nsize: 4
1103:     args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu -m 5

1105:   test:
1106:     suffix: mg_3lv_2mg_customcommsize
1107:     nsize: 12
1108:     args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm  -m 5 -level_comm_red_factor 2,6 -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu

1110:  TEST*/