Actual source code: telescope_dmda.c

  1: #include <petsc/private/matimpl.h>
  2: #include <petsc/private/pcimpl.h>
  3: #include <petsc/private/dmimpl.h>
  4: #include <petscksp.h>
  5: #include <petscdm.h>
  6: #include <petscdmda.h>

  8: #include "../src/ksp/pc/impls/telescope/telescope.h"

 10: static PetscBool  cited      = PETSC_FALSE;
 11: static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
 12:                                "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
 13:                                "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
 14:                                "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
 15:                                "  series    = {PASC '16},\n"
 16:                                "  isbn      = {978-1-4503-4126-4},\n"
 17:                                "  location  = {Lausanne, Switzerland},\n"
 18:                                "  pages     = {5:1--5:12},\n"
 19:                                "  articleno = {5},\n"
 20:                                "  numpages  = {12},\n"
 21:                                "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
 22:                                "  doi       = {10.1145/2929908.2929913},\n"
 23:                                "  acmid     = {2929913},\n"
 24:                                "  publisher = {ACM},\n"
 25:                                "  address   = {New York, NY, USA},\n"
 26:                                "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
 27:                                "  year      = {2016}\n"
 28:                                "}\n";

 30: static PetscErrorCode _DMDADetermineRankFromGlobalIJK(PetscInt dim, PetscInt i, PetscInt j, PetscInt k, PetscInt Mp, PetscInt Np, PetscInt Pp, PetscInt start_i[], PetscInt start_j[], PetscInt start_k[], PetscInt span_i[], PetscInt span_j[], PetscInt span_k[], PetscMPIInt *_pi, PetscMPIInt *_pj, PetscMPIInt *_pk, PetscMPIInt *rank_re)
 31: {
 32:   PetscInt pi, pj, pk, n;

 34:   PetscFunctionBegin;
 35:   *rank_re = -1;
 36:   if (_pi) *_pi = -1;
 37:   if (_pj) *_pj = -1;
 38:   if (_pk) *_pk = -1;
 39:   pi = pj = pk = -1;
 40:   if (_pi) {
 41:     for (n = 0; n < Mp; n++) {
 42:       if ((i >= start_i[n]) && (i < start_i[n] + span_i[n])) {
 43:         pi = n;
 44:         break;
 45:       }
 46:     }
 47:     PetscCheck(pi != -1, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmda-ijk] pi cannot be determined : range %" PetscInt_FMT ", val %" PetscInt_FMT, Mp, i);
 48:     *_pi = pi;
 49:   }

 51:   if (_pj) {
 52:     for (n = 0; n < Np; n++) {
 53:       if ((j >= start_j[n]) && (j < start_j[n] + span_j[n])) {
 54:         pj = n;
 55:         break;
 56:       }
 57:     }
 58:     PetscCheck(pj != -1, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmda-ijk] pj cannot be determined : range %" PetscInt_FMT ", val %" PetscInt_FMT, Np, j);
 59:     *_pj = pj;
 60:   }

 62:   if (_pk) {
 63:     for (n = 0; n < Pp; n++) {
 64:       if ((k >= start_k[n]) && (k < start_k[n] + span_k[n])) {
 65:         pk = n;
 66:         break;
 67:       }
 68:     }
 69:     PetscCheck(pk != -1, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmda-ijk] pk cannot be determined : range %" PetscInt_FMT ", val %" PetscInt_FMT, Pp, k);
 70:     *_pk = pk;
 71:   }

 73:   switch (dim) {
 74:   case 1:
 75:     *rank_re = pi;
 76:     break;
 77:   case 2:
 78:     *rank_re = pi + pj * Mp;
 79:     break;
 80:   case 3:
 81:     *rank_re = pi + pj * Mp + pk * (Mp * Np);
 82:     break;
 83:   }
 84:   PetscFunctionReturn(PETSC_SUCCESS);
 85: }

 87: static PetscErrorCode _DMDADetermineGlobalS0(PetscInt dim, PetscMPIInt rank_re, PetscInt Mp_re, PetscInt Np_re, PetscInt Pp_re, PetscInt range_i_re[], PetscInt range_j_re[], PetscInt range_k_re[], PetscInt *s0)
 88: {
 89:   PetscInt i, j, k, start_IJK = 0;
 90:   PetscInt rank_ijk;

 92:   PetscFunctionBegin;
 93:   switch (dim) {
 94:   case 1:
 95:     for (i = 0; i < Mp_re; i++) {
 96:       rank_ijk = i;
 97:       if (rank_ijk < rank_re) start_IJK += range_i_re[i];
 98:     }
 99:     break;
100:   case 2:
101:     for (j = 0; j < Np_re; j++) {
102:       for (i = 0; i < Mp_re; i++) {
103:         rank_ijk = i + j * Mp_re;
104:         if (rank_ijk < rank_re) start_IJK += range_i_re[i] * range_j_re[j];
105:       }
106:     }
107:     break;
108:   case 3:
109:     for (k = 0; k < Pp_re; k++) {
110:       for (j = 0; j < Np_re; j++) {
111:         for (i = 0; i < Mp_re; i++) {
112:           rank_ijk = i + j * Mp_re + k * Mp_re * Np_re;
113:           if (rank_ijk < rank_re) start_IJK += range_i_re[i] * range_j_re[j] * range_k_re[k];
114:         }
115:       }
116:     }
117:     break;
118:   }
119:   *s0 = start_IJK;
120:   PetscFunctionReturn(PETSC_SUCCESS);
121: }

123: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors2d(PC_Telescope sred, DM dm, DM subdm)
124: {
125:   DM         cdm;
126:   Vec        coor, coor_natural, perm_coors;
127:   PetscInt   i, j, si, sj, ni, nj, M, N, Ml, Nl, c, nidx;
128:   PetscInt  *fine_indices;
129:   IS         is_fine, is_local;
130:   VecScatter sctx;

132:   PetscFunctionBegin;
133:   PetscCall(DMGetCoordinates(dm, &coor));
134:   if (!coor) PetscFunctionReturn(PETSC_SUCCESS);
135:   if (PCTelescope_isActiveRank(sred)) PetscCall(DMDASetUniformCoordinates(subdm, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
136:   /* Get the coordinate vector from the distributed array */
137:   PetscCall(DMGetCoordinateDM(dm, &cdm));
138:   PetscCall(DMDACreateNaturalVector(cdm, &coor_natural));

140:   PetscCall(DMDAGlobalToNaturalBegin(cdm, coor, INSERT_VALUES, coor_natural));
141:   PetscCall(DMDAGlobalToNaturalEnd(cdm, coor, INSERT_VALUES, coor_natural));

143:   /* get indices of the guys I want to grab */
144:   PetscCall(DMDAGetInfo(dm, NULL, &M, &N, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
145:   if (PCTelescope_isActiveRank(sred)) {
146:     PetscCall(DMDAGetCorners(subdm, &si, &sj, NULL, &ni, &nj, NULL));
147:     Ml = ni;
148:     Nl = nj;
149:   } else {
150:     si = sj = 0;
151:     ni = nj = 0;
152:     Ml = Nl = 0;
153:   }

155:   PetscCall(PetscMalloc1(Ml * Nl * 2, &fine_indices));
156:   c = 0;
157:   if (PCTelescope_isActiveRank(sred)) {
158:     for (j = sj; j < sj + nj; j++) {
159:       for (i = si; i < si + ni; i++) {
160:         nidx                = (i) + (j)*M;
161:         fine_indices[c]     = 2 * nidx;
162:         fine_indices[c + 1] = 2 * nidx + 1;
163:         c                   = c + 2;
164:       }
165:     }
166:     PetscCheck(c == Ml * Nl * 2, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c %" PetscInt_FMT " should equal 2 * Ml %" PetscInt_FMT " * Nl %" PetscInt_FMT, c, Ml, Nl);
167:   }

169:   /* generate scatter */
170:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), Ml * Nl * 2, fine_indices, PETSC_USE_POINTER, &is_fine));
171:   PetscCall(ISCreateStride(PETSC_COMM_SELF, Ml * Nl * 2, 0, 1, &is_local));

173:   /* scatter */
174:   PetscCall(VecCreate(PETSC_COMM_SELF, &perm_coors));
175:   PetscCall(VecSetSizes(perm_coors, PETSC_DECIDE, Ml * Nl * 2));
176:   PetscCall(VecSetType(perm_coors, VECSEQ));

178:   PetscCall(VecScatterCreate(coor_natural, is_fine, perm_coors, is_local, &sctx));
179:   PetscCall(VecScatterBegin(sctx, coor_natural, perm_coors, INSERT_VALUES, SCATTER_FORWARD));
180:   PetscCall(VecScatterEnd(sctx, coor_natural, perm_coors, INSERT_VALUES, SCATTER_FORWARD));
181:   /* access */
182:   if (PCTelescope_isActiveRank(sred)) {
183:     Vec                _coors;
184:     const PetscScalar *LA_perm;
185:     PetscScalar       *LA_coors;

187:     PetscCall(DMGetCoordinates(subdm, &_coors));
188:     PetscCall(VecGetArrayRead(perm_coors, &LA_perm));
189:     PetscCall(VecGetArray(_coors, &LA_coors));
190:     for (i = 0; i < Ml * Nl * 2; i++) LA_coors[i] = LA_perm[i];
191:     PetscCall(VecRestoreArray(_coors, &LA_coors));
192:     PetscCall(VecRestoreArrayRead(perm_coors, &LA_perm));
193:   }

195:   /* update local coords */
196:   if (PCTelescope_isActiveRank(sred)) {
197:     DM  _dmc;
198:     Vec _coors, _coors_local;
199:     PetscCall(DMGetCoordinateDM(subdm, &_dmc));
200:     PetscCall(DMGetCoordinates(subdm, &_coors));
201:     PetscCall(DMGetCoordinatesLocal(subdm, &_coors_local));
202:     PetscCall(DMGlobalToLocalBegin(_dmc, _coors, INSERT_VALUES, _coors_local));
203:     PetscCall(DMGlobalToLocalEnd(_dmc, _coors, INSERT_VALUES, _coors_local));
204:   }
205:   PetscCall(VecScatterDestroy(&sctx));
206:   PetscCall(ISDestroy(&is_fine));
207:   PetscCall(PetscFree(fine_indices));
208:   PetscCall(ISDestroy(&is_local));
209:   PetscCall(VecDestroy(&perm_coors));
210:   PetscCall(VecDestroy(&coor_natural));
211:   PetscFunctionReturn(PETSC_SUCCESS);
212: }

214: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors3d(PC_Telescope sred, DM dm, DM subdm)
215: {
216:   DM         cdm;
217:   Vec        coor, coor_natural, perm_coors;
218:   PetscInt   i, j, k, si, sj, sk, ni, nj, nk, M, N, P, Ml, Nl, Pl, c, nidx;
219:   PetscInt  *fine_indices;
220:   IS         is_fine, is_local;
221:   VecScatter sctx;

223:   PetscFunctionBegin;
224:   PetscCall(DMGetCoordinates(dm, &coor));
225:   if (!coor) PetscFunctionReturn(PETSC_SUCCESS);

227:   if (PCTelescope_isActiveRank(sred)) PetscCall(DMDASetUniformCoordinates(subdm, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));

229:   /* Get the coordinate vector from the distributed array */
230:   PetscCall(DMGetCoordinateDM(dm, &cdm));
231:   PetscCall(DMDACreateNaturalVector(cdm, &coor_natural));
232:   PetscCall(DMDAGlobalToNaturalBegin(cdm, coor, INSERT_VALUES, coor_natural));
233:   PetscCall(DMDAGlobalToNaturalEnd(cdm, coor, INSERT_VALUES, coor_natural));

235:   /* get indices of the guys I want to grab */
236:   PetscCall(DMDAGetInfo(dm, NULL, &M, &N, &P, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));

238:   if (PCTelescope_isActiveRank(sred)) {
239:     PetscCall(DMDAGetCorners(subdm, &si, &sj, &sk, &ni, &nj, &nk));
240:     Ml = ni;
241:     Nl = nj;
242:     Pl = nk;
243:   } else {
244:     si = sj = sk = 0;
245:     ni = nj = nk = 0;
246:     Ml = Nl = Pl = 0;
247:   }

249:   PetscCall(PetscMalloc1(Ml * Nl * Pl * 3, &fine_indices));

251:   c = 0;
252:   if (PCTelescope_isActiveRank(sred)) {
253:     for (k = sk; k < sk + nk; k++) {
254:       for (j = sj; j < sj + nj; j++) {
255:         for (i = si; i < si + ni; i++) {
256:           nidx                = (i) + (j)*M + (k)*M * N;
257:           fine_indices[c]     = 3 * nidx;
258:           fine_indices[c + 1] = 3 * nidx + 1;
259:           fine_indices[c + 2] = 3 * nidx + 2;
260:           c                   = c + 3;
261:         }
262:       }
263:     }
264:   }

266:   /* generate scatter */
267:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), Ml * Nl * Pl * 3, fine_indices, PETSC_USE_POINTER, &is_fine));
268:   PetscCall(ISCreateStride(PETSC_COMM_SELF, Ml * Nl * Pl * 3, 0, 1, &is_local));

270:   /* scatter */
271:   PetscCall(VecCreate(PETSC_COMM_SELF, &perm_coors));
272:   PetscCall(VecSetSizes(perm_coors, PETSC_DECIDE, Ml * Nl * Pl * 3));
273:   PetscCall(VecSetType(perm_coors, VECSEQ));
274:   PetscCall(VecScatterCreate(coor_natural, is_fine, perm_coors, is_local, &sctx));
275:   PetscCall(VecScatterBegin(sctx, coor_natural, perm_coors, INSERT_VALUES, SCATTER_FORWARD));
276:   PetscCall(VecScatterEnd(sctx, coor_natural, perm_coors, INSERT_VALUES, SCATTER_FORWARD));

278:   /* access */
279:   if (PCTelescope_isActiveRank(sred)) {
280:     Vec                _coors;
281:     const PetscScalar *LA_perm;
282:     PetscScalar       *LA_coors;

284:     PetscCall(DMGetCoordinates(subdm, &_coors));
285:     PetscCall(VecGetArrayRead(perm_coors, &LA_perm));
286:     PetscCall(VecGetArray(_coors, &LA_coors));
287:     for (i = 0; i < Ml * Nl * Pl * 3; i++) LA_coors[i] = LA_perm[i];
288:     PetscCall(VecRestoreArray(_coors, &LA_coors));
289:     PetscCall(VecRestoreArrayRead(perm_coors, &LA_perm));
290:   }

292:   /* update local coords */
293:   if (PCTelescope_isActiveRank(sred)) {
294:     DM  _dmc;
295:     Vec _coors, _coors_local;

297:     PetscCall(DMGetCoordinateDM(subdm, &_dmc));
298:     PetscCall(DMGetCoordinates(subdm, &_coors));
299:     PetscCall(DMGetCoordinatesLocal(subdm, &_coors_local));
300:     PetscCall(DMGlobalToLocalBegin(_dmc, _coors, INSERT_VALUES, _coors_local));
301:     PetscCall(DMGlobalToLocalEnd(_dmc, _coors, INSERT_VALUES, _coors_local));
302:   }

304:   PetscCall(VecScatterDestroy(&sctx));
305:   PetscCall(ISDestroy(&is_fine));
306:   PetscCall(PetscFree(fine_indices));
307:   PetscCall(ISDestroy(&is_local));
308:   PetscCall(VecDestroy(&perm_coors));
309:   PetscCall(VecDestroy(&coor_natural));
310:   PetscFunctionReturn(PETSC_SUCCESS);
311: }

313: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
314: {
315:   PetscInt     dim;
316:   DM           dm, subdm;
317:   PetscSubcomm psubcomm;
318:   MPI_Comm     comm;
319:   Vec          coor;

321:   PetscFunctionBegin;
322:   PetscCall(PCGetDM(pc, &dm));
323:   PetscCall(DMGetCoordinates(dm, &coor));
324:   if (!coor) PetscFunctionReturn(PETSC_SUCCESS);
325:   psubcomm = sred->psubcomm;
326:   comm     = PetscSubcommParent(psubcomm);
327:   subdm    = ctx->dmrepart;

329:   PetscCall(PetscInfo(pc, "PCTelescope: setting up the coordinates (DMDA)\n"));
330:   PetscCall(DMDAGetInfo(dm, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
331:   switch (dim) {
332:   case 1:
333:     SETERRQ(comm, PETSC_ERR_SUP, "Telescope: DMDA (1D) repartitioning not provided");
334:   case 2:
335:     PetscCall(PCTelescopeSetUp_dmda_repart_coors2d(sred, dm, subdm));
336:     break;
337:   case 3:
338:     PetscCall(PCTelescopeSetUp_dmda_repart_coors3d(sred, dm, subdm));
339:     break;
340:   }
341:   PetscFunctionReturn(PETSC_SUCCESS);
342: }

344: /* setup repartitioned dm */
345: static PetscErrorCode PCTelescopeSetUp_dmda_repart(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
346: {
347:   DM                    dm;
348:   PetscInt              dim, nx, ny, nz, ndof, nsw, sum, k;
349:   DMBoundaryType        bx, by, bz;
350:   DMDAStencilType       stencil;
351:   const PetscInt       *_range_i_re;
352:   const PetscInt       *_range_j_re;
353:   const PetscInt       *_range_k_re;
354:   DMDAInterpolationType itype;
355:   PetscInt              refine_x, refine_y, refine_z;
356:   MPI_Comm              comm, subcomm;
357:   const char           *prefix;

359:   PetscFunctionBegin;
360:   comm    = PetscSubcommParent(sred->psubcomm);
361:   subcomm = PetscSubcommChild(sred->psubcomm);
362:   PetscCall(PCGetDM(pc, &dm));

364:   PetscCall(DMDAGetInfo(dm, &dim, &nx, &ny, &nz, NULL, NULL, NULL, &ndof, &nsw, &bx, &by, &bz, &stencil));
365:   PetscCall(DMDAGetInterpolationType(dm, &itype));
366:   PetscCall(DMDAGetRefinementFactor(dm, &refine_x, &refine_y, &refine_z));

368:   ctx->dmrepart = NULL;
369:   _range_i_re = _range_j_re = _range_k_re = NULL;
370:   /* Create DMDA on the child communicator */
371:   if (PCTelescope_isActiveRank(sred)) {
372:     switch (dim) {
373:     case 1:
374:       PetscCall(PetscInfo(pc, "PCTelescope: setting up the DMDA on comm subset (1D)\n"));
375:       /* PetscCall(DMDACreate1d(subcomm,bx,nx,ndof,nsw,NULL,&ctx->dmrepart)); */
376:       ny = nz = 1;
377:       by = bz = DM_BOUNDARY_NONE;
378:       break;
379:     case 2:
380:       PetscCall(PetscInfo(pc, "PCTelescope: setting up the DMDA on comm subset (2D)\n"));
381:       /* PetscCall(DMDACreate2d(subcomm,bx,by,stencil,nx,ny, PETSC_DECIDE,PETSC_DECIDE,
382:          ndof,nsw, NULL,NULL,&ctx->dmrepart)); */
383:       nz = 1;
384:       bz = DM_BOUNDARY_NONE;
385:       break;
386:     case 3:
387:       PetscCall(PetscInfo(pc, "PCTelescope: setting up the DMDA on comm subset (3D)\n"));
388:       /* PetscCall(DMDACreate3d(subcomm,bx,by,bz,stencil,nx,ny,nz,
389:          PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,NULL,&ctx->dmrepart)); */
390:       break;
391:     }
392:     /*
393:      The API DMDACreate1d(), DMDACreate2d(), DMDACreate3d() does not allow us to set/append
394:      a unique option prefix for the DM, thus I prefer to expose the contents of these API's here.
395:      This allows users to control the partitioning of the subDM.
396:     */
397:     PetscCall(DMDACreate(subcomm, &ctx->dmrepart));
398:     /* Set unique option prefix name */
399:     PetscCall(KSPGetOptionsPrefix(sred->ksp, &prefix));
400:     PetscCall(DMSetOptionsPrefix(ctx->dmrepart, prefix));
401:     PetscCall(DMAppendOptionsPrefix(ctx->dmrepart, "repart_"));
402:     /* standard setup from DMDACreate{1,2,3}d() */
403:     PetscCall(DMSetDimension(ctx->dmrepart, dim));
404:     PetscCall(DMDASetSizes(ctx->dmrepart, nx, ny, nz));
405:     PetscCall(DMDASetNumProcs(ctx->dmrepart, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE));
406:     PetscCall(DMDASetBoundaryType(ctx->dmrepart, bx, by, bz));
407:     PetscCall(DMDASetDof(ctx->dmrepart, ndof));
408:     PetscCall(DMDASetStencilType(ctx->dmrepart, stencil));
409:     PetscCall(DMDASetStencilWidth(ctx->dmrepart, nsw));
410:     PetscCall(DMDASetOwnershipRanges(ctx->dmrepart, NULL, NULL, NULL));
411:     PetscCall(DMSetFromOptions(ctx->dmrepart));
412:     PetscCall(DMSetUp(ctx->dmrepart));
413:     /* Set refinement factors and interpolation type from the partent */
414:     PetscCall(DMDASetRefinementFactor(ctx->dmrepart, refine_x, refine_y, refine_z));
415:     PetscCall(DMDASetInterpolationType(ctx->dmrepart, itype));

417:     PetscCall(DMDAGetInfo(ctx->dmrepart, NULL, NULL, NULL, NULL, &ctx->Mp_re, &ctx->Np_re, &ctx->Pp_re, NULL, NULL, NULL, NULL, NULL, NULL));
418:     PetscCall(DMDAGetOwnershipRanges(ctx->dmrepart, &_range_i_re, &_range_j_re, &_range_k_re));

420:     ctx->dmrepart->ops->creatematrix              = dm->ops->creatematrix;
421:     ctx->dmrepart->ops->createdomaindecomposition = dm->ops->createdomaindecomposition;
422:   }

424:   /* generate ranges for repartitioned dm */
425:   /* note - assume rank 0 always participates */
426:   /* TODO: use a single MPI call */
427:   PetscCallMPI(MPI_Bcast(&ctx->Mp_re, 1, MPIU_INT, 0, comm));
428:   PetscCallMPI(MPI_Bcast(&ctx->Np_re, 1, MPIU_INT, 0, comm));
429:   PetscCallMPI(MPI_Bcast(&ctx->Pp_re, 1, MPIU_INT, 0, comm));

431:   PetscCall(PetscCalloc3(ctx->Mp_re, &ctx->range_i_re, ctx->Np_re, &ctx->range_j_re, ctx->Pp_re, &ctx->range_k_re));

433:   if (_range_i_re) PetscCall(PetscArraycpy(ctx->range_i_re, _range_i_re, ctx->Mp_re));
434:   if (_range_j_re) PetscCall(PetscArraycpy(ctx->range_j_re, _range_j_re, ctx->Np_re));
435:   if (_range_k_re) PetscCall(PetscArraycpy(ctx->range_k_re, _range_k_re, ctx->Pp_re));

437:   /* TODO: use a single MPI call */
438:   PetscCallMPI(MPI_Bcast(ctx->range_i_re, ctx->Mp_re, MPIU_INT, 0, comm));
439:   PetscCallMPI(MPI_Bcast(ctx->range_j_re, ctx->Np_re, MPIU_INT, 0, comm));
440:   PetscCallMPI(MPI_Bcast(ctx->range_k_re, ctx->Pp_re, MPIU_INT, 0, comm));

442:   PetscCall(PetscMalloc3(ctx->Mp_re, &ctx->start_i_re, ctx->Np_re, &ctx->start_j_re, ctx->Pp_re, &ctx->start_k_re));

444:   sum = 0;
445:   for (k = 0; k < ctx->Mp_re; k++) {
446:     ctx->start_i_re[k] = sum;
447:     sum += ctx->range_i_re[k];
448:   }

450:   sum = 0;
451:   for (k = 0; k < ctx->Np_re; k++) {
452:     ctx->start_j_re[k] = sum;
453:     sum += ctx->range_j_re[k];
454:   }

456:   sum = 0;
457:   for (k = 0; k < ctx->Pp_re; k++) {
458:     ctx->start_k_re[k] = sum;
459:     sum += ctx->range_k_re[k];
460:   }

462:   /* attach repartitioned dm to child ksp */
463:   {
464:     PetscErrorCode (*dmksp_func)(KSP, Mat, Mat, void *);
465:     void *dmksp_ctx;

467:     PetscCall(DMKSPGetComputeOperators(dm, &dmksp_func, &dmksp_ctx));

469:     /* attach dm to ksp on sub communicator */
470:     if (PCTelescope_isActiveRank(sred)) {
471:       PetscCall(KSPSetDM(sred->ksp, ctx->dmrepart));

473:       if (!dmksp_func || sred->ignore_kspcomputeoperators) {
474:         PetscCall(KSPSetDMActive(sred->ksp, PETSC_FALSE));
475:       } else {
476:         /* sub ksp inherits dmksp_func and context provided by user */
477:         PetscCall(KSPSetComputeOperators(sred->ksp, dmksp_func, dmksp_ctx));
478:         PetscCall(KSPSetDMActive(sred->ksp, PETSC_TRUE));
479:       }
480:     }
481:   }
482:   PetscFunctionReturn(PETSC_SUCCESS);
483: }

485: static PetscErrorCode PCTelescopeSetUp_dmda_permutation_3d(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
486: {
487:   DM       dm;
488:   MPI_Comm comm;
489:   Mat      Pscalar, P;
490:   PetscInt ndof;
491:   PetscInt i, j, k, location, startI[3], endI[3], lenI[3], nx, ny, nz;
492:   PetscInt sr, er, Mr;
493:   Vec      V;

495:   PetscFunctionBegin;
496:   PetscCall(PetscInfo(pc, "PCTelescope: setting up the permutation matrix (DMDA-3D)\n"));
497:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));

499:   PetscCall(PCGetDM(pc, &dm));
500:   PetscCall(DMDAGetInfo(dm, NULL, &nx, &ny, &nz, NULL, NULL, NULL, &ndof, NULL, NULL, NULL, NULL, NULL));

502:   PetscCall(DMGetGlobalVector(dm, &V));
503:   PetscCall(VecGetSize(V, &Mr));
504:   PetscCall(VecGetOwnershipRange(V, &sr, &er));
505:   PetscCall(DMRestoreGlobalVector(dm, &V));
506:   sr = sr / ndof;
507:   er = er / ndof;
508:   Mr = Mr / ndof;

510:   PetscCall(MatCreate(comm, &Pscalar));
511:   PetscCall(MatSetSizes(Pscalar, (er - sr), (er - sr), Mr, Mr));
512:   PetscCall(MatSetType(Pscalar, MATAIJ));
513:   PetscCall(MatSeqAIJSetPreallocation(Pscalar, 1, NULL));
514:   PetscCall(MatMPIAIJSetPreallocation(Pscalar, 1, NULL, 1, NULL));

516:   PetscCall(DMDAGetCorners(dm, NULL, NULL, NULL, &lenI[0], &lenI[1], &lenI[2]));
517:   PetscCall(DMDAGetCorners(dm, &startI[0], &startI[1], &startI[2], &endI[0], &endI[1], &endI[2]));
518:   endI[0] += startI[0];
519:   endI[1] += startI[1];
520:   endI[2] += startI[2];

522:   for (k = startI[2]; k < endI[2]; k++) {
523:     for (j = startI[1]; j < endI[1]; j++) {
524:       for (i = startI[0]; i < endI[0]; i++) {
525:         PetscMPIInt rank_ijk_re, rank_reI[3];
526:         PetscInt    s0_re;
527:         PetscInt    ii, jj, kk, local_ijk_re, mapped_ijk;
528:         PetscInt    lenI_re[3];

530:         location = (i - startI[0]) + (j - startI[1]) * lenI[0] + (k - startI[2]) * lenI[0] * lenI[1];
531:         PetscCall(_DMDADetermineRankFromGlobalIJK(3, i, j, k, ctx->Mp_re, ctx->Np_re, ctx->Pp_re, ctx->start_i_re, ctx->start_j_re, ctx->start_k_re, ctx->range_i_re, ctx->range_j_re, ctx->range_k_re, &rank_reI[0], &rank_reI[1], &rank_reI[2], &rank_ijk_re));
532:         PetscCall(_DMDADetermineGlobalS0(3, rank_ijk_re, ctx->Mp_re, ctx->Np_re, ctx->Pp_re, ctx->range_i_re, ctx->range_j_re, ctx->range_k_re, &s0_re));
533:         ii = i - ctx->start_i_re[rank_reI[0]];
534:         PetscCheck(ii >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm3d] index error ii");
535:         jj = j - ctx->start_j_re[rank_reI[1]];
536:         PetscCheck(jj >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm3d] index error jj");
537:         kk = k - ctx->start_k_re[rank_reI[2]];
538:         PetscCheck(kk >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm3d] index error kk");
539:         lenI_re[0]   = ctx->range_i_re[rank_reI[0]];
540:         lenI_re[1]   = ctx->range_j_re[rank_reI[1]];
541:         lenI_re[2]   = ctx->range_k_re[rank_reI[2]];
542:         local_ijk_re = ii + jj * lenI_re[0] + kk * lenI_re[0] * lenI_re[1];
543:         mapped_ijk   = s0_re + local_ijk_re;
544:         PetscCall(MatSetValue(Pscalar, sr + location, mapped_ijk, 1.0, INSERT_VALUES));
545:       }
546:     }
547:   }
548:   PetscCall(MatAssemblyBegin(Pscalar, MAT_FINAL_ASSEMBLY));
549:   PetscCall(MatAssemblyEnd(Pscalar, MAT_FINAL_ASSEMBLY));
550:   PetscCall(MatCreateMAIJ(Pscalar, ndof, &P));
551:   PetscCall(MatDestroy(&Pscalar));
552:   ctx->permutation = P;
553:   PetscFunctionReturn(PETSC_SUCCESS);
554: }

556: static PetscErrorCode PCTelescopeSetUp_dmda_permutation_2d(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
557: {
558:   DM       dm;
559:   MPI_Comm comm;
560:   Mat      Pscalar, P;
561:   PetscInt ndof;
562:   PetscInt i, j, location, startI[2], endI[2], lenI[2], nx, ny, nz;
563:   PetscInt sr, er, Mr;
564:   Vec      V;

566:   PetscFunctionBegin;
567:   PetscCall(PetscInfo(pc, "PCTelescope: setting up the permutation matrix (DMDA-2D)\n"));
568:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
569:   PetscCall(PCGetDM(pc, &dm));
570:   PetscCall(DMDAGetInfo(dm, NULL, &nx, &ny, &nz, NULL, NULL, NULL, &ndof, NULL, NULL, NULL, NULL, NULL));
571:   PetscCall(DMGetGlobalVector(dm, &V));
572:   PetscCall(VecGetSize(V, &Mr));
573:   PetscCall(VecGetOwnershipRange(V, &sr, &er));
574:   PetscCall(DMRestoreGlobalVector(dm, &V));
575:   sr = sr / ndof;
576:   er = er / ndof;
577:   Mr = Mr / ndof;

579:   PetscCall(MatCreate(comm, &Pscalar));
580:   PetscCall(MatSetSizes(Pscalar, (er - sr), (er - sr), Mr, Mr));
581:   PetscCall(MatSetType(Pscalar, MATAIJ));
582:   PetscCall(MatSeqAIJSetPreallocation(Pscalar, 1, NULL));
583:   PetscCall(MatMPIAIJSetPreallocation(Pscalar, 1, NULL, 1, NULL));

585:   PetscCall(DMDAGetCorners(dm, NULL, NULL, NULL, &lenI[0], &lenI[1], NULL));
586:   PetscCall(DMDAGetCorners(dm, &startI[0], &startI[1], NULL, &endI[0], &endI[1], NULL));
587:   endI[0] += startI[0];
588:   endI[1] += startI[1];

590:   for (j = startI[1]; j < endI[1]; j++) {
591:     for (i = startI[0]; i < endI[0]; i++) {
592:       PetscMPIInt rank_ijk_re, rank_reI[3];
593:       PetscInt    s0_re;
594:       PetscInt    ii, jj, local_ijk_re, mapped_ijk;
595:       PetscInt    lenI_re[3];

597:       location = (i - startI[0]) + (j - startI[1]) * lenI[0];
598:       PetscCall(_DMDADetermineRankFromGlobalIJK(2, i, j, 0, ctx->Mp_re, ctx->Np_re, ctx->Pp_re, ctx->start_i_re, ctx->start_j_re, ctx->start_k_re, ctx->range_i_re, ctx->range_j_re, ctx->range_k_re, &rank_reI[0], &rank_reI[1], NULL, &rank_ijk_re));

600:       PetscCall(_DMDADetermineGlobalS0(2, rank_ijk_re, ctx->Mp_re, ctx->Np_re, ctx->Pp_re, ctx->range_i_re, ctx->range_j_re, ctx->range_k_re, &s0_re));

602:       ii = i - ctx->start_i_re[rank_reI[0]];
603:       PetscCheck(ii >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm2d] index error ii");
604:       jj = j - ctx->start_j_re[rank_reI[1]];
605:       PetscCheck(jj >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm2d] index error jj");

607:       lenI_re[0]   = ctx->range_i_re[rank_reI[0]];
608:       lenI_re[1]   = ctx->range_j_re[rank_reI[1]];
609:       local_ijk_re = ii + jj * lenI_re[0];
610:       mapped_ijk   = s0_re + local_ijk_re;
611:       PetscCall(MatSetValue(Pscalar, sr + location, mapped_ijk, 1.0, INSERT_VALUES));
612:     }
613:   }
614:   PetscCall(MatAssemblyBegin(Pscalar, MAT_FINAL_ASSEMBLY));
615:   PetscCall(MatAssemblyEnd(Pscalar, MAT_FINAL_ASSEMBLY));
616:   PetscCall(MatCreateMAIJ(Pscalar, ndof, &P));
617:   PetscCall(MatDestroy(&Pscalar));
618:   ctx->permutation = P;
619:   PetscFunctionReturn(PETSC_SUCCESS);
620: }

622: static PetscErrorCode PCTelescopeSetUp_dmda_scatters(PC pc, PC_Telescope sred, PC_Telescope_DMDACtx *ctx)
623: {
624:   Vec        xred, yred, xtmp, x, xp;
625:   VecScatter scatter;
626:   IS         isin;
627:   Mat        B;
628:   PetscInt   m, bs, st, ed;
629:   MPI_Comm   comm;

631:   PetscFunctionBegin;
632:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
633:   PetscCall(PCGetOperators(pc, NULL, &B));
634:   PetscCall(MatCreateVecs(B, &x, NULL));
635:   PetscCall(MatGetBlockSize(B, &bs));
636:   PetscCall(VecDuplicate(x, &xp));
637:   m    = 0;
638:   xred = NULL;
639:   yred = NULL;
640:   if (PCTelescope_isActiveRank(sred)) {
641:     PetscCall(DMCreateGlobalVector(ctx->dmrepart, &xred));
642:     PetscCall(VecDuplicate(xred, &yred));
643:     PetscCall(VecGetOwnershipRange(xred, &st, &ed));
644:     PetscCall(ISCreateStride(comm, ed - st, st, 1, &isin));
645:     PetscCall(VecGetLocalSize(xred, &m));
646:   } else {
647:     PetscCall(VecGetOwnershipRange(x, &st, &ed));
648:     PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
649:   }
650:   PetscCall(ISSetBlockSize(isin, bs));
651:   PetscCall(VecCreate(comm, &xtmp));
652:   PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
653:   PetscCall(VecSetBlockSize(xtmp, bs));
654:   PetscCall(VecSetType(xtmp, ((PetscObject)x)->type_name));
655:   PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
656:   sred->xred    = xred;
657:   sred->yred    = yred;
658:   sred->isin    = isin;
659:   sred->scatter = scatter;
660:   sred->xtmp    = xtmp;

662:   ctx->xp = xp;
663:   PetscCall(VecDestroy(&x));
664:   PetscFunctionReturn(PETSC_SUCCESS);
665: }

667: PetscErrorCode PCTelescopeSetUp_dmda(PC pc, PC_Telescope sred)
668: {
669:   PC_Telescope_DMDACtx *ctx;
670:   PetscInt              dim;
671:   DM                    dm;
672:   MPI_Comm              comm;

674:   PetscFunctionBegin;
675:   PetscCall(PetscInfo(pc, "PCTelescope: setup (DMDA)\n"));
676:   PetscCall(PetscNew(&ctx));
677:   sred->dm_ctx = (void *)ctx;

679:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
680:   PetscCall(PCGetDM(pc, &dm));
681:   PetscCall(DMDAGetInfo(dm, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));

683:   PetscCall(PCTelescopeSetUp_dmda_repart(pc, sred, ctx));
684:   PetscCall(PCTelescopeSetUp_dmda_repart_coors(pc, sred, ctx));
685:   switch (dim) {
686:   case 1:
687:     SETERRQ(comm, PETSC_ERR_SUP, "Telescope: DMDA (1D) repartitioning not provided");
688:   case 2:
689:     PetscCall(PCTelescopeSetUp_dmda_permutation_2d(pc, sred, ctx));
690:     break;
691:   case 3:
692:     PetscCall(PCTelescopeSetUp_dmda_permutation_3d(pc, sred, ctx));
693:     break;
694:   }
695:   PetscCall(PCTelescopeSetUp_dmda_scatters(pc, sred, ctx));
696:   PetscFunctionReturn(PETSC_SUCCESS);
697: }

699: static PetscErrorCode PCTelescopeMatCreate_dmda_dmactivefalse(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
700: {
701:   PC_Telescope_DMDACtx *ctx;
702:   MPI_Comm              comm, subcomm;
703:   Mat                   Bperm, Bred, B, P;
704:   PetscInt              nr, nc;
705:   IS                    isrow, iscol;
706:   Mat                   Blocal, *_Blocal;

708:   PetscFunctionBegin;
709:   PetscCall(PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (DMDA)\n"));
710:   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
711:   subcomm = PetscSubcommChild(sred->psubcomm);
712:   ctx     = (PC_Telescope_DMDACtx *)sred->dm_ctx;

714:   PetscCall(PCGetOperators(pc, NULL, &B));
715:   PetscCall(MatGetSize(B, &nr, &nc));

717:   P = ctx->permutation;
718:   PetscCall(MatPtAP(B, P, MAT_INITIAL_MATRIX, 1.1, &Bperm));

720:   /* Get submatrices */
721:   isrow = sred->isin;
722:   PetscCall(ISCreateStride(comm, nc, 0, 1, &iscol));

724:   PetscCall(MatCreateSubMatrices(Bperm, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal));
725:   Blocal = *_Blocal;
726:   Bred   = NULL;
727:   if (PCTelescope_isActiveRank(sred)) {
728:     PetscInt mm;

730:     if (reuse != MAT_INITIAL_MATRIX) Bred = *A;
731:     PetscCall(MatGetSize(Blocal, &mm, NULL));
732:     /* PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,PETSC_DECIDE,reuse,&Bred)); */
733:     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred));
734:   }
735:   *A = Bred;

737:   PetscCall(ISDestroy(&iscol));
738:   PetscCall(MatDestroy(&Bperm));
739:   PetscCall(MatDestroyMatrices(1, &_Blocal));
740:   PetscFunctionReturn(PETSC_SUCCESS);
741: }

743: PetscErrorCode PCTelescopeMatCreate_dmda(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
744: {
745:   DM dm;
746:   PetscErrorCode (*dmksp_func)(KSP, Mat, Mat, void *);
747:   void *dmksp_ctx;

749:   PetscFunctionBegin;
750:   PetscCall(PCGetDM(pc, &dm));
751:   PetscCall(DMKSPGetComputeOperators(dm, &dmksp_func, &dmksp_ctx));
752:   /* We assume that dmksp_func = NULL, is equivalent to dmActive = PETSC_FALSE */
753:   if (dmksp_func && !sred->ignore_kspcomputeoperators) {
754:     DM  dmrepart;
755:     Mat Ak;

757:     *A = NULL;
758:     if (PCTelescope_isActiveRank(sred)) {
759:       PetscCall(KSPGetDM(sred->ksp, &dmrepart));
760:       if (reuse == MAT_INITIAL_MATRIX) {
761:         PetscCall(DMCreateMatrix(dmrepart, &Ak));
762:         *A = Ak;
763:       } else if (reuse == MAT_REUSE_MATRIX) {
764:         Ak = *A;
765:       }
766:       /*
767:        There is no need to explicitly assemble the operator now,
768:        the sub-KSP will call the method provided to KSPSetComputeOperators() during KSPSetUp()
769:       */
770:     }
771:   } else {
772:     PetscCall(PCTelescopeMatCreate_dmda_dmactivefalse(pc, sred, reuse, A));
773:   }
774:   PetscFunctionReturn(PETSC_SUCCESS);
775: }

777: static PetscErrorCode PCTelescopeSubNullSpaceCreate_dmda_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
778: {
779:   PetscBool             has_const;
780:   PetscInt              i, k, n = 0;
781:   const Vec            *vecs;
782:   Vec                  *sub_vecs = NULL;
783:   MPI_Comm              subcomm;
784:   PC_Telescope_DMDACtx *ctx;

786:   PetscFunctionBegin;
787:   ctx     = (PC_Telescope_DMDACtx *)sred->dm_ctx;
788:   subcomm = PetscSubcommChild(sred->psubcomm);
789:   PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));

791:   if (PCTelescope_isActiveRank(sred)) {
792:     /* create new vectors */
793:     if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
794:   }

796:   /* copy entries */
797:   for (k = 0; k < n; k++) {
798:     const PetscScalar *x_array;
799:     PetscScalar       *LA_sub_vec;
800:     PetscInt           st, ed;

802:     /* permute vector into ordering associated with re-partitioned dmda */
803:     PetscCall(MatMultTranspose(ctx->permutation, vecs[k], ctx->xp));

805:     /* pull in vector x->xtmp */
806:     PetscCall(VecScatterBegin(sred->scatter, ctx->xp, sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));
807:     PetscCall(VecScatterEnd(sred->scatter, ctx->xp, sred->xtmp, INSERT_VALUES, SCATTER_FORWARD));

809:     /* copy vector entries into xred */
810:     PetscCall(VecGetArrayRead(sred->xtmp, &x_array));
811:     if (sub_vecs) {
812:       if (sub_vecs[k]) {
813:         PetscCall(VecGetOwnershipRange(sub_vecs[k], &st, &ed));
814:         PetscCall(VecGetArray(sub_vecs[k], &LA_sub_vec));
815:         for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
816:         PetscCall(VecRestoreArray(sub_vecs[k], &LA_sub_vec));
817:       }
818:     }
819:     PetscCall(VecRestoreArrayRead(sred->xtmp, &x_array));
820:   }

822:   if (PCTelescope_isActiveRank(sred)) {
823:     /* create new (near) nullspace for redundant object */
824:     PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
825:     PetscCall(VecDestroyVecs(n, &sub_vecs));
826:     PetscCheck(!nullspace->remove, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
827:     PetscCheck(!nullspace->rmctx, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
828:   }
829:   PetscFunctionReturn(PETSC_SUCCESS);
830: }

832: PetscErrorCode PCTelescopeMatNullSpaceCreate_dmda(PC pc, PC_Telescope sred, Mat sub_mat)
833: {
834:   Mat B;

836:   PetscFunctionBegin;
837:   PetscCall(PCGetOperators(pc, NULL, &B));
838:   {
839:     MatNullSpace nullspace, sub_nullspace;
840:     PetscCall(MatGetNullSpace(B, &nullspace));
841:     if (nullspace) {
842:       PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (DMDA)\n"));
843:       PetscCall(PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc, sred, nullspace, &sub_nullspace));
844:       if (PCTelescope_isActiveRank(sred)) {
845:         PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
846:         PetscCall(MatNullSpaceDestroy(&sub_nullspace));
847:       }
848:     }
849:   }
850:   {
851:     MatNullSpace nearnullspace, sub_nearnullspace;
852:     PetscCall(MatGetNearNullSpace(B, &nearnullspace));
853:     if (nearnullspace) {
854:       PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (DMDA)\n"));
855:       PetscCall(PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc, sred, nearnullspace, &sub_nearnullspace));
856:       if (PCTelescope_isActiveRank(sred)) {
857:         PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
858:         PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
859:       }
860:     }
861:   }
862:   PetscFunctionReturn(PETSC_SUCCESS);
863: }

865: PetscErrorCode PCApply_Telescope_dmda(PC pc, Vec x, Vec y)
866: {
867:   PC_Telescope          sred = (PC_Telescope)pc->data;
868:   Mat                   perm;
869:   Vec                   xtmp, xp, xred, yred;
870:   PetscInt              i, st, ed;
871:   VecScatter            scatter;
872:   PetscScalar          *array;
873:   const PetscScalar    *x_array;
874:   PC_Telescope_DMDACtx *ctx;

876:   ctx     = (PC_Telescope_DMDACtx *)sred->dm_ctx;
877:   xtmp    = sred->xtmp;
878:   scatter = sred->scatter;
879:   xred    = sred->xred;
880:   yred    = sred->yred;
881:   perm    = ctx->permutation;
882:   xp      = ctx->xp;

884:   PetscFunctionBegin;
885:   PetscCall(PetscCitationsRegister(citation, &cited));

887:   /* permute vector into ordering associated with re-partitioned dmda */
888:   PetscCall(MatMultTranspose(perm, x, xp));

890:   /* pull in vector x->xtmp */
891:   PetscCall(VecScatterBegin(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));
892:   PetscCall(VecScatterEnd(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));

894:   /* copy vector entries into xred */
895:   PetscCall(VecGetArrayRead(xtmp, &x_array));
896:   if (xred) {
897:     PetscScalar *LA_xred;
898:     PetscCall(VecGetOwnershipRange(xred, &st, &ed));

900:     PetscCall(VecGetArray(xred, &LA_xred));
901:     for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
902:     PetscCall(VecRestoreArray(xred, &LA_xred));
903:   }
904:   PetscCall(VecRestoreArrayRead(xtmp, &x_array));

906:   /* solve */
907:   if (PCTelescope_isActiveRank(sred)) {
908:     PetscCall(KSPSolve(sred->ksp, xred, yred));
909:     PetscCall(KSPCheckSolve(sred->ksp, pc, yred));
910:   }

912:   /* return vector */
913:   PetscCall(VecGetArray(xtmp, &array));
914:   if (yred) {
915:     const PetscScalar *LA_yred;
916:     PetscCall(VecGetOwnershipRange(yred, &st, &ed));
917:     PetscCall(VecGetArrayRead(yred, &LA_yred));
918:     for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
919:     PetscCall(VecRestoreArrayRead(yred, &LA_yred));
920:   }
921:   PetscCall(VecRestoreArray(xtmp, &array));
922:   PetscCall(VecScatterBegin(scatter, xtmp, xp, INSERT_VALUES, SCATTER_REVERSE));
923:   PetscCall(VecScatterEnd(scatter, xtmp, xp, INSERT_VALUES, SCATTER_REVERSE));
924:   PetscCall(MatMult(perm, xp, y));
925:   PetscFunctionReturn(PETSC_SUCCESS);
926: }

928: PetscErrorCode PCApplyRichardson_Telescope_dmda(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
929: {
930:   PC_Telescope          sred = (PC_Telescope)pc->data;
931:   Mat                   perm;
932:   Vec                   xtmp, xp, yred;
933:   PetscInt              i, st, ed;
934:   VecScatter            scatter;
935:   const PetscScalar    *x_array;
936:   PetscBool             default_init_guess_value = PETSC_FALSE;
937:   PC_Telescope_DMDACtx *ctx;

939:   PetscFunctionBegin;
940:   ctx     = (PC_Telescope_DMDACtx *)sred->dm_ctx;
941:   xtmp    = sred->xtmp;
942:   scatter = sred->scatter;
943:   yred    = sred->yred;
944:   perm    = ctx->permutation;
945:   xp      = ctx->xp;

947:   PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope_dmda only supports max_it = 1");
948:   *reason = (PCRichardsonConvergedReason)0;

950:   if (!zeroguess) {
951:     PetscCall(PetscInfo(pc, "PCTelescopeDMDA: Scattering y for non-zero-initial guess\n"));
952:     /* permute vector into ordering associated with re-partitioned dmda */
953:     PetscCall(MatMultTranspose(perm, y, xp));

955:     /* pull in vector x->xtmp */
956:     PetscCall(VecScatterBegin(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));
957:     PetscCall(VecScatterEnd(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));

959:     /* copy vector entries into xred */
960:     PetscCall(VecGetArrayRead(xtmp, &x_array));
961:     if (yred) {
962:       PetscScalar *LA_yred;
963:       PetscCall(VecGetOwnershipRange(yred, &st, &ed));
964:       PetscCall(VecGetArray(yred, &LA_yred));
965:       for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
966:       PetscCall(VecRestoreArray(yred, &LA_yred));
967:     }
968:     PetscCall(VecRestoreArrayRead(xtmp, &x_array));
969:   }

971:   if (PCTelescope_isActiveRank(sred)) {
972:     PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
973:     if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
974:   }

976:   PetscCall(PCApply_Telescope_dmda(pc, x, y));

978:   if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));

980:   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
981:   *outits = 1;
982:   PetscFunctionReturn(PETSC_SUCCESS);
983: }

985: PetscErrorCode PCReset_Telescope_dmda(PC pc)
986: {
987:   PC_Telescope          sred = (PC_Telescope)pc->data;
988:   PC_Telescope_DMDACtx *ctx;

990:   PetscFunctionBegin;
991:   ctx = (PC_Telescope_DMDACtx *)sred->dm_ctx;
992:   PetscCall(VecDestroy(&ctx->xp));
993:   PetscCall(MatDestroy(&ctx->permutation));
994:   PetscCall(DMDestroy(&ctx->dmrepart));
995:   PetscCall(PetscFree3(ctx->range_i_re, ctx->range_j_re, ctx->range_k_re));
996:   PetscCall(PetscFree3(ctx->start_i_re, ctx->start_j_re, ctx->start_k_re));
997:   PetscFunctionReturn(PETSC_SUCCESS);
998: }

1000: static PetscErrorCode DMView_DA_Short_3d(DM dm, PetscViewer v)
1001: {
1002:   PetscInt    M, N, P, m, n, p, ndof, nsw;
1003:   MPI_Comm    comm;
1004:   PetscMPIInt size;
1005:   const char *prefix;

1007:   PetscFunctionBegin;
1008:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1009:   PetscCallMPI(MPI_Comm_size(comm, &size));
1010:   PetscCall(DMGetOptionsPrefix(dm, &prefix));
1011:   PetscCall(DMDAGetInfo(dm, NULL, &M, &N, &P, &m, &n, &p, &ndof, &nsw, NULL, NULL, NULL, NULL));
1012:   if (prefix) PetscCall(PetscViewerASCIIPrintf(v, "DMDA Object:    (%s)    %d MPI processes\n", prefix, size));
1013:   else PetscCall(PetscViewerASCIIPrintf(v, "DMDA Object:    %d MPI processes\n", size));
1014:   PetscCall(PetscViewerASCIIPrintf(v, "  M %" PetscInt_FMT " N %" PetscInt_FMT " P %" PetscInt_FMT " m %" PetscInt_FMT " n %" PetscInt_FMT " p %" PetscInt_FMT " dof %" PetscInt_FMT " overlap %" PetscInt_FMT "\n", M, N, P, m, n, p, ndof, nsw));
1015:   PetscFunctionReturn(PETSC_SUCCESS);
1016: }

1018: static PetscErrorCode DMView_DA_Short_2d(DM dm, PetscViewer v)
1019: {
1020:   PetscInt    M, N, m, n, ndof, nsw;
1021:   MPI_Comm    comm;
1022:   PetscMPIInt size;
1023:   const char *prefix;

1025:   PetscFunctionBegin;
1026:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1027:   PetscCallMPI(MPI_Comm_size(comm, &size));
1028:   PetscCall(DMGetOptionsPrefix(dm, &prefix));
1029:   PetscCall(DMDAGetInfo(dm, NULL, &M, &N, NULL, &m, &n, NULL, &ndof, &nsw, NULL, NULL, NULL, NULL));
1030:   if (prefix) PetscCall(PetscViewerASCIIPrintf(v, "DMDA Object:    (%s)    %d MPI processes\n", prefix, size));
1031:   else PetscCall(PetscViewerASCIIPrintf(v, "DMDA Object:    %d MPI processes\n", size));
1032:   PetscCall(PetscViewerASCIIPrintf(v, "  M %" PetscInt_FMT " N %" PetscInt_FMT " m %" PetscInt_FMT " n %" PetscInt_FMT " dof %" PetscInt_FMT " overlap %" PetscInt_FMT "\n", M, N, m, n, ndof, nsw));
1033:   PetscFunctionReturn(PETSC_SUCCESS);
1034: }

1036: PetscErrorCode DMView_DA_Short(DM dm, PetscViewer v)
1037: {
1038:   PetscInt dim;

1040:   PetscFunctionBegin;
1041:   PetscCall(DMDAGetInfo(dm, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1042:   switch (dim) {
1043:   case 2:
1044:     PetscCall(DMView_DA_Short_2d(dm, v));
1045:     break;
1046:   case 3:
1047:     PetscCall(DMView_DA_Short_3d(dm, v));
1048:     break;
1049:   }
1050:   PetscFunctionReturn(PETSC_SUCCESS);
1051: }