Actual source code: plexcheckinterface.c

  1: #include <petsc/private/dmpleximpl.h>

  3: /* TODO PetscArrayExchangeBegin/End */
  4: /* TODO blocksize */
  5: /* TODO move to API ? */
  6: static PetscErrorCode ExchangeArrayByRank_Private(PetscObject obj, MPI_Datatype dt, PetscMPIInt nsranks, const PetscMPIInt sranks[], PetscInt ssize[], const void *sarr[], PetscMPIInt nrranks, const PetscMPIInt rranks[], PetscInt *rsize_out[], void **rarr_out[])
  7: {
  8:   PetscMPIInt  r;
  9:   PetscInt    *rsize;
 10:   void       **rarr;
 11:   MPI_Request *sreq, *rreq;
 12:   PetscMPIInt  tag, unitsize;
 13:   MPI_Comm     comm;

 15:   PetscFunctionBegin;
 16:   PetscCallMPI(MPI_Type_size(dt, &unitsize));
 17:   PetscCall(PetscObjectGetComm(obj, &comm));
 18:   PetscCall(PetscMalloc2(nrranks, &rsize, nrranks, &rarr));
 19:   PetscCall(PetscMalloc2(nrranks, &rreq, nsranks, &sreq));
 20:   /* exchange array size */
 21:   PetscCall(PetscObjectGetNewTag(obj, &tag));
 22:   for (r = 0; r < nrranks; r++) PetscCallMPI(MPIU_Irecv(&rsize[r], 1, MPIU_INT, rranks[r], tag, comm, &rreq[r]));
 23:   for (r = 0; r < nsranks; r++) PetscCallMPI(MPIU_Isend(&ssize[r], 1, MPIU_INT, sranks[r], tag, comm, &sreq[r]));
 24:   PetscCallMPI(MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE));
 25:   PetscCallMPI(MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE));
 26:   /* exchange array */
 27:   PetscCall(PetscObjectGetNewTag(obj, &tag));
 28:   for (r = 0; r < nrranks; r++) {
 29:     PetscCall(PetscMalloc(rsize[r] * unitsize, &rarr[r]));
 30:     PetscCallMPI(MPIU_Irecv(rarr[r], rsize[r], dt, rranks[r], tag, comm, &rreq[r]));
 31:   }
 32:   for (r = 0; r < nsranks; r++) PetscCallMPI(MPIU_Isend(sarr[r], ssize[r], dt, sranks[r], tag, comm, &sreq[r]));
 33:   PetscCallMPI(MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE));
 34:   PetscCallMPI(MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE));
 35:   PetscCall(PetscFree2(rreq, sreq));
 36:   *rsize_out = rsize;
 37:   *rarr_out  = rarr;
 38:   PetscFunctionReturn(PETSC_SUCCESS);
 39: }

 41: /* TODO VecExchangeBegin/End */
 42: /* TODO move to API ? */
 43: static PetscErrorCode ExchangeVecByRank_Private(PetscObject obj, PetscMPIInt nsranks, const PetscMPIInt sranks[], Vec svecs[], PetscMPIInt nrranks, const PetscMPIInt rranks[], Vec *rvecs[])
 44: {
 45:   PetscMPIInt         r;
 46:   PetscInt           *ssize, *rsize;
 47:   PetscScalar       **rarr;
 48:   const PetscScalar **sarr;
 49:   Vec                *rvecs_;
 50:   MPI_Request        *sreq, *rreq;

 52:   PetscFunctionBegin;
 53:   PetscCall(PetscMalloc4(nsranks, &ssize, nsranks, &sarr, nrranks, &rreq, nsranks, &sreq));
 54:   for (r = 0; r < nsranks; r++) {
 55:     PetscCall(VecGetLocalSize(svecs[r], &ssize[r]));
 56:     PetscCall(VecGetArrayRead(svecs[r], &sarr[r]));
 57:   }
 58:   PetscCall(ExchangeArrayByRank_Private(obj, MPIU_SCALAR, nsranks, sranks, ssize, (const void **)sarr, nrranks, rranks, &rsize, (void ***)&rarr));
 59:   PetscCall(PetscMalloc1(nrranks, &rvecs_));
 60:   for (r = 0; r < nrranks; r++) {
 61:     /* set array in two steps to mimic PETSC_OWN_POINTER */
 62:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, rsize[r], NULL, &rvecs_[r]));
 63:     PetscCall(VecReplaceArray(rvecs_[r], rarr[r]));
 64:   }
 65:   for (r = 0; r < nsranks; r++) PetscCall(VecRestoreArrayRead(svecs[r], &sarr[r]));
 66:   PetscCall(PetscFree2(rsize, rarr));
 67:   PetscCall(PetscFree4(ssize, sarr, rreq, sreq));
 68:   *rvecs = rvecs_;
 69:   PetscFunctionReturn(PETSC_SUCCESS);
 70: }

 72: static PetscErrorCode SortByRemote_Private(PetscSF sf, PetscInt *rmine1[], PetscInt *rremote1[])
 73: {
 74:   PetscInt           nleaves;
 75:   PetscMPIInt        nranks;
 76:   const PetscMPIInt *ranks;
 77:   const PetscInt    *roffset, *rmine, *rremote;
 78:   PetscInt           n, o;

 80:   PetscFunctionBegin;
 81:   PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote));
 82:   nleaves = roffset[nranks];
 83:   PetscCall(PetscMalloc2(nleaves, rmine1, nleaves, rremote1));
 84:   for (PetscMPIInt r = 0; r < nranks; r++) {
 85:     /* simultaneously sort rank-wise portions of rmine & rremote by values in rremote
 86:        - to unify order with the other side */
 87:     o = roffset[r];
 88:     n = roffset[r + 1] - o;
 89:     PetscCall(PetscArraycpy(&(*rmine1)[o], &rmine[o], n));
 90:     PetscCall(PetscArraycpy(&(*rremote1)[o], &rremote[o], n));
 91:     PetscCall(PetscSortIntWithArray(n, &(*rremote1)[o], &(*rmine1)[o]));
 92:   }
 93:   PetscFunctionReturn(PETSC_SUCCESS);
 94: }

 96: static PetscErrorCode GetRecursiveConeCoordinatesPerRank_Private(DM dm, PetscSF sf, PetscInt rmine[], Vec *coordinatesPerRank[])
 97: {
 98:   IS                 pointsPerRank, conesPerRank;
 99:   PetscMPIInt        nranks;
100:   const PetscMPIInt *ranks;
101:   const PetscInt    *roffset;
102:   PetscInt           n, o;

104:   PetscFunctionBegin;
105:   PetscCall(DMGetCoordinatesLocalSetUp(dm));
106:   PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, NULL, NULL));
107:   PetscCall(PetscMalloc1(nranks, coordinatesPerRank));
108:   for (PetscMPIInt r = 0; r < nranks; r++) {
109:     o = roffset[r];
110:     n = roffset[r + 1] - o;
111:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n, &rmine[o], PETSC_USE_POINTER, &pointsPerRank));
112:     PetscCall(DMPlexGetConeRecursiveVertices(dm, pointsPerRank, &conesPerRank));
113:     PetscCall(DMGetCoordinatesLocalTuple(dm, conesPerRank, NULL, &(*coordinatesPerRank)[r]));
114:     PetscCall(ISDestroy(&pointsPerRank));
115:     PetscCall(ISDestroy(&conesPerRank));
116:   }
117:   PetscFunctionReturn(PETSC_SUCCESS);
118: }

120: static PetscErrorCode PetscSFComputeMultiRootOriginalNumberingByRank_Private(PetscSF sf, PetscSF imsf, PetscInt *irmine1[])
121: {
122:   PetscInt       *mRootsOrigNumbering;
123:   PetscMPIInt     niranks;
124:   PetscInt        nileaves;
125:   const PetscInt *iroffset, *irmine, *degree;
126:   PetscInt        n, o;

128:   PetscFunctionBegin;
129:   PetscCall(PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL));
130:   PetscCall(PetscSFGetRootRanks(imsf, &niranks, NULL, &iroffset, &irmine, NULL));
131:   PetscCheck(nileaves == iroffset[niranks], PETSC_COMM_SELF, PETSC_ERR_PLIB, "nileaves != iroffset[niranks])");
132:   PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
133:   PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
134:   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, degree, NULL, &mRootsOrigNumbering));
135:   PetscCall(PetscMalloc1(nileaves, irmine1));
136:   for (PetscMPIInt r = 0; r < niranks; r++) {
137:     o = iroffset[r];
138:     n = iroffset[r + 1] - o;
139:     for (PetscInt i = 0; i < n; i++) (*irmine1)[o + i] = mRootsOrigNumbering[irmine[o + i]];
140:   }
141:   PetscCall(PetscFree(mRootsOrigNumbering));
142:   PetscFunctionReturn(PETSC_SUCCESS);
143: }

145: /*@
146:   DMPlexCheckInterfaceCones - Check that points on inter-partition interfaces have conforming order of cone points.

148:   Input Parameter:
149: . dm - The `DMPLEX` object

151:   Level: developer

153:   Notes:
154:   For example, if there is an edge (rank,index)=(0,2) connecting points cone(0,2)=[(0,0),(0,1)] in this order, and the point `PetscSF` contains connections 0 <- (1,0), 1 <- (1,1) and 2 <- (1,2),
155:   then this check would pass if the edge (1,2) has cone(1,2)=[(1,0),(1,1)]. By contrast, if cone(1,2)=[(1,1),(1,0)], then this check would fail.

157:   This is mainly intended for debugging/testing purposes. Does not check cone orientation, for this purpose use `DMPlexCheckFaces()`.

159:   For the complete list of DMPlexCheck* functions, see `DMSetFromOptions()`.

161:   Developer Notes:
162:   Interface cones are expanded into vertices and then their coordinates are compared.

164: .seealso: [](ch_unstructured), `DM`, `DMPLEX`, `DMPlexGetCone()`, `DMPlexGetConeSize()`, `DMGetPointSF()`, `DMGetCoordinates()`, `DMSetFromOptions()`
165: @*/
166: PetscErrorCode DMPlexCheckInterfaceCones(DM dm)
167: {
168:   PetscSF            sf;
169:   PetscInt           nleaves, nroots;
170:   PetscMPIInt        nranks;
171:   const PetscInt    *mine, *roffset, *rmine, *rremote;
172:   const PetscSFNode *remote;
173:   const PetscMPIInt *ranks;
174:   PetscSF            msf, imsf;
175:   PetscMPIInt        niranks;
176:   PetscInt           nileaves;
177:   const PetscMPIInt *iranks;
178:   const PetscInt    *iroffset, *irmine, *irremote;
179:   PetscInt          *rmine1, *rremote1; /* rmine and rremote copies simultaneously sorted by rank and rremote */
180:   PetscInt          *mine_orig_numbering;
181:   Vec               *sntCoordinatesPerRank;
182:   Vec               *refCoordinatesPerRank;
183:   Vec               *recCoordinatesPerRank = NULL;
184:   PetscInt           r;
185:   PetscMPIInt        size, rank;
186:   PetscBool          same;
187:   PetscBool          verbose = PETSC_FALSE;
188:   MPI_Comm           comm;

190:   PetscFunctionBegin;
192:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
193:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
194:   PetscCallMPI(MPI_Comm_size(comm, &size));
195:   if (size < 2) PetscFunctionReturn(PETSC_SUCCESS);
196:   PetscCall(DMGetPointSF(dm, &sf));
197:   if (!sf) PetscFunctionReturn(PETSC_SUCCESS);
198:   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, &mine, &remote));
199:   if (nroots < 0) PetscFunctionReturn(PETSC_SUCCESS);
200:   PetscCheck(dm->coordinates[0].x || dm->coordinates[0].xl, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM coordinates must be set");
201:   PetscCall(PetscSFSetUp(sf));
202:   PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote));

204:   /* Expand sent cones per rank */
205:   PetscCall(SortByRemote_Private(sf, &rmine1, &rremote1));
206:   PetscCall(GetRecursiveConeCoordinatesPerRank_Private(dm, sf, rmine1, &sntCoordinatesPerRank));

208:   /* Create inverse SF */
209:   PetscCall(PetscSFGetMultiSF(sf, &msf));
210:   PetscCall(PetscSFCreateInverseSF(msf, &imsf));
211:   PetscCall(PetscSFSetUp(imsf));
212:   PetscCall(PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL));
213:   PetscCall(PetscSFGetRootRanks(imsf, &niranks, &iranks, &iroffset, &irmine, &irremote));

215:   /* Compute original numbering of multi-roots (referenced points) */
216:   PetscCall(PetscSFComputeMultiRootOriginalNumberingByRank_Private(sf, imsf, &mine_orig_numbering));

218:   /* Expand coordinates of the referred cones per rank */
219:   PetscCall(GetRecursiveConeCoordinatesPerRank_Private(dm, imsf, mine_orig_numbering, &refCoordinatesPerRank));

221:   /* Send the coordinates */
222:   PetscCall(ExchangeVecByRank_Private((PetscObject)sf, nranks, ranks, sntCoordinatesPerRank, niranks, iranks, &recCoordinatesPerRank));

224:   /* verbose output */
225:   PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dm_plex_check_cones_conform_on_interfaces_verbose", &verbose, NULL));
226:   if (verbose) {
227:     PetscViewer sv, v = PETSC_VIEWER_STDOUT_WORLD;
228:     PetscCall(PetscViewerASCIIPrintf(v, "============\nDMPlexCheckInterfaceCones output\n============\n"));
229:     PetscCall(PetscViewerASCIIPushSynchronized(v));
230:     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
231:     for (r = 0; r < size; r++) {
232:       if (r < nranks) {
233:         PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " ranks[r]=%d sntCoordinatesPerRank[r]:\n", r, ranks[r]));
234:         PetscCall(PetscViewerASCIIPushTab(v));
235:         PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
236:         PetscCall(VecView(sntCoordinatesPerRank[r], sv));
237:         PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
238:         PetscCall(PetscViewerASCIIPopTab(v));
239:       } else {
240:         PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
241:         PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
242:       }
243:     }
244:     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  ----------\n"));
245:     for (r = 0; r < size; r++) {
246:       if (r < niranks) {
247:         PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " iranks[r]=%d refCoordinatesPerRank[r]:\n", r, iranks[r]));
248:         PetscCall(PetscViewerASCIIPushTab(v));
249:         PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
250:         PetscCall(VecView(refCoordinatesPerRank[r], sv));
251:         PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
252:         PetscCall(PetscViewerASCIIPopTab(v));
253:       } else {
254:         PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
255:         PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
256:       }
257:     }
258:     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  ----------\n"));
259:     for (r = 0; r < size; r++) {
260:       if (r < niranks) {
261:         PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " iranks[r]=%d recCoordinatesPerRank[r]:\n", r, iranks[r]));
262:         PetscCall(PetscViewerASCIIPushTab(v));
263:         PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
264:         PetscCall(VecView(recCoordinatesPerRank[r], sv));
265:         PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
266:         PetscCall(PetscViewerASCIIPopTab(v));
267:       } else {
268:         PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
269:         PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
270:       }
271:     }
272:     PetscCall(PetscViewerASCIIPopSynchronized(v));
273:   }

275:   /* Compare recCoordinatesPerRank with refCoordinatesPerRank */
276:   for (r = 0; r < niranks; r++) {
277:     PetscCall(VecEqual(refCoordinatesPerRank[r], recCoordinatesPerRank[r], &same));
278:     PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_PLIB, "interface cones do not conform for remote rank %d", iranks[r]);
279:   }

281:   /* destroy sent stuff */
282:   for (r = 0; r < nranks; r++) PetscCall(VecDestroy(&sntCoordinatesPerRank[r]));
283:   PetscCall(PetscFree(sntCoordinatesPerRank));
284:   PetscCall(PetscFree2(rmine1, rremote1));
285:   PetscCall(PetscSFDestroy(&imsf));

287:   /* destroy referenced stuff */
288:   for (r = 0; r < niranks; r++) PetscCall(VecDestroy(&refCoordinatesPerRank[r]));
289:   PetscCall(PetscFree(refCoordinatesPerRank));
290:   PetscCall(PetscFree(mine_orig_numbering));

292:   /* destroy received stuff */
293:   for (r = 0; r < niranks; r++) PetscCall(VecDestroy(&recCoordinatesPerRank[r]));
294:   PetscCall(PetscFree(recCoordinatesPerRank));
295:   PetscFunctionReturn(PETSC_SUCCESS);
296: }