Actual source code: patch.c
1: #include <petsc/private/dmpatchimpl.h>
2: #include <petscdmda.h>
3: #include <petscsf.h>
5: /*
6: Solver loop to update \tau:
8: DMZoom(dmc, &dmz)
9: DMRefine(dmz, &dmf),
10: Scatter Xcoarse -> Xzoom,
11: Interpolate Xzoom -> Xfine (note that this may be on subcomms),
12: Smooth Xfine using two-step smoother
13: normal smoother plus Kaczmarz---moves back and forth from dmzoom to dmfine
14: Compute residual Rfine
15: Restrict Rfine to Rzoom_restricted
16: Scatter Rzoom_restricted -> Rcoarse_restricted
17: Compute global residual Rcoarse
18: TauCoarse = Rcoarse - Rcoarse_restricted
19: */
21: /*@
22: DMPatchZoom - Create patches of a `DMDA` on subsets of processes, indicated by `commz`
24: Collective
26: Input Parameters:
27: + dm - the `DM`
28: . lower - the lower left corner of the requested patch
29: . upper - the upper right corner of the requested patch
30: - commz - the new communicator for the patch, `MPI_COMM_NULL` indicates that the given rank will not own a patch
32: Output Parameters:
33: + dmz - the patch `DM`
34: . sfz - the `PetscSF` mapping the patch+halo to the zoomed version (optional)
35: - sfzr - the `PetscSF` mapping the patch to the restricted zoomed version
37: Level: intermediate
39: .seealso: `DMPatchSolve()`, `DMDACreatePatchIS()`
40: @*/
41: PetscErrorCode DMPatchZoom(DM dm, MatStencil lower, MatStencil upper, MPI_Comm commz, DM *dmz, PetscSF *sfz, PetscSF *sfzr)
42: {
43: DMDAStencilType st;
44: MatStencil blower, bupper, loclower, locupper;
45: IS is;
46: const PetscInt *ranges, *indices;
47: PetscInt *localPoints = NULL;
48: PetscSFNode *remotePoints = NULL;
49: PetscInt dim, dof;
50: PetscInt M, N, P, rM, rN, rP, halo = 1, sxb, syb, szb, sxr, syr, szr, exr, eyr, ezr, mxb, myb, mzb, i, j, k, l, q;
51: PetscMPIInt size;
52: PetscBool patchis_offproc = PETSC_TRUE;
53: Vec X;
55: PetscFunctionBegin;
56: if (!sfz) halo = 0;
57: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
58: /* Create patch DM */
59: PetscCall(DMDAGetInfo(dm, &dim, &M, &N, &P, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, &st));
61: /* Get piece for rank r, expanded by halo */
62: bupper.i = PetscMin(M, upper.i + halo);
63: blower.i = PetscMax(lower.i - halo, 0);
64: bupper.j = PetscMin(N, upper.j + halo);
65: blower.j = PetscMax(lower.j - halo, 0);
66: bupper.k = PetscMin(P, upper.k + halo);
67: blower.k = PetscMax(lower.k - halo, 0);
68: rM = bupper.i - blower.i;
69: rN = bupper.j - blower.j;
70: rP = bupper.k - blower.k;
72: if (commz != MPI_COMM_NULL) {
73: PetscCall(DMDACreate(commz, dmz));
74: PetscCall(DMSetDimension(*dmz, dim));
75: PetscCall(DMDASetSizes(*dmz, rM, rN, rP));
76: PetscCall(DMDASetNumProcs(*dmz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE));
77: PetscCall(DMDASetBoundaryType(*dmz, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
78: PetscCall(DMDASetDof(*dmz, dof));
79: PetscCall(DMDASetStencilType(*dmz, st));
80: PetscCall(DMDASetStencilWidth(*dmz, 0));
81: PetscCall(DMDASetOwnershipRanges(*dmz, NULL, NULL, NULL));
82: PetscCall(DMSetFromOptions(*dmz));
83: PetscCall(DMSetUp(*dmz));
84: PetscCall(DMDAGetCorners(*dmz, &sxb, &syb, &szb, &mxb, &myb, &mzb));
85: sxr = PetscMax(sxb, lower.i - blower.i);
86: syr = PetscMax(syb, lower.j - blower.j);
87: szr = PetscMax(szb, lower.k - blower.k);
88: exr = PetscMin(sxb + mxb, upper.i - blower.i);
89: eyr = PetscMin(syb + myb, upper.j - blower.j);
90: ezr = PetscMin(szb + mzb, upper.k - blower.k);
91: PetscCall(PetscMalloc2(dof * rM * rN * PetscMax(rP, 1), &localPoints, dof * rM * rN * PetscMax(rP, 1), &remotePoints));
92: } else {
93: sxr = syr = szr = exr = eyr = ezr = sxb = syb = szb = mxb = myb = mzb = 0;
94: }
96: /* Create SF for restricted map */
97: PetscCall(DMCreateGlobalVector(dm, &X));
98: PetscCall(VecGetOwnershipRanges(X, &ranges));
100: loclower.i = blower.i + sxr;
101: locupper.i = blower.i + exr;
102: loclower.j = blower.j + syr;
103: locupper.j = blower.j + eyr;
104: loclower.k = blower.k + szr;
105: locupper.k = blower.k + ezr;
107: PetscCall(DMDACreatePatchIS(dm, &loclower, &locupper, &is, patchis_offproc));
108: PetscCall(ISGetIndices(is, &indices));
110: if (dim < 3) {
111: mzb = 1;
112: ezr = 1;
113: }
114: q = 0;
115: for (k = szb; k < szb + mzb; ++k) {
116: if ((k < szr) || (k >= ezr)) continue;
117: for (j = syb; j < syb + myb; ++j) {
118: if ((j < syr) || (j >= eyr)) continue;
119: for (i = sxb; i < sxb + mxb; ++i) {
120: for (l = 0; l < dof; l++) {
121: const PetscInt lp = l + dof * (((k - szb) * rN + (j - syb)) * rM + i - sxb);
122: PetscMPIInt r;
123: PetscInt ir;
125: if ((i < sxr) || (i >= exr)) continue;
126: localPoints[q] = lp;
127: PetscCall(PetscFindInt(indices[q], size + 1, ranges, &ir));
128: PetscCall(PetscMPIIntCast(ir, &r));
130: remotePoints[q].rank = r < 0 ? -(r + 1) - 1 : r;
131: remotePoints[q].index = indices[q] - ranges[remotePoints[q].rank];
132: ++q;
133: }
134: }
135: }
136: }
137: PetscCall(ISRestoreIndices(is, &indices));
138: PetscCall(ISDestroy(&is));
139: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfzr));
140: PetscCall(PetscObjectSetName((PetscObject)*sfzr, "Restricted Map"));
141: PetscCall(PetscSFSetGraph(*sfzr, dof * M * N * P, q, localPoints, PETSC_COPY_VALUES, remotePoints, PETSC_COPY_VALUES));
143: if (sfz) {
144: /* Create SF for buffered map */
145: loclower.i = blower.i + sxb;
146: locupper.i = blower.i + sxb + mxb;
147: loclower.j = blower.j + syb;
148: locupper.j = blower.j + syb + myb;
149: loclower.k = blower.k + szb;
150: locupper.k = blower.k + szb + mzb;
152: PetscCall(DMDACreatePatchIS(dm, &loclower, &locupper, &is, patchis_offproc));
153: PetscCall(ISGetIndices(is, &indices));
155: q = 0;
156: for (k = szb; k < szb + mzb; ++k) {
157: for (j = syb; j < syb + myb; ++j) {
158: for (i = sxb; i < sxb + mxb; ++i, ++q) {
159: PetscInt ir;
160: PetscMPIInt r;
162: localPoints[q] = q;
163: PetscCall(PetscFindInt(indices[q], size + 1, ranges, &ir));
164: PetscCall(PetscMPIIntCast(ir, &r));
165: remotePoints[q].rank = r < 0 ? -(r + 1) - 1 : r;
166: remotePoints[q].index = indices[q] - ranges[remotePoints[q].rank];
167: }
168: }
169: }
170: PetscCall(ISRestoreIndices(is, &indices));
171: PetscCall(ISDestroy(&is));
172: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfz));
173: PetscCall(PetscObjectSetName((PetscObject)*sfz, "Buffered Map"));
174: PetscCall(PetscSFSetGraph(*sfz, M * N * P, q, localPoints, PETSC_COPY_VALUES, remotePoints, PETSC_COPY_VALUES));
175: }
177: PetscCall(VecDestroy(&X));
178: PetscCall(PetscFree2(localPoints, remotePoints));
179: PetscFunctionReturn(PETSC_SUCCESS);
180: }
182: typedef enum {
183: PATCH_COMM_TYPE_WORLD = 0,
184: PATCH_COMM_TYPE_SELF = 1
185: } PatchCommType;
187: PetscErrorCode DMPatchSolve(DM dm)
188: {
189: MPI_Comm comm, commz;
190: DM dmc;
191: PetscSF sfz, sfzr;
192: Vec XC;
193: MatStencil patchSize, commSize, gridRank, lower, upper;
194: PetscInt M, N, P, i, j, k, l, m, n, p = 0;
195: PetscMPIInt rank, size;
196: PetscInt debug = 0;
198: PetscFunctionBegin;
199: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
200: PetscCallMPI(MPI_Comm_rank(comm, &rank));
201: PetscCallMPI(MPI_Comm_size(comm, &size));
202: PetscCall(DMPatchGetCoarse(dm, &dmc));
203: PetscCall(DMPatchGetPatchSize(dm, &patchSize));
204: PetscCall(DMPatchGetCommSize(dm, &commSize));
205: PetscCall(DMPatchGetCommSize(dm, &commSize));
206: PetscCall(DMGetGlobalVector(dmc, &XC));
207: PetscCall(DMDAGetInfo(dmc, NULL, &M, &N, &P, &l, &m, &n, NULL, NULL, NULL, NULL, NULL, NULL));
208: M = PetscMax(M, 1);
209: l = PetscMax(l, 1);
210: N = PetscMax(N, 1);
211: m = PetscMax(m, 1);
212: P = PetscMax(P, 1);
213: n = PetscMax(n, 1);
215: gridRank.i = rank % l;
216: gridRank.j = rank / l % m;
217: gridRank.k = rank / (l * m) % n;
219: if (commSize.i * commSize.j * commSize.k == size || commSize.i * commSize.j * commSize.k == 0) {
220: commSize.i = l;
221: commSize.j = m;
222: commSize.k = n;
223: commz = comm;
224: } else if (commSize.i * commSize.j * commSize.k == 1) {
225: commz = PETSC_COMM_SELF;
226: } else {
227: PetscInt newComm = ((gridRank.k / commSize.k) * (m / commSize.j) + gridRank.j / commSize.j) * (l / commSize.i) + gridRank.i / commSize.i;
228: PetscInt newRank = ((gridRank.k % commSize.k) * commSize.j + gridRank.j % commSize.j) * commSize.i + gridRank.i % commSize.i;
229: PetscMPIInt newCommi;
230: PetscMPIInt newRanki;
232: PetscCall(PetscMPIIntCast(newComm, &newCommi));
233: PetscCall(PetscMPIIntCast(newRank, &newRanki));
234: PetscCallMPI(MPI_Comm_split(comm, newCommi, newRanki, &commz));
235: if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Rank %d color %d key %d commz %p\n", rank, newCommi, newRanki, (void *)(MPI_Aint)commz));
236: }
237: /*
238: Assumptions:
239: - patchSize divides gridSize
240: - commSize divides gridSize
241: - commSize divides l,m,n
242: Ignore multiple patches per rank for now
244: Multiple ranks per patch:
245: - l,m,n divides patchSize
246: - commSize divides patchSize
247: */
248: for (k = 0; k < P; k += PetscMax(patchSize.k, 1)) {
249: for (j = 0; j < N; j += PetscMax(patchSize.j, 1)) {
250: for (i = 0; i < M; i += PetscMax(patchSize.i, 1), ++p) {
251: MPI_Comm commp = MPI_COMM_NULL;
252: DM dmz = NULL;
253: #if 0
254: DM dmf = NULL;
255: Mat interpz = NULL;
256: #endif
257: Vec XZ = NULL;
258: PetscScalar *xcarray = NULL;
259: PetscScalar *xzarray = NULL;
261: if ((gridRank.k / commSize.k == p / (l / commSize.i * m / commSize.j) % n / commSize.k) && (gridRank.j / commSize.j == p / (l / commSize.i) % m / commSize.j) && (gridRank.i / commSize.i == p % l / commSize.i)) {
262: if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Rank %d is accepting Patch %" PetscInt_FMT "\n", rank, p));
263: commp = commz;
264: }
265: /* Zoom to coarse patch */
266: lower.i = i;
267: lower.j = j;
268: lower.k = k;
269: upper.i = i + patchSize.i;
270: upper.j = j + patchSize.j;
271: upper.k = k + patchSize.k;
272: PetscCall(DMPatchZoom(dmc, lower, upper, commp, &dmz, &sfz, &sfzr));
273: lower.c = 0; /* initialize member, otherwise compiler issues warnings */
274: upper.c = 0; /* initialize member, otherwise compiler issues warnings */
275: if (debug)
276: PetscCall(PetscPrintf(comm, "Patch %" PetscInt_FMT ": (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")--(%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")\n", p, lower.i, lower.j, lower.k, upper.i, upper.j, upper.k));
277: if (dmz) PetscCall(DMView(dmz, PETSC_VIEWER_STDOUT_(commz)));
278: PetscCall(PetscSFView(sfz, PETSC_VIEWER_STDOUT_(comm)));
279: PetscCall(PetscSFView(sfzr, PETSC_VIEWER_STDOUT_(comm)));
280: /* Scatter Xcoarse -> Xzoom */
281: if (dmz) PetscCall(DMGetGlobalVector(dmz, &XZ));
282: if (XZ) PetscCall(VecGetArray(XZ, &xzarray));
283: PetscCall(VecGetArray(XC, &xcarray));
284: PetscCall(PetscSFBcastBegin(sfz, MPIU_SCALAR, xcarray, xzarray, MPI_REPLACE));
285: PetscCall(PetscSFBcastEnd(sfz, MPIU_SCALAR, xcarray, xzarray, MPI_REPLACE));
286: PetscCall(VecRestoreArray(XC, &xcarray));
287: if (XZ) PetscCall(VecRestoreArray(XZ, &xzarray));
288: #if 0
289: /* Interpolate Xzoom -> Xfine, note that this may be on subcomms */
290: PetscCall(DMRefine(dmz, MPI_COMM_NULL, &dmf));
291: PetscCall(DMCreateInterpolation(dmz, dmf, &interpz, NULL));
292: PetscCall(DMInterpolate(dmz, interpz, dmf));
293: /* Smooth Xfine using two-step smoother, normal smoother plus Kaczmarz---moves back and forth from dmzoom to dmfine */
294: /* Compute residual Rfine */
295: /* Restrict Rfine to Rzoom_restricted */
296: #endif
297: /* Scatter Rzoom_restricted -> Rcoarse_restricted */
298: if (XZ) PetscCall(VecGetArray(XZ, &xzarray));
299: PetscCall(VecGetArray(XC, &xcarray));
300: PetscCall(PetscSFReduceBegin(sfzr, MPIU_SCALAR, xzarray, xcarray, MPIU_SUM));
301: PetscCall(PetscSFReduceEnd(sfzr, MPIU_SCALAR, xzarray, xcarray, MPIU_SUM));
302: PetscCall(VecRestoreArray(XC, &xcarray));
303: if (XZ) PetscCall(VecRestoreArray(XZ, &xzarray));
304: if (dmz) PetscCall(DMRestoreGlobalVector(dmz, &XZ));
305: /* Compute global residual Rcoarse */
306: /* TauCoarse = Rcoarse - Rcoarse_restricted */
308: PetscCall(PetscSFDestroy(&sfz));
309: PetscCall(PetscSFDestroy(&sfzr));
310: PetscCall(DMDestroy(&dmz));
311: }
312: }
313: }
314: PetscCall(DMRestoreGlobalVector(dmc, &XC));
315: PetscFunctionReturn(PETSC_SUCCESS);
316: }
318: static PetscErrorCode DMPatchView_ASCII(DM dm, PetscViewer viewer)
319: {
320: DM_Patch *mesh = (DM_Patch *)dm->data;
321: PetscViewerFormat format;
322: const char *name;
324: PetscFunctionBegin;
325: PetscCall(PetscViewerGetFormat(viewer, &format));
326: /* if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) */
327: PetscCall(PetscObjectGetName((PetscObject)dm, &name));
328: PetscCall(PetscViewerASCIIPrintf(viewer, "Patch DM %s\n", name));
329: PetscCall(PetscViewerASCIIPushTab(viewer));
330: PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse DM\n"));
331: PetscCall(DMView(mesh->dmCoarse, viewer));
332: PetscCall(PetscViewerASCIIPopTab(viewer));
333: PetscFunctionReturn(PETSC_SUCCESS);
334: }
336: PetscErrorCode DMView_Patch(DM dm, PetscViewer viewer)
337: {
338: PetscBool iascii, isbinary;
340: PetscFunctionBegin;
343: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
344: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
345: if (iascii) PetscCall(DMPatchView_ASCII(dm, viewer));
346: PetscFunctionReturn(PETSC_SUCCESS);
347: }
349: PetscErrorCode DMDestroy_Patch(DM dm)
350: {
351: DM_Patch *mesh = (DM_Patch *)dm->data;
353: PetscFunctionBegin;
354: if (--mesh->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);
355: PetscCall(DMDestroy(&mesh->dmCoarse));
356: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
357: PetscCall(PetscFree(mesh));
358: PetscFunctionReturn(PETSC_SUCCESS);
359: }
361: PetscErrorCode DMSetUp_Patch(DM dm)
362: {
363: DM_Patch *mesh = (DM_Patch *)dm->data;
365: PetscFunctionBegin;
367: PetscCall(DMSetUp(mesh->dmCoarse));
368: PetscFunctionReturn(PETSC_SUCCESS);
369: }
371: PetscErrorCode DMCreateGlobalVector_Patch(DM dm, Vec *g)
372: {
373: DM_Patch *mesh = (DM_Patch *)dm->data;
375: PetscFunctionBegin;
377: PetscCall(DMCreateGlobalVector(mesh->dmCoarse, g));
378: PetscFunctionReturn(PETSC_SUCCESS);
379: }
381: PetscErrorCode DMCreateLocalVector_Patch(DM dm, Vec *l)
382: {
383: DM_Patch *mesh = (DM_Patch *)dm->data;
385: PetscFunctionBegin;
387: PetscCall(DMCreateLocalVector(mesh->dmCoarse, l));
388: PetscFunctionReturn(PETSC_SUCCESS);
389: }
391: PetscErrorCode DMCreateSubDM_Patch(DM dm, PetscInt numFields, const PetscInt fields[], IS *is, DM *subdm)
392: {
393: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Tell me to code this");
394: }
396: PetscErrorCode DMPatchGetCoarse(DM dm, DM *dmCoarse)
397: {
398: DM_Patch *mesh = (DM_Patch *)dm->data;
400: PetscFunctionBegin;
402: *dmCoarse = mesh->dmCoarse;
403: PetscFunctionReturn(PETSC_SUCCESS);
404: }
406: PetscErrorCode DMPatchGetPatchSize(DM dm, MatStencil *patchSize)
407: {
408: DM_Patch *mesh = (DM_Patch *)dm->data;
410: PetscFunctionBegin;
412: PetscAssertPointer(patchSize, 2);
413: *patchSize = mesh->patchSize;
414: PetscFunctionReturn(PETSC_SUCCESS);
415: }
417: PetscErrorCode DMPatchSetPatchSize(DM dm, MatStencil patchSize)
418: {
419: DM_Patch *mesh = (DM_Patch *)dm->data;
421: PetscFunctionBegin;
423: mesh->patchSize = patchSize;
424: PetscFunctionReturn(PETSC_SUCCESS);
425: }
427: PetscErrorCode DMPatchGetCommSize(DM dm, MatStencil *commSize)
428: {
429: DM_Patch *mesh = (DM_Patch *)dm->data;
431: PetscFunctionBegin;
433: PetscAssertPointer(commSize, 2);
434: *commSize = mesh->commSize;
435: PetscFunctionReturn(PETSC_SUCCESS);
436: }
438: PetscErrorCode DMPatchSetCommSize(DM dm, MatStencil commSize)
439: {
440: DM_Patch *mesh = (DM_Patch *)dm->data;
442: PetscFunctionBegin;
444: mesh->commSize = commSize;
445: PetscFunctionReturn(PETSC_SUCCESS);
446: }