Actual source code: patch.c
1: #include <petsc/private/dmpatchimpl.h>
2: #include <petscdmda.h>
3: #include <petscsf.h>
5: /*
6: Solver loop to update \tau:
8: DMZoom(dmc, &dmz)
9: DMRefine(dmz, &dmf),
10: Scatter Xcoarse -> Xzoom,
11: Interpolate Xzoom -> Xfine (note that this may be on subcomms),
12: Smooth Xfine using two-step smoother
13: normal smoother plus Kaczmarz---moves back and forth from dmzoom to dmfine
14: Compute residual Rfine
15: Restrict Rfine to Rzoom_restricted
16: Scatter Rzoom_restricted -> Rcoarse_restricted
17: Compute global residual Rcoarse
18: TauCoarse = Rcoarse - Rcoarse_restricted
19: */
21: /*@
22: DMPatchZoom - Create patches of a `DMDA` on subsets of processes, indicated by `commz`
24: Collective
26: Input Parameters:
27: + dm - the `DM`
28: . lower - the lower left corner of the requested patch
29: . upper - the upper right corner of the requested patch
30: - commz - the new communicator for the patch, `MPI_COMM_NULL` indicates that the given rank will not own a patch
32: Output Parameters:
33: + dmz - the patch `DM`
34: . sfz - the `PetscSF` mapping the patch+halo to the zoomed version (optional)
35: - sfzr - the `PetscSF` mapping the patch to the restricted zoomed version
37: Level: intermediate
39: .seealso: `DMPatchSolve()`, `DMDACreatePatchIS()`
40: @*/
41: PetscErrorCode DMPatchZoom(DM dm, MatStencil lower, MatStencil upper, MPI_Comm commz, DM *dmz, PetscSF *sfz, PetscSF *sfzr)
42: {
43: DMDAStencilType st;
44: MatStencil blower, bupper, loclower, locupper;
45: IS is;
46: const PetscInt *ranges, *indices;
47: PetscInt *localPoints = NULL;
48: PetscSFNode *remotePoints = NULL;
49: PetscInt dim, dof;
50: PetscInt M, N, P, rM, rN, rP, halo = 1, sxb, syb, szb, sxr, syr, szr, exr, eyr, ezr, mxb, myb, mzb, i, j, k, l, q;
51: PetscMPIInt size;
52: PetscBool patchis_offproc = PETSC_TRUE;
53: Vec X;
55: PetscFunctionBegin;
56: if (!sfz) halo = 0;
57: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
58: /* Create patch DM */
59: PetscCall(DMDAGetInfo(dm, &dim, &M, &N, &P, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, &st));
61: /* Get piece for rank r, expanded by halo */
62: bupper.i = PetscMin(M, upper.i + halo);
63: blower.i = PetscMax(lower.i - halo, 0);
64: bupper.j = PetscMin(N, upper.j + halo);
65: blower.j = PetscMax(lower.j - halo, 0);
66: bupper.k = PetscMin(P, upper.k + halo);
67: blower.k = PetscMax(lower.k - halo, 0);
68: rM = bupper.i - blower.i;
69: rN = bupper.j - blower.j;
70: rP = bupper.k - blower.k;
72: if (commz != MPI_COMM_NULL) {
73: PetscCall(DMDACreate(commz, dmz));
74: PetscCall(DMSetDimension(*dmz, dim));
75: PetscCall(DMDASetSizes(*dmz, rM, rN, rP));
76: PetscCall(DMDASetNumProcs(*dmz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE));
77: PetscCall(DMDASetBoundaryType(*dmz, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
78: PetscCall(DMDASetDof(*dmz, dof));
79: PetscCall(DMDASetStencilType(*dmz, st));
80: PetscCall(DMDASetStencilWidth(*dmz, 0));
81: PetscCall(DMDASetOwnershipRanges(*dmz, NULL, NULL, NULL));
82: PetscCall(DMSetFromOptions(*dmz));
83: PetscCall(DMSetUp(*dmz));
84: PetscCall(DMDAGetCorners(*dmz, &sxb, &syb, &szb, &mxb, &myb, &mzb));
85: sxr = PetscMax(sxb, lower.i - blower.i);
86: syr = PetscMax(syb, lower.j - blower.j);
87: szr = PetscMax(szb, lower.k - blower.k);
88: exr = PetscMin(sxb + mxb, upper.i - blower.i);
89: eyr = PetscMin(syb + myb, upper.j - blower.j);
90: ezr = PetscMin(szb + mzb, upper.k - blower.k);
91: PetscCall(PetscMalloc2(dof * rM * rN * PetscMax(rP, 1), &localPoints, dof * rM * rN * PetscMax(rP, 1), &remotePoints));
92: } else {
93: sxr = syr = szr = exr = eyr = ezr = sxb = syb = szb = mxb = myb = mzb = 0;
94: }
96: /* Create SF for restricted map */
97: PetscCall(DMCreateGlobalVector(dm, &X));
98: PetscCall(VecGetOwnershipRanges(X, &ranges));
100: loclower.i = blower.i + sxr;
101: locupper.i = blower.i + exr;
102: loclower.j = blower.j + syr;
103: locupper.j = blower.j + eyr;
104: loclower.k = blower.k + szr;
105: locupper.k = blower.k + ezr;
107: PetscCall(DMDACreatePatchIS(dm, &loclower, &locupper, &is, patchis_offproc));
108: PetscCall(ISGetIndices(is, &indices));
110: if (dim < 3) {
111: mzb = 1;
112: ezr = 1;
113: }
114: q = 0;
115: for (k = szb; k < szb + mzb; ++k) {
116: if ((k < szr) || (k >= ezr)) continue;
117: for (j = syb; j < syb + myb; ++j) {
118: if ((j < syr) || (j >= eyr)) continue;
119: for (i = sxb; i < sxb + mxb; ++i) {
120: for (l = 0; l < dof; l++) {
121: const PetscInt lp = l + dof * (((k - szb) * rN + (j - syb)) * rM + i - sxb);
122: PetscMPIInt r;
123: PetscInt ir;
125: if ((i < sxr) || (i >= exr)) continue;
126: localPoints[q] = lp;
127: PetscCall(PetscFindInt(indices[q], size + 1, ranges, &ir));
128: PetscCall(PetscMPIIntCast(ir, &r));
130: remotePoints[q].rank = r < 0 ? -(r + 1) - 1 : r;
131: remotePoints[q].index = indices[q] - ranges[remotePoints[q].rank];
132: ++q;
133: }
134: }
135: }
136: }
137: PetscCall(ISRestoreIndices(is, &indices));
138: PetscCall(ISDestroy(&is));
139: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfzr));
140: PetscCall(PetscObjectSetName((PetscObject)*sfzr, "Restricted Map"));
141: PetscCall(PetscSFSetGraph(*sfzr, dof * M * N * P, q, localPoints, PETSC_COPY_VALUES, remotePoints, PETSC_COPY_VALUES));
143: if (sfz) {
144: /* Create SF for buffered map */
145: loclower.i = blower.i + sxb;
146: locupper.i = blower.i + sxb + mxb;
147: loclower.j = blower.j + syb;
148: locupper.j = blower.j + syb + myb;
149: loclower.k = blower.k + szb;
150: locupper.k = blower.k + szb + mzb;
152: PetscCall(DMDACreatePatchIS(dm, &loclower, &locupper, &is, patchis_offproc));
153: PetscCall(ISGetIndices(is, &indices));
155: q = 0;
156: for (k = szb; k < szb + mzb; ++k) {
157: for (j = syb; j < syb + myb; ++j) {
158: for (i = sxb; i < sxb + mxb; ++i, ++q) {
159: PetscInt ir;
160: PetscMPIInt r;
162: localPoints[q] = q;
163: PetscCall(PetscFindInt(indices[q], size + 1, ranges, &ir));
164: PetscCall(PetscMPIIntCast(ir, &r));
165: remotePoints[q].rank = r < 0 ? -(r + 1) - 1 : r;
166: remotePoints[q].index = indices[q] - ranges[remotePoints[q].rank];
167: }
168: }
169: }
170: PetscCall(ISRestoreIndices(is, &indices));
171: PetscCall(ISDestroy(&is));
172: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfz));
173: PetscCall(PetscObjectSetName((PetscObject)*sfz, "Buffered Map"));
174: PetscCall(PetscSFSetGraph(*sfz, M * N * P, q, localPoints, PETSC_COPY_VALUES, remotePoints, PETSC_COPY_VALUES));
175: }
177: PetscCall(VecDestroy(&X));
178: PetscCall(PetscFree2(localPoints, remotePoints));
179: PetscFunctionReturn(PETSC_SUCCESS);
180: }
182: typedef enum {
183: PATCH_COMM_TYPE_WORLD = 0,
184: PATCH_COMM_TYPE_SELF = 1
185: } PatchCommType;
187: PetscErrorCode DMPatchSolve(DM dm)
188: {
189: MPI_Comm comm, commz;
190: DM dmc;
191: PetscSF sfz, sfzr;
192: Vec XC;
193: MatStencil patchSize, commSize, gridRank, lower, upper;
194: PetscInt M, N, P, i, j, k, l, m, n, p = 0;
195: PetscMPIInt rank, size;
196: PetscInt debug = 0;
198: PetscFunctionBegin;
199: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
200: PetscCallMPI(MPI_Comm_rank(comm, &rank));
201: PetscCallMPI(MPI_Comm_size(comm, &size));
202: PetscCall(DMPatchGetCoarse(dm, &dmc));
203: PetscCall(DMPatchGetPatchSize(dm, &patchSize));
204: PetscCall(DMPatchGetCommSize(dm, &commSize));
205: PetscCall(DMPatchGetCommSize(dm, &commSize));
206: PetscCall(DMGetGlobalVector(dmc, &XC));
207: PetscCall(DMDAGetInfo(dmc, NULL, &M, &N, &P, &l, &m, &n, NULL, NULL, NULL, NULL, NULL, NULL));
208: M = PetscMax(M, 1);
209: l = PetscMax(l, 1);
210: N = PetscMax(N, 1);
211: m = PetscMax(m, 1);
212: P = PetscMax(P, 1);
213: n = PetscMax(n, 1);
215: gridRank.i = rank % l;
216: gridRank.j = rank / l % m;
217: gridRank.k = rank / (l * m) % n;
219: if (commSize.i * commSize.j * commSize.k == size || commSize.i * commSize.j * commSize.k == 0) {
220: commSize.i = l;
221: commSize.j = m;
222: commSize.k = n;
223: commz = comm;
224: } else if (commSize.i * commSize.j * commSize.k == 1) {
225: commz = PETSC_COMM_SELF;
226: } else {
227: const PetscMPIInt newComm = (PetscMPIInt)(((gridRank.k / commSize.k) * (m / commSize.j) + gridRank.j / commSize.j) * (l / commSize.i) + (gridRank.i / commSize.i));
228: const PetscMPIInt newRank = (PetscMPIInt)(((gridRank.k % commSize.k) * commSize.j + gridRank.j % commSize.j) * commSize.i + (gridRank.i % commSize.i));
230: PetscCallMPI(MPI_Comm_split(comm, newComm, newRank, &commz));
231: if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Rank %d color %d key %d commz %p\n", rank, newComm, newRank, (void *)(MPI_Aint)commz));
232: }
233: /*
234: Assumptions:
235: - patchSize divides gridSize
236: - commSize divides gridSize
237: - commSize divides l,m,n
238: Ignore multiple patches per rank for now
240: Multiple ranks per patch:
241: - l,m,n divides patchSize
242: - commSize divides patchSize
243: */
244: for (k = 0; k < P; k += PetscMax(patchSize.k, 1)) {
245: for (j = 0; j < N; j += PetscMax(patchSize.j, 1)) {
246: for (i = 0; i < M; i += PetscMax(patchSize.i, 1), ++p) {
247: MPI_Comm commp = MPI_COMM_NULL;
248: DM dmz = NULL;
249: #if 0
250: DM dmf = NULL;
251: Mat interpz = NULL;
252: #endif
253: Vec XZ = NULL;
254: PetscScalar *xcarray = NULL;
255: PetscScalar *xzarray = NULL;
257: if ((gridRank.k / commSize.k == p / (l / commSize.i * m / commSize.j) % n / commSize.k) && (gridRank.j / commSize.j == p / (l / commSize.i) % m / commSize.j) && (gridRank.i / commSize.i == p % l / commSize.i)) {
258: if (debug) PetscCall(PetscPrintf(PETSC_COMM_SELF, "Rank %d is accepting Patch %" PetscInt_FMT "\n", rank, p));
259: commp = commz;
260: }
261: /* Zoom to coarse patch */
262: lower.i = i;
263: lower.j = j;
264: lower.k = k;
265: upper.i = i + patchSize.i;
266: upper.j = j + patchSize.j;
267: upper.k = k + patchSize.k;
268: PetscCall(DMPatchZoom(dmc, lower, upper, commp, &dmz, &sfz, &sfzr));
269: lower.c = 0; /* initialize member, otherwise compiler issues warnings */
270: upper.c = 0; /* initialize member, otherwise compiler issues warnings */
271: if (debug)
272: PetscCall(PetscPrintf(comm, "Patch %" PetscInt_FMT ": (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")--(%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")\n", p, lower.i, lower.j, lower.k, upper.i, upper.j, upper.k));
273: if (dmz) PetscCall(DMView(dmz, PETSC_VIEWER_STDOUT_(commz)));
274: PetscCall(PetscSFView(sfz, PETSC_VIEWER_STDOUT_(comm)));
275: PetscCall(PetscSFView(sfzr, PETSC_VIEWER_STDOUT_(comm)));
276: /* Scatter Xcoarse -> Xzoom */
277: if (dmz) PetscCall(DMGetGlobalVector(dmz, &XZ));
278: if (XZ) PetscCall(VecGetArray(XZ, &xzarray));
279: PetscCall(VecGetArray(XC, &xcarray));
280: PetscCall(PetscSFBcastBegin(sfz, MPIU_SCALAR, xcarray, xzarray, MPI_REPLACE));
281: PetscCall(PetscSFBcastEnd(sfz, MPIU_SCALAR, xcarray, xzarray, MPI_REPLACE));
282: PetscCall(VecRestoreArray(XC, &xcarray));
283: if (XZ) PetscCall(VecRestoreArray(XZ, &xzarray));
284: #if 0
285: /* Interpolate Xzoom -> Xfine, note that this may be on subcomms */
286: PetscCall(DMRefine(dmz, MPI_COMM_NULL, &dmf));
287: PetscCall(DMCreateInterpolation(dmz, dmf, &interpz, NULL));
288: PetscCall(DMInterpolate(dmz, interpz, dmf));
289: /* Smooth Xfine using two-step smoother, normal smoother plus Kaczmarz---moves back and forth from dmzoom to dmfine */
290: /* Compute residual Rfine */
291: /* Restrict Rfine to Rzoom_restricted */
292: #endif
293: /* Scatter Rzoom_restricted -> Rcoarse_restricted */
294: if (XZ) PetscCall(VecGetArray(XZ, &xzarray));
295: PetscCall(VecGetArray(XC, &xcarray));
296: PetscCall(PetscSFReduceBegin(sfzr, MPIU_SCALAR, xzarray, xcarray, MPIU_SUM));
297: PetscCall(PetscSFReduceEnd(sfzr, MPIU_SCALAR, xzarray, xcarray, MPIU_SUM));
298: PetscCall(VecRestoreArray(XC, &xcarray));
299: if (XZ) PetscCall(VecRestoreArray(XZ, &xzarray));
300: if (dmz) PetscCall(DMRestoreGlobalVector(dmz, &XZ));
301: /* Compute global residual Rcoarse */
302: /* TauCoarse = Rcoarse - Rcoarse_restricted */
304: PetscCall(PetscSFDestroy(&sfz));
305: PetscCall(PetscSFDestroy(&sfzr));
306: PetscCall(DMDestroy(&dmz));
307: }
308: }
309: }
310: PetscCall(DMRestoreGlobalVector(dmc, &XC));
311: PetscFunctionReturn(PETSC_SUCCESS);
312: }
314: static PetscErrorCode DMPatchView_ASCII(DM dm, PetscViewer viewer)
315: {
316: DM_Patch *mesh = (DM_Patch *)dm->data;
317: PetscViewerFormat format;
318: const char *name;
320: PetscFunctionBegin;
321: PetscCall(PetscViewerGetFormat(viewer, &format));
322: /* if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) */
323: PetscCall(PetscObjectGetName((PetscObject)dm, &name));
324: PetscCall(PetscViewerASCIIPrintf(viewer, "Patch DM %s\n", name));
325: PetscCall(PetscViewerASCIIPushTab(viewer));
326: PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse DM\n"));
327: PetscCall(DMView(mesh->dmCoarse, viewer));
328: PetscCall(PetscViewerASCIIPopTab(viewer));
329: PetscFunctionReturn(PETSC_SUCCESS);
330: }
332: PetscErrorCode DMView_Patch(DM dm, PetscViewer viewer)
333: {
334: PetscBool iascii, isbinary;
336: PetscFunctionBegin;
339: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
340: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
341: if (iascii) PetscCall(DMPatchView_ASCII(dm, viewer));
342: PetscFunctionReturn(PETSC_SUCCESS);
343: }
345: PetscErrorCode DMDestroy_Patch(DM dm)
346: {
347: DM_Patch *mesh = (DM_Patch *)dm->data;
349: PetscFunctionBegin;
350: if (--mesh->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);
351: PetscCall(DMDestroy(&mesh->dmCoarse));
352: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
353: PetscCall(PetscFree(mesh));
354: PetscFunctionReturn(PETSC_SUCCESS);
355: }
357: PetscErrorCode DMSetUp_Patch(DM dm)
358: {
359: DM_Patch *mesh = (DM_Patch *)dm->data;
361: PetscFunctionBegin;
363: PetscCall(DMSetUp(mesh->dmCoarse));
364: PetscFunctionReturn(PETSC_SUCCESS);
365: }
367: PetscErrorCode DMCreateGlobalVector_Patch(DM dm, Vec *g)
368: {
369: DM_Patch *mesh = (DM_Patch *)dm->data;
371: PetscFunctionBegin;
373: PetscCall(DMCreateGlobalVector(mesh->dmCoarse, g));
374: PetscFunctionReturn(PETSC_SUCCESS);
375: }
377: PetscErrorCode DMCreateLocalVector_Patch(DM dm, Vec *l)
378: {
379: DM_Patch *mesh = (DM_Patch *)dm->data;
381: PetscFunctionBegin;
383: PetscCall(DMCreateLocalVector(mesh->dmCoarse, l));
384: PetscFunctionReturn(PETSC_SUCCESS);
385: }
387: PetscErrorCode DMCreateSubDM_Patch(DM dm, PetscInt numFields, const PetscInt fields[], IS *is, DM *subdm)
388: {
389: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Tell me to code this");
390: }
392: PetscErrorCode DMPatchGetCoarse(DM dm, DM *dmCoarse)
393: {
394: DM_Patch *mesh = (DM_Patch *)dm->data;
396: PetscFunctionBegin;
398: *dmCoarse = mesh->dmCoarse;
399: PetscFunctionReturn(PETSC_SUCCESS);
400: }
402: PetscErrorCode DMPatchGetPatchSize(DM dm, MatStencil *patchSize)
403: {
404: DM_Patch *mesh = (DM_Patch *)dm->data;
406: PetscFunctionBegin;
408: PetscAssertPointer(patchSize, 2);
409: *patchSize = mesh->patchSize;
410: PetscFunctionReturn(PETSC_SUCCESS);
411: }
413: PetscErrorCode DMPatchSetPatchSize(DM dm, MatStencil patchSize)
414: {
415: DM_Patch *mesh = (DM_Patch *)dm->data;
417: PetscFunctionBegin;
419: mesh->patchSize = patchSize;
420: PetscFunctionReturn(PETSC_SUCCESS);
421: }
423: PetscErrorCode DMPatchGetCommSize(DM dm, MatStencil *commSize)
424: {
425: DM_Patch *mesh = (DM_Patch *)dm->data;
427: PetscFunctionBegin;
429: PetscAssertPointer(commSize, 2);
430: *commSize = mesh->commSize;
431: PetscFunctionReturn(PETSC_SUCCESS);
432: }
434: PetscErrorCode DMPatchSetCommSize(DM dm, MatStencil commSize)
435: {
436: DM_Patch *mesh = (DM_Patch *)dm->data;
438: PetscFunctionBegin;
440: mesh->commSize = commSize;
441: PetscFunctionReturn(PETSC_SUCCESS);
442: }