Actual source code: dadd.c
1: #include <petsc/private/dmdaimpl.h>
3: /*@
4: DMDACreatePatchIS - Creates an index set corresponding to a logically rectangular patch of the `DMDA`.
6: Collective
8: Input Parameters:
9: + da - the `DMDA`
10: . lower - a `MatStencil` with i, j and k entries corresponding to the lower corner of the patch
11: . upper - a `MatStencil` with i, j and k entries corresponding to the upper corner of the patch
12: - offproc - indicate whether the returned `IS` will contain off process indices
14: Output Parameter:
15: . is - the `IS` corresponding to the patch
17: Level: developer
19: Notes:
20: This routine always returns an `IS` on the `DMDA` communicator.
22: If `offproc` is set to `PETSC_TRUE`,
23: the routine returns an `IS` with all the indices requested regardless of whether these indices
24: are present on the requesting MPI process or not. Thus, it is upon the caller to ensure that
25: the indices returned in this mode are appropriate.
27: If `offproc` is set to `PETSC_FALSE`,
28: the `IS` only returns the subset of indices that are present on the requesting MPI process and there
29: is no duplication of indices between multiple MPI processes.
31: .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMCreateDomainDecompositionScatters()`
32: @*/
33: PetscErrorCode DMDACreatePatchIS(DM da, MatStencil *lower, MatStencil *upper, IS *is, PetscBool offproc)
34: {
35: PetscInt ms = 0, ns = 0, ps = 0;
36: PetscInt mw = 0, nw = 0, pw = 0;
37: PetscInt me = 1, ne = 1, pe = 1;
38: PetscInt mr = 0, nr = 0, pr = 0;
39: PetscInt ii, jj, kk;
40: PetscInt si, sj, sk;
41: PetscInt i, k = 0, l, idx = 0;
42: PetscInt base;
43: PetscInt xm = 1, ym = 1, zm = 1;
44: PetscInt ox, oy, oz;
45: PetscInt m, n, p, M, N, P, dof;
46: const PetscInt *lx, *ly, *lz;
47: PetscInt nindices;
48: PetscInt *indices;
49: DM_DA *dd = (DM_DA *)da->data;
50: PetscBool skip_i = PETSC_TRUE, skip_j = PETSC_TRUE, skip_k = PETSC_TRUE;
51: PetscBool valid_j = PETSC_FALSE, valid_k = PETSC_FALSE; /* DMDA has at least 1 dimension */
53: PetscFunctionBegin;
54: M = dd->M;
55: N = dd->N;
56: P = dd->P;
57: m = dd->m;
58: n = dd->n;
59: p = dd->p;
60: dof = dd->w;
62: nindices = -1;
63: if (PetscLikely(upper->i - lower->i)) {
64: nindices = nindices * (upper->i - lower->i);
65: skip_i = PETSC_FALSE;
66: }
67: if (N > 1) {
68: valid_j = PETSC_TRUE;
69: if (PetscLikely(upper->j - lower->j)) {
70: nindices = nindices * (upper->j - lower->j);
71: skip_j = PETSC_FALSE;
72: }
73: }
74: if (P > 1) {
75: valid_k = PETSC_TRUE;
76: if (PetscLikely(upper->k - lower->k)) {
77: nindices = nindices * (upper->k - lower->k);
78: skip_k = PETSC_FALSE;
79: }
80: }
81: if (PetscLikely(nindices < 0)) {
82: if (PetscUnlikely(skip_i && skip_j && skip_k)) {
83: nindices = 0;
84: } else nindices = nindices * (-1);
85: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Lower and Upper stencils are identical! Please check inputs.");
87: PetscCall(PetscMalloc1(nindices * dof, &indices));
88: PetscCall(DMDAGetOffset(da, &ox, &oy, &oz, NULL, NULL, NULL));
90: if (!valid_k) {
91: upper->k = 0;
92: lower->k = 0;
93: }
94: if (!valid_j) {
95: upper->j = 0;
96: lower->j = 0;
97: }
99: if (offproc) {
100: PetscCall(DMDAGetOwnershipRanges(da, &lx, &ly, &lz));
101: /* start at index 0 on processor 0 */
102: mr = 0;
103: nr = 0;
104: pr = 0;
105: ms = 0;
106: ns = 0;
107: ps = 0;
108: if (lx) me = lx[0];
109: if (ly) ne = ly[0];
110: if (lz) pe = lz[0];
111: /*
112: If no indices are to be returned, create an empty is,
113: this prevents hanging in while loops
114: */
115: if (skip_i && skip_j && skip_k) goto createis;
116: /*
117: do..while loops to ensure the block gets entered once,
118: regardless of control condition being met, necessary for
119: cases when a subset of skip_i/j/k is true
120: */
121: if (skip_k) k = upper->k - oz;
122: else k = lower->k - oz;
123: do {
124: PetscInt j;
126: if (skip_j) j = upper->j - oy;
127: else j = lower->j - oy;
128: do {
129: if (skip_i) i = upper->i - ox;
130: else i = lower->i - ox;
131: do {
132: /* "actual" indices rather than ones outside of the domain */
133: ii = i;
134: jj = j;
135: kk = k;
136: if (ii < 0) ii = M + ii;
137: if (jj < 0) jj = N + jj;
138: if (kk < 0) kk = P + kk;
139: if (ii > M - 1) ii = ii - M;
140: if (jj > N - 1) jj = jj - N;
141: if (kk > P - 1) kk = kk - P;
142: /* gone out of processor range on x axis */
143: while (ii > me - 1 || ii < ms) {
144: if (mr == m - 1) {
145: ms = 0;
146: me = lx[0];
147: mr = 0;
148: } else {
149: mr++;
150: ms = me;
151: me += lx[mr];
152: }
153: }
154: /* gone out of processor range on y axis */
155: while (jj > ne - 1 || jj < ns) {
156: if (nr == n - 1) {
157: ns = 0;
158: ne = ly[0];
159: nr = 0;
160: } else {
161: nr++;
162: ns = ne;
163: ne += ly[nr];
164: }
165: }
166: /* gone out of processor range on z axis */
167: while (kk > pe - 1 || kk < ps) {
168: if (pr == p - 1) {
169: ps = 0;
170: pe = lz[0];
171: pr = 0;
172: } else {
173: pr++;
174: ps = pe;
175: pe += lz[pr];
176: }
177: }
178: /* compute the vector base on owning processor */
179: xm = me - ms;
180: ym = ne - ns;
181: zm = pe - ps;
182: base = ms * ym * zm + ns * M * zm + ps * M * N;
183: /* compute the local coordinates on owning processor */
184: si = ii - ms;
185: sj = jj - ns;
186: sk = kk - ps;
187: for (l = 0; l < dof; l++) {
188: indices[idx] = l + dof * (base + si + xm * sj + xm * ym * sk);
189: idx++;
190: }
191: i++;
192: } while (i < upper->i - ox);
193: j++;
194: } while (j < upper->j - oy);
195: k++;
196: } while (k < upper->k - oz);
197: }
199: if (!offproc) {
200: PetscCall(DMDAGetCorners(da, &ms, &ns, &ps, &mw, &nw, &pw));
201: me = ms + mw;
202: if (N > 1) ne = ns + nw;
203: if (P > 1) pe = ps + pw;
204: /* Account for DM offsets */
205: ms = ms - ox;
206: me = me - ox;
207: ns = ns - oy;
208: ne = ne - oy;
209: ps = ps - oz;
210: pe = pe - oz;
212: /* compute the vector base on owning processor */
213: xm = me - ms;
214: ym = ne - ns;
215: zm = pe - ps;
216: base = ms * ym * zm + ns * M * zm + ps * M * N;
217: /*
218: if no indices are to be returned, create an empty is,
219: this prevents hanging in while loops
220: */
221: if (skip_i && skip_j && skip_k) goto createis;
222: /*
223: do..while loops to ensure the block gets entered once,
224: regardless of control condition being met, necessary for
225: cases when a subset of skip_i/j/k is true
226: */
227: if (skip_k) k = upper->k - oz;
228: else k = lower->k - oz;
229: do {
230: PetscInt j;
232: if (skip_j) j = upper->j - oy;
233: else j = lower->j - oy;
234: do {
235: if (skip_i) i = upper->i - ox;
236: else i = lower->i - ox;
237: do {
238: if (k >= ps && k <= pe - 1) {
239: if (j >= ns && j <= ne - 1) {
240: if (i >= ms && i <= me - 1) {
241: /* compute the local coordinates on owning processor */
242: si = i - ms;
243: sj = j - ns;
244: sk = k - ps;
245: for (l = 0; l < dof; l++) {
246: indices[idx] = l + dof * (base + si + xm * sj + xm * ym * sk);
247: idx++;
248: }
249: }
250: }
251: }
252: i++;
253: } while (i < upper->i - ox);
254: j++;
255: } while (j < upper->j - oy);
256: k++;
257: } while (k < upper->k - oz);
259: PetscCall(PetscRealloc((size_t)(idx * sizeof(PetscInt)), (void *)&indices));
260: }
262: createis:
263: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)da), idx, indices, PETSC_OWN_POINTER, is));
264: PetscFunctionReturn(PETSC_SUCCESS);
265: }
267: static PetscErrorCode DMDASubDomainDA_Private(DM dm, PetscInt *nlocal, DM **sdm)
268: {
269: DM *da;
270: PetscInt dim, size, i, j, k, idx;
271: DMDALocalInfo info;
272: PetscInt xsize, ysize, zsize;
273: PetscInt xo, yo, zo;
274: PetscInt xs, ys, zs;
275: PetscInt xm = 1, ym = 1, zm = 1;
276: PetscInt xol, yol, zol;
277: PetscInt m = 1, n = 1, p = 1;
278: PetscInt M, N, P;
279: PetscInt pm, mtmp;
281: PetscFunctionBegin;
282: PetscCall(DMDAGetLocalInfo(dm, &info));
283: PetscCall(DMDAGetOverlap(dm, &xol, &yol, &zol));
284: PetscCall(DMDAGetNumLocalSubDomains(dm, &size));
285: PetscCall(PetscMalloc1(size, &da));
287: if (nlocal) *nlocal = size;
289: dim = info.dim;
291: M = info.xm;
292: N = info.ym;
293: P = info.zm;
295: if (dim == 1) {
296: m = size;
297: } else if (dim == 2) {
298: m = (PetscInt)(0.5 + PetscSqrtReal(((PetscReal)M) * ((PetscReal)size) / ((PetscReal)N)));
299: while (m > 0) {
300: n = size / m;
301: if (m * n * p == size) break;
302: m--;
303: }
304: } else if (dim == 3) {
305: n = (PetscInt)(0.5 + PetscPowReal(((PetscReal)N * N) * ((PetscReal)size) / ((PetscReal)P * M), (PetscReal)(1. / 3.)));
306: if (!n) n = 1;
307: while (n > 0) {
308: pm = size / n;
309: if (n * pm == size) break;
310: n--;
311: }
312: if (!n) n = 1;
313: m = (PetscInt)(0.5 + PetscSqrtReal(((PetscReal)M) * ((PetscReal)size) / ((PetscReal)P * n)));
314: if (!m) m = 1;
315: while (m > 0) {
316: p = size / (m * n);
317: if (m * n * p == size) break;
318: m--;
319: }
320: if (M > P && m < p) {
321: mtmp = m;
322: m = p;
323: p = mtmp;
324: }
325: }
327: zs = info.zs;
328: idx = 0;
329: for (k = 0; k < p; k++) {
330: ys = info.ys;
331: for (j = 0; j < n; j++) {
332: xs = info.xs;
333: for (i = 0; i < m; i++) {
334: if (dim == 1) {
335: xm = M / m + ((M % m) > i);
336: } else if (dim == 2) {
337: xm = M / m + ((M % m) > i);
338: ym = N / n + ((N % n) > j);
339: } else if (dim == 3) {
340: xm = M / m + ((M % m) > i);
341: ym = N / n + ((N % n) > j);
342: zm = P / p + ((P % p) > k);
343: }
345: xsize = xm;
346: ysize = ym;
347: zsize = zm;
348: xo = xs;
349: yo = ys;
350: zo = zs;
352: PetscCall(DMDACreate(PETSC_COMM_SELF, &da[idx]));
353: PetscCall(DMSetOptionsPrefix(da[idx], "sub_"));
354: PetscCall(DMSetDimension(da[idx], info.dim));
355: PetscCall(DMDASetDof(da[idx], info.dof));
357: PetscCall(DMDASetStencilType(da[idx], info.st));
358: PetscCall(DMDASetStencilWidth(da[idx], info.sw));
360: if (info.bx == DM_BOUNDARY_PERIODIC || (xs != 0)) {
361: xsize += xol;
362: xo -= xol;
363: }
364: if (info.by == DM_BOUNDARY_PERIODIC || (ys != 0)) {
365: ysize += yol;
366: yo -= yol;
367: }
368: if (info.bz == DM_BOUNDARY_PERIODIC || (zs != 0)) {
369: zsize += zol;
370: zo -= zol;
371: }
373: if (info.bx == DM_BOUNDARY_PERIODIC || (xs + xm != info.mx)) xsize += xol;
374: if (info.by == DM_BOUNDARY_PERIODIC || (ys + ym != info.my)) ysize += yol;
375: if (info.bz == DM_BOUNDARY_PERIODIC || (zs + zm != info.mz)) zsize += zol;
377: if (info.bx != DM_BOUNDARY_PERIODIC) {
378: if (xo < 0) {
379: xsize += xo;
380: xo = 0;
381: }
382: if (xo + xsize > info.mx - 1) xsize -= xo + xsize - info.mx;
383: }
384: if (info.by != DM_BOUNDARY_PERIODIC) {
385: if (yo < 0) {
386: ysize += yo;
387: yo = 0;
388: }
389: if (yo + ysize > info.my - 1) ysize -= yo + ysize - info.my;
390: }
391: if (info.bz != DM_BOUNDARY_PERIODIC) {
392: if (zo < 0) {
393: zsize += zo;
394: zo = 0;
395: }
396: if (zo + zsize > info.mz - 1) zsize -= zo + zsize - info.mz;
397: }
399: PetscCall(DMDASetSizes(da[idx], xsize, ysize, zsize));
400: PetscCall(DMDASetNumProcs(da[idx], 1, 1, 1));
401: PetscCall(DMDASetBoundaryType(da[idx], DM_BOUNDARY_GHOSTED, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_GHOSTED));
403: /* set up as a block instead */
404: PetscCall(DMSetUp(da[idx]));
406: /* nonoverlapping region */
407: PetscCall(DMDASetNonOverlappingRegion(da[idx], xs, ys, zs, xm, ym, zm));
409: /* this alters the behavior of DMDAGetInfo, DMDAGetLocalInfo, DMDAGetCorners, and DMDAGetGhostedCorners and should be used with care */
410: PetscCall(DMDASetOffset(da[idx], xo, yo, zo, info.mx, info.my, info.mz));
411: xs += xm;
412: idx++;
413: }
414: ys += ym;
415: }
416: zs += zm;
417: }
418: *sdm = da;
419: PetscFunctionReturn(PETSC_SUCCESS);
420: }
422: /*
423: Fills the local vector problem on the subdomain from the global problem.
425: Right now this assumes one subdomain per processor.
427: */
428: PetscErrorCode DMCreateDomainDecompositionScatters_DA(DM dm, PetscInt nsubdms, DM *subdms, VecScatter **iscat, VecScatter **oscat, VecScatter **lscat)
429: {
430: DMDALocalInfo info, subinfo;
431: DM subdm;
432: MatStencil upper, lower;
433: IS idis, isis, odis, osis, gdis;
434: Vec svec, dvec, slvec;
435: PetscInt xm, ym, zm, xs, ys, zs;
436: PetscInt i;
437: PetscBool patchis_offproc = PETSC_TRUE;
439: PetscFunctionBegin;
440: /* allocate the arrays of scatters */
441: if (iscat) PetscCall(PetscMalloc1(nsubdms, iscat));
442: if (oscat) PetscCall(PetscMalloc1(nsubdms, oscat));
443: if (lscat) PetscCall(PetscMalloc1(nsubdms, lscat));
445: PetscCall(DMDAGetLocalInfo(dm, &info));
446: for (i = 0; i < nsubdms; i++) {
447: subdm = subdms[i];
448: PetscCall(DMDAGetLocalInfo(subdm, &subinfo));
449: PetscCall(DMDAGetNonOverlappingRegion(subdm, &xs, &ys, &zs, &xm, &ym, &zm));
451: /* create the global and subdomain index sets for the inner domain */
452: lower.i = xs;
453: lower.j = ys;
454: lower.k = zs;
455: upper.i = xs + xm;
456: upper.j = ys + ym;
457: upper.k = zs + zm;
458: PetscCall(DMDACreatePatchIS(dm, &lower, &upper, &idis, patchis_offproc));
459: PetscCall(DMDACreatePatchIS(subdm, &lower, &upper, &isis, patchis_offproc));
461: /* create the global and subdomain index sets for the outer subdomain */
462: lower.i = subinfo.xs;
463: lower.j = subinfo.ys;
464: lower.k = subinfo.zs;
465: upper.i = subinfo.xs + subinfo.xm;
466: upper.j = subinfo.ys + subinfo.ym;
467: upper.k = subinfo.zs + subinfo.zm;
468: PetscCall(DMDACreatePatchIS(dm, &lower, &upper, &odis, patchis_offproc));
469: PetscCall(DMDACreatePatchIS(subdm, &lower, &upper, &osis, patchis_offproc));
471: /* global and subdomain ISes for the local indices of the subdomain */
472: /* todo - make this not loop over at nonperiodic boundaries, which will be more involved */
473: lower.i = subinfo.gxs;
474: lower.j = subinfo.gys;
475: lower.k = subinfo.gzs;
476: upper.i = subinfo.gxs + subinfo.gxm;
477: upper.j = subinfo.gys + subinfo.gym;
478: upper.k = subinfo.gzs + subinfo.gzm;
479: PetscCall(DMDACreatePatchIS(dm, &lower, &upper, &gdis, patchis_offproc));
481: /* form the scatter */
482: PetscCall(DMGetGlobalVector(dm, &dvec));
483: PetscCall(DMGetGlobalVector(subdm, &svec));
484: PetscCall(DMGetLocalVector(subdm, &slvec));
486: if (iscat) PetscCall(VecScatterCreate(dvec, idis, svec, isis, &(*iscat)[i]));
487: if (oscat) PetscCall(VecScatterCreate(dvec, odis, svec, osis, &(*oscat)[i]));
488: if (lscat) PetscCall(VecScatterCreate(dvec, gdis, slvec, NULL, &(*lscat)[i]));
490: PetscCall(DMRestoreGlobalVector(dm, &dvec));
491: PetscCall(DMRestoreGlobalVector(subdm, &svec));
492: PetscCall(DMRestoreLocalVector(subdm, &slvec));
494: PetscCall(ISDestroy(&idis));
495: PetscCall(ISDestroy(&isis));
497: PetscCall(ISDestroy(&odis));
498: PetscCall(ISDestroy(&osis));
500: PetscCall(ISDestroy(&gdis));
501: }
502: PetscFunctionReturn(PETSC_SUCCESS);
503: }
505: static PetscErrorCode DMDASubDomainIS_Private(DM dm, PetscInt n, DM *subdm, IS **iis, IS **ois)
506: {
507: PetscInt i;
508: DMDALocalInfo info, subinfo;
509: MatStencil lower, upper;
510: PetscBool patchis_offproc = PETSC_TRUE;
512: PetscFunctionBegin;
513: PetscCall(DMDAGetLocalInfo(dm, &info));
514: if (iis) PetscCall(PetscMalloc1(n, iis));
515: if (ois) PetscCall(PetscMalloc1(n, ois));
517: for (i = 0; i < n; i++) {
518: PetscCall(DMDAGetLocalInfo(subdm[i], &subinfo));
519: if (iis) {
520: /* create the inner IS */
521: lower.i = info.xs;
522: lower.j = info.ys;
523: lower.k = info.zs;
524: upper.i = info.xs + info.xm;
525: upper.j = info.ys + info.ym;
526: upper.k = info.zs + info.zm;
527: PetscCall(DMDACreatePatchIS(dm, &lower, &upper, &(*iis)[i], patchis_offproc));
528: }
530: if (ois) {
531: /* create the outer IS */
532: lower.i = subinfo.xs;
533: lower.j = subinfo.ys;
534: lower.k = subinfo.zs;
535: upper.i = subinfo.xs + subinfo.xm;
536: upper.j = subinfo.ys + subinfo.ym;
537: upper.k = subinfo.zs + subinfo.zm;
538: PetscCall(DMDACreatePatchIS(dm, &lower, &upper, &(*ois)[i], patchis_offproc));
539: }
540: }
541: PetscFunctionReturn(PETSC_SUCCESS);
542: }
544: PetscErrorCode DMCreateDomainDecomposition_DA(DM dm, PetscInt *len, char ***names, IS **iis, IS **ois, DM **subdm)
545: {
546: DM *sdm = NULL;
547: PetscInt n = 0, i;
549: PetscFunctionBegin;
550: PetscCall(DMDASubDomainDA_Private(dm, &n, &sdm));
551: if (names) {
552: PetscCall(PetscMalloc1(n, names));
553: for (i = 0; i < n; i++) (*names)[i] = NULL;
554: }
555: PetscCall(DMDASubDomainIS_Private(dm, n, sdm, iis, ois));
556: if (subdm) *subdm = sdm;
557: else {
558: for (i = 0; i < n; i++) PetscCall(DMDestroy(&sdm[i]));
559: }
560: if (len) *len = n;
561: PetscFunctionReturn(PETSC_SUCCESS);
562: }