Actual source code: da1.c

  1: /*
  2:    Code for manipulating distributed regular 1d arrays in parallel.
  3:    This file was created by Peter Mell   6/30/95
  4: */

  6: #include <petsc/private/dmdaimpl.h>

  8: #include <petscdraw.h>
  9: static PetscErrorCode DMView_DA_1d(DM da, PetscViewer viewer)
 10: {
 11:   PetscMPIInt rank;
 12:   PetscBool   isascii, isdraw, isglvis, isbinary;
 13:   DM_DA      *dd = (DM_DA *)da->data;
 14: #if defined(PETSC_HAVE_MATLAB)
 15:   PetscBool ismatlab;
 16: #endif

 18:   PetscFunctionBegin;
 19:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da), &rank));

 21:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
 22:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
 23:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
 24:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
 25: #if defined(PETSC_HAVE_MATLAB)
 26:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATLAB, &ismatlab));
 27: #endif
 28:   if (isascii) {
 29:     PetscViewerFormat format;

 31:     PetscCall(PetscViewerGetFormat(viewer, &format));
 32:     if (format == PETSC_VIEWER_LOAD_BALANCE) {
 33:       PetscInt      i, nmax = 0, nmin = PETSC_INT_MAX, navg = 0, *nz, nzlocal;
 34:       DMDALocalInfo info;
 35:       PetscMPIInt   size;
 36:       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
 37:       PetscCall(DMDAGetLocalInfo(da, &info));
 38:       nzlocal = info.xm;
 39:       PetscCall(PetscMalloc1(size, &nz));
 40:       PetscCallMPI(MPI_Allgather(&nzlocal, 1, MPIU_INT, nz, 1, MPIU_INT, PetscObjectComm((PetscObject)da)));
 41:       for (i = 0; i < size; i++) {
 42:         nmax = PetscMax(nmax, nz[i]);
 43:         nmin = PetscMin(nmin, nz[i]);
 44:         navg += nz[i];
 45:       }
 46:       PetscCall(PetscFree(nz));
 47:       navg = navg / size;
 48:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Load Balance - Grid Points: Min %" PetscInt_FMT "  avg %" PetscInt_FMT "  max %" PetscInt_FMT "\n", nmin, navg, nmax));
 49:       PetscFunctionReturn(PETSC_SUCCESS);
 50:     }
 51:     if (format != PETSC_VIEWER_ASCII_GLVIS) {
 52:       DMDALocalInfo info;
 53:       PetscCall(DMDAGetLocalInfo(da, &info));
 54:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
 55:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Processor [%d] M %" PetscInt_FMT " m %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n", rank, dd->M, dd->m, dd->w, dd->s));
 56:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "X range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n", info.xs, info.xs + info.xm));
 57:       PetscCall(PetscViewerFlush(viewer));
 58:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
 59:     } else if (format == PETSC_VIEWER_ASCII_GLVIS) PetscCall(DMView_DA_GLVis(da, viewer));
 60:   } else if (isdraw) {
 61:     PetscDraw draw;
 62:     double    ymin = -1, ymax = 1, xmin = -1, xmax = dd->M, x;
 63:     PetscInt  base;
 64:     char      node[10];
 65:     PetscBool isnull;

 67:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
 68:     PetscCall(PetscDrawIsNull(draw, &isnull));
 69:     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);

 71:     PetscCall(PetscDrawCheckResizedWindow(draw));
 72:     PetscCall(PetscDrawClear(draw));
 73:     PetscCall(PetscDrawSetCoordinates(draw, xmin, ymin, xmax, ymax));

 75:     PetscDrawCollectiveBegin(draw);
 76:     /* first processor draws all node lines */
 77:     if (rank == 0) {
 78:       ymin = 0.0;
 79:       ymax = 0.3;
 80:       for (PetscInt xmin_tmp = 0; xmin_tmp < dd->M; xmin_tmp++) PetscCall(PetscDrawLine(draw, (double)xmin_tmp, ymin, (double)xmin_tmp, ymax, PETSC_DRAW_BLACK));
 81:       xmin = 0.0;
 82:       xmax = dd->M - 1;
 83:       PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_BLACK));
 84:       PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_BLACK));
 85:     }
 86:     PetscDrawCollectiveEnd(draw);
 87:     PetscCall(PetscDrawFlush(draw));
 88:     PetscCall(PetscDrawPause(draw));

 90:     PetscDrawCollectiveBegin(draw);
 91:     /* draw my box */
 92:     ymin = 0;
 93:     ymax = 0.3;
 94:     xmin = dd->xs / dd->w;
 95:     xmax = (dd->xe / dd->w) - 1;
 96:     PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_RED));
 97:     PetscCall(PetscDrawLine(draw, xmin, ymin, xmin, ymax, PETSC_DRAW_RED));
 98:     PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_RED));
 99:     PetscCall(PetscDrawLine(draw, xmax, ymin, xmax, ymax, PETSC_DRAW_RED));
100:     /* Put in index numbers */
101:     base = dd->base / dd->w;
102:     for (x = xmin; x <= xmax; x++) {
103:       PetscCall(PetscSNPrintf(node, sizeof(node), "%" PetscInt_FMT, base++));
104:       PetscCall(PetscDrawString(draw, x, ymin, PETSC_DRAW_RED, node));
105:     }
106:     PetscDrawCollectiveEnd(draw);
107:     PetscCall(PetscDrawFlush(draw));
108:     PetscCall(PetscDrawPause(draw));
109:     PetscCall(PetscDrawSave(draw));
110:   } else if (isglvis) {
111:     PetscCall(DMView_DA_GLVis(da, viewer));
112:   } else if (isbinary) {
113:     PetscCall(DMView_DA_Binary(da, viewer));
114: #if defined(PETSC_HAVE_MATLAB)
115:   } else if (ismatlab) {
116:     PetscCall(DMView_DA_Matlab(da, viewer));
117: #endif
118:   }
119:   PetscFunctionReturn(PETSC_SUCCESS);
120: }

122: PetscErrorCode DMSetUp_DA_1D(DM da)
123: {
124:   DM_DA          *dd    = (DM_DA *)da->data;
125:   const PetscInt  M     = dd->M;
126:   const PetscInt  dof   = dd->w;
127:   const PetscInt  s     = dd->s;
128:   const PetscInt  sDist = s; /* stencil distance in points */
129:   const PetscInt *lx    = dd->lx;
130:   DMBoundaryType  bx    = dd->bx;
131:   MPI_Comm        comm;
132:   Vec             local, global;
133:   VecScatter      gtol;
134:   IS              to, from;
135:   PetscBool       flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
136:   PetscMPIInt     rank, size;
137:   PetscInt        i, *idx, nn, left, xs, xe, x, Xs, Xe, start, m, IXs, IXe;

139:   PetscFunctionBegin;
140:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
141:   PetscCallMPI(MPI_Comm_size(comm, &size));
142:   PetscCallMPI(MPI_Comm_rank(comm, &rank));

144:   dd->p = 1;
145:   dd->n = 1;
146:   dd->m = size;
147:   m     = dd->m;

149:   if (s > 0) {
150:     /* if not communicating data then should be ok to have nothing on some processes */
151:     PetscCheck(M >= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "More processes than data points! %" PetscInt_FMT " %" PetscInt_FMT, m, M);
152:     PetscCheck((M - 1) >= s || size <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Array is too small for stencil! %" PetscInt_FMT " %" PetscInt_FMT, M - 1, s);
153:   }

155:   /*
156:      Determine locally owned region
157:      xs is the first local node number, x is the number of local nodes
158:   */
159:   if (!lx) {
160:     PetscCall(PetscMalloc1(m, &dd->lx));
161:     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_blockcomm", &flg1, NULL));
162:     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_nodes_at_end", &flg2, NULL));
163:     if (flg1) { /* Block Comm type Distribution */
164:       xs = rank * M / m;
165:       x  = (rank + 1) * M / m - xs;
166:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
167:       x = (M + rank) / m;
168:       if (M / m == x) xs = rank * x;
169:       else xs = rank * (x - 1) + (M + rank) % (x * m);
170:     } else { /* The odd nodes are evenly distributed across the first k nodes */
171:       /* Regular PETSc Distribution */
172:       x = M / m + ((M % m) > rank);
173:       if (rank >= (M % m)) xs = (rank * (M / m) + M % m);
174:       else xs = rank * (M / m) + rank;
175:     }
176:     PetscCallMPI(MPI_Allgather(&xs, 1, MPIU_INT, dd->lx, 1, MPIU_INT, comm));
177:     for (i = 0; i < m - 1; i++) dd->lx[i] = dd->lx[i + 1] - dd->lx[i];
178:     dd->lx[m - 1] = M - dd->lx[m - 1];
179:   } else {
180:     x  = lx[rank];
181:     xs = 0;
182:     for (i = 0; i < rank; i++) xs += lx[i];
183:     /* verify that data user provided is consistent */
184:     left = xs;
185:     for (i = rank; i < size; i++) left += lx[i];
186:     PetscCheck(left == M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Sum of lx across processors not equal to M %" PetscInt_FMT " %" PetscInt_FMT, left, M);
187:   }

189:   /*
190:    check if the scatter requires more than one process neighbor or wraps around
191:    the domain more than once
192:   */
193:   PetscCheck((x >= s) || ((M <= 1) && (bx != DM_BOUNDARY_PERIODIC)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT, x, s);

195:   xe = xs + x;

197:   /* determine ghost region (Xs) and region scattered into (IXs)  */
198:   if (xs - sDist > 0) {
199:     Xs  = xs - sDist;
200:     IXs = xs - sDist;
201:   } else {
202:     if (bx) Xs = xs - sDist;
203:     else Xs = 0;
204:     IXs = 0;
205:   }
206:   if (xe + sDist <= M) {
207:     Xe  = xe + sDist;
208:     IXe = xe + sDist;
209:   } else {
210:     if (bx) Xe = xe + sDist;
211:     else Xe = M;
212:     IXe = M;
213:   }

215:   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
216:     Xs  = xs - sDist;
217:     Xe  = xe + sDist;
218:     IXs = xs - sDist;
219:     IXe = xe + sDist;
220:   }

222:   /* allocate the base parallel and sequential vectors */
223:   dd->Nlocal = dof * x;
224:   PetscCall(VecCreateMPIWithArray(comm, dof, dd->Nlocal, PETSC_DECIDE, NULL, &global));
225:   dd->nlocal = dof * (Xe - Xs);
226:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dof, dd->nlocal, NULL, &local));

228:   PetscCall(VecGetOwnershipRange(global, &start, NULL));

230:   /* Create Global to Local Vector Scatter Context */
231:   /* global to local must retrieve ghost points */
232:   PetscCall(ISCreateStride(comm, dof * (IXe - IXs), dof * (IXs - Xs), 1, &to));

234:   PetscCall(PetscMalloc1(x + 2 * sDist, &idx));

236:   for (i = 0; i < IXs - Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/

238:   nn = IXs - Xs;
239:   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
240:     for (i = 0; i < sDist; i++) {   /* Left ghost points */
241:       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
242:       else idx[nn++] = M + (xs - sDist + i);
243:     }

245:     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */

247:     for (i = 0; i < sDist; i++) { /* Right ghost points */
248:       if ((xe + i) < M) idx[nn++] = xe + i;
249:       else idx[nn++] = (xe + i) - M;
250:     }
251:   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
252:     for (i = 0; i < (sDist); i++) {      /* Left ghost points */
253:       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
254:       else idx[nn++] = sDist - i;
255:     }

257:     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */

259:     for (i = 0; i < (sDist); i++) { /* Right ghost points */
260:       if ((xe + i) < M) idx[nn++] = xe + i;
261:       else idx[nn++] = M - (i + 2);
262:     }
263:   } else { /* Now do all cases with no periodicity */
264:     if (0 <= xs - sDist) {
265:       for (i = 0; i < sDist; i++) idx[nn++] = xs - sDist + i;
266:     } else {
267:       for (i = 0; i < xs; i++) idx[nn++] = i;
268:     }

270:     for (i = 0; i < x; i++) idx[nn++] = xs + i;

272:     if ((xe + sDist) <= M) {
273:       for (i = 0; i < sDist; i++) idx[nn++] = xe + i;
274:     } else {
275:       for (i = xe; i < M; i++) idx[nn++] = i;
276:     }
277:   }

279:   PetscCall(ISCreateBlock(comm, dof, nn - IXs + Xs, &idx[IXs - Xs], PETSC_USE_POINTER, &from));
280:   PetscCall(VecScatterCreate(global, from, local, to, &gtol));
281:   PetscCall(ISDestroy(&to));
282:   PetscCall(ISDestroy(&from));
283:   PetscCall(VecDestroy(&local));
284:   PetscCall(VecDestroy(&global));

286:   dd->xs = dof * xs;
287:   dd->xe = dof * xe;
288:   dd->ys = 0;
289:   dd->ye = 1;
290:   dd->zs = 0;
291:   dd->ze = 1;
292:   dd->Xs = dof * Xs;
293:   dd->Xe = dof * Xe;
294:   dd->Ys = 0;
295:   dd->Ye = 1;
296:   dd->Zs = 0;
297:   dd->Ze = 1;

299:   dd->gtol      = gtol;
300:   dd->base      = dof * xs;
301:   da->ops->view = DMView_DA_1d;

303:   /*
304:      Set the local to global ordering in the global vector, this allows use
305:      of VecSetValuesLocal().
306:   */
307:   for (i = 0; i < Xe - IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/

309:   PetscCall(ISLocalToGlobalMappingCreate(comm, dof, nn, idx, PETSC_OWN_POINTER, &da->ltogmap));
310:   PetscFunctionReturn(PETSC_SUCCESS);
311: }

313: /*@
314:   DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
315:   regular array data that is distributed across one or mpre MPI processes.

317:   Collective

319:   Input Parameters:
320: + comm - MPI communicator
321: . bx   - type of ghost cells at the boundary the array should have, if any. Use
322:          `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, or `DM_BOUNDARY_PERIODIC`.
323: . M    - global dimension of the array (that is the number of grid points)
324: . dof  - number of degrees of freedom per node
325: . s    - stencil width
326: - lx   - array containing number of nodes in the X direction on each processor,
327:          or `NULL`. If non-null, must be of length as the number of processes in the MPI_Comm.
328:          The sum of these entries must equal `M`

330:   Output Parameter:
331: . da - the resulting distributed array object

333:   Options Database Keys:
334: + -dm_view        - Calls `DMView()` at the conclusion of `DMDACreate1d()`
335: . -da_grid_x nx   - number of grid points in the x direction
336: . -da_refine_x rx - refinement factor
337: - -da_refine n    - refine the `DMDA` `n` times before creating it

339:   Level: beginner

341:   Notes:
342:   The array data itself is NOT stored in the `DMDA`, it is stored in `Vec` objects;
343:   The appropriate vector objects can be obtained with calls to `DMCreateGlobalVector()`
344:   and `DMCreateLocalVector()` and calls to `VecDuplicate()` if more are needed.

346:   You must call `DMSetUp()` after this call before using this `DM`.

348:   If you wish to use the options database to change values in the `DMDA` call `DMSetFromOptions()` after this call
349:   but before `DMSetUp()`.

351: .seealso: [](sec_struct), `DMDA`, `DM`, `DMDestroy()`, `DMView()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMGlobalToLocalBegin()`, `DMDASetRefinementFactor()`,
352:           `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToLocalBegin()`, `DMLocalToLocalEnd()`, `DMDAGetRefinementFactor()`,
353:           `DMDAGetInfo()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMDACreateNaturalVector()`, `DMLoad()`, `DMDAGetOwnershipRanges()`,
354:           `DMStagCreate1d()`, `DMBoundaryType`
355: @*/
356: PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
357: {
358:   PetscMPIInt size;

360:   PetscFunctionBegin;
361:   PetscCall(DMDACreate(comm, da));
362:   PetscCall(DMSetDimension(*da, 1));
363:   PetscCall(DMDASetSizes(*da, M, 1, 1));
364:   PetscCallMPI(MPI_Comm_size(comm, &size));
365:   PetscCall(DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE));
366:   PetscCall(DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
367:   PetscCall(DMDASetDof(*da, dof));
368:   PetscCall(DMDASetStencilWidth(*da, s));
369:   PetscCall(DMDASetOwnershipRanges(*da, lx, NULL, NULL));
370:   PetscFunctionReturn(PETSC_SUCCESS);
371: }