Actual source code: stag.c

  1: /*
  2:    Implementation of DMStag, defining dimension-independent functions in the
  3:    DM API. stag1d.c, stag2d.c, and stag3d.c may include dimension-specific
  4:    implementations of DM API functions, and other files here contain additional
  5:    DMStag-specific API functions, as well as internal functions.
  6: */
  7: #include <petsc/private/dmstagimpl.h>
  8: #include <petscsf.h>

 10: static PetscErrorCode DMCreateFieldDecomposition_Stag(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
 11: {
 12:   PetscInt       f0, f1, f2, f3, dof0, dof1, dof2, dof3, n_entries, k, d, cnt, n_fields, dim;
 13:   DMStagStencil *stencil0, *stencil1, *stencil2, *stencil3;

 15:   PetscFunctionBegin;
 16:   PetscCall(DMGetDimension(dm, &dim));
 17:   PetscCall(DMStagGetDOF(dm, &dof0, &dof1, &dof2, &dof3));
 18:   PetscCall(DMStagGetEntriesPerElement(dm, &n_entries));

 20:   f0 = 1;
 21:   f1 = f2 = f3 = 0;
 22:   if (dim == 1) {
 23:     f1 = 1;
 24:   } else if (dim == 2) {
 25:     f1 = 2;
 26:     f2 = 1;
 27:   } else if (dim == 3) {
 28:     f1 = 3;
 29:     f2 = 3;
 30:     f3 = 1;
 31:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported dimension %" PetscInt_FMT, dim);

 33:   PetscCall(PetscCalloc1(f0 * dof0, &stencil0));
 34:   PetscCall(PetscCalloc1(f1 * dof1, &stencil1));
 35:   if (dim >= 2) PetscCall(PetscCalloc1(f2 * dof2, &stencil2));
 36:   if (dim >= 3) PetscCall(PetscCalloc1(f3 * dof3, &stencil3));
 37:   for (k = 0; k < f0; ++k) {
 38:     for (d = 0; d < dof0; ++d) {
 39:       stencil0[dof0 * k + d].i = 0;
 40:       stencil0[dof0 * k + d].j = 0;
 41:       stencil0[dof0 * k + d].j = 0;
 42:     }
 43:   }
 44:   for (k = 0; k < f1; ++k) {
 45:     for (d = 0; d < dof1; ++d) {
 46:       stencil1[dof1 * k + d].i = 0;
 47:       stencil1[dof1 * k + d].j = 0;
 48:       stencil1[dof1 * k + d].j = 0;
 49:     }
 50:   }
 51:   if (dim >= 2) {
 52:     for (k = 0; k < f2; ++k) {
 53:       for (d = 0; d < dof2; ++d) {
 54:         stencil2[dof2 * k + d].i = 0;
 55:         stencil2[dof2 * k + d].j = 0;
 56:         stencil2[dof2 * k + d].j = 0;
 57:       }
 58:     }
 59:   }
 60:   if (dim >= 3) {
 61:     for (k = 0; k < f3; ++k) {
 62:       for (d = 0; d < dof3; ++d) {
 63:         stencil3[dof3 * k + d].i = 0;
 64:         stencil3[dof3 * k + d].j = 0;
 65:         stencil3[dof3 * k + d].j = 0;
 66:       }
 67:     }
 68:   }

 70:   n_fields = 0;
 71:   if (dof0 != 0) ++n_fields;
 72:   if (dof1 != 0) ++n_fields;
 73:   if (dim >= 2 && dof2 != 0) ++n_fields;
 74:   if (dim >= 3 && dof3 != 0) ++n_fields;
 75:   if (len) *len = n_fields;

 77:   if (islist) {
 78:     PetscCall(PetscMalloc1(n_fields, islist));

 80:     if (dim == 1) {
 81:       /* face, element */
 82:       for (d = 0; d < dof0; ++d) {
 83:         stencil0[d].loc = DMSTAG_LEFT;
 84:         stencil0[d].c   = d;
 85:       }
 86:       for (d = 0; d < dof1; ++d) {
 87:         stencil1[d].loc = DMSTAG_ELEMENT;
 88:         stencil1[d].c   = d;
 89:       }
 90:     } else if (dim == 2) {
 91:       /* vertex, edge(down,left), element */
 92:       for (d = 0; d < dof0; ++d) {
 93:         stencil0[d].loc = DMSTAG_DOWN_LEFT;
 94:         stencil0[d].c   = d;
 95:       }
 96:       /* edge */
 97:       cnt = 0;
 98:       for (d = 0; d < dof1; ++d) {
 99:         stencil1[cnt].loc = DMSTAG_DOWN;
100:         stencil1[cnt].c   = d;
101:         ++cnt;
102:       }
103:       for (d = 0; d < dof1; ++d) {
104:         stencil1[cnt].loc = DMSTAG_LEFT;
105:         stencil1[cnt].c   = d;
106:         ++cnt;
107:       }
108:       /* element */
109:       for (d = 0; d < dof2; ++d) {
110:         stencil2[d].loc = DMSTAG_ELEMENT;
111:         stencil2[d].c   = d;
112:       }
113:     } else if (dim == 3) {
114:       /* vertex, edge(down,left), face(down,left,back), element */
115:       for (d = 0; d < dof0; ++d) {
116:         stencil0[d].loc = DMSTAG_BACK_DOWN_LEFT;
117:         stencil0[d].c   = d;
118:       }
119:       /* edges */
120:       cnt = 0;
121:       for (d = 0; d < dof1; ++d) {
122:         stencil1[cnt].loc = DMSTAG_BACK_DOWN;
123:         stencil1[cnt].c   = d;
124:         ++cnt;
125:       }
126:       for (d = 0; d < dof1; ++d) {
127:         stencil1[cnt].loc = DMSTAG_BACK_LEFT;
128:         stencil1[cnt].c   = d;
129:         ++cnt;
130:       }
131:       for (d = 0; d < dof1; ++d) {
132:         stencil1[cnt].loc = DMSTAG_DOWN_LEFT;
133:         stencil1[cnt].c   = d;
134:         ++cnt;
135:       }
136:       /* faces */
137:       cnt = 0;
138:       for (d = 0; d < dof2; ++d) {
139:         stencil2[cnt].loc = DMSTAG_BACK;
140:         stencil2[cnt].c   = d;
141:         ++cnt;
142:       }
143:       for (d = 0; d < dof2; ++d) {
144:         stencil2[cnt].loc = DMSTAG_DOWN;
145:         stencil2[cnt].c   = d;
146:         ++cnt;
147:       }
148:       for (d = 0; d < dof2; ++d) {
149:         stencil2[cnt].loc = DMSTAG_LEFT;
150:         stencil2[cnt].c   = d;
151:         ++cnt;
152:       }
153:       /* elements */
154:       for (d = 0; d < dof3; ++d) {
155:         stencil3[d].loc = DMSTAG_ELEMENT;
156:         stencil3[d].c   = d;
157:       }
158:     } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported dimension %" PetscInt_FMT, dim);

160:     cnt = 0;
161:     if (dof0 != 0) {
162:       PetscCall(DMStagCreateISFromStencils(dm, f0 * dof0, stencil0, &(*islist)[cnt]));
163:       ++cnt;
164:     }
165:     if (dof1 != 0) {
166:       PetscCall(DMStagCreateISFromStencils(dm, f1 * dof1, stencil1, &(*islist)[cnt]));
167:       ++cnt;
168:     }
169:     if (dim >= 2 && dof2 != 0) {
170:       PetscCall(DMStagCreateISFromStencils(dm, f2 * dof2, stencil2, &(*islist)[cnt]));
171:       ++cnt;
172:     }
173:     if (dim >= 3 && dof3 != 0) {
174:       PetscCall(DMStagCreateISFromStencils(dm, f3 * dof3, stencil3, &(*islist)[cnt]));
175:       ++cnt;
176:     }
177:   }

179:   if (namelist) {
180:     PetscCall(PetscMalloc1(n_fields, namelist));
181:     cnt = 0;
182:     if (dim == 1) {
183:       if (dof0 != 0) {
184:         PetscCall(PetscStrallocpy("vertex", &(*namelist)[cnt]));
185:         ++cnt;
186:       }
187:       if (dof1 != 0) {
188:         PetscCall(PetscStrallocpy("element", &(*namelist)[cnt]));
189:         ++cnt;
190:       }
191:     } else if (dim == 2) {
192:       if (dof0 != 0) {
193:         PetscCall(PetscStrallocpy("vertex", &(*namelist)[cnt]));
194:         ++cnt;
195:       }
196:       if (dof1 != 0) {
197:         PetscCall(PetscStrallocpy("face", &(*namelist)[cnt]));
198:         ++cnt;
199:       }
200:       if (dof2 != 0) {
201:         PetscCall(PetscStrallocpy("element", &(*namelist)[cnt]));
202:         ++cnt;
203:       }
204:     } else if (dim == 3) {
205:       if (dof0 != 0) {
206:         PetscCall(PetscStrallocpy("vertex", &(*namelist)[cnt]));
207:         ++cnt;
208:       }
209:       if (dof1 != 0) {
210:         PetscCall(PetscStrallocpy("edge", &(*namelist)[cnt]));
211:         ++cnt;
212:       }
213:       if (dof2 != 0) {
214:         PetscCall(PetscStrallocpy("face", &(*namelist)[cnt]));
215:         ++cnt;
216:       }
217:       if (dof3 != 0) {
218:         PetscCall(PetscStrallocpy("element", &(*namelist)[cnt]));
219:         ++cnt;
220:       }
221:     }
222:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported dimension %" PetscInt_FMT, dim);
223:   if (dmlist) {
224:     PetscCall(PetscMalloc1(n_fields, dmlist));
225:     cnt = 0;
226:     if (dof0 != 0) {
227:       PetscCall(DMStagCreateCompatibleDMStag(dm, dof0, 0, 0, 0, &(*dmlist)[cnt]));
228:       ++cnt;
229:     }
230:     if (dof1 != 0) {
231:       PetscCall(DMStagCreateCompatibleDMStag(dm, 0, dof1, 0, 0, &(*dmlist)[cnt]));
232:       ++cnt;
233:     }
234:     if (dim >= 2 && dof2 != 0) {
235:       PetscCall(DMStagCreateCompatibleDMStag(dm, 0, 0, dof2, 0, &(*dmlist)[cnt]));
236:       ++cnt;
237:     }
238:     if (dim >= 3 && dof3 != 0) {
239:       PetscCall(DMStagCreateCompatibleDMStag(dm, 0, 0, 0, dof3, &(*dmlist)[cnt]));
240:       ++cnt;
241:     }
242:   }
243:   PetscCall(PetscFree(stencil0));
244:   PetscCall(PetscFree(stencil1));
245:   if (dim >= 2) PetscCall(PetscFree(stencil2));
246:   if (dim >= 3) PetscCall(PetscFree(stencil3));
247:   PetscFunctionReturn(PETSC_SUCCESS);
248: }

250: static PetscErrorCode DMClone_Stag(DM dm, DM *newdm)
251: {
252:   PetscFunctionBegin;
253:   /* Destroy the DM created by generic logic in DMClone() */
254:   if (*newdm) PetscCall(DMDestroy(newdm));
255:   PetscCall(DMStagDuplicateWithoutSetup(dm, PetscObjectComm((PetscObject)dm), newdm));
256:   PetscCall(DMSetUp(*newdm));
257:   PetscFunctionReturn(PETSC_SUCCESS);
258: }

260: static PetscErrorCode DMCoarsen_Stag(DM dm, MPI_Comm comm, DM *dmc)
261: {
262:   const DM_Stag *const stag = (DM_Stag *)dm->data;
263:   PetscInt             dim, i, d;
264:   PetscInt            *l[DMSTAG_MAX_DIM];

266:   PetscFunctionBegin;
267:   PetscCall(DMStagDuplicateWithoutSetup(dm, comm, dmc));
268:   PetscCall(DMSetOptionsPrefix(*dmc, ((PetscObject)dm)->prefix));
269:   PetscCall(DMGetDimension(dm, &dim));
270:   for (d = 0; d < dim; ++d) PetscCheck(stag->N[d] % stag->refineFactor[d] == 0, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "coarsening not supported except when the number of elements in each dimension is a multiple of the refinement factor");
271:   PetscCall(DMStagSetGlobalSizes(*dmc, stag->N[0] / stag->refineFactor[0], stag->N[1] / stag->refineFactor[1], stag->N[2] / stag->refineFactor[2]));
272:   for (d = 0; d < dim; ++d) {
273:     PetscCall(PetscMalloc1(stag->nRanks[d], &l[d]));
274:     for (i = 0; i < stag->nRanks[d]; ++i) {
275:       PetscCheck(stag->l[d][i] % stag->refineFactor[d] == 0, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "coarsening not supported except when the number of elements in each direction on each rank is a multiple of the refinement factor");
276:       l[d][i] = stag->l[d][i] / stag->refineFactor[d]; /* Just divide everything */
277:     }
278:   }
279:   PetscCall(DMStagSetOwnershipRanges(*dmc, l[0], l[1], l[2]));
280:   for (d = 0; d < dim; ++d) PetscCall(PetscFree(l[d]));
281:   PetscCall(DMSetUp(*dmc));

283:   if (dm->coordinates[0].dm) { /* Note that with product coordinates, dm->coordinates = NULL, so we check the DM */
284:     DM        coordinate_dm, coordinate_dmc;
285:     PetscBool isstag, isprod;

287:     PetscCall(DMGetCoordinateDM(dm, &coordinate_dm));
288:     PetscCall(PetscObjectTypeCompare((PetscObject)coordinate_dm, DMSTAG, &isstag));
289:     PetscCall(PetscObjectTypeCompare((PetscObject)coordinate_dm, DMPRODUCT, &isprod));
290:     if (isstag) {
291:       PetscCall(DMStagSetUniformCoordinatesExplicit(*dmc, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)); /* Coordinates will be overwritten */
292:       PetscCall(DMGetCoordinateDM(*dmc, &coordinate_dmc));
293:       PetscCall(DMStagRestrictSimple(coordinate_dm, dm->coordinates[0].x, coordinate_dmc, (*dmc)->coordinates[0].x));
294:     } else if (isprod) {
295:       PetscCall(DMStagSetUniformCoordinatesProduct(*dmc, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)); /* Coordinates will be overwritten */
296:       PetscCall(DMGetCoordinateDM(*dmc, &coordinate_dmc));
297:       for (d = 0; d < dim; ++d) {
298:         DM subdm_coarse, subdm_coord_coarse, subdm_fine, subdm_coord_fine;

300:         PetscCall(DMProductGetDM(coordinate_dm, d, &subdm_fine));
301:         PetscCall(DMGetCoordinateDM(subdm_fine, &subdm_coord_fine));
302:         PetscCall(DMProductGetDM(coordinate_dmc, d, &subdm_coarse));
303:         PetscCall(DMGetCoordinateDM(subdm_coarse, &subdm_coord_coarse));
304:         PetscCall(DMStagRestrictSimple(subdm_coord_fine, subdm_fine->coordinates[0].xl, subdm_coord_coarse, subdm_coarse->coordinates[0].xl));
305:       }
306:     } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unknown coordinate DM type");
307:   }
308:   PetscFunctionReturn(PETSC_SUCCESS);
309: }

311: static PetscErrorCode DMRefine_Stag(DM dm, MPI_Comm comm, DM *dmf)
312: {
313:   const DM_Stag *const stag = (DM_Stag *)dm->data;
314:   PetscInt             dim, i, d;
315:   PetscInt            *l[DMSTAG_MAX_DIM];

317:   PetscFunctionBegin;
318:   PetscCall(DMStagDuplicateWithoutSetup(dm, comm, dmf));
319:   PetscCall(DMSetOptionsPrefix(*dmf, ((PetscObject)dm)->prefix));
320:   PetscCall(DMStagSetGlobalSizes(*dmf, stag->N[0] * stag->refineFactor[0], stag->N[1] * stag->refineFactor[1], stag->N[2] * stag->refineFactor[2]));
321:   PetscCall(DMGetDimension(dm, &dim));
322:   for (d = 0; d < dim; ++d) {
323:     PetscCall(PetscMalloc1(stag->nRanks[d], &l[d]));
324:     for (i = 0; i < stag->nRanks[d]; ++i) l[d][i] = stag->l[d][i] * stag->refineFactor[d]; /* Just multiply everything */
325:   }
326:   PetscCall(DMStagSetOwnershipRanges(*dmf, l[0], l[1], l[2]));
327:   for (d = 0; d < dim; ++d) PetscCall(PetscFree(l[d]));
328:   PetscCall(DMSetUp(*dmf));
329:   /* Note: For now, we do not refine coordinates */
330:   PetscFunctionReturn(PETSC_SUCCESS);
331: }

333: static PetscErrorCode DMDestroy_Stag(DM dm)
334: {
335:   DM_Stag *stag;
336:   PetscInt i;

338:   PetscFunctionBegin;
339:   stag = (DM_Stag *)dm->data;
340:   for (i = 0; i < DMSTAG_MAX_DIM; ++i) PetscCall(PetscFree(stag->l[i]));
341:   PetscCall(VecScatterDestroy(&stag->gtol));
342:   PetscCall(VecScatterDestroy(&stag->ltog_injective));
343:   PetscCall(VecScatterDestroy(&stag->ltol));
344:   PetscCall(PetscFree(stag->neighbors));
345:   PetscCall(PetscFree(stag->locationOffsets));
346:   PetscCall(PetscFree(stag->coordinateDMType));
347:   PetscCall(PetscFree(stag));
348:   PetscFunctionReturn(PETSC_SUCCESS);
349: }

351: static PetscErrorCode DMCreateGlobalVector_Stag(DM dm, Vec *vec)
352: {
353:   DM_Stag *const stag = (DM_Stag *)dm->data;

355:   PetscFunctionBegin;
356:   PetscCheck(dm->setupcalled, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "This function must be called after DMSetUp()");
357:   PetscCall(VecCreate(PetscObjectComm((PetscObject)dm), vec));
358:   PetscCall(VecSetSizes(*vec, stag->entries, PETSC_DETERMINE));
359:   PetscCall(VecSetType(*vec, dm->vectype));
360:   PetscCall(VecSetDM(*vec, dm));
361:   /* Could set some ops, as DMDA does */
362:   PetscCall(VecSetLocalToGlobalMapping(*vec, dm->ltogmap));
363:   PetscFunctionReturn(PETSC_SUCCESS);
364: }

366: static PetscErrorCode DMCreateLocalVector_Stag(DM dm, Vec *vec)
367: {
368:   DM_Stag *const stag = (DM_Stag *)dm->data;

370:   PetscFunctionBegin;
371:   PetscCheck(dm->setupcalled, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "This function must be called after DMSetUp()");
372:   PetscCall(VecCreate(PETSC_COMM_SELF, vec));
373:   PetscCall(VecSetSizes(*vec, stag->entriesGhost, PETSC_DETERMINE));
374:   PetscCall(VecSetType(*vec, dm->vectype));
375:   PetscCall(VecSetBlockSize(*vec, stag->entriesPerElement));
376:   PetscCall(VecSetDM(*vec, dm));
377:   PetscFunctionReturn(PETSC_SUCCESS);
378: }

380: /* Helper function to check for the limited situations for which interpolation
381:    and restriction functions are implemented */
382: static PetscErrorCode CheckTransferOperatorRequirements_Private(DM dmc, DM dmf)
383: {
384:   PetscInt dim, stencilWidthc, stencilWidthf, nf[DMSTAG_MAX_DIM], nc[DMSTAG_MAX_DIM], doff[DMSTAG_MAX_STRATA], dofc[DMSTAG_MAX_STRATA];

386:   PetscFunctionBegin;
387:   PetscCall(DMGetDimension(dmc, &dim));
388:   PetscCall(DMStagGetStencilWidth(dmc, &stencilWidthc));
389:   PetscCheck(stencilWidthc >= 1, PetscObjectComm((PetscObject)dmc), PETSC_ERR_SUP, "DMCreateRestriction not implemented for coarse grid stencil width < 1");
390:   PetscCall(DMStagGetStencilWidth(dmf, &stencilWidthf));
391:   PetscCheck(stencilWidthf >= 1, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "DMCreateRestriction not implemented for fine grid stencil width < 1");
392:   PetscCall(DMStagGetLocalSizes(dmf, &nf[0], &nf[1], &nf[2]));
393:   PetscCall(DMStagGetLocalSizes(dmc, &nc[0], &nc[1], &nc[2]));
394:   for (PetscInt d = 0; d < dim; ++d) PetscCheck(nf[d] % nc[d] == 0, PetscObjectComm((PetscObject)dmc), PETSC_ERR_SUP, "DMCreateRestriction not implemented for non-integer refinement factor");
395:   PetscCall(DMStagGetDOF(dmc, &dofc[0], &dofc[1], &dofc[2], &dofc[3]));
396:   PetscCall(DMStagGetDOF(dmf, &doff[0], &doff[1], &doff[2], &doff[3]));
397:   for (PetscInt d = 0; d < dim + 1; ++d)
398:     PetscCheck(dofc[d] == doff[d], PetscObjectComm((PetscObject)dmc), PETSC_ERR_SUP, "No support for different numbers of dof per stratum between coarse and fine DMStag objects: dof%" PetscInt_FMT " is %" PetscInt_FMT " (fine) but %" PetscInt_FMT "(coarse))", d, doff[d], dofc[d]);
399:   PetscFunctionReturn(PETSC_SUCCESS);
400: }

402: /* Since the interpolation uses MATMAIJ for dof > 0 we convert requests for non-MATAIJ baseded matrices to MATAIJ.
403:    This is a bit of a hack; the reason for it is partially because -dm_mat_type defines the
404:    matrix type for both the operator matrices and the interpolation matrices so that users
405:    can select matrix types of base MATAIJ for accelerators

407:    Note: The ConvertToAIJ() code below *has been copied from dainterp.c*! ConvertToAIJ() should perhaps be placed somewhere
408:    in mat/utils to avoid code duplication, but then the DMStag and DMDA code would need to include the private Mat headers.
409:    Since it is only used in two places, I have simply duplicated the code to avoid the need to exposure the private
410:    Mat routines in parts of DM. If we find a need for ConvertToAIJ() elsewhere, then we should consolidate it to one
411:    place in mat/utils.
412: */
413: static PetscErrorCode ConvertToAIJ(MatType intype, MatType *outtype)
414: {
415:   PetscInt    i;
416:   char const *types[3] = {MATAIJ, MATSEQAIJ, MATMPIAIJ};
417:   PetscBool   flg;

419:   PetscFunctionBegin;
420:   *outtype = MATAIJ;
421:   for (i = 0; i < 3; i++) {
422:     PetscCall(PetscStrbeginswith(intype, types[i], &flg));
423:     if (flg) {
424:       *outtype = intype;
425:       break;
426:     }
427:   }
428:   PetscFunctionReturn(PETSC_SUCCESS);
429: }

431: static PetscErrorCode DMCreateInterpolation_Stag(DM dmc, DM dmf, Mat *A, Vec *vec)
432: {
433:   PetscInt               dim, entriesf, entriesc;
434:   ISLocalToGlobalMapping ltogmf, ltogmc;
435:   MatType                mattype;

437:   PetscFunctionBegin;
438:   PetscCall(CheckTransferOperatorRequirements_Private(dmc, dmf));

440:   PetscCall(DMStagGetEntries(dmf, &entriesf));
441:   PetscCall(DMStagGetEntries(dmc, &entriesc));
442:   PetscCall(DMGetLocalToGlobalMapping(dmf, &ltogmf));
443:   PetscCall(DMGetLocalToGlobalMapping(dmc, &ltogmc));

445:   PetscCall(MatCreate(PetscObjectComm((PetscObject)dmc), A));
446:   PetscCall(MatSetSizes(*A, entriesf, entriesc, PETSC_DECIDE, PETSC_DECIDE));
447:   PetscCall(ConvertToAIJ(dmc->mattype, &mattype));
448:   PetscCall(MatSetType(*A, mattype));
449:   PetscCall(MatSetLocalToGlobalMapping(*A, ltogmf, ltogmc));

451:   PetscCall(DMGetDimension(dmc, &dim));
452:   if (dim == 1) PetscCall(DMStagPopulateInterpolation1d_Internal(dmc, dmf, *A));
453:   else if (dim == 2) PetscCall(DMStagPopulateInterpolation2d_Internal(dmc, dmf, *A));
454:   else if (dim == 3) PetscCall(DMStagPopulateInterpolation3d_Internal(dmc, dmf, *A));
455:   else SETERRQ(PetscObjectComm((PetscObject)dmc), PETSC_ERR_ARG_OUTOFRANGE, "Unsupported dimension %" PetscInt_FMT, dim);
456:   PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
457:   PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));

459:   if (vec) *vec = NULL;
460:   PetscFunctionReturn(PETSC_SUCCESS);
461: }

463: static PetscErrorCode DMCreateRestriction_Stag(DM dmc, DM dmf, Mat *A)
464: {
465:   PetscInt               dim, entriesf, entriesc, doff[DMSTAG_MAX_STRATA];
466:   ISLocalToGlobalMapping ltogmf, ltogmc;
467:   MatType                mattype;

469:   PetscFunctionBegin;
470:   PetscCall(CheckTransferOperatorRequirements_Private(dmc, dmf));

472:   PetscCall(DMStagGetEntries(dmf, &entriesf));
473:   PetscCall(DMStagGetEntries(dmc, &entriesc));
474:   PetscCall(DMGetLocalToGlobalMapping(dmf, &ltogmf));
475:   PetscCall(DMGetLocalToGlobalMapping(dmc, &ltogmc));

477:   PetscCall(MatCreate(PetscObjectComm((PetscObject)dmc), A));
478:   PetscCall(MatSetSizes(*A, entriesc, entriesf, PETSC_DECIDE, PETSC_DECIDE)); /* Note transpose wrt interpolation */
479:   PetscCall(ConvertToAIJ(dmc->mattype, &mattype));
480:   PetscCall(MatSetType(*A, mattype));
481:   PetscCall(MatSetLocalToGlobalMapping(*A, ltogmc, ltogmf)); /* Note transpose wrt interpolation */

483:   PetscCall(DMGetDimension(dmc, &dim));
484:   PetscCall(DMStagGetDOF(dmf, &doff[0], &doff[1], &doff[2], &doff[3]));
485:   if (dim == 1) PetscCall(DMStagPopulateRestriction1d_Internal(dmc, dmf, *A));
486:   else if (dim == 2) PetscCall(DMStagPopulateRestriction2d_Internal(dmc, dmf, *A));
487:   else if (dim == 3) PetscCall(DMStagPopulateRestriction3d_Internal(dmc, dmf, *A));
488:   else SETERRQ(PetscObjectComm((PetscObject)dmc), PETSC_ERR_ARG_OUTOFRANGE, "Unsupported dimension %" PetscInt_FMT, dim);

490:   PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
491:   PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
492:   PetscFunctionReturn(PETSC_SUCCESS);
493: }

495: static PetscErrorCode DMCreateMatrix_Stag(DM dm, Mat *mat)
496: {
497:   MatType                mat_type;
498:   PetscBool              is_shell, is_aij;
499:   PetscInt               dim, entries;
500:   ISLocalToGlobalMapping ltogmap;

502:   PetscFunctionBegin;
503:   PetscCheck(dm->setupcalled, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "This function must be called after DMSetUp()");
504:   PetscCall(DMGetDimension(dm, &dim));
505:   PetscCall(DMGetMatType(dm, &mat_type));
506:   PetscCall(DMStagGetEntries(dm, &entries));
507:   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), mat));
508:   PetscCall(MatSetSizes(*mat, entries, entries, PETSC_DETERMINE, PETSC_DETERMINE));
509:   PetscCall(MatSetType(*mat, mat_type));
510:   PetscCall(MatSetUp(*mat));
511:   PetscCall(DMGetLocalToGlobalMapping(dm, &ltogmap));
512:   PetscCall(MatSetLocalToGlobalMapping(*mat, ltogmap, ltogmap));
513:   PetscCall(MatSetDM(*mat, dm));

515:   /* Compare to similar and perhaps superior logic in DMCreateMatrix_DA, which creates
516:      the matrix first and then performs this logic by checking for preallocation functions */
517:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)*mat, MATAIJ, &is_aij));
518:   if (!is_aij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)*mat, MATSEQAIJ, &is_aij));
519:   if (!is_aij) PetscCall(PetscObjectBaseTypeCompare((PetscObject)*mat, MATMPIAIJ, &is_aij));
520:   PetscCall(PetscStrcmp(mat_type, MATSHELL, &is_shell));
521:   if (is_aij) {
522:     Mat             preallocator;
523:     PetscInt        m, n;
524:     const PetscBool fill_with_zeros = PETSC_FALSE;

526:     PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), &preallocator));
527:     PetscCall(MatSetType(preallocator, MATPREALLOCATOR));
528:     PetscCall(MatGetLocalSize(*mat, &m, &n));
529:     PetscCall(MatSetSizes(preallocator, m, n, PETSC_DECIDE, PETSC_DECIDE));
530:     PetscCall(MatSetLocalToGlobalMapping(preallocator, ltogmap, ltogmap));
531:     PetscCall(MatSetUp(preallocator));
532:     switch (dim) {
533:     case 1:
534:       PetscCall(DMCreateMatrix_Stag_1D_AIJ_Assemble(dm, preallocator));
535:       break;
536:     case 2:
537:       PetscCall(DMCreateMatrix_Stag_2D_AIJ_Assemble(dm, preallocator));
538:       break;
539:     case 3:
540:       PetscCall(DMCreateMatrix_Stag_3D_AIJ_Assemble(dm, preallocator));
541:       break;
542:     default:
543:       SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Unsupported dimension %" PetscInt_FMT, dim);
544:     }
545:     PetscCall(MatPreallocatorPreallocate(preallocator, fill_with_zeros, *mat));
546:     PetscCall(MatDestroy(&preallocator));

548:     if (!dm->prealloc_only) {
549:       /* Bind to CPU before assembly, to prevent unnecessary copies of zero entries from CPU to GPU */
550:       PetscCall(MatBindToCPU(*mat, PETSC_TRUE));
551:       switch (dim) {
552:       case 1:
553:         PetscCall(DMCreateMatrix_Stag_1D_AIJ_Assemble(dm, *mat));
554:         break;
555:       case 2:
556:         PetscCall(DMCreateMatrix_Stag_2D_AIJ_Assemble(dm, *mat));
557:         break;
558:       case 3:
559:         PetscCall(DMCreateMatrix_Stag_3D_AIJ_Assemble(dm, *mat));
560:         break;
561:       default:
562:         SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Unsupported dimension %" PetscInt_FMT, dim);
563:       }
564:       PetscCall(MatBindToCPU(*mat, PETSC_FALSE));
565:     }
566:   } else if (is_shell) {
567:     /* nothing more to do */
568:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Not implemented for Mattype %s", mat_type);
569:   PetscFunctionReturn(PETSC_SUCCESS);
570: }

572: static PetscErrorCode DMGetCompatibility_Stag(DM dm, DM dm2, PetscBool *compatible, PetscBool *set)
573: {
574:   const DM_Stag *const stag  = (DM_Stag *)dm->data;
575:   const DM_Stag *const stag2 = (DM_Stag *)dm2->data;
576:   PetscInt             dim, dim2, i;
577:   MPI_Comm             comm;
578:   PetscMPIInt          sameComm;
579:   DMType               type2;
580:   PetscBool            sameType;

582:   PetscFunctionBegin;
583:   PetscCall(DMGetType(dm2, &type2));
584:   PetscCall(PetscStrcmp(DMSTAG, type2, &sameType));
585:   if (!sameType) {
586:     PetscCall(PetscInfo(dm, "DMStag compatibility check not implemented with DM of type %s\n", type2));
587:     *set = PETSC_FALSE;
588:     PetscFunctionReturn(PETSC_SUCCESS);
589:   }

591:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
592:   PetscCallMPI(MPI_Comm_compare(comm, PetscObjectComm((PetscObject)dm2), &sameComm));
593:   if (sameComm != MPI_IDENT) {
594:     PetscCall(PetscInfo(dm, "DMStag objects have different communicators: %" PETSC_INTPTR_T_FMT " != %" PETSC_INTPTR_T_FMT "\n", (PETSC_INTPTR_T)comm, (PETSC_INTPTR_T)PetscObjectComm((PetscObject)dm2)));
595:     *set = PETSC_FALSE;
596:     PetscFunctionReturn(PETSC_SUCCESS);
597:   }
598:   PetscCall(DMGetDimension(dm, &dim));
599:   PetscCall(DMGetDimension(dm2, &dim2));
600:   if (dim != dim2) {
601:     PetscCall(PetscInfo(dm, "DMStag objects have different dimensions\n"));
602:     *set        = PETSC_TRUE;
603:     *compatible = PETSC_FALSE;
604:     PetscFunctionReturn(PETSC_SUCCESS);
605:   }
606:   for (i = 0; i < dim; ++i) {
607:     if (stag->N[i] != stag2->N[i]) {
608:       PetscCall(PetscInfo(dm, "DMStag objects have different global numbers of elements in dimension %" PetscInt_FMT ": %" PetscInt_FMT " != %" PetscInt_FMT "\n", i, stag->n[i], stag2->n[i]));
609:       *set        = PETSC_TRUE;
610:       *compatible = PETSC_FALSE;
611:       PetscFunctionReturn(PETSC_SUCCESS);
612:     }
613:     if (stag->n[i] != stag2->n[i]) {
614:       PetscCall(PetscInfo(dm, "DMStag objects have different local numbers of elements in dimension %" PetscInt_FMT ": %" PetscInt_FMT " != %" PetscInt_FMT "\n", i, stag->n[i], stag2->n[i]));
615:       *set        = PETSC_TRUE;
616:       *compatible = PETSC_FALSE;
617:       PetscFunctionReturn(PETSC_SUCCESS);
618:     }
619:     if (stag->boundaryType[i] != stag2->boundaryType[i]) {
620:       PetscCall(PetscInfo(dm, "DMStag objects have different boundary types in dimension %" PetscInt_FMT ": %s != %s\n", i, DMBoundaryTypes[stag->boundaryType[i]], DMBoundaryTypes[stag2->boundaryType[i]]));
621:       *set        = PETSC_TRUE;
622:       *compatible = PETSC_FALSE;
623:       PetscFunctionReturn(PETSC_SUCCESS);
624:     }
625:   }
626:   /* Note: we include stencil type and width in the notion of compatibility, as this affects
627:      the "atlas" (local subdomains). This might be irritating in legitimate cases
628:      of wanting to transfer between two other-wise compatible DMs with different
629:      stencil characteristics. */
630:   if (stag->stencilType != stag2->stencilType) {
631:     PetscCall(PetscInfo(dm, "DMStag objects have different ghost stencil types: %s != %s\n", DMStagStencilTypes[stag->stencilType], DMStagStencilTypes[stag2->stencilType]));
632:     *set        = PETSC_TRUE;
633:     *compatible = PETSC_FALSE;
634:     PetscFunctionReturn(PETSC_SUCCESS);
635:   }
636:   if (stag->stencilWidth != stag2->stencilWidth) {
637:     PetscCall(PetscInfo(dm, "DMStag objects have different ghost stencil widths: %" PetscInt_FMT " != %" PetscInt_FMT "\n", stag->stencilWidth, stag->stencilWidth));
638:     *set        = PETSC_TRUE;
639:     *compatible = PETSC_FALSE;
640:     PetscFunctionReturn(PETSC_SUCCESS);
641:   }
642:   *set        = PETSC_TRUE;
643:   *compatible = PETSC_TRUE;
644:   PetscFunctionReturn(PETSC_SUCCESS);
645: }

647: static PetscErrorCode DMHasCreateInjection_Stag(DM dm, PetscBool *flg)
648: {
649:   PetscFunctionBegin;
651:   PetscAssertPointer(flg, 2);
652:   *flg = PETSC_FALSE;
653:   PetscFunctionReturn(PETSC_SUCCESS);
654: }

656: /*
657: Note there are several orderings in play here.
658: In all cases, non-element dof are associated with the element that they are below/left/behind, and the order in 2D proceeds vertex/bottom edge/left edge/element (with all dof on each together).
659: Also in all cases, only subdomains which are the last in their dimension have partial elements.

661: 1) "Natural" Ordering (not used). Number adding each full or partial (on the right or top) element, starting at the bottom left (i=0,j=0) and proceeding across the entire domain, row by row to get a global numbering.
662: 2) Global ("PETSc") ordering. The same as natural, but restricted to each domain. So, traverse all elements (again starting at the bottom left and going row-by-row) on rank 0, then continue numbering with rank 1, and so on.
663: 3) Local ordering. Including ghost elements (both interior and on the right/top/front to complete partial elements), use the same convention to create a local numbering.
664: */

666: static PetscErrorCode DMLocalToGlobalBegin_Stag(DM dm, Vec l, InsertMode mode, Vec g)
667: {
668:   DM_Stag *const stag = (DM_Stag *)dm->data;

670:   PetscFunctionBegin;
671:   if (mode == ADD_VALUES) {
672:     PetscCall(VecScatterBegin(stag->gtol, l, g, mode, SCATTER_REVERSE));
673:   } else if (mode == INSERT_VALUES) {
674:     if (stag->ltog_injective) {
675:       PetscCall(VecScatterBegin(stag->ltog_injective, l, g, mode, SCATTER_FORWARD));
676:     } else {
677:       PetscCall(VecScatterBegin(stag->gtol, l, g, mode, SCATTER_REVERSE_LOCAL));
678:     }
679:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported InsertMode");
680:   PetscFunctionReturn(PETSC_SUCCESS);
681: }

683: static PetscErrorCode DMLocalToGlobalEnd_Stag(DM dm, Vec l, InsertMode mode, Vec g)
684: {
685:   DM_Stag *const stag = (DM_Stag *)dm->data;

687:   PetscFunctionBegin;
688:   if (mode == ADD_VALUES) {
689:     PetscCall(VecScatterEnd(stag->gtol, l, g, mode, SCATTER_REVERSE));
690:   } else if (mode == INSERT_VALUES) {
691:     if (stag->ltog_injective) {
692:       PetscCall(VecScatterEnd(stag->ltog_injective, l, g, mode, SCATTER_FORWARD));
693:     } else {
694:       PetscCall(VecScatterEnd(stag->gtol, l, g, mode, SCATTER_REVERSE_LOCAL));
695:     }
696:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported InsertMode");
697:   PetscFunctionReturn(PETSC_SUCCESS);
698: }

700: static PetscErrorCode DMGlobalToLocalBegin_Stag(DM dm, Vec g, InsertMode mode, Vec l)
701: {
702:   DM_Stag *const stag = (DM_Stag *)dm->data;

704:   PetscFunctionBegin;
705:   PetscCall(VecScatterBegin(stag->gtol, g, l, mode, SCATTER_FORWARD));
706:   PetscFunctionReturn(PETSC_SUCCESS);
707: }

709: static PetscErrorCode DMGlobalToLocalEnd_Stag(DM dm, Vec g, InsertMode mode, Vec l)
710: {
711:   DM_Stag *const stag = (DM_Stag *)dm->data;

713:   PetscFunctionBegin;
714:   PetscCall(VecScatterEnd(stag->gtol, g, l, mode, SCATTER_FORWARD));
715:   PetscFunctionReturn(PETSC_SUCCESS);
716: }

718: static PetscErrorCode DMLocalToLocalBegin_Stag(DM dm, Vec g, InsertMode mode, Vec l)
719: {
720:   DM_Stag *const stag = (DM_Stag *)dm->data;

722:   PetscFunctionBegin;
723:   if (!stag->ltol) {
724:     PetscInt dim;
725:     PetscCall(DMGetDimension(dm, &dim));
726:     switch (dim) {
727:     case 1:
728:       PetscCall(DMStagPopulateLocalToLocal1d_Internal(dm));
729:       break;
730:     case 2:
731:       PetscCall(DMStagPopulateLocalToLocal2d_Internal(dm));
732:       break;
733:     case 3:
734:       PetscCall(DMStagPopulateLocalToLocal3d_Internal(dm));
735:       break;
736:     default:
737:       SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported dimension %" PetscInt_FMT, dim);
738:     }
739:   }
740:   PetscCall(VecScatterBegin(stag->ltol, g, l, mode, SCATTER_FORWARD));
741:   PetscFunctionReturn(PETSC_SUCCESS);
742: }

744: static PetscErrorCode DMLocalToLocalEnd_Stag(DM dm, Vec g, InsertMode mode, Vec l)
745: {
746:   DM_Stag *const stag = (DM_Stag *)dm->data;

748:   PetscFunctionBegin;
749:   PetscCall(VecScatterEnd(stag->ltol, g, l, mode, SCATTER_FORWARD));
750:   PetscFunctionReturn(PETSC_SUCCESS);
751: }

753: /*
754: If a stratum is active (non-zero dof), make it active in the coordinate DM.
755: */
756: static PetscErrorCode DMCreateCoordinateDM_Stag(DM dm, DM *dmc)
757: {
758:   DM_Stag *const stag = (DM_Stag *)dm->data;
759:   PetscInt       dim;
760:   PetscBool      isstag, isproduct;
761:   const char    *prefix;

763:   PetscFunctionBegin;
764:   PetscCheck(stag->coordinateDMType, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Before creating a coordinate DM, a type must be specified with DMStagSetCoordinateDMType()");

766:   PetscCall(DMGetDimension(dm, &dim));
767:   PetscCall(PetscStrcmp(stag->coordinateDMType, DMSTAG, &isstag));
768:   PetscCall(PetscStrcmp(stag->coordinateDMType, DMPRODUCT, &isproduct));
769:   if (isstag) {
770:     PetscCall(DMStagCreateCompatibleDMStag(dm, stag->dof[0] > 0 ? dim : 0, stag->dof[1] > 0 ? dim : 0, stag->dof[2] > 0 ? dim : 0, stag->dof[3] > 0 ? dim : 0, dmc));
771:   } else if (isproduct) {
772:     PetscCall(DMCreate(PETSC_COMM_WORLD, dmc));
773:     PetscCall(DMSetType(*dmc, DMPRODUCT));
774:     PetscCall(DMSetDimension(*dmc, dim));
775:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported coordinate DM type %s", stag->coordinateDMType);
776:   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
777:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dmc, prefix));
778:   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)*dmc, "cdm_"));
779:   PetscFunctionReturn(PETSC_SUCCESS);
780: }

782: static PetscErrorCode DMGetNeighbors_Stag(DM dm, PetscInt *nRanks, const PetscMPIInt *ranks[])
783: {
784:   DM_Stag *const stag = (DM_Stag *)dm->data;
785:   PetscInt       dim;

787:   PetscFunctionBegin;
788:   PetscCall(DMGetDimension(dm, &dim));
789:   switch (dim) {
790:   case 1:
791:     *nRanks = 3;
792:     break;
793:   case 2:
794:     *nRanks = 9;
795:     break;
796:   case 3:
797:     *nRanks = 27;
798:     break;
799:   default:
800:     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Get neighbors not implemented for dim = %" PetscInt_FMT, dim);
801:   }
802:   *ranks = stag->neighbors;
803:   PetscFunctionReturn(PETSC_SUCCESS);
804: }

806: static PetscErrorCode DMView_Stag(DM dm, PetscViewer viewer)
807: {
808:   DM_Stag *const stag = (DM_Stag *)dm->data;
809:   PetscBool      isascii, viewAllRanks;
810:   PetscMPIInt    rank, size;
811:   PetscInt       dim, maxRanksToView, i;

813:   PetscFunctionBegin;
814:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
815:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
816:   PetscCall(DMGetDimension(dm, &dim));
817:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
818:   if (isascii) {
819:     PetscCall(PetscViewerASCIIPrintf(viewer, "Dimension: %" PetscInt_FMT "\n", dim));
820:     switch (dim) {
821:     case 1:
822:       PetscCall(PetscViewerASCIIPrintf(viewer, "Global size: %" PetscInt_FMT "\n", stag->N[0]));
823:       break;
824:     case 2:
825:       PetscCall(PetscViewerASCIIPrintf(viewer, "Global sizes: %" PetscInt_FMT " x %" PetscInt_FMT "\n", stag->N[0], stag->N[1]));
826:       PetscCall(PetscViewerASCIIPrintf(viewer, "Parallel decomposition: %d x %d ranks\n", stag->nRanks[0], stag->nRanks[1]));
827:       break;
828:     case 3:
829:       PetscCall(PetscViewerASCIIPrintf(viewer, "Global sizes: %" PetscInt_FMT " x %" PetscInt_FMT " x %" PetscInt_FMT "\n", stag->N[0], stag->N[1], stag->N[2]));
830:       PetscCall(PetscViewerASCIIPrintf(viewer, "Parallel decomposition: %d x %d x %d ranks\n", stag->nRanks[0], stag->nRanks[1], stag->nRanks[2]));
831:       break;
832:     default:
833:       SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "not implemented for dim==%" PetscInt_FMT, dim);
834:     }
835:     PetscCall(PetscViewerASCIIPrintf(viewer, "Boundary ghosting:"));
836:     for (i = 0; i < dim; ++i) PetscCall(PetscViewerASCIIPrintf(viewer, " %s", DMBoundaryTypes[stag->boundaryType[i]]));
837:     PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
838:     PetscCall(PetscViewerASCIIPrintf(viewer, "Elementwise ghost stencil: %s", DMStagStencilTypes[stag->stencilType]));
839:     if (stag->stencilType != DMSTAG_STENCIL_NONE) {
840:       PetscCall(PetscViewerASCIIPrintf(viewer, ", width %" PetscInt_FMT "\n", stag->stencilWidth));
841:     } else {
842:       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
843:     }
844:     PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " DOF per vertex (0D)\n", stag->dof[0]));
845:     if (dim == 3) PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " DOF per edge (1D)\n", stag->dof[1]));
846:     if (dim > 1) PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " DOF per face (%" PetscInt_FMT "D)\n", stag->dof[dim - 1], dim - 1));
847:     PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " DOF per element (%" PetscInt_FMT "D)\n", stag->dof[dim], dim));
848:     if (dm->coordinates[0].dm) PetscCall(PetscViewerASCIIPrintf(viewer, "Has coordinate DM\n"));
849:     maxRanksToView = 16;
850:     viewAllRanks   = (PetscBool)(size <= maxRanksToView);
851:     if (viewAllRanks) {
852:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
853:       switch (dim) {
854:       case 1:
855:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local elements : %" PetscInt_FMT " (%" PetscInt_FMT " with ghosts)\n", rank, stag->n[0], stag->nGhost[0]));
856:         break;
857:       case 2:
858:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Rank coordinates (%d,%d)\n", rank, stag->rank[0], stag->rank[1]));
859:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local elements : %" PetscInt_FMT " x %" PetscInt_FMT " (%" PetscInt_FMT " x %" PetscInt_FMT " with ghosts)\n", rank, stag->n[0], stag->n[1], stag->nGhost[0], stag->nGhost[1]));
860:         break;
861:       case 3:
862:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Rank coordinates (%d,%d,%d)\n", rank, stag->rank[0], stag->rank[1], stag->rank[2]));
863:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local elements : %" PetscInt_FMT " x %" PetscInt_FMT " x %" PetscInt_FMT " (%" PetscInt_FMT " x %" PetscInt_FMT " x %" PetscInt_FMT " with ghosts)\n", rank, stag->n[0], stag->n[1],
864:                                                      stag->n[2], stag->nGhost[0], stag->nGhost[1], stag->nGhost[2]));
865:         break;
866:       default:
867:         SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "not implemented for dim==%" PetscInt_FMT, dim);
868:       }
869:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local native entries: %" PetscInt_FMT "\n", rank, stag->entries));
870:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local entries total : %" PetscInt_FMT "\n", rank, stag->entriesGhost));
871:       PetscCall(PetscViewerFlush(viewer));
872:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
873:     } else {
874:       PetscCall(PetscViewerASCIIPrintf(viewer, "(Per-rank information omitted since >%" PetscInt_FMT " ranks used)\n", maxRanksToView));
875:     }
876:   }
877:   PetscFunctionReturn(PETSC_SUCCESS);
878: }

880: static PetscErrorCode DMSetFromOptions_Stag(DM dm, PetscOptionItems *PetscOptionsObject)
881: {
882:   DM_Stag *const stag = (DM_Stag *)dm->data;
883:   PetscInt       dim, nRefine = 0, refineFactorTotal[DMSTAG_MAX_DIM], i, d;

885:   PetscFunctionBegin;
886:   PetscCall(DMGetDimension(dm, &dim));
887:   PetscOptionsHeadBegin(PetscOptionsObject, "DMStag Options");
888:   PetscCall(PetscOptionsInt("-stag_grid_x", "Number of grid points in x direction", "DMStagSetGlobalSizes", stag->N[0], &stag->N[0], NULL));
889:   if (dim > 1) PetscCall(PetscOptionsInt("-stag_grid_y", "Number of grid points in y direction", "DMStagSetGlobalSizes", stag->N[1], &stag->N[1], NULL));
890:   if (dim > 2) PetscCall(PetscOptionsInt("-stag_grid_z", "Number of grid points in z direction", "DMStagSetGlobalSizes", stag->N[2], &stag->N[2], NULL));
891:   PetscCall(PetscOptionsMPIInt("-stag_ranks_x", "Number of ranks in x direction", "DMStagSetNumRanks", stag->nRanks[0], &stag->nRanks[0], NULL));
892:   if (dim > 1) PetscCall(PetscOptionsMPIInt("-stag_ranks_y", "Number of ranks in y direction", "DMStagSetNumRanks", stag->nRanks[1], &stag->nRanks[1], NULL));
893:   if (dim > 2) PetscCall(PetscOptionsMPIInt("-stag_ranks_z", "Number of ranks in z direction", "DMStagSetNumRanks", stag->nRanks[2], &stag->nRanks[2], NULL));
894:   PetscCall(PetscOptionsInt("-stag_stencil_width", "Elementwise stencil width", "DMStagSetStencilWidth", stag->stencilWidth, &stag->stencilWidth, NULL));
895:   PetscCall(PetscOptionsEnum("-stag_stencil_type", "Elementwise stencil stype", "DMStagSetStencilType", DMStagStencilTypes, (PetscEnum)stag->stencilType, (PetscEnum *)&stag->stencilType, NULL));
896:   PetscCall(PetscOptionsEnum("-stag_boundary_type_x", "Treatment of (physical) boundaries in x direction", "DMStagSetBoundaryTypes", DMBoundaryTypes, (PetscEnum)stag->boundaryType[0], (PetscEnum *)&stag->boundaryType[0], NULL));
897:   PetscCall(PetscOptionsEnum("-stag_boundary_type_y", "Treatment of (physical) boundaries in y direction", "DMStagSetBoundaryTypes", DMBoundaryTypes, (PetscEnum)stag->boundaryType[1], (PetscEnum *)&stag->boundaryType[1], NULL));
898:   PetscCall(PetscOptionsEnum("-stag_boundary_type_z", "Treatment of (physical) boundaries in z direction", "DMStagSetBoundaryTypes", DMBoundaryTypes, (PetscEnum)stag->boundaryType[2], (PetscEnum *)&stag->boundaryType[2], NULL));
899:   PetscCall(PetscOptionsInt("-stag_dof_0", "Number of dof per 0-cell (vertex)", "DMStagSetDOF", stag->dof[0], &stag->dof[0], NULL));
900:   PetscCall(PetscOptionsInt("-stag_dof_1", "Number of dof per 1-cell (element in 1D, face in 2D, edge in 3D)", "DMStagSetDOF", stag->dof[1], &stag->dof[1], NULL));
901:   PetscCall(PetscOptionsInt("-stag_dof_2", "Number of dof per 2-cell (element in 2D, face in 3D)", "DMStagSetDOF", stag->dof[2], &stag->dof[2], NULL));
902:   PetscCall(PetscOptionsInt("-stag_dof_3", "Number of dof per 3-cell (element in 3D)", "DMStagSetDOF", stag->dof[3], &stag->dof[3], NULL));
903:   PetscCall(PetscOptionsBoundedInt("-stag_refine_x", "Refinement factor in x-direction", "DMStagSetRefinementFactor", stag->refineFactor[0], &stag->refineFactor[0], NULL, 1));
904:   if (dim > 1) PetscCall(PetscOptionsBoundedInt("-stag_refine_y", "Refinement factor in y-direction", "DMStagSetRefinementFactor", stag->refineFactor[1], &stag->refineFactor[1], NULL, 1));
905:   if (dim > 2) PetscCall(PetscOptionsBoundedInt("-stag_refine_z", "Refinement factor in z-direction", "DMStagSetRefinementFactor", stag->refineFactor[2], &stag->refineFactor[2], NULL, 1));
906:   PetscCall(PetscOptionsBoundedInt("-stag_refine", "Refine grid one or more times", "None", nRefine, &nRefine, NULL, 0));
907:   PetscOptionsHeadEnd();

909:   for (d = 0; d < dim; ++d) refineFactorTotal[d] = 1;
910:   while (nRefine--)
911:     for (d = 0; d < dim; ++d) refineFactorTotal[d] *= stag->refineFactor[d];
912:   for (d = 0; d < dim; ++d) {
913:     stag->N[d] *= refineFactorTotal[d];
914:     if (stag->l[d])
915:       for (i = 0; i < stag->nRanks[d]; ++i) stag->l[d][i] *= refineFactorTotal[d];
916:   }
917:   PetscFunctionReturn(PETSC_SUCCESS);
918: }

920: /*MC
921:   DMSTAG - `"stag"` - A `DM` object representing a "staggered grid" or a structured cell complex.

923:   Level: beginner

925:   Notes:
926:   This implementation parallels the `DMDA` implementation in many ways, but allows degrees of freedom
927:   to be associated with all "strata" in a logically-rectangular grid.

929:   Each stratum can be characterized by the dimension of the entities ("points", to borrow the `DMPLEX`
930:   terminology), from 0- to 3-dimensional.

932:   In some cases this numbering is used directly, for example with `DMStagGetDOF()`.
933:   To allow easier reading and to some extent more similar code between different-dimensional implementations
934:   of the same problem, we associate canonical names for each type of point, for each dimension of DMStag.

936:   * 1-dimensional `DMSTAG` objects have vertices (0D) and elements (1D).
937:   * 2-dimensional `DMSTAG` objects have vertices (0D), faces (1D), and elements (2D).
938:   * 3-dimensional `DMSTAG` objects have vertices (0D), edges (1D), faces (2D), and elements (3D).

940:   This naming is reflected when viewing a `DMSTAG` object with `DMView()`, and in forming
941:   convenient options prefixes when creating a decomposition with `DMCreateFieldDecomposition()`.

943: .seealso: [](ch_stag), `DM`, `DMPRODUCT`, `DMDA`, `DMPLEX`, `DMStagCreate1d()`, `DMStagCreate2d()`, `DMStagCreate3d()`, `DMType`, `DMCreate()`,
944:           `DMSetType()`, `DMStagVecSplitToDMDA()`
945: M*/

947: PETSC_EXTERN PetscErrorCode DMCreate_Stag(DM dm)
948: {
949:   DM_Stag *stag;
950:   PetscInt i, dim;

952:   PetscFunctionBegin;
953:   PetscAssertPointer(dm, 1);
954:   PetscCall(PetscNew(&stag));
955:   dm->data = stag;

957:   stag->gtol           = NULL;
958:   stag->ltog_injective = NULL;
959:   stag->ltol           = NULL;
960:   for (i = 0; i < DMSTAG_MAX_STRATA; ++i) stag->dof[i] = 0;
961:   for (i = 0; i < DMSTAG_MAX_DIM; ++i) stag->l[i] = NULL;
962:   stag->stencilType  = DMSTAG_STENCIL_NONE;
963:   stag->stencilWidth = 0;
964:   for (i = 0; i < DMSTAG_MAX_DIM; ++i) stag->nRanks[i] = -1;
965:   stag->coordinateDMType = NULL;
966:   for (i = 0; i < DMSTAG_MAX_DIM; ++i) stag->refineFactor[i] = 2;

968:   PetscCall(DMGetDimension(dm, &dim));
969:   PetscCheck(dim == 1 || dim == 2 || dim == 3, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DMSetDimension() must be called to set a dimension with value 1, 2, or 3");

971:   PetscCall(PetscMemzero(dm->ops, sizeof(*dm->ops)));
972:   dm->ops->createcoordinatedm  = DMCreateCoordinateDM_Stag;
973:   dm->ops->createglobalvector  = DMCreateGlobalVector_Stag;
974:   dm->ops->createlocalvector   = DMCreateLocalVector_Stag;
975:   dm->ops->creatematrix        = DMCreateMatrix_Stag;
976:   dm->ops->hascreateinjection  = DMHasCreateInjection_Stag;
977:   dm->ops->refine              = DMRefine_Stag;
978:   dm->ops->coarsen             = DMCoarsen_Stag;
979:   dm->ops->createinterpolation = DMCreateInterpolation_Stag;
980:   dm->ops->createrestriction   = DMCreateRestriction_Stag;
981:   dm->ops->destroy             = DMDestroy_Stag;
982:   dm->ops->getneighbors        = DMGetNeighbors_Stag;
983:   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Stag;
984:   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Stag;
985:   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Stag;
986:   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Stag;
987:   dm->ops->localtolocalbegin   = DMLocalToLocalBegin_Stag;
988:   dm->ops->localtolocalend     = DMLocalToLocalEnd_Stag;
989:   dm->ops->setfromoptions      = DMSetFromOptions_Stag;
990:   switch (dim) {
991:   case 1:
992:     dm->ops->setup = DMSetUp_Stag_1d;
993:     break;
994:   case 2:
995:     dm->ops->setup = DMSetUp_Stag_2d;
996:     break;
997:   case 3:
998:     dm->ops->setup = DMSetUp_Stag_3d;
999:     break;
1000:   default:
1001:     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Unsupported dimension %" PetscInt_FMT, dim);
1002:   }
1003:   dm->ops->clone                    = DMClone_Stag;
1004:   dm->ops->view                     = DMView_Stag;
1005:   dm->ops->getcompatibility         = DMGetCompatibility_Stag;
1006:   dm->ops->createfielddecomposition = DMCreateFieldDecomposition_Stag;
1007:   PetscFunctionReturn(PETSC_SUCCESS);
1008: }