Actual source code: da.c

  1: #include <petsc/private/dmdaimpl.h>

  3: /*@
  4:   DMDASetSizes - Sets the number of grid points in the three dimensional directions

  6:   Logically Collective

  8:   Input Parameters:
  9: + da - the `DMDA`
 10: . M  - the global X size
 11: . N  - the global Y size
 12: - P  - the global Z size

 14:   Level: intermediate

 16:   Developer Note:
 17:   Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points

 19: .seealso: [](sec_struct), `DM`, `DMDA`, `PetscSplitOwnership()`
 20: @*/
 21: PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
 22: {
 23:   DM_DA *dd = (DM_DA *)da->data;

 25:   PetscFunctionBegin;
 30:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
 31:   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in X direction must be positive");
 32:   PetscCheck(N >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Y direction must be positive");
 33:   PetscCheck(P >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Z direction must be positive");

 35:   dd->M = M;
 36:   dd->N = N;
 37:   dd->P = P;
 38:   PetscFunctionReturn(PETSC_SUCCESS);
 39: }

 41: /*@
 42:   DMDASetNumProcs - Sets the number of processes in each dimension

 44:   Logically Collective

 46:   Input Parameters:
 47: + da - the `DMDA`
 48: . m  - the number of X processes (or `PETSC_DECIDE`)
 49: . n  - the number of Y processes (or `PETSC_DECIDE`)
 50: - p  - the number of Z processes (or `PETSC_DECIDE`)

 52:   Level: intermediate

 54: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetSizes()`, `PetscSplitOwnership()`
 55: @*/
 56: PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
 57: {
 58:   DM_DA *dd = (DM_DA *)da->data;

 60:   PetscFunctionBegin;
 65:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
 66:   dd->m = m;
 67:   dd->n = n;
 68:   dd->p = p;
 69:   if (da->dim == 2) {
 70:     PetscMPIInt size;
 71:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
 72:     if ((dd->m > 0) && (dd->n < 0)) {
 73:       dd->n = size / dd->m;
 74:       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in X direction not divisible into comm size %d", m, size);
 75:     }
 76:     if ((dd->n > 0) && (dd->m < 0)) {
 77:       dd->m = size / dd->n;
 78:       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in Y direction not divisible into comm size %d", n, size);
 79:     }
 80:   }
 81:   PetscFunctionReturn(PETSC_SUCCESS);
 82: }

 84: /*@
 85:   DMDAGetBoundaryType - Gets the type of ghost nodes on domain boundaries.

 87:   Not Collective

 89:   Input Parameter:
 90: . da - The `DMDA`

 92:   Output Parameters:
 93: + bx - x boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
 94: . by - y boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
 95: - bz - z boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`

 97:   Level: intermediate

 99: .seealso: [](sec_struct), `DMDASetBoundaryType()`, `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
100: @*/
101: PetscErrorCode DMDAGetBoundaryType(DM da, DMBoundaryType *bx, DMBoundaryType *by, DMBoundaryType *bz)
102: {
103:   DM_DA *dd = (DM_DA *)da->data;

105:   PetscFunctionBegin;
107:   if (bx) PetscAssertPointer(bx, 2);
108:   if (by) PetscAssertPointer(by, 3);
109:   if (bz) PetscAssertPointer(bz, 4);
110:   if (bx) *bx = dd->bx;
111:   if (by) *by = dd->by;
112:   if (bz) *bz = dd->bz;
113:   PetscFunctionReturn(PETSC_SUCCESS);
114: }

116: /*@
117:   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.

119:   Not Collective

121:   Input Parameters:
122: + da - The `DMDA`
123: . bx - x boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
124: . by - y boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
125: - bz - z boundary type, one of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`

127:   Level: intermediate

129: .seealso: [](sec_struct), `DMDAGetBoundaryType()`, `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`, `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
130: @*/
131: PetscErrorCode DMDASetBoundaryType(DM da, DMBoundaryType bx, DMBoundaryType by, DMBoundaryType bz)
132: {
133:   DM_DA *dd = (DM_DA *)da->data;

135:   PetscFunctionBegin;
140:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
141:   dd->bx = bx;
142:   dd->by = by;
143:   dd->bz = bz;
144:   PetscFunctionReturn(PETSC_SUCCESS);
145: }

147: /*@
148:   DMDASetDof - Sets the number of degrees of freedom per vertex

150:   Not Collective

152:   Input Parameters:
153: + da  - The `DMDA`
154: - dof - Number of degrees of freedom per vertex

156:   Level: intermediate

158: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`
159: @*/
160: PetscErrorCode DMDASetDof(DM da, PetscInt dof)
161: {
162:   DM_DA *dd = (DM_DA *)da->data;

164:   PetscFunctionBegin;
167:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
168:   dd->w  = dof;
169:   da->bs = dof;
170:   PetscFunctionReturn(PETSC_SUCCESS);
171: }

173: /*@
174:   DMDAGetDof - Gets the number of degrees of freedom per vertex

176:   Not Collective

178:   Input Parameter:
179: . da - The `DMDA`

181:   Output Parameter:
182: . dof - Number of degrees of freedom per vertex

184:   Level: intermediate

186: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`
187: @*/
188: PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
189: {
190:   DM_DA *dd = (DM_DA *)da->data;

192:   PetscFunctionBegin;
194:   PetscAssertPointer(dof, 2);
195:   *dof = dd->w;
196:   PetscFunctionReturn(PETSC_SUCCESS);
197: }

199: /*@
200:   DMDAGetOverlap - Gets the size of the per-processor overlap.

202:   Not Collective

204:   Input Parameter:
205: . da - The `DMDA`

207:   Output Parameters:
208: + x - Overlap in the x direction
209: . y - Overlap in the y direction
210: - z - Overlap in the z direction

212:   Level: intermediate

214: .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetOverlap()`
215: @*/
216: PetscErrorCode DMDAGetOverlap(DM da, PetscInt *x, PetscInt *y, PetscInt *z)
217: {
218:   DM_DA *dd = (DM_DA *)da->data;

220:   PetscFunctionBegin;
222:   if (x) *x = dd->xol;
223:   if (y) *y = dd->yol;
224:   if (z) *z = dd->zol;
225:   PetscFunctionReturn(PETSC_SUCCESS);
226: }

228: /*@
229:   DMDASetOverlap - Sets the size of the per-processor overlap.

231:   Not Collective

233:   Input Parameters:
234: + da - The `DMDA`
235: . x  - Overlap in the x direction
236: . y  - Overlap in the y direction
237: - z  - Overlap in the z direction

239:   Level: intermediate

241: .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetOverlap()`
242: @*/
243: PetscErrorCode DMDASetOverlap(DM da, PetscInt x, PetscInt y, PetscInt z)
244: {
245:   DM_DA *dd = (DM_DA *)da->data;

247:   PetscFunctionBegin;
252:   dd->xol = x;
253:   dd->yol = y;
254:   dd->zol = z;
255:   PetscFunctionReturn(PETSC_SUCCESS);
256: }

258: /*@
259:   DMDAGetNumLocalSubDomains - Gets the number of local subdomains that would be created upon decomposition.

261:   Not Collective

263:   Input Parameter:
264: . da - The `DMDA`

266:   Output Parameter:
267: . Nsub - Number of local subdomains created upon decomposition

269:   Level: intermediate

271: .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`
272: @*/
273: PetscErrorCode DMDAGetNumLocalSubDomains(DM da, PetscInt *Nsub)
274: {
275:   DM_DA *dd = (DM_DA *)da->data;

277:   PetscFunctionBegin;
279:   if (Nsub) *Nsub = dd->Nsub;
280:   PetscFunctionReturn(PETSC_SUCCESS);
281: }

283: /*@
284:   DMDASetNumLocalSubDomains - Sets the number of local subdomains to create when decomposing with `DMCreateDomainDecomposition()`

286:   Not Collective

288:   Input Parameters:
289: + da   - The `DMDA`
290: - Nsub - The number of local subdomains requested

292:   Level: intermediate

294: .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`
295: @*/
296: PetscErrorCode DMDASetNumLocalSubDomains(DM da, PetscInt Nsub)
297: {
298:   DM_DA *dd = (DM_DA *)da->data;

300:   PetscFunctionBegin;
303:   dd->Nsub = Nsub;
304:   PetscFunctionReturn(PETSC_SUCCESS);
305: }

307: /*@
308:   DMDASetOffset - Sets the index offset of the `DMDA`.

310:   Collective

312:   Input Parameters:
313: + da - The `DMDA`
314: . xo - The offset in the x direction
315: . yo - The offset in the y direction
316: . zo - The offset in the z direction
317: . Mo - The problem offset in the x direction
318: . No - The problem offset in the y direction
319: - Po - The problem offset in the z direction

321:   Level: developer

323:   Note:
324:   This is used primarily to overlap a computation on a local `DMDA` with that on a global `DMDA` without
325:   changing boundary conditions or subdomain features that depend upon the global offsets.

327: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
328: @*/
329: PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
330: {
331:   DM_DA *dd = (DM_DA *)da->data;

333:   PetscFunctionBegin;
341:   dd->xo = xo;
342:   dd->yo = yo;
343:   dd->zo = zo;
344:   dd->Mo = Mo;
345:   dd->No = No;
346:   dd->Po = Po;

348:   if (da->coordinates[0].dm) PetscCall(DMDASetOffset(da->coordinates[0].dm, xo, yo, zo, Mo, No, Po));
349:   PetscFunctionReturn(PETSC_SUCCESS);
350: }

352: /*@
353:   DMDAGetOffset - Gets the index offset of the `DMDA`.

355:   Not Collective

357:   Input Parameter:
358: . da - The `DMDA`

360:   Output Parameters:
361: + xo - The offset in the x direction
362: . yo - The offset in the y direction
363: . zo - The offset in the z direction
364: . Mo - The global size in the x direction
365: . No - The global size in the y direction
366: - Po - The global size in the z direction

368:   Level: developer

370: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDASetOffset()`, `DMDAVecGetArray()`
371: @*/
372: PetscErrorCode DMDAGetOffset(DM da, PetscInt *xo, PetscInt *yo, PetscInt *zo, PetscInt *Mo, PetscInt *No, PetscInt *Po)
373: {
374:   DM_DA *dd = (DM_DA *)da->data;

376:   PetscFunctionBegin;
378:   if (xo) *xo = dd->xo;
379:   if (yo) *yo = dd->yo;
380:   if (zo) *zo = dd->zo;
381:   if (Mo) *Mo = dd->Mo;
382:   if (No) *No = dd->No;
383:   if (Po) *Po = dd->Po;
384:   PetscFunctionReturn(PETSC_SUCCESS);
385: }

387: /*@
388:   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain `DMDA`.

390:   Not Collective

392:   Input Parameter:
393: . da - The `DMDA`

395:   Output Parameters:
396: + xs - The start of the region in x
397: . ys - The start of the region in y
398: . zs - The start of the region in z
399: . xm - The size of the region in x
400: . ym - The size of the region in y
401: - zm - The size of the region in z

403:   Level: intermediate

405: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
406: @*/
407: PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
408: {
409:   DM_DA *dd = (DM_DA *)da->data;

411:   PetscFunctionBegin;
413:   if (xs) *xs = dd->nonxs;
414:   if (ys) *ys = dd->nonys;
415:   if (zs) *zs = dd->nonzs;
416:   if (xm) *xm = dd->nonxm;
417:   if (ym) *ym = dd->nonym;
418:   if (zm) *zm = dd->nonzm;
419:   PetscFunctionReturn(PETSC_SUCCESS);
420: }

422: /*@
423:   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain `DMDA`.

425:   Collective

427:   Input Parameters:
428: + da - The `DMDA`
429: . xs - The start of the region in x
430: . ys - The start of the region in y
431: . zs - The start of the region in z
432: . xm - The size of the region in x
433: . ym - The size of the region in y
434: - zm - The size of the region in z

436:   Level: intermediate

438: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
439: @*/
440: PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
441: {
442:   DM_DA *dd = (DM_DA *)da->data;

444:   PetscFunctionBegin;
452:   dd->nonxs = xs;
453:   dd->nonys = ys;
454:   dd->nonzs = zs;
455:   dd->nonxm = xm;
456:   dd->nonym = ym;
457:   dd->nonzm = zm;
458:   PetscFunctionReturn(PETSC_SUCCESS);
459: }

461: /*@
462:   DMDASetStencilType - Sets the type of the communication stencil

464:   Logically Collective

466:   Input Parameters:
467: + da    - The `DMDA`
468: - stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.

470:   Level: intermediate

472: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
473: @*/
474: PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype)
475: {
476:   DM_DA *dd = (DM_DA *)da->data;

478:   PetscFunctionBegin;
481:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
482:   dd->stencil_type = stype;
483:   PetscFunctionReturn(PETSC_SUCCESS);
484: }

486: /*@
487:   DMDAGetStencilType - Gets the type of the communication stencil

489:   Not Collective

491:   Input Parameter:
492: . da - The `DMDA`

494:   Output Parameter:
495: . stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.

497:   Level: intermediate

499: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
500: @*/
501: PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
502: {
503:   DM_DA *dd = (DM_DA *)da->data;

505:   PetscFunctionBegin;
507:   PetscAssertPointer(stype, 2);
508:   *stype = dd->stencil_type;
509:   PetscFunctionReturn(PETSC_SUCCESS);
510: }

512: /*@
513:   DMDASetStencilWidth - Sets the width of the communication stencil

515:   Logically Collective

517:   Input Parameters:
518: + da    - The `DMDA`
519: - width - The stencil width

521:   Level: intermediate

523: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
524: @*/
525: PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width)
526: {
527:   DM_DA *dd = (DM_DA *)da->data;

529:   PetscFunctionBegin;
532:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
533:   dd->s = width;
534:   PetscFunctionReturn(PETSC_SUCCESS);
535: }

537: /*@
538:   DMDAGetStencilWidth - Gets the width of the communication stencil

540:   Not Collective

542:   Input Parameter:
543: . da - The `DMDA`

545:   Output Parameter:
546: . width - The stencil width

548:   Level: intermediate

550: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMDAStencilType`, `DMDA_STENCIL_BOX`, `DMDA_STENCIL_STAR.`
551: @*/
552: PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
553: {
554:   DM_DA *dd = (DM_DA *)da->data;

556:   PetscFunctionBegin;
558:   PetscAssertPointer(width, 2);
559:   *width = dd->s;
560:   PetscFunctionReturn(PETSC_SUCCESS);
561: }

563: static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da, PetscInt M, PetscInt m, const PetscInt lx[])
564: {
565:   PetscInt i, sum;

567:   PetscFunctionBegin;
568:   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Global dimension not set");
569:   for (i = sum = 0; i < m; i++) sum += lx[i];
570:   PetscCheck(sum == M, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT, sum, M);
571:   PetscFunctionReturn(PETSC_SUCCESS);
572: }

574: /*@
575:   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process

577:   Logically Collective

579:   Input Parameters:
580: + da - The `DMDA`
581: . lx - array containing number of nodes in the X direction on each process, or `NULL`. If non-null, must be of length da->m
582: . ly - array containing number of nodes in the Y direction on each process, or `NULL`. If non-null, must be of length da->n
583: - lz - array containing number of nodes in the Z direction on each process, or `NULL`. If non-null, must be of length da->p.

585:   Level: intermediate

587:   Note:
588:   These numbers are NOT multiplied by the number of dof per node.

590: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
591: @*/
592: PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
593: {
594:   DM_DA *dd = (DM_DA *)da->data;

596:   PetscFunctionBegin;
598:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
599:   if (lx) {
600:     PetscCheck(dd->m >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
601:     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->M, dd->m, lx));
602:     if (!dd->lx) PetscCall(PetscMalloc1(dd->m, &dd->lx));
603:     PetscCall(PetscArraycpy(dd->lx, lx, dd->m));
604:   }
605:   if (ly) {
606:     PetscCheck(dd->n >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
607:     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->N, dd->n, ly));
608:     if (!dd->ly) PetscCall(PetscMalloc1(dd->n, &dd->ly));
609:     PetscCall(PetscArraycpy(dd->ly, ly, dd->n));
610:   }
611:   if (lz) {
612:     PetscCheck(dd->p >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of processes");
613:     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->P, dd->p, lz));
614:     if (!dd->lz) PetscCall(PetscMalloc1(dd->p, &dd->lz));
615:     PetscCall(PetscArraycpy(dd->lz, lz, dd->p));
616:   }
617:   PetscFunctionReturn(PETSC_SUCCESS);
618: }

620: /*@
621:   DMDASetInterpolationType - Sets the type of interpolation that will be
622:   returned by `DMCreateInterpolation()`

624:   Logically Collective

626:   Input Parameters:
627: + da    - initial distributed array
628: - ctype - `DMDA_Q1` and `DMDA_Q0` are currently the only supported forms

630:   Level: intermediate

632:   Note:
633:   You should call this on the coarser of the two `DMDA` you pass to `DMCreateInterpolation()`

635: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDAInterpolationType`,
636:           `DMDA_Q1`, `DMDA_Q0`
637: @*/
638: PetscErrorCode DMDASetInterpolationType(DM da, DMDAInterpolationType ctype)
639: {
640:   DM_DA *dd = (DM_DA *)da->data;

642:   PetscFunctionBegin;
645:   dd->interptype = ctype;
646:   PetscFunctionReturn(PETSC_SUCCESS);
647: }

649: /*@
650:   DMDAGetInterpolationType - Gets the type of interpolation that will be
651:   used by `DMCreateInterpolation()`

653:   Not Collective

655:   Input Parameter:
656: . da - distributed array

658:   Output Parameter:
659: . ctype - interpolation type (`DMDA_Q1` and `DMDA_Q0` are currently the only supported forms)

661:   Level: intermediate

663: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()`,
664:           `DMDA_Q1`, `DMDA_Q0`
665: @*/
666: PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype)
667: {
668:   DM_DA *dd = (DM_DA *)da->data;

670:   PetscFunctionBegin;
672:   PetscAssertPointer(ctype, 2);
673:   *ctype = dd->interptype;
674:   PetscFunctionReturn(PETSC_SUCCESS);
675: }

677: /*@C
678:   DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
679:   processes neighbors.

681:   Not Collective

683:   Input Parameter:
684: . da - the `DMDA` object

686:   Output Parameter:
687: . ranks - the neighbors ranks, stored with the x index increasing most rapidly. The process itself is in the list

689:   Level: intermediate

691:   Notes:
692:   In 2d the array is of length 9, in 3d of length 27

694:   Not supported in 1d

696:   Do not free the array, it is freed when the `DMDA` is destroyed.

698:   Fortran Note:
699:   Pass in an array of the appropriate length to contain the values

701: .seealso: [](sec_struct), `DMDA`, `DM`
702: @*/
703: PetscErrorCode DMDAGetNeighbors(DM da, const PetscMPIInt *ranks[])
704: {
705:   DM_DA *dd = (DM_DA *)da->data;

707:   PetscFunctionBegin;
709:   *ranks = dd->neighbors;
710:   PetscFunctionReturn(PETSC_SUCCESS);
711: }

713: /*@C
714:   DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process

716:   Not Collective

718:   Input Parameter:
719: . da - the `DMDA` object

721:   Output Parameters:
722: + lx - ownership along x direction (optional)
723: . ly - ownership along y direction (optional)
724: - lz - ownership along z direction (optional)

726:   Level: intermediate

728:   Note:
729:   These correspond to the optional final arguments passed to `DMDACreate()`, `DMDACreate2d()`, `DMDACreate3d()`

731:   In C you should not free these arrays, nor change the values in them. They will only have valid values while the
732:   `DMDA` they came from still exists (has not been destroyed).

734:   These numbers are NOT multiplied by the number of dof per node.

736:   Fortran Note:
737:   Pass in arrays `lx`, `ly`, and `lz` of the appropriate length to hold the values; the sixth, seventh and
738:   eighth arguments from `DMDAGetInfo()`

740: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()`
741: @*/
742: PetscErrorCode DMDAGetOwnershipRanges(DM da, const PetscInt *lx[], const PetscInt *ly[], const PetscInt *lz[])
743: {
744:   DM_DA *dd = (DM_DA *)da->data;

746:   PetscFunctionBegin;
748:   if (lx) *lx = dd->lx;
749:   if (ly) *ly = dd->ly;
750:   if (lz) *lz = dd->lz;
751:   PetscFunctionReturn(PETSC_SUCCESS);
752: }

754: /*@
755:   DMDASetRefinementFactor - Set the ratios that the `DMDA` grid is refined

757:   Logically Collective

759:   Input Parameters:
760: + da       - the `DMDA` object
761: . refine_x - ratio of fine grid to coarse in x direction (2 by default)
762: . refine_y - ratio of fine grid to coarse in y direction (2 by default)
763: - refine_z - ratio of fine grid to coarse in z direction (2 by default)

765:   Options Database Keys:
766: + -da_refine_x refine_x - refinement ratio in x direction
767: . -da_refine_y rafine_y - refinement ratio in y direction
768: . -da_refine_z refine_z - refinement ratio in z direction
769: - -da_refine <n>        - refine the `DMDA` object n times when it is created.

771:   Level: intermediate

773:   Note:
774:   Pass `PETSC_IGNORE` to leave a value unchanged

776: .seealso: [](sec_struct), `DM`, `DMDA`, `DMRefine()`, `DMDAGetRefinementFactor()`
777: @*/
778: PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y, PetscInt refine_z)
779: {
780:   DM_DA *dd = (DM_DA *)da->data;

782:   PetscFunctionBegin;

788:   if (refine_x > 0) dd->refine_x = refine_x;
789:   if (refine_y > 0) dd->refine_y = refine_y;
790:   if (refine_z > 0) dd->refine_z = refine_z;
791:   PetscFunctionReturn(PETSC_SUCCESS);
792: }

794: /*@C
795:   DMDAGetRefinementFactor - Gets the ratios that the `DMDA` grid is refined

797:   Not Collective

799:   Input Parameter:
800: . da - the `DMDA` object

802:   Output Parameters:
803: + refine_x - ratio of fine grid to coarse in x direction (2 by default)
804: . refine_y - ratio of fine grid to coarse in y direction (2 by default)
805: - refine_z - ratio of fine grid to coarse in z direction (2 by default)

807:   Level: intermediate

809:   Note:
810:   Pass `NULL` for values you do not need

812: .seealso: [](sec_struct), `DM`, `DMDA`, `DMRefine()`, `DMDASetRefinementFactor()`
813: @*/
814: PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y, PetscInt *refine_z)
815: {
816:   DM_DA *dd = (DM_DA *)da->data;

818:   PetscFunctionBegin;
820:   if (refine_x) *refine_x = dd->refine_x;
821:   if (refine_y) *refine_y = dd->refine_y;
822:   if (refine_z) *refine_z = dd->refine_z;
823:   PetscFunctionReturn(PETSC_SUCCESS);
824: }

826: /*@C
827:   DMDASetGetMatrix - Sets the routine used by the `DMDA` to allocate a matrix.

829:   Logically Collective; No Fortran Support

831:   Input Parameters:
832: + da - the `DMDA` object
833: - f  - the function that allocates the matrix for that specific `DMDA`

835:   Calling sequence of `f`:
836: + da - the `DMDA` object
837: - A  - the created matrix

839:   Level: developer

841:   Notes:
842:   If the function is not provided a default function is used that uses the `DMDAStencilType`, `DMBoundaryType`, and value of `DMDASetStencilWidth()`
843:   to construct the matrix.

845:   See `DMDASetBlockFills()` that provides a simple way to provide the nonzero structure for
846:   the diagonal and off-diagonal blocks of the matrix without providing a custom function

848:   Developer Note:
849:   This should be called `DMDASetCreateMatrix()`

851: .seealso: [](sec_struct), `DM`, `DMDA`, `DMCreateMatrix()`, `DMDASetBlockFills()`
852: @*/
853: PetscErrorCode DMDASetGetMatrix(DM da, PetscErrorCode (*f)(DM da, Mat *A))
854: {
855:   PetscFunctionBegin;
857:   da->ops->creatematrix = f;
858:   PetscFunctionReturn(PETSC_SUCCESS);
859: }

861: /*@
862:   DMDAMapMatStencilToGlobal - Map a list of `MatStencil` on a grid to global indices.

864:   Not Collective

866:   Input Parameters:
867: + da   - the `DMDA` object
868: . m    - number of `MatStencil` to map
869: - idxm - grid points (and component number when dof > 1)

871:   Output Parameter:
872: . gidxm - global row indices

874:   Level: intermediate

876: .seealso: [](sec_struct), `DM`, `DMDA`, `MatStencil`
877: @*/
878: PetscErrorCode DMDAMapMatStencilToGlobal(DM da, PetscInt m, const MatStencil idxm[], PetscInt gidxm[])
879: {
880:   const DM_DA           *dd  = (const DM_DA *)da->data;
881:   const PetscInt        *dxm = (const PetscInt *)idxm;
882:   PetscInt               i, j, sdim, tmp, dim;
883:   PetscInt               dims[4], starts[4], dims2[3], starts2[3], dof = dd->w;
884:   ISLocalToGlobalMapping ltog;

886:   PetscFunctionBegin;
887:   if (m <= 0) PetscFunctionReturn(PETSC_SUCCESS);

889:   /* Code adapted from DMDAGetGhostCorners() */
890:   starts2[0] = dd->Xs / dof + dd->xo;
891:   starts2[1] = dd->Ys + dd->yo;
892:   starts2[2] = dd->Zs + dd->zo;
893:   dims2[0]   = (dd->Xe - dd->Xs) / dof;
894:   dims2[1]   = (dd->Ye - dd->Ys);
895:   dims2[2]   = (dd->Ze - dd->Zs);

897:   /* As if we do MatSetStencil() to get dims[]/starts[] of mat->stencil */
898:   dim  = da->dim;                   /* DA dim: 1 to 3 */
899:   sdim = dim + (dof > 1 ? 1 : 0);   /* Dimensions in MatStencil's (k,j,i,c) view */
900:   for (i = 0; i < dim; i++) {       /* Reverse the order and also skip the unused dimensions */
901:     dims[i]   = dims2[dim - i - 1]; /* ex. dims/starts[] are in order of {i} for 1D, {j,i} for 2D and {k,j,i} for 3D */
902:     starts[i] = starts2[dim - i - 1];
903:   }
904:   starts[dim] = 0; /* Append the extra dim for dof (won't be used below if dof=1) */
905:   dims[dim]   = dof;

907:   /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */
908:   for (i = 0; i < m; i++) {
909:     dxm += 3 - dim; /* Input is {k,j,i,c}; move the pointer to the first used index, e.g., j in 2D */
910:     tmp = 0;
911:     for (j = 0; j < sdim; j++) {                                                      /* Iter over, ex. j,i or j,i,c in 2D */
912:       if (tmp < 0 || dxm[j] < starts[j] || dxm[j] >= (starts[j] + dims[j])) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */
913:       else tmp = tmp * dims[j] + (dxm[j] - starts[j]);
914:     }
915:     gidxm[i] = tmp;
916:     /* Move to the next MatStencil point */
917:     if (dof > 1) dxm += sdim; /* c is already counted in sdim */
918:     else dxm += sdim + 1;     /* skip the unused c */
919:   }

921:   /* Map local indices to global indices */
922:   PetscCall(DMGetLocalToGlobalMapping(da, &ltog));
923:   PetscCall(ISLocalToGlobalMappingApply(ltog, m, gidxm, gidxm));
924:   PetscFunctionReturn(PETSC_SUCCESS);
925: }

927: /*
928:   Creates "balanced" ownership ranges after refinement, constrained by the need for the
929:   fine grid boundaries to fall within one stencil width of the coarse partition.

931:   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
932: */
933: static PetscErrorCode DMDARefineOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lc[], PetscInt lf[])
934: {
935:   PetscInt i, totalc = 0, remaining, startc = 0, startf = 0;

937:   PetscFunctionBegin;
938:   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
939:   if (ratio == 1) {
940:     PetscCall(PetscArraycpy(lf, lc, m));
941:     PetscFunctionReturn(PETSC_SUCCESS);
942:   }
943:   for (i = 0; i < m; i++) totalc += lc[i];
944:   remaining = (!periodic) + ratio * (totalc - (!periodic));
945:   for (i = 0; i < m; i++) {
946:     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
947:     if (i == m - 1) lf[i] = want;
948:     else {
949:       const PetscInt nextc = startc + lc[i];
950:       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
951:        * coarse stencil width of the first coarse node in the next subdomain. */
952:       while ((startf + want) / ratio < nextc - stencil_width) want++;
953:       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
954:        * coarse stencil width of the last coarse node in the current subdomain. */
955:       while ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width) want--;
956:       /* Make sure all constraints are satisfied */
957:       if (want < 0 || want > remaining || ((startf + want) / ratio < nextc - stencil_width) || ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width))
958:         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible refined ownership range");
959:     }
960:     lf[i] = want;
961:     startc += lc[i];
962:     startf += lf[i];
963:     remaining -= lf[i];
964:   }
965:   PetscFunctionReturn(PETSC_SUCCESS);
966: }

968: /*
969:   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
970:   fine grid boundaries to fall within one stencil width of the coarse partition.

972:   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
973: */
974: static PetscErrorCode DMDACoarsenOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lf[], PetscInt lc[])
975: {
976:   PetscInt i, totalf, remaining, startc, startf;

978:   PetscFunctionBegin;
979:   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
980:   if (ratio == 1) {
981:     PetscCall(PetscArraycpy(lc, lf, m));
982:     PetscFunctionReturn(PETSC_SUCCESS);
983:   }
984:   for (i = 0, totalf = 0; i < m; i++) totalf += lf[i];
985:   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
986:   for (i = 0, startc = 0, startf = 0; i < m; i++) {
987:     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
988:     if (i == m - 1) lc[i] = want;
989:     else {
990:       const PetscInt nextf = startf + lf[i];
991:       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
992:        * node is within one stencil width. */
993:       while (nextf / ratio < startc + want - stencil_width) want--;
994:       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
995:        * fine node is within one stencil width. */
996:       while ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width) want++;
997:       if (want < 0 || want > remaining || (nextf / ratio < startc + want - stencil_width) || ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width))
998:         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible coarsened ownership range");
999:     }
1000:     lc[i] = want;
1001:     startc += lc[i];
1002:     startf += lf[i];
1003:     remaining -= lc[i];
1004:   }
1005:   PetscFunctionReturn(PETSC_SUCCESS);
1006: }

1008: PetscErrorCode DMRefine_DA(DM da, MPI_Comm comm, DM *daref)
1009: {
1010:   PetscInt M, N, P, i, dim;
1011:   Vec      coordsc, coordsf;
1012:   DM       da2;
1013:   DM_DA   *dd = (DM_DA *)da->data, *dd2;

1015:   PetscFunctionBegin;
1017:   PetscAssertPointer(daref, 3);

1019:   PetscCall(DMGetDimension(da, &dim));
1020:   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1021:     M = dd->refine_x * dd->M;
1022:   } else {
1023:     M = 1 + dd->refine_x * (dd->M - 1);
1024:   }
1025:   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1026:     if (dim > 1) {
1027:       N = dd->refine_y * dd->N;
1028:     } else {
1029:       N = 1;
1030:     }
1031:   } else {
1032:     N = 1 + dd->refine_y * (dd->N - 1);
1033:   }
1034:   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1035:     if (dim > 2) {
1036:       P = dd->refine_z * dd->P;
1037:     } else {
1038:       P = 1;
1039:     }
1040:   } else {
1041:     P = 1 + dd->refine_z * (dd->P - 1);
1042:   }
1043:   PetscCall(DMDACreate(PetscObjectComm((PetscObject)da), &da2));
1044:   PetscCall(DMSetOptionsPrefix(da2, ((PetscObject)da)->prefix));
1045:   PetscCall(DMSetDimension(da2, dim));
1046:   PetscCall(DMDASetSizes(da2, M, N, P));
1047:   PetscCall(DMDASetNumProcs(da2, dd->m, dd->n, dd->p));
1048:   PetscCall(DMDASetBoundaryType(da2, dd->bx, dd->by, dd->bz));
1049:   PetscCall(DMDASetDof(da2, dd->w));
1050:   PetscCall(DMDASetStencilType(da2, dd->stencil_type));
1051:   PetscCall(DMDASetStencilWidth(da2, dd->s));
1052:   if (dim == 3) {
1053:     PetscInt *lx, *ly, *lz;
1054:     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1055:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1056:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1057:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_z, dd->p, dd->lz, lz));
1058:     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, lz));
1059:     PetscCall(PetscFree3(lx, ly, lz));
1060:   } else if (dim == 2) {
1061:     PetscInt *lx, *ly;
1062:     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1063:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1064:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1065:     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, NULL));
1066:     PetscCall(PetscFree2(lx, ly));
1067:   } else if (dim == 1) {
1068:     PetscInt *lx;
1069:     PetscCall(PetscMalloc1(dd->m, &lx));
1070:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1071:     PetscCall(DMDASetOwnershipRanges(da2, lx, NULL, NULL));
1072:     PetscCall(PetscFree(lx));
1073:   }
1074:   dd2 = (DM_DA *)da2->data;

1076:   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1077:   da2->ops->creatematrix = da->ops->creatematrix;
1078:   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
1079:   da2->ops->getcoloring = da->ops->getcoloring;
1080:   dd2->interptype       = dd->interptype;

1082:   /* copy fill information if given */
1083:   if (dd->dfill) {
1084:     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1085:     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1086:   }
1087:   if (dd->ofill) {
1088:     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1089:     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1090:   }
1091:   /* copy the refine information */
1092:   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1093:   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1094:   dd2->coarsen_z = dd2->refine_z = dd->refine_z;

1096:   if (dd->refine_z_hier) {
1097:     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1098:     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1099:     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1100:     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1101:     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1102:   }
1103:   if (dd->refine_y_hier) {
1104:     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1105:     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1106:     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1107:     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1108:     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1109:   }
1110:   if (dd->refine_x_hier) {
1111:     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1112:     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1113:     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1114:     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1115:     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1116:   }

1118:   /* copy vector type information */
1119:   PetscCall(DMSetVecType(da2, da->vectype));

1121:   dd2->lf = dd->lf;
1122:   dd2->lj = dd->lj;

1124:   da2->leveldown = da->leveldown;
1125:   da2->levelup   = da->levelup + 1;

1127:   PetscCall(DMSetUp(da2));

1129:   /* interpolate coordinates if they are set on the coarse grid */
1130:   PetscCall(DMGetCoordinates(da, &coordsc));
1131:   if (coordsc) {
1132:     DM  cdaf, cdac;
1133:     Mat II;

1135:     PetscCall(DMGetCoordinateDM(da, &cdac));
1136:     PetscCall(DMGetCoordinateDM(da2, &cdaf));
1137:     /* force creation of the coordinate vector */
1138:     PetscCall(DMDASetUniformCoordinates(da2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1139:     PetscCall(DMGetCoordinates(da2, &coordsf));
1140:     PetscCall(DMCreateInterpolation(cdac, cdaf, &II, NULL));
1141:     PetscCall(MatInterpolate(II, coordsc, coordsf));
1142:     PetscCall(MatDestroy(&II));
1143:   }

1145:   for (i = 0; i < da->bs; i++) {
1146:     const char *fieldname;
1147:     PetscCall(DMDAGetFieldName(da, i, &fieldname));
1148:     PetscCall(DMDASetFieldName(da2, i, fieldname));
1149:   }

1151:   *daref = da2;
1152:   PetscFunctionReturn(PETSC_SUCCESS);
1153: }

1155: PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm, DM *dmc)
1156: {
1157:   PetscInt M, N, P, i, dim;
1158:   Vec      coordsc, coordsf;
1159:   DM       dmc2;
1160:   DM_DA   *dd = (DM_DA *)dmf->data, *dd2;

1162:   PetscFunctionBegin;
1164:   PetscAssertPointer(dmc, 3);

1166:   PetscCall(DMGetDimension(dmf, &dim));
1167:   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1168:     M = dd->M / dd->coarsen_x;
1169:   } else {
1170:     M = 1 + (dd->M - 1) / dd->coarsen_x;
1171:   }
1172:   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1173:     if (dim > 1) {
1174:       N = dd->N / dd->coarsen_y;
1175:     } else {
1176:       N = 1;
1177:     }
1178:   } else {
1179:     N = 1 + (dd->N - 1) / dd->coarsen_y;
1180:   }
1181:   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1182:     if (dim > 2) {
1183:       P = dd->P / dd->coarsen_z;
1184:     } else {
1185:       P = 1;
1186:     }
1187:   } else {
1188:     P = 1 + (dd->P - 1) / dd->coarsen_z;
1189:   }
1190:   PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf), &dmc2));
1191:   PetscCall(DMSetOptionsPrefix(dmc2, ((PetscObject)dmf)->prefix));
1192:   PetscCall(DMSetDimension(dmc2, dim));
1193:   PetscCall(DMDASetSizes(dmc2, M, N, P));
1194:   PetscCall(DMDASetNumProcs(dmc2, dd->m, dd->n, dd->p));
1195:   PetscCall(DMDASetBoundaryType(dmc2, dd->bx, dd->by, dd->bz));
1196:   PetscCall(DMDASetDof(dmc2, dd->w));
1197:   PetscCall(DMDASetStencilType(dmc2, dd->stencil_type));
1198:   PetscCall(DMDASetStencilWidth(dmc2, dd->s));
1199:   if (dim == 3) {
1200:     PetscInt *lx, *ly, *lz;
1201:     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1202:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1203:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1204:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_z, dd->p, dd->lz, lz));
1205:     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, lz));
1206:     PetscCall(PetscFree3(lx, ly, lz));
1207:   } else if (dim == 2) {
1208:     PetscInt *lx, *ly;
1209:     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1210:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1211:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1212:     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, NULL));
1213:     PetscCall(PetscFree2(lx, ly));
1214:   } else if (dim == 1) {
1215:     PetscInt *lx;
1216:     PetscCall(PetscMalloc1(dd->m, &lx));
1217:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1218:     PetscCall(DMDASetOwnershipRanges(dmc2, lx, NULL, NULL));
1219:     PetscCall(PetscFree(lx));
1220:   }
1221:   dd2 = (DM_DA *)dmc2->data;

1223:   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1224:   /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1225:   dmc2->ops->creatematrix = dmf->ops->creatematrix;
1226:   dmc2->ops->getcoloring  = dmf->ops->getcoloring;
1227:   dd2->interptype         = dd->interptype;

1229:   /* copy fill information if given */
1230:   if (dd->dfill) {
1231:     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1232:     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1233:   }
1234:   if (dd->ofill) {
1235:     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1236:     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1237:   }
1238:   /* copy the refine information */
1239:   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1240:   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1241:   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;

1243:   if (dd->refine_z_hier) {
1244:     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1];
1245:     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2];
1246:     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1247:     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1248:     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1249:   }
1250:   if (dd->refine_y_hier) {
1251:     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1];
1252:     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2];
1253:     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1254:     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1255:     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1256:   }
1257:   if (dd->refine_x_hier) {
1258:     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1];
1259:     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2];
1260:     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1261:     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1262:     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1263:   }

1265:   /* copy vector type information */
1266:   PetscCall(DMSetVecType(dmc2, dmf->vectype));

1268:   dd2->lf = dd->lf;
1269:   dd2->lj = dd->lj;

1271:   dmc2->leveldown = dmf->leveldown + 1;
1272:   dmc2->levelup   = dmf->levelup;

1274:   PetscCall(DMSetUp(dmc2));

1276:   /* inject coordinates if they are set on the fine grid */
1277:   PetscCall(DMGetCoordinates(dmf, &coordsf));
1278:   if (coordsf) {
1279:     DM         cdaf, cdac;
1280:     Mat        inject;
1281:     VecScatter vscat;

1283:     PetscCall(DMGetCoordinateDM(dmf, &cdaf));
1284:     PetscCall(DMGetCoordinateDM(dmc2, &cdac));
1285:     /* force creation of the coordinate vector */
1286:     PetscCall(DMDASetUniformCoordinates(dmc2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1287:     PetscCall(DMGetCoordinates(dmc2, &coordsc));

1289:     PetscCall(DMCreateInjection(cdac, cdaf, &inject));
1290:     PetscCall(MatScatterGetVecScatter(inject, &vscat));
1291:     PetscCall(VecScatterBegin(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1292:     PetscCall(VecScatterEnd(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1293:     PetscCall(MatDestroy(&inject));
1294:   }

1296:   for (i = 0; i < dmf->bs; i++) {
1297:     const char *fieldname;
1298:     PetscCall(DMDAGetFieldName(dmf, i, &fieldname));
1299:     PetscCall(DMDASetFieldName(dmc2, i, fieldname));
1300:   }

1302:   *dmc = dmc2;
1303:   PetscFunctionReturn(PETSC_SUCCESS);
1304: }

1306: PetscErrorCode DMRefineHierarchy_DA(DM da, PetscInt nlevels, DM daf[])
1307: {
1308:   PetscInt i, n, *refx, *refy, *refz;

1310:   PetscFunctionBegin;
1312:   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1313:   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1314:   PetscAssertPointer(daf, 3);

1316:   /* Get refinement factors, defaults taken from the coarse DMDA */
1317:   PetscCall(PetscMalloc3(nlevels, &refx, nlevels, &refy, nlevels, &refz));
1318:   for (i = 0; i < nlevels; i++) PetscCall(DMDAGetRefinementFactor(da, &refx[i], &refy[i], &refz[i]));
1319:   n = nlevels;
1320:   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_x", refx, &n, NULL));
1321:   n = nlevels;
1322:   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_y", refy, &n, NULL));
1323:   n = nlevels;
1324:   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_z", refz, &n, NULL));

1326:   PetscCall(DMDASetRefinementFactor(da, refx[0], refy[0], refz[0]));
1327:   PetscCall(DMRefine(da, PetscObjectComm((PetscObject)da), &daf[0]));
1328:   for (i = 1; i < nlevels; i++) {
1329:     PetscCall(DMDASetRefinementFactor(daf[i - 1], refx[i], refy[i], refz[i]));
1330:     PetscCall(DMRefine(daf[i - 1], PetscObjectComm((PetscObject)da), &daf[i]));
1331:   }
1332:   PetscCall(PetscFree3(refx, refy, refz));
1333:   PetscFunctionReturn(PETSC_SUCCESS);
1334: }

1336: PetscErrorCode DMCoarsenHierarchy_DA(DM da, PetscInt nlevels, DM dac[])
1337: {
1338:   PetscInt i;

1340:   PetscFunctionBegin;
1342:   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1343:   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1344:   PetscAssertPointer(dac, 3);
1345:   PetscCall(DMCoarsen(da, PetscObjectComm((PetscObject)da), &dac[0]));
1346:   for (i = 1; i < nlevels; i++) PetscCall(DMCoarsen(dac[i - 1], PetscObjectComm((PetscObject)da), &dac[i]));
1347:   PetscFunctionReturn(PETSC_SUCCESS);
1348: }

1350: static PetscErrorCode DMDASetGLLCoordinates_1d(DM dm, PetscInt n, PetscReal *nodes)
1351: {
1352:   PetscInt     i, j, xs, xn, q;
1353:   PetscScalar *xx;
1354:   PetscReal    h;
1355:   Vec          x;
1356:   DM_DA       *da = (DM_DA *)dm->data;

1358:   PetscFunctionBegin;
1359:   if (da->bx != DM_BOUNDARY_PERIODIC) {
1360:     PetscCall(DMDAGetInfo(dm, NULL, &q, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1361:     q = (q - 1) / (n - 1); /* number of spectral elements */
1362:     h = 2.0 / q;
1363:     PetscCall(DMDAGetCorners(dm, &xs, NULL, NULL, &xn, NULL, NULL));
1364:     xs = xs / (n - 1);
1365:     xn = xn / (n - 1);
1366:     PetscCall(DMDASetUniformCoordinates(dm, -1., 1., 0., 0., 0., 0.));
1367:     PetscCall(DMGetCoordinates(dm, &x));
1368:     PetscCall(DMDAVecGetArray(dm, x, &xx));

1370:     /* loop over local spectral elements */
1371:     for (j = xs; j < xs + xn; j++) {
1372:       /*
1373:        Except for the first process, each process starts on the second GLL point of the first element on that process
1374:        */
1375:       for (i = (j == xs && xs > 0) ? 1 : 0; i < n; i++) xx[j * (n - 1) + i] = -1.0 + h * j + h * (nodes[i] + 1.0) / 2.;
1376:     }
1377:     PetscCall(DMDAVecRestoreArray(dm, x, &xx));
1378:   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for periodic");
1379:   PetscFunctionReturn(PETSC_SUCCESS);
1380: }

1382: /*@

1384:   DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points

1386:   Collective

1388:   Input Parameters:
1389: + da    - the `DMDA` object
1390: . n     - the number of GLL nodes
1391: - nodes - the GLL nodes

1393:   Level: advanced

1395:   Note:
1396:   The parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1397:   on each process much be divisible by the number of GLL elements needed per process. This depends on whether the `DMDA` is
1398:   periodic or not.

1400: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()`
1401: @*/
1402: PetscErrorCode DMDASetGLLCoordinates(DM da, PetscInt n, PetscReal *nodes)
1403: {
1404:   PetscFunctionBegin;
1405:   if (da->dim == 1) {
1406:     PetscCall(DMDASetGLLCoordinates_1d(da, n, nodes));
1407:   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for 2 or 3d");
1408:   PetscFunctionReturn(PETSC_SUCCESS);
1409: }

1411: PetscErrorCode DMGetCompatibility_DA(DM da1, DM dm2, PetscBool *compatible, PetscBool *set)
1412: {
1413:   DM_DA    *dd1 = (DM_DA *)da1->data, *dd2;
1414:   DM        da2;
1415:   DMType    dmtype2;
1416:   PetscBool isda, compatibleLocal;
1417:   PetscInt  i;

1419:   PetscFunctionBegin;
1420:   PetscCheck(da1->setupcalled, PetscObjectComm((PetscObject)da1), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on first DM before DMGetCompatibility()");
1421:   PetscCall(DMGetType(dm2, &dmtype2));
1422:   PetscCall(PetscStrcmp(dmtype2, DMDA, &isda));
1423:   if (isda) {
1424:     da2 = dm2;
1425:     dd2 = (DM_DA *)da2->data;
1426:     PetscCheck(da2->setupcalled, PetscObjectComm((PetscObject)da2), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on second DM before DMGetCompatibility()");
1427:     compatibleLocal = (PetscBool)(da1->dim == da2->dim);
1428:     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */
1429:     /*                                                                           Global size              ranks               Boundary type */
1430:     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx));
1431:     if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by));
1432:     if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz));
1433:     if (compatibleLocal) {
1434:       for (i = 0; i < dd1->m; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size     */ }
1435:     }
1436:     if (compatibleLocal && da1->dim > 1) {
1437:       for (i = 0; i < dd1->n; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i]));
1438:     }
1439:     if (compatibleLocal && da1->dim > 2) {
1440:       for (i = 0; i < dd1->p; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i]));
1441:     }
1442:     *compatible = compatibleLocal;
1443:     *set        = PETSC_TRUE;
1444:   } else {
1445:     /* Decline to determine compatibility with other DM types */
1446:     *set = PETSC_FALSE;
1447:   }
1448:   PetscFunctionReturn(PETSC_SUCCESS);
1449: }