Actual source code: plexcgns2.c

  1: #include <petsc/private/dmpleximpl.h>
  2: #include <petsc/private/sfimpl.h>
  3: #include <petsc/private/viewercgnsimpl.h>
  4: #include <petsc/private/hashseti.h>

  6: #include <pcgnslib.h>
  7: #include <cgns_io.h>

  9: #if !defined(CGNS_ENUMT)
 10:   #define CGNS_ENUMT(a) a
 11: #endif
 12: #if !defined(CGNS_ENUMV)
 13:   #define CGNS_ENUMV(a) a
 14: #endif
 15: // Permute plex closure ordering to CGNS
 16: static PetscErrorCode DMPlexCGNSGetPermutation_Internal(DMPolytopeType cell_type, PetscInt closure_size, CGNS_ENUMT(ElementType_t) * element_type, const int **perm)
 17: {
 18:   CGNS_ENUMT(ElementType_t) element_type_tmp;

 20:   // https://cgns.github.io/CGNS_docs_current/sids/conv.html#unstructgrid
 21:   static const int bar_2[2]   = {0, 1};
 22:   static const int bar_3[3]   = {1, 2, 0};
 23:   static const int bar_4[4]   = {2, 3, 0, 1};
 24:   static const int bar_5[5]   = {3, 4, 0, 1, 2};
 25:   static const int tri_3[3]   = {0, 1, 2};
 26:   static const int tri_6[6]   = {3, 4, 5, 0, 1, 2};
 27:   static const int tri_10[10] = {7, 8, 9, 1, 2, 3, 4, 5, 6, 0};
 28:   static const int quad_4[4]  = {0, 1, 2, 3};
 29:   static const int quad_9[9]  = {
 30:     5, 6, 7, 8, // vertices
 31:     1, 2, 3, 4, // edges
 32:     0,          // center
 33:   };
 34:   static const int quad_16[] = {
 35:     12, 13, 14, 15,               // vertices
 36:     4,  5,  6,  7,  8, 9, 10, 11, // edges
 37:     0,  1,  3,  2,                // centers
 38:   };
 39:   static const int quad_25[] = {
 40:     21, 22, 23, 24,                                 // vertices
 41:     9,  10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, // edges
 42:     0,  1,  2,  5,  8,  7,  6,  3,  4,              // centers
 43:   };
 44:   static const int tetra_4[4]   = {0, 2, 1, 3};
 45:   static const int tetra_10[10] = {6, 8, 7, 9, 2, 1, 0, 3, 5, 4};
 46:   static const int tetra_20[20] = {
 47:     16, 18, 17, 19,         // vertices
 48:     9,  8,  7,  6,  5,  4,  // bottom edges
 49:     10, 11, 14, 15, 13, 12, // side edges
 50:     0,  2,  3,  1,          // faces
 51:   };
 52:   static const int hexa_8[8]   = {0, 3, 2, 1, 4, 5, 6, 7};
 53:   static const int hexa_27[27] = {
 54:     19, 22, 21, 20, 23, 24, 25, 26, // vertices
 55:     10, 9,  8,  7,                  // bottom edges
 56:     16, 15, 18, 17,                 // mid edges
 57:     11, 12, 13, 14,                 // top edges
 58:     1,  3,  5,  4,  6,  2,          // faces
 59:     0,                              // center
 60:   };
 61:   static const int hexa_64[64] = {
 62:     // debug with $PETSC_ARCH/tests/dm/impls/plex/tests/ex49 -dm_plex_simplex 0 -dm_plex_dim 3 -dm_plex_box_faces 1,1,1 -dm_coord_petscspace_degree 3
 63:     56, 59, 58, 57, 60, 61, 62, 63, // vertices
 64:     39, 38, 37, 36, 35, 34, 33, 32, // bottom edges
 65:     51, 50, 48, 49, 52, 53, 55, 54, // mid edges; Paraview needs edge 21-22 swapped with 23-24
 66:     40, 41, 42, 43, 44, 45, 46, 47, // top edges
 67:     8,  10, 11, 9,                  // z-minus face
 68:     16, 17, 19, 18,                 // y-minus face
 69:     24, 25, 27, 26,                 // x-plus face
 70:     20, 21, 23, 22,                 // y-plus face
 71:     30, 28, 29, 31,                 // x-minus face
 72:     12, 13, 15, 14,                 // z-plus face
 73:     0,  1,  3,  2,  4,  5,  7,  6,  // center
 74:   };

 76:   PetscFunctionBegin;
 77:   element_type_tmp = CGNS_ENUMV(ElementTypeNull);
 78:   *perm            = NULL;
 79:   switch (cell_type) {
 80:   case DM_POLYTOPE_SEGMENT:
 81:     switch (closure_size) {
 82:     case 2:
 83:       element_type_tmp = CGNS_ENUMV(BAR_2);
 84:       *perm            = bar_2;
 85:       break;
 86:     case 3:
 87:       element_type_tmp = CGNS_ENUMV(BAR_3);
 88:       *perm            = bar_3;
 89:       break;
 90:     case 4:
 91:       element_type_tmp = CGNS_ENUMV(BAR_4);
 92:       *perm            = bar_4;
 93:       break;
 94:     case 5:
 95:       element_type_tmp = CGNS_ENUMV(BAR_5);
 96:       *perm            = bar_5;
 97:       break;
 98:     default:
 99:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
100:     }
101:     break;
102:   case DM_POLYTOPE_TRIANGLE:
103:     switch (closure_size) {
104:     case 3:
105:       element_type_tmp = CGNS_ENUMV(TRI_3);
106:       *perm            = tri_3;
107:       break;
108:     case 6:
109:       element_type_tmp = CGNS_ENUMV(TRI_6);
110:       *perm            = tri_6;
111:       break;
112:     case 10:
113:       element_type_tmp = CGNS_ENUMV(TRI_10);
114:       *perm            = tri_10;
115:       break;
116:     default:
117:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
118:     }
119:     break;
120:   case DM_POLYTOPE_QUADRILATERAL:
121:     switch (closure_size) {
122:     case 4:
123:       element_type_tmp = CGNS_ENUMV(QUAD_4);
124:       *perm            = quad_4;
125:       break;
126:     case 9:
127:       element_type_tmp = CGNS_ENUMV(QUAD_9);
128:       *perm            = quad_9;
129:       break;
130:     case 16:
131:       element_type_tmp = CGNS_ENUMV(QUAD_16);
132:       *perm            = quad_16;
133:       break;
134:     case 25:
135:       element_type_tmp = CGNS_ENUMV(QUAD_25);
136:       *perm            = quad_25;
137:       break;
138:     default:
139:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
140:     }
141:     break;
142:   case DM_POLYTOPE_TETRAHEDRON:
143:     switch (closure_size) {
144:     case 4:
145:       element_type_tmp = CGNS_ENUMV(TETRA_4);
146:       *perm            = tetra_4;
147:       break;
148:     case 10:
149:       element_type_tmp = CGNS_ENUMV(TETRA_10);
150:       *perm            = tetra_10;
151:       break;
152:     case 20:
153:       element_type_tmp = CGNS_ENUMV(TETRA_20);
154:       *perm            = tetra_20;
155:       break;
156:     default:
157:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
158:     }
159:     break;
160:   case DM_POLYTOPE_HEXAHEDRON:
161:     switch (closure_size) {
162:     case 8:
163:       element_type_tmp = CGNS_ENUMV(HEXA_8);
164:       *perm            = hexa_8;
165:       break;
166:     case 27:
167:       element_type_tmp = CGNS_ENUMV(HEXA_27);
168:       *perm            = hexa_27;
169:       break;
170:     case 64:
171:       element_type_tmp = CGNS_ENUMV(HEXA_64);
172:       *perm            = hexa_64;
173:       break;
174:     default:
175:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
176:     }
177:     break;
178:   default:
179:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
180:   }
181:   if (element_type) *element_type = element_type_tmp;
182:   PetscFunctionReturn(PETSC_SUCCESS);
183: }

185: /*
186:   Input Parameters:
187: + cellType  - The CGNS-defined element type

189:   Output Parameters:
190: + dmcelltype  - The equivalent DMPolytopeType for the cellType
191: . numCorners - Number of corners of the polytope
192: - dim - The topological dimension of the polytope

194: CGNS elements defined in: https://cgns.github.io/CGNS_docs_current/sids/conv.html#unstructgrid
195: */
196: static inline PetscErrorCode CGNSElementTypeGetTopologyInfo(CGNS_ENUMT(ElementType_t) cellType, DMPolytopeType *dmcelltype, PetscInt *numCorners, PetscInt *dim)
197: {
198:   DMPolytopeType _dmcelltype;

200:   PetscFunctionBegin;
201:   switch (cellType) {
202:   case CGNS_ENUMV(BAR_2):
203:   case CGNS_ENUMV(BAR_3):
204:   case CGNS_ENUMV(BAR_4):
205:   case CGNS_ENUMV(BAR_5):
206:     _dmcelltype = DM_POLYTOPE_SEGMENT;
207:     break;
208:   case CGNS_ENUMV(TRI_3):
209:   case CGNS_ENUMV(TRI_6):
210:   case CGNS_ENUMV(TRI_9):
211:   case CGNS_ENUMV(TRI_10):
212:   case CGNS_ENUMV(TRI_12):
213:   case CGNS_ENUMV(TRI_15):
214:     _dmcelltype = DM_POLYTOPE_TRIANGLE;
215:     break;
216:   case CGNS_ENUMV(QUAD_4):
217:   case CGNS_ENUMV(QUAD_8):
218:   case CGNS_ENUMV(QUAD_9):
219:   case CGNS_ENUMV(QUAD_12):
220:   case CGNS_ENUMV(QUAD_16):
221:   case CGNS_ENUMV(QUAD_P4_16):
222:   case CGNS_ENUMV(QUAD_25):
223:     _dmcelltype = DM_POLYTOPE_QUADRILATERAL;
224:     break;
225:   case CGNS_ENUMV(TETRA_4):
226:   case CGNS_ENUMV(TETRA_10):
227:   case CGNS_ENUMV(TETRA_16):
228:   case CGNS_ENUMV(TETRA_20):
229:   case CGNS_ENUMV(TETRA_22):
230:   case CGNS_ENUMV(TETRA_34):
231:   case CGNS_ENUMV(TETRA_35):
232:     _dmcelltype = DM_POLYTOPE_TETRAHEDRON;
233:     break;
234:   case CGNS_ENUMV(PYRA_5):
235:   case CGNS_ENUMV(PYRA_13):
236:   case CGNS_ENUMV(PYRA_14):
237:   case CGNS_ENUMV(PYRA_21):
238:   case CGNS_ENUMV(PYRA_29):
239:   case CGNS_ENUMV(PYRA_P4_29):
240:   case CGNS_ENUMV(PYRA_30):
241:   case CGNS_ENUMV(PYRA_50):
242:   case CGNS_ENUMV(PYRA_55):
243:     _dmcelltype = DM_POLYTOPE_PYRAMID;
244:     break;
245:   case CGNS_ENUMV(PENTA_6):
246:   case CGNS_ENUMV(PENTA_15):
247:   case CGNS_ENUMV(PENTA_18):
248:   case CGNS_ENUMV(PENTA_24):
249:   case CGNS_ENUMV(PENTA_33):
250:   case CGNS_ENUMV(PENTA_38):
251:   case CGNS_ENUMV(PENTA_40):
252:   case CGNS_ENUMV(PENTA_66):
253:   case CGNS_ENUMV(PENTA_75):
254:     _dmcelltype = DM_POLYTOPE_TRI_PRISM;
255:     break;
256:   case CGNS_ENUMV(HEXA_8):
257:   case CGNS_ENUMV(HEXA_20):
258:   case CGNS_ENUMV(HEXA_27):
259:   case CGNS_ENUMV(HEXA_32):
260:   case CGNS_ENUMV(HEXA_44):
261:   case CGNS_ENUMV(HEXA_56):
262:   case CGNS_ENUMV(HEXA_64):
263:   case CGNS_ENUMV(HEXA_98):
264:   case CGNS_ENUMV(HEXA_125):
265:     _dmcelltype = DM_POLYTOPE_HEXAHEDRON;
266:     break;
267:   case CGNS_ENUMV(MIXED):
268:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid CGNS ElementType_t: MIXED");
269:   default:
270:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid CGNS ElementType_t: %d", (int)cellType);
271:   }

273:   if (dmcelltype) *dmcelltype = _dmcelltype;
274:   if (numCorners) *numCorners = DMPolytopeTypeGetNumVertices(_dmcelltype);
275:   if (dim) *dim = DMPolytopeTypeGetDim(_dmcelltype);
276:   PetscFunctionReturn(PETSC_SUCCESS);
277: }

279: /*
280:   Input Parameters:
281: + cellType  - The CGNS-defined cell type

283:   Output Parameters:
284: + numClosure - Number of nodes that define the function space on the cell
285: - pOrder - The polynomial order of the cell

287: CGNS elements defined in: https://cgns.github.io/CGNS_docs_current/sids/conv.html#unstructgrid

289: Note: we only support "full" elements, ie. not seredipity elements
290: */
291: static inline PetscErrorCode CGNSElementTypeGetDiscretizationInfo(CGNS_ENUMT(ElementType_t) cellType, PetscInt *numClosure, PetscInt *pOrder)
292: {
293:   PetscInt _numClosure, _pOrder;

295:   PetscFunctionBegin;
296:   switch (cellType) {
297:   case CGNS_ENUMV(BAR_2):
298:     _numClosure = 2;
299:     _pOrder     = 1;
300:     break;
301:   case CGNS_ENUMV(BAR_3):
302:     _numClosure = 3;
303:     _pOrder     = 2;
304:     break;
305:   case CGNS_ENUMV(BAR_4):
306:     _numClosure = 4;
307:     _pOrder     = 3;
308:     break;
309:   case CGNS_ENUMV(BAR_5):
310:     _numClosure = 5;
311:     _pOrder     = 4;
312:     break;
313:   case CGNS_ENUMV(TRI_3):
314:     _numClosure = 3;
315:     _pOrder     = 1;
316:     break;
317:   case CGNS_ENUMV(TRI_6):
318:     _numClosure = 6;
319:     _pOrder     = 2;
320:     break;
321:   case CGNS_ENUMV(TRI_10):
322:     _numClosure = 10;
323:     _pOrder     = 3;
324:     break;
325:   case CGNS_ENUMV(TRI_15):
326:     _numClosure = 15;
327:     _pOrder     = 4;
328:     break;
329:   case CGNS_ENUMV(QUAD_4):
330:     _numClosure = 4;
331:     _pOrder     = 1;
332:     break;
333:   case CGNS_ENUMV(QUAD_9):
334:     _numClosure = 9;
335:     _pOrder     = 2;
336:     break;
337:   case CGNS_ENUMV(QUAD_16):
338:     _numClosure = 16;
339:     _pOrder     = 3;
340:     break;
341:   case CGNS_ENUMV(QUAD_25):
342:     _numClosure = 25;
343:     _pOrder     = 4;
344:     break;
345:   case CGNS_ENUMV(TETRA_4):
346:     _numClosure = 4;
347:     _pOrder     = 1;
348:     break;
349:   case CGNS_ENUMV(TETRA_10):
350:     _numClosure = 10;
351:     _pOrder     = 2;
352:     break;
353:   case CGNS_ENUMV(TETRA_20):
354:     _numClosure = 20;
355:     _pOrder     = 3;
356:     break;
357:   case CGNS_ENUMV(TETRA_35):
358:     _numClosure = 35;
359:     _pOrder     = 4;
360:     break;
361:   case CGNS_ENUMV(PYRA_5):
362:     _numClosure = 5;
363:     _pOrder     = 1;
364:     break;
365:   case CGNS_ENUMV(PYRA_14):
366:     _numClosure = 14;
367:     _pOrder     = 2;
368:     break;
369:   case CGNS_ENUMV(PYRA_30):
370:     _numClosure = 30;
371:     _pOrder     = 3;
372:     break;
373:   case CGNS_ENUMV(PYRA_55):
374:     _numClosure = 55;
375:     _pOrder     = 4;
376:     break;
377:   case CGNS_ENUMV(PENTA_6):
378:     _numClosure = 6;
379:     _pOrder     = 1;
380:     break;
381:   case CGNS_ENUMV(PENTA_18):
382:     _numClosure = 18;
383:     _pOrder     = 2;
384:     break;
385:   case CGNS_ENUMV(PENTA_40):
386:     _numClosure = 40;
387:     _pOrder     = 3;
388:     break;
389:   case CGNS_ENUMV(PENTA_75):
390:     _numClosure = 75;
391:     _pOrder     = 4;
392:     break;
393:   case CGNS_ENUMV(HEXA_8):
394:     _numClosure = 8;
395:     _pOrder     = 1;
396:     break;
397:   case CGNS_ENUMV(HEXA_27):
398:     _numClosure = 27;
399:     _pOrder     = 2;
400:     break;
401:   case CGNS_ENUMV(HEXA_64):
402:     _numClosure = 64;
403:     _pOrder     = 3;
404:     break;
405:   case CGNS_ENUMV(HEXA_125):
406:     _numClosure = 125;
407:     _pOrder     = 4;
408:     break;
409:   case CGNS_ENUMV(MIXED):
410:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid CGNS ElementType_t: MIXED");
411:   default:
412:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unsupported or Invalid cell type %d", (int)cellType);
413:   }
414:   if (numClosure) *numClosure = _numClosure;
415:   if (pOrder) *pOrder = _pOrder;
416:   PetscFunctionReturn(PETSC_SUCCESS);
417: }

419: static PetscErrorCode PetscCGNSDataType(PetscDataType pd, CGNS_ENUMT(DataType_t) * cd)
420: {
421:   PetscFunctionBegin;
422:   switch (pd) {
423:   case PETSC_FLOAT:
424:     *cd = CGNS_ENUMV(RealSingle);
425:     break;
426:   case PETSC_DOUBLE:
427:     *cd = CGNS_ENUMV(RealDouble);
428:     break;
429:   case PETSC_COMPLEX:
430:     *cd = CGNS_ENUMV(ComplexDouble);
431:     break;
432:   default:
433:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Data type %s", PetscDataTypes[pd]);
434:   }
435:   PetscFunctionReturn(PETSC_SUCCESS);
436: }

438: PetscErrorCode DMPlexCreateCGNSFromFile_Internal(MPI_Comm comm, const char filename[], PetscBool interpolate, DM *dm)
439: {
440:   int       cgid                = -1;
441:   PetscBool use_parallel_viewer = PETSC_FALSE;

443:   PetscFunctionBegin;
444:   PetscAssertPointer(filename, 2);
445:   PetscCall(PetscViewerCGNSRegisterLogEvents_Internal());
446:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-dm_plex_cgns_parallel", &use_parallel_viewer, NULL));

448:   if (use_parallel_viewer) {
449:     PetscCallCGNS(cgp_mpi_comm(comm));
450:     PetscCallCGNSOpen(cgp_open(filename, CG_MODE_READ, &cgid), 0, 0);
451:     PetscCheck(cgid > 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "cgp_open(\"%s\",...) did not return a valid file ID", filename);
452:     PetscCall(DMPlexCreateCGNS(comm, cgid, interpolate, dm));
453:     PetscCallCGNSClose(cgp_close(cgid), 0, 0);
454:   } else {
455:     PetscCallCGNSOpen(cg_open(filename, CG_MODE_READ, &cgid), 0, 0);
456:     PetscCheck(cgid > 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "cg_open(\"%s\",...) did not return a valid file ID", filename);
457:     PetscCall(DMPlexCreateCGNS(comm, cgid, interpolate, dm));
458:     PetscCallCGNSClose(cg_close(cgid), 0, 0);
459:   }
460:   PetscFunctionReturn(PETSC_SUCCESS);
461: }

463: PetscErrorCode DMPlexCreateCGNS_Internal_Serial(MPI_Comm comm, PetscInt cgid, PetscBool interpolate, DM *dm)
464: {
465:   PetscMPIInt  num_proc, rank;
466:   DM           cdm;
467:   DMLabel      label;
468:   PetscSection coordSection;
469:   Vec          coordinates;
470:   PetscScalar *coords;
471:   PetscInt    *cellStart, *vertStart;
472:   PetscInt     labelIdRange[2];
473:   /* Read from file */
474:   char      basename[CGIO_MAX_NAME_LENGTH + 1];
475:   char      buffer[CGIO_MAX_NAME_LENGTH + 1];
476:   int       dim = 0, physDim = 0, coordDim = 0;
477:   int       nzones      = 0;
478:   PetscInt  numVertices = 0, numCells = 0;
479:   const int B = 1; // Only support single base

481:   PetscFunctionBegin;
482:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
483:   PetscCallMPI(MPI_Comm_size(comm, &num_proc));
484:   PetscCall(DMCreate(comm, dm));
485:   PetscCall(DMSetType(*dm, DMPLEX));

487:   /* Open CGNS II file and read basic information on rank 0, then broadcast to all processors */
488:   {
489:     cgsize_t nverts = 0, ncells = 0;

491:     if (rank == 0) {
492:       int nbases;

494:       PetscCallCGNSRead(cg_nbases(cgid, &nbases), *dm, 0);
495:       PetscCheck(nbases <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single base, not %d", nbases);
496:       PetscCallCGNSRead(cg_base_read(cgid, B, basename, &dim, &physDim), *dm, 0);
497:       PetscCallCGNSRead(cg_nzones(cgid, B, &nzones), *dm, 0);
498:       PetscCall(PetscCalloc2(nzones + 1, &cellStart, nzones + 1, &vertStart));
499:       for (int z = 1; z <= nzones; z++) {
500:         cgsize_t sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */

502:         PetscCallCGNSRead(cg_zone_read(cgid, B, z, buffer, sizes), *dm, 0);
503:         nverts += sizes[0];
504:         ncells += sizes[1];
505:         cellStart[z] += sizes[1] + cellStart[z - 1];
506:         vertStart[z] += sizes[0] + vertStart[z - 1];
507:       }
508:       for (int z = 1; z <= nzones; z++) vertStart[z] += ncells;
509:       coordDim = dim;
510:     }
511:     PetscCallMPI(MPI_Bcast(basename, CGIO_MAX_NAME_LENGTH + 1, MPI_CHAR, 0, comm));
512:     PetscCallMPI(MPI_Bcast(&dim, 1, MPI_INT, 0, comm));
513:     PetscCallMPI(MPI_Bcast(&coordDim, 1, MPI_INT, 0, comm));
514:     PetscCallMPI(MPI_Bcast(&nzones, 1, MPI_INT, 0, comm));

516:     PetscCheck(ncells + nverts < PETSC_INT_MAX, comm, PETSC_ERR_INT_OVERFLOW, "Mesh chart size too large for PetscInt type, reconfigure with --with-64-bit-indices");
517:     numCells    = (PetscInt)ncells;
518:     numVertices = (PetscInt)nverts;
519:   }

521:   PetscCall(PetscObjectSetName((PetscObject)*dm, basename));
522:   PetscCall(DMSetDimension(*dm, dim));
523:   PetscCall(DMCreateLabel(*dm, "celltype"));
524:   PetscCall(DMPlexSetChart(*dm, 0, numCells + numVertices));

526:   /* Read zone information */
527:   if (rank == 0) {
528:     /* Read the cell set connectivity table and build mesh topology
529:        CGNS standard requires that cells in a zone be numbered sequentially and be pairwise disjoint. */
530:     /* First set sizes */
531:     for (int z = 1, c = 0; z <= nzones; z++) {
532:       CGNS_ENUMT(ZoneType_t) zonetype;
533:       int nsections;
534:       CGNS_ENUMT(ElementType_t) cellType;
535:       cgsize_t       start, end;
536:       int            nbndry, parentFlag;
537:       PetscInt       numCorners, pOrder;
538:       DMPolytopeType ctype;
539:       const int      S = 1; // Only support single section

541:       PetscCallCGNSRead(cg_zone_type(cgid, B, z, &zonetype), *dm, 0);
542:       PetscCheck(zonetype != CGNS_ENUMV(Structured), PETSC_COMM_SELF, PETSC_ERR_LIB, "Can only handle Unstructured zones for CGNS");
543:       PetscCallCGNSRead(cg_nsections(cgid, B, z, &nsections), *dm, 0);
544:       PetscCheck(nsections <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single section, not %d", nsections);
545:       PetscCallCGNSRead(cg_section_read(cgid, B, z, S, buffer, &cellType, &start, &end, &nbndry, &parentFlag), *dm, 0);
546:       if (cellType == CGNS_ENUMV(MIXED)) {
547:         cgsize_t elementDataSize, *elements;

549:         PetscCallCGNSRead(cg_ElementDataSize(cgid, B, z, S, &elementDataSize), *dm, 0);
550:         PetscCall(PetscMalloc1(elementDataSize, &elements));
551:         PetscCallCGNSReadData(cg_poly_elements_read(cgid, B, z, S, elements, NULL, NULL), *dm, 0);
552:         for (cgsize_t c_loc = start, off = 0; c_loc <= end; c_loc++, c++) {
553:           PetscCall(CGNSElementTypeGetTopologyInfo((CGNS_ENUMT(ElementType_t))elements[off], &ctype, &numCorners, NULL));
554:           PetscCall(CGNSElementTypeGetDiscretizationInfo((CGNS_ENUMT(ElementType_t))elements[off], NULL, &pOrder));
555:           PetscCheck(pOrder == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Serial CGNS reader only supports first order elements, not %" PetscInt_FMT " order", pOrder);
556:           PetscCall(DMPlexSetConeSize(*dm, c, numCorners));
557:           PetscCall(DMPlexSetCellType(*dm, c, ctype));
558:           off += numCorners + 1;
559:         }
560:         PetscCall(PetscFree(elements));
561:       } else {
562:         PetscCall(CGNSElementTypeGetTopologyInfo(cellType, &ctype, &numCorners, NULL));
563:         PetscCall(CGNSElementTypeGetDiscretizationInfo(cellType, NULL, &pOrder));
564:         PetscCheck(pOrder == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Serial CGNS reader only supports first order elements, not %" PetscInt_FMT " order", pOrder);
565:         for (cgsize_t c_loc = start; c_loc <= end; c_loc++, c++) {
566:           PetscCall(DMPlexSetConeSize(*dm, c, numCorners));
567:           PetscCall(DMPlexSetCellType(*dm, c, ctype));
568:         }
569:       }
570:     }
571:     for (PetscInt v = numCells; v < numCells + numVertices; v++) PetscCall(DMPlexSetCellType(*dm, v, DM_POLYTOPE_POINT));
572:   }

574:   PetscCall(DMSetUp(*dm));

576:   PetscCall(DMCreateLabel(*dm, "zone"));
577:   if (rank == 0) {
578:     PetscCall(DMGetLabel(*dm, "zone", &label));
579:     for (int z = 1, c = 0; z <= nzones; z++) {
580:       CGNS_ENUMT(ElementType_t) cellType;
581:       cgsize_t  elementDataSize, *elements, start, end, numc;
582:       int       nbndry, parentFlag;
583:       PetscInt *cone, numCorners, maxCorners = 27, pOrder, v = 0;
584:       const int S = 1; // Only support single section

586:       PetscCallCGNSRead(cg_section_read(cgid, B, z, S, buffer, &cellType, &start, &end, &nbndry, &parentFlag), *dm, 0);
587:       numc = end - start;
588:       PetscCallCGNSRead(cg_ElementDataSize(cgid, B, z, S, &elementDataSize), *dm, 0);
589:       PetscCall(PetscMalloc2(elementDataSize, &elements, maxCorners, &cone));
590:       PetscCallCGNSReadData(cg_poly_elements_read(cgid, B, z, S, elements, NULL, NULL), *dm, 0);
591:       if (cellType == CGNS_ENUMV(MIXED)) {
592:         /* CGNS uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */
593:         for (cgsize_t c_loc = 0; c_loc <= numc; c_loc++, c++) {
594:           PetscCall(CGNSElementTypeGetTopologyInfo((CGNS_ENUMT(ElementType_t))elements[v], NULL, &numCorners, NULL));
595:           PetscCall(CGNSElementTypeGetDiscretizationInfo((CGNS_ENUMT(ElementType_t))elements[v], NULL, &pOrder));
596:           PetscCheck(pOrder == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Serial CGNS reader only supports first order elements, not %" PetscInt_FMT " order", pOrder);
597:           v++;
598:           for (PetscInt v_loc = 0; v_loc < numCorners; ++v_loc, v++) cone[v_loc] = (PetscInt)elements[v] + numCells - 1;
599:           PetscCall(DMPlexReorderCell(*dm, c, cone));
600:           PetscCall(DMPlexSetCone(*dm, c, cone));
601:           PetscCall(DMLabelSetValue(label, c, z));
602:         }
603:       } else {
604:         PetscCall(CGNSElementTypeGetTopologyInfo(cellType, NULL, &numCorners, NULL));
605:         PetscCall(CGNSElementTypeGetDiscretizationInfo(cellType, NULL, &pOrder));
606:         PetscCheck(pOrder == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Serial CGNS reader only supports first order elements, not %" PetscInt_FMT " order", pOrder);
607:         /* CGNS uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */
608:         for (cgsize_t c_loc = 0; c_loc <= numc; c_loc++, c++) {
609:           for (PetscInt v_loc = 0; v_loc < numCorners; ++v_loc, v++) cone[v_loc] = (PetscInt)elements[v] + numCells - 1;
610:           PetscCall(DMPlexReorderCell(*dm, c, cone));
611:           PetscCall(DMPlexSetCone(*dm, c, cone));
612:           PetscCall(DMLabelSetValue(label, c, z));
613:         }
614:       }
615:       PetscCall(PetscFree2(elements, cone));
616:     }
617:   }

619:   PetscCall(DMPlexSymmetrize(*dm));
620:   PetscCall(DMPlexStratify(*dm));
621:   if (interpolate) PetscCall(DMPlexInterpolateInPlace_Internal(*dm));

623:   /* Read coordinates */
624:   PetscCall(DMSetCoordinateDim(*dm, coordDim));
625:   PetscCall(DMGetCoordinateDM(*dm, &cdm));
626:   PetscCall(DMGetLocalSection(cdm, &coordSection));
627:   PetscCall(PetscSectionSetNumFields(coordSection, 1));
628:   PetscCall(PetscSectionSetFieldComponents(coordSection, 0, coordDim));
629:   PetscCall(PetscSectionSetChart(coordSection, numCells, numCells + numVertices));
630:   for (PetscInt v = numCells; v < numCells + numVertices; v++) {
631:     PetscCall(PetscSectionSetDof(coordSection, v, dim));
632:     PetscCall(PetscSectionSetFieldDof(coordSection, v, 0, coordDim));
633:   }
634:   PetscCall(PetscSectionSetUp(coordSection));

636:   PetscCall(DMCreateLocalVector(cdm, &coordinates));
637:   PetscCall(VecGetArray(coordinates, &coords));
638:   if (rank == 0) {
639:     cgsize_t off = 0;
640:     float   *x[3];

642:     PetscCall(PetscMalloc3(numVertices, &x[0], numVertices, &x[1], numVertices, &x[2]));
643:     for (int z = 1; z <= nzones; z++) {
644:       CGNS_ENUMT(DataType_t) datatype;
645:       cgsize_t sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */
646:       cgsize_t range_min[3] = {1, 1, 1};
647:       cgsize_t range_max[3] = {1, 1, 1};
648:       int      ngrids, ncoords;

650:       PetscCallCGNSRead(cg_zone_read(cgid, B, z, buffer, sizes), *dm, 0);
651:       range_max[0] = sizes[0];
652:       PetscCallCGNSRead(cg_ngrids(cgid, B, z, &ngrids), *dm, 0);
653:       PetscCheck(ngrids <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single grid, not %d", ngrids);
654:       PetscCallCGNSRead(cg_ncoords(cgid, B, z, &ncoords), *dm, 0);
655:       PetscCheck(ncoords == coordDim, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a coordinate array for each dimension, not %d", ncoords);
656:       for (int d = 0; d < coordDim; ++d) {
657:         PetscCallCGNSRead(cg_coord_info(cgid, B, z, 1 + d, &datatype, buffer), *dm, 0);
658:         PetscCallCGNSReadData(cg_coord_read(cgid, B, z, buffer, CGNS_ENUMV(RealSingle), range_min, range_max, x[d]), *dm, 0);
659:       }
660:       if (coordDim >= 1) {
661:         for (cgsize_t v = 0; v < sizes[0]; v++) coords[(v + off) * coordDim + 0] = x[0][v];
662:       }
663:       if (coordDim >= 2) {
664:         for (cgsize_t v = 0; v < sizes[0]; v++) coords[(v + off) * coordDim + 1] = x[1][v];
665:       }
666:       if (coordDim >= 3) {
667:         for (cgsize_t v = 0; v < sizes[0]; v++) coords[(v + off) * coordDim + 2] = x[2][v];
668:       }
669:       off += sizes[0];
670:     }
671:     PetscCall(PetscFree3(x[0], x[1], x[2]));
672:   }
673:   PetscCall(VecRestoreArray(coordinates, &coords));

675:   PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
676:   PetscCall(VecSetBlockSize(coordinates, coordDim));
677:   PetscCall(DMSetCoordinatesLocal(*dm, coordinates));
678:   PetscCall(VecDestroy(&coordinates));

680:   /* Read boundary conditions */
681:   PetscCall(DMGetNumLabels(*dm, &labelIdRange[0]));
682:   if (rank == 0) {
683:     CGNS_ENUMT(BCType_t) bctype;
684:     CGNS_ENUMT(DataType_t) datatype;
685:     CGNS_ENUMT(PointSetType_t) pointtype;
686:     cgsize_t  *points;
687:     PetscReal *normals;
688:     int        normal[3];
689:     char      *bcname = buffer;
690:     cgsize_t   npoints, nnormals;
691:     int        nbc, ndatasets;

693:     for (int z = 1; z <= nzones; z++) {
694:       PetscCallCGNSRead(cg_nbocos(cgid, B, z, &nbc), *dm, 0);
695:       for (int bc = 1; bc <= nbc; bc++) {
696:         PetscCallCGNSRead(cg_boco_info(cgid, B, z, bc, bcname, &bctype, &pointtype, &npoints, normal, &nnormals, &datatype, &ndatasets), *dm, 0);
697:         PetscCheck(npoints < PETSC_INT_MAX, comm, PETSC_ERR_INT_OVERFLOW, "Number of mesh points too large for PetscInt type, reconfigure with --with-64-bit-indices");
698:         PetscCall(DMCreateLabel(*dm, bcname));
699:         PetscCall(DMGetLabel(*dm, bcname, &label));
700:         PetscCall(PetscMalloc2(npoints, &points, nnormals, &normals));
701:         PetscCallCGNSReadData(cg_boco_read(cgid, B, z, bc, points, (void *)normals), *dm, 0);
702:         if (pointtype == CGNS_ENUMV(ElementRange)) {
703:           // Range of cells: assuming half-open interval
704:           for (PetscInt c = (PetscInt)points[0]; c < (PetscInt)points[1]; c++) PetscCall(DMLabelSetValue(label, c - cellStart[z - 1], 1));
705:         } else if (pointtype == CGNS_ENUMV(ElementList)) {
706:           // List of cells
707:           for (PetscInt c = 0; c < npoints; c++) PetscCall(DMLabelSetValue(label, (PetscInt)points[c] - cellStart[z - 1], 1));
708:         } else if (pointtype == CGNS_ENUMV(PointRange)) {
709:           CGNS_ENUMT(GridLocation_t) gridloc;

711:           // List of points:
712:           PetscCallCGNS(cg_goto(cgid, 1, "Zone_t", z, "BC_t", bc, "end"));
713:           PetscCallCGNSRead(cg_gridlocation_read(&gridloc), *dm, 0);
714:           // Range of points: assuming half-open interval
715:           for (PetscInt c = (PetscInt)points[0]; c < (PetscInt)points[1]; c++) {
716:             if (gridloc == CGNS_ENUMV(Vertex)) PetscCall(DMLabelSetValue(label, c - vertStart[z - 1], 1));
717:             else PetscCall(DMLabelSetValue(label, c - cellStart[z - 1], 1));
718:           }
719:         } else if (pointtype == CGNS_ENUMV(PointList)) {
720:           CGNS_ENUMT(GridLocation_t) gridloc;

722:           // List of points:
723:           PetscCallCGNS(cg_goto(cgid, 1, "Zone_t", z, "BC_t", bc, "end"));
724:           PetscCallCGNSRead(cg_gridlocation_read(&gridloc), *dm, 0);
725:           for (PetscInt c = 0; c < npoints; c++) {
726:             if (gridloc == CGNS_ENUMV(Vertex)) PetscCall(DMLabelSetValue(label, (PetscInt)points[c] - vertStart[z - 1], 1));
727:             else PetscCall(DMLabelSetValue(label, (PetscInt)points[c] - cellStart[z - 1], 1));
728:           }
729:         } else SETERRQ(comm, PETSC_ERR_SUP, "Unsupported point set type %d", (int)pointtype);
730:         PetscCall(PetscFree2(points, normals));
731:       }
732:     }
733:     PetscCall(PetscFree2(cellStart, vertStart));
734:   }
735:   PetscCall(DMGetNumLabels(*dm, &labelIdRange[1]));
736:   PetscCallMPI(MPI_Bcast(labelIdRange, 2, MPIU_INT, 0, comm));

738:   /* Create BC labels at all processes */
739:   for (PetscInt labelId = labelIdRange[0]; labelId < labelIdRange[1]; labelId++) {
740:     char       *labelName = buffer;
741:     size_t      len       = sizeof(buffer);
742:     const char *locName;

744:     if (rank == 0) {
745:       PetscCall(DMGetLabelByNum(*dm, labelId, &label));
746:       PetscCall(PetscObjectGetName((PetscObject)label, &locName));
747:       PetscCall(PetscStrncpy(labelName, locName, len));
748:     }
749:     PetscCallMPI(MPI_Bcast(labelName, (PetscMPIInt)len, MPIU_INT, 0, comm));
750:     PetscCallMPI(DMCreateLabel(*dm, labelName));
751:   }
752:   PetscFunctionReturn(PETSC_SUCCESS);
753: }

755: typedef struct {
756:   cgsize_t start, end;
757: } CGRange;

759: // Creates a PetscLayout from the given sizes, but adjusts the ranges by the offset. So the first rank ranges will be [offset, offset + local_size) rather than [0, local_size)
760: static PetscErrorCode PetscLayoutCreateFromSizesAndOffset(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt bs, PetscInt offset, PetscLayout *map)
761: {
762:   PetscLayout     init;
763:   const PetscInt *ranges;
764:   PetscInt       *new_ranges;
765:   PetscMPIInt     num_ranks;

767:   PetscFunctionBegin;
768:   PetscCall(PetscLayoutCreateFromSizes(comm, n, N, bs, &init));
769:   PetscCall(PetscLayoutGetRanges(init, &ranges));
770:   PetscCallMPI(MPI_Comm_size(comm, &num_ranks));
771:   PetscCall(PetscMalloc1(num_ranks + 1, &new_ranges));
772:   PetscCall(PetscArraycpy(new_ranges, ranges, num_ranks + 1));
773:   for (PetscInt r = 0; r < num_ranks + 1; r++) new_ranges[r] += offset;
774:   PetscCall(PetscLayoutCreateFromRanges(comm, new_ranges, PETSC_OWN_POINTER, bs, map));
775:   PetscCall(PetscLayoutDestroy(&init));
776:   PetscFunctionReturn(PETSC_SUCCESS);
777: }

779: /**
780:   @brief Creates connectivity array of CGNS elements' corners in Plex ordering, with a `PetscSection` to describe the data layout

782:   @param[in]  dm           `DM`
783:   @param[in]  cgid         CGNS file ID
784:   @param[in]  base         CGNS base ID
785:   @param[in]  zone         CGNS zone ID
786:   @param[in]  num_sections Number of sections to put in connectivity
787:   @param[in]  section_ids  CGNS section IDs to obtain connectivity from
788:   @param[out] section      Section describing the connectivity for each element
789:   @param[out] cellTypes    Array specifying the CGNS ElementType_t for each element
790:   @param[out] cell_ids     CGNS IDs of the cells
791:   @param[out] layouts      Array of `PetscLayout` that describes the distributed ownership of the cells in each `section_ids`
792:   @param[out] connectivity Array of the cell connectivity, described by `section`. The vertices are the local Plex IDs

794:   @description

796:   Each point in `section` corresponds to the `cellTypes` and `cell_ids` arrays.
797:   The dof and offset of `section` maps into the `connectivity` array.

799:   The `layouts` array is intended to be used with `PetscLayoutFindOwnerIndex_CGNSSectionLayouts()`
800: **/
801: static PetscErrorCode DMPlexCGNS_CreateCornersConnectivitySection(DM dm, PetscInt cgid, int base, int zone, PetscInt num_sections, const int section_ids[], PetscSection *section, CGNS_ENUMT(ElementType_t) * cellTypes[], PetscInt *cell_ids[], PetscLayout *layouts[], PetscInt *connectivity[])
802: {
803:   MPI_Comm     comm = PetscObjectComm((PetscObject)dm);
804:   PetscSection section_;
805:   char         buffer[CGIO_MAX_NAME_LENGTH + 1];
806:   CGNS_ENUMT(ElementType_t) * sectionCellTypes, *cellTypes_;
807:   CGRange       *ranges;
808:   PetscInt       nlocal_cells = 0, global_cell_dim = -1;
809:   PetscSegBuffer conn_sb;
810:   PetscLayout   *layouts_;

812:   PetscFunctionBegin;
813:   PetscCall(PetscMalloc2(num_sections, &ranges, num_sections, &sectionCellTypes));
814:   PetscCall(PetscMalloc1(num_sections, &layouts_));
815:   for (PetscInt s = 0; s < num_sections; s++) {
816:     int      nbndry, parentFlag;
817:     PetscInt local_size;

819:     PetscCallCGNSRead(cg_section_read(cgid, base, zone, section_ids[s], buffer, &sectionCellTypes[s], &ranges[s].start, &ranges[s].end, &nbndry, &parentFlag), dm, 0);
820:     PetscCheck(sectionCellTypes[s] != CGNS_ENUMV(NGON_n) && sectionCellTypes[s] != CGNS_ENUMV(NFACE_n), comm, PETSC_ERR_SUP, "CGNS reader does not support elements of type NGON_n or NFACE_n");
821:     PetscInt num_section_cells = (PetscInt)(ranges[s].end - ranges[s].start) + 1;
822:     PetscCall(PetscLayoutCreateFromSizesAndOffset(comm, PETSC_DECIDE, num_section_cells, 1, (PetscInt)ranges[s].start, &layouts_[s]));
823:     PetscCall(PetscLayoutGetLocalSize(layouts_[s], &local_size));
824:     nlocal_cells += local_size;
825:   }
826:   PetscCall(PetscSectionCreate(comm, &section_));
827:   PetscCall(PetscSectionSetChart(section_, 0, nlocal_cells));

829:   PetscCall(PetscMalloc1(nlocal_cells, cell_ids));
830:   PetscCall(PetscMalloc1(nlocal_cells, &cellTypes_));
831:   PetscCall(PetscSegBufferCreate(sizeof(PetscInt), nlocal_cells * 2, &conn_sb));
832:   for (PetscInt s = 0, c = 0; s < num_sections; s++) {
833:     PetscInt mystart, myend, myowned;

835:     PetscCall(PetscLayoutGetRange(layouts_[s], &mystart, &myend));
836:     PetscCall(PetscLayoutGetLocalSize(layouts_[s], &myowned));
837:     if (sectionCellTypes[s] == CGNS_ENUMV(MIXED)) {
838:       cgsize_t *offsets, *conn_cg;

840:       PetscCall(PetscMalloc1(myowned + 1, &offsets)); // The last element in the array is the total size of the connectivity for the given [start,end] range
841:       PetscCallCGNSRead(cgp_poly_elements_read_data_offsets(cgid, base, zone, section_ids[s], mystart, myend - 1, offsets), dm, 0);
842:       PetscCall(PetscMalloc1(offsets[myowned + 1], &conn_cg));
843:       PetscCallCGNSRead(cgp_poly_elements_read_data_elements(cgid, base, zone, section_ids[s], mystart, myend - 1, offsets, conn_cg), dm, 0);
844:       for (PetscInt i = 0; i < myowned; i++) {
845:         DMPolytopeType dm_cell_type = DM_POLYTOPE_UNKNOWN;
846:         PetscInt       numCorners, cell_dim, *conn_sb_seg;
847:         const int     *perm;

849:         (*cell_ids)[c] = mystart + i;

851:         cellTypes_[c] = (CGNS_ENUMT(ElementType_t))conn_cg[offsets[i]];
852:         PetscCall(CGNSElementTypeGetTopologyInfo(cellTypes_[c], &dm_cell_type, &numCorners, &cell_dim));
853:         if (global_cell_dim == -1) global_cell_dim = cell_dim;
854:         else
855:           PetscCheck(cell_dim == global_cell_dim, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Can only combine cells of the same dimension. Global cell dimension detected as %" PetscInt_FMT ", but CGNS element %" PetscInt_FMT " is dimension %" PetscInt_FMT, global_cell_dim, (*cell_ids)[c], cell_dim);
856:         PetscCall(PetscSegBufferGetInts(conn_sb, numCorners, &conn_sb_seg));

858:         PetscCall(DMPlexCGNSGetPermutation_Internal(dm_cell_type, numCorners, NULL, &perm));
859:         for (PetscInt v = 0; v < numCorners; v++) conn_sb_seg[perm[v]] = (PetscInt)conn_cg[offsets[i] + 1 + v];
860:         PetscCall(PetscSectionSetDof(section_, c, numCorners));
861:         c++;
862:       }
863:       PetscCall(PetscFree(offsets));
864:       PetscCall(PetscFree(conn_cg));
865:     } else {
866:       PetscInt       numCorners, cell_dim;
867:       PetscInt      *conn_sb_seg;
868:       DMPolytopeType dm_cell_type = DM_POLYTOPE_UNKNOWN;
869:       int            npe; // nodes per element
870:       const int     *perm;
871:       cgsize_t      *conn_cg;

873:       PetscCall(CGNSElementTypeGetTopologyInfo(sectionCellTypes[s], &dm_cell_type, &numCorners, &cell_dim));
874:       if (global_cell_dim == -1) global_cell_dim = cell_dim;
875:       else
876:         PetscCheck(cell_dim == global_cell_dim, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Can only combine cells of the same dimension. Global cell dimension detected as %" PetscInt_FMT ", but CGNS element %" PetscInt_FMT " is dimension %" PetscInt_FMT, global_cell_dim, (*cell_ids)[c], cell_dim);

878:       PetscCallCGNSRead(cg_npe(sectionCellTypes[s], &npe), dm, 0);
879:       PetscCall(PetscMalloc1(myowned * npe, &conn_cg));
880:       PetscCallCGNSRead(cgp_elements_read_data(cgid, base, zone, section_ids[s], mystart, myend - 1, conn_cg), dm, 0);
881:       PetscCall(DMPlexCGNSGetPermutation_Internal(dm_cell_type, numCorners, NULL, &perm));
882:       PetscCall(PetscSegBufferGetInts(conn_sb, numCorners * myowned, &conn_sb_seg));
883:       for (PetscInt i = 0; i < myowned; i++) {
884:         (*cell_ids)[c] = mystart + i;
885:         cellTypes_[c]  = sectionCellTypes[s];
886:         for (PetscInt v = 0; v < numCorners; v++) conn_sb_seg[i * numCorners + perm[v]] = (PetscInt)conn_cg[i * npe + v];
887:         PetscCall(PetscSectionSetDof(section_, c, numCorners));
888:         c++;
889:       }
890:       PetscCall(PetscFree(conn_cg));
891:     }
892:   }

894:   PetscCall(PetscSectionSetUp(section_));
895:   *section = section_;
896:   PetscCall(PetscSegBufferExtractAlloc(conn_sb, connectivity));
897:   PetscInt connSize;
898:   PetscCall(PetscSectionGetStorageSize(section_, &connSize));
899:   for (PetscInt i = 0; i < connSize; i++) (*connectivity)[i] -= 1; // vertices should be 0-based indexing for consistency with DMPlexBuildFromCellListParallel()
900:   *layouts = layouts_;
901:   if (cellTypes) *cellTypes = cellTypes_;
902:   else PetscCall(PetscFree(cellTypes_));

904:   PetscCall(PetscSegBufferDestroy(&conn_sb));
905:   PetscCall(PetscFree2(ranges, sectionCellTypes));
906:   PetscFunctionReturn(PETSC_SUCCESS);
907: }

909: static inline PetscErrorCode PetscFindIntUnsorted(PetscInt key, PetscInt size, const PetscInt array[], PetscInt *loc)
910: {
911:   PetscFunctionBegin;
912:   *loc = -1;
913:   for (PetscInt i = 0; i < size; i++) {
914:     if (array[i] == key) {
915:       *loc = i;
916:       break;
917:     }
918:   }
919:   PetscFunctionReturn(PETSC_SUCCESS);
920: }

922: // Same as `PetscLayoutFindOwnerIndex()`, but does not fail if owner not in PetscLayout
923: static PetscErrorCode PetscLayoutFindOwnerIndex_Internal(PetscLayout map, PetscInt idx, PetscMPIInt *owner, PetscInt *lidx, PetscBool *found_owner)
924: {
925:   PetscMPIInt lo = 0, hi, t;

927:   PetscFunctionBegin;
928:   PetscAssert((map->n >= 0) && (map->N >= 0) && (map->range), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PetscLayoutSetUp() must be called first");
929:   if (owner) *owner = -1;
930:   if (lidx) *lidx = -1;
931:   if (idx < map->range[0] && idx >= map->range[map->size + 1]) {
932:     *found_owner = PETSC_FALSE;
933:     PetscFunctionReturn(PETSC_SUCCESS);
934:   }
935:   hi = map->size;
936:   while (hi - lo > 1) {
937:     t = lo + (hi - lo) / 2;
938:     if (idx < map->range[t]) hi = t;
939:     else lo = t;
940:   }
941:   if (owner) *owner = lo;
942:   if (lidx) *lidx = idx - map->range[lo];
943:   if (found_owner) *found_owner = PETSC_TRUE;
944:   PetscFunctionReturn(PETSC_SUCCESS);
945: }

947: // This function assumes that there is an array that `maps` describes the layout too. Locally, the range of each map is concatenated onto each other.
948: // So [maps[0].start, ..., maps[0].end - 1, maps[1].start, ..., maps[1].end - 1, ...]
949: // The returned index is the index into this array
950: static PetscErrorCode PetscLayoutFindOwnerIndex_CGNSSectionLayouts(PetscLayout maps[], PetscInt nmaps, PetscInt idx, PetscMPIInt *owner, PetscInt *lidx, PetscInt *mapidx)
951: {
952:   PetscFunctionBegin;
953:   for (PetscInt m = 0; m < nmaps; m++) {
954:     PetscBool found_owner = PETSC_FALSE;
955:     PetscCall(PetscLayoutFindOwnerIndex_Internal(maps[m], idx, owner, lidx, &found_owner));
956:     if (found_owner) {
957:       // Now loop back through the previous maps to get the local offset for the containing index
958:       for (PetscInt mm = m - 1; mm >= 0; mm--) {
959:         PetscInt size = maps[mm]->range[*owner + 1] - maps[mm]->range[*owner];
960:         *lidx += size;
961:       }
962:       if (mapidx) *mapidx = m;
963:       PetscFunctionReturn(PETSC_SUCCESS);
964:     }
965:   }
966:   SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "CGNS id %" PetscInt_FMT " not found in layouts", idx);
967: }

969: /*
970:   @brief Matching root and leaf indices across processes, allowing multiple roots per leaf

972:   Collective

974:   Input Parameters:
975: + layout           - `PetscLayout` defining the global index space and the MPI rank that brokers each index
976: . numRootIndices   - size of `rootIndices`
977: . rootIndices      - array of global indices of which this process requests ownership
978: . rootLocalIndices - root local index permutation (`NULL` if no permutation)
979: . rootLocalOffset  - offset to be added to `rootLocalIndices`
980: . numLeafIndices   - size of `leafIndices`
981: . leafIndices      - array of global indices with which this process requires data associated
982: . leafLocalIndices - leaf local index permutation (`NULL` if no permutation)
983: - leafLocalOffset  - offset to be added to `leafLocalIndices`

985:   Output Parameters:
986: + matchSection - The `PetscSection` describing the layout of `matches` with respect to the `leafIndices` (the points of the section indexes into `leafIndices`)
987: - matches      - Array of `PetscSFNode` denoting the location (rank and index) of the root matching the leaf.

989:   Example 1:
990: .vb
991:   rank             : 0            1            2
992:   rootIndices      : [1 0 2]      [3]          [3]
993:   rootLocalOffset  : 100          200          300
994:   layout           : [0 1]        [2]          [3]
995:   leafIndices      : [0]          [2]          [0 3]
996:   leafLocalOffset  : 400          500          600

998: would build the following result

1000:   rank 0: [0] 400 <- [0] (0,101)
1001:   ------------------------------
1002:   rank 1: [0] 500 <- [0] (0,102)
1003:   ------------------------------
1004:   rank 2: [0] 600 <- [0] (0,101)
1005:           [1] 601 <- [1] (1,200)
1006:           [1] 601 <- [2] (2,300)
1007:            |   |      |     |
1008:            |   |      |     + `matches`, the rank and index of the respective match with the leaf index
1009:            |   |      + index into `matches` array
1010:            |   + the leaves for the respective root
1011:            + The point in `matchSection` (indexes into `leafIndices`)

1013:   For rank 2, the `matchSection` would be:

1015:   [0]: (1, 0)
1016:   [1]: (2, 1)
1017:    |    |  |
1018:    |    |  + offset
1019:    |    + ndof
1020:    + point
1021: .ve

1023:   Notes:
1024:   This function is identical to `PetscSFCreateByMatchingIndices()` except it includes *all* matching indices instead of designating a single rank as the "owner".
1025:   Attempting to create an SF with all matching indices would create an invalid SF, thus we give an array of `matches` and `matchSection` to describe the layout
1026:   Compare the examples in this document to those in `PetscSFCreateByMatchingIndices()`.

1028: .seealso: [](sec_petscsf), `PetscSF`, `PetscSFCreate()`, `PetscSFCreateByMatchingIndices()`
1029: */
1030: static PetscErrorCode PetscSFFindMatchingIndices(PetscLayout layout, PetscInt numRootIndices, const PetscInt rootIndices[], const PetscInt rootLocalIndices[], PetscInt rootLocalOffset, PetscInt numLeafIndices, const PetscInt leafIndices[], const PetscInt leafLocalIndices[], PetscInt leafLocalOffset, PetscSection *matchSection, PetscSFNode *matches[])
1031: {
1032:   MPI_Comm     comm = layout->comm;
1033:   PetscMPIInt  rank;
1034:   PetscSF      sf1;
1035:   PetscSection sectionBuffer, matchSection_;
1036:   PetscInt     numMatches;
1037:   PetscSFNode *roots, *buffer, *matches_;
1038:   PetscInt     N, n, pStart, pEnd;
1039:   PetscBool    areIndicesSame;

1041:   PetscFunctionBegin;
1042:   if (rootIndices) PetscAssertPointer(rootIndices, 3);
1043:   if (rootLocalIndices) PetscAssertPointer(rootLocalIndices, 4);
1044:   if (leafIndices) PetscAssertPointer(leafIndices, 7);
1045:   if (leafLocalIndices) PetscAssertPointer(leafLocalIndices, 8);
1046:   PetscCheck(numRootIndices >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "numRootIndices (%" PetscInt_FMT ") must be non-negative", numRootIndices);
1047:   PetscCheck(numLeafIndices >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "numLeafIndices (%" PetscInt_FMT ") must be non-negative", numLeafIndices);
1048:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1049:   PetscCall(PetscLayoutSetUp(layout));
1050:   PetscCall(PetscLayoutGetSize(layout, &N));
1051:   PetscCall(PetscLayoutGetLocalSize(layout, &n));
1052:   areIndicesSame = (PetscBool)(leafIndices == rootIndices);
1053:   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &areIndicesSame, 1, MPI_C_BOOL, MPI_LAND, comm));
1054:   PetscCheck(!areIndicesSame || numLeafIndices == numRootIndices, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "leafIndices == rootIndices, but numLeafIndices (%" PetscInt_FMT ") != numRootIndices(%" PetscInt_FMT ")", numLeafIndices, numRootIndices);
1055:   if (PetscDefined(USE_DEBUG)) {
1056:     PetscInt N1 = PETSC_INT_MIN;
1057:     for (PetscInt i = 0; i < numRootIndices; i++)
1058:       if (rootIndices[i] > N1) N1 = rootIndices[i];
1059:     PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &N1, 1, MPIU_INT, MPI_MAX, comm));
1060:     PetscCheck(N1 < N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Max. root index (%" PetscInt_FMT ") out of layout range [0,%" PetscInt_FMT ")", N1, N);
1061:     if (!areIndicesSame) {
1062:       N1 = PETSC_INT_MIN;
1063:       for (PetscInt i = 0; i < numLeafIndices; i++)
1064:         if (leafIndices[i] > N1) N1 = leafIndices[i];
1065:       PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &N1, 1, MPIU_INT, MPI_MAX, comm));
1066:       PetscCheck(N1 < N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Max. leaf index (%" PetscInt_FMT ") out of layout range [0,%" PetscInt_FMT ")", N1, N);
1067:     }
1068:   }

1070:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1071:   { /* Reduce: roots -> buffer */
1072:     // Data in buffer described by section_buffer. The chart of `section_buffer` maps onto the local portion of `layout`, with dofs denoting how many matches there are.
1073:     PetscInt        bufsize;
1074:     const PetscInt *root_degree;

1076:     PetscCall(PetscSFCreate(comm, &sf1));
1077:     PetscCall(PetscSFSetFromOptions(sf1));
1078:     PetscCall(PetscSFSetGraphLayout(sf1, layout, numRootIndices, NULL, PETSC_OWN_POINTER, rootIndices));

1080:     PetscCall(PetscSFComputeDegreeBegin(sf1, &root_degree));
1081:     PetscCall(PetscSFComputeDegreeEnd(sf1, &root_degree));
1082:     PetscCall(PetscSectionCreate(comm, &sectionBuffer));
1083:     PetscCall(PetscSectionSetChart(sectionBuffer, 0, n));
1084:     PetscCall(PetscMalloc1(numRootIndices, &roots));
1085:     for (PetscInt i = 0; i < numRootIndices; ++i) {
1086:       roots[i].rank  = rank;
1087:       roots[i].index = rootLocalOffset + (rootLocalIndices ? rootLocalIndices[i] : i);
1088:     }
1089:     for (PetscInt i = 0; i < n; i++) PetscCall(PetscSectionSetDof(sectionBuffer, i, root_degree[i]));
1090:     PetscCall(PetscSectionSetUp(sectionBuffer));
1091:     PetscCall(PetscSectionGetStorageSize(sectionBuffer, &bufsize));
1092:     PetscCall(PetscMalloc1(bufsize, &buffer));
1093:     for (PetscInt i = 0; i < bufsize; ++i) {
1094:       buffer[i].index = -1;
1095:       buffer[i].rank  = -1;
1096:     }
1097:     PetscCall(PetscSFGatherBegin(sf1, MPIU_SF_NODE, roots, buffer));
1098:     PetscCall(PetscSFGatherEnd(sf1, MPIU_SF_NODE, roots, buffer));
1099:     PetscCall(PetscFree(roots));
1100:   }

1102:   // Distribute data in buffers to the leaf locations. The chart of `sectionMatches` maps to `leafIndices`, with dofs denoting how many matches there are for each leaf.
1103:   if (!areIndicesSame) PetscCall(PetscSFSetGraphLayout(sf1, layout, numLeafIndices, NULL, PETSC_OWN_POINTER, leafIndices));
1104:   PetscCall(PetscSectionCreate(comm, &matchSection_));
1105:   PetscCall(PetscSectionMigrateData(sf1, MPIU_SF_NODE, sectionBuffer, buffer, matchSection_, (void **)&matches_, NULL));
1106:   PetscCall(PetscSectionGetChart(matchSection_, &pStart, &pEnd));
1107:   PetscCheck(pEnd - pStart == numLeafIndices, comm, PETSC_ERR_PLIB, "Section of matches has different chart size (%" PetscInt_FMT ")  than number of leaf indices %" PetscInt_FMT ". Section chart is [%" PetscInt_FMT ", %" PetscInt_FMT ")", pEnd - pStart, numLeafIndices, pStart, pEnd);
1108:   PetscCall(PetscSectionGetStorageSize(matchSection_, &numMatches));
1109:   for (PetscInt p = pStart; p < pEnd; p++) {
1110:     PetscInt ndofs;
1111:     PetscCall(PetscSectionGetDof(matchSection_, p, &ndofs));
1112:     PetscCheck(ndofs > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "No match found for index %" PetscInt_FMT, leafIndices[p]);
1113:   }

1115:   *matchSection = matchSection_;
1116:   *matches      = matches_;

1118:   PetscCall(PetscFree(buffer));
1119:   PetscCall(PetscSectionDestroy(&sectionBuffer));
1120:   PetscCall(PetscSFDestroy(&sf1));
1121:   PetscFunctionReturn(PETSC_SUCCESS);
1122: }

1124: /**
1125:   @brief Match CGNS faces to their Plex equivalents

1127:   @param[in]  dm                 DM that holds the Plex to match against
1128:   @param[in]  nuniq_verts        Number of unique CGNS vertices on this rank
1129:   @param[in]  uniq_verts         Unique CGNS vertices on this rank
1130:   @param[in]  plex_vertex_offset Index offset to match `uniq_vertices` to their respective Plex vertices
1131:   @param[in]  NVertices          Number of vertices for Layout for `PetscSFFindMatchingIndices()`
1132:   @param[in]  connSection        PetscSection describing the CGNS face connectivity
1133:   @param[in]  face_ids           Array of the CGNS face IDs
1134:   @param[in]  conn               Array of the CGNS face connectivity
1135:   @param[out] cg2plexSF          PetscSF describing the mapping from owned CGNS faces to remote `plexFaces`
1136:   @param[out] plexFaces          Matching Plex face IDs

1138:   @description

1140:    `cg2plexSF` is a mapping from the owned CGNS faces to the rank whose local Plex has that face.
1141:    `plexFaces` holds the actual mesh point in the local Plex that corresponds to the owned CGNS face (which is the root)

1143:          cg2plexSF
1144:    __________|__________
1145:    |                   |

1147:    [F0_11] -----> [P0_0]  [38]
1148:    [F0_12] --                        Rank 0
1149:    ~~~~~~~~~ \ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1150:               --> [P1_0]  [59]       Rank 1
1151:    [F1_13] -----> [P1_1]  [98]
1152:       |              |     |
1153:       |              |     + plexFaces, maps the leaves of cg2plexSF to local Plex face mesh points
1154:       |              + Leaves of cg2plexSF. P(rank)_(root_index)
1155:       + Roots of cg2plexSF, F(rank)_(CGNS face ID)

1157:    Note that, unlike a pointSF, the leaves of `cg2plexSF` do not map onto chart of the local Plex, but just onto an array.
1158:    The plexFaces array is then what maps the leaves to the actual local Plex mesh points.

1160:   `plex_vertex_offset` is used to map the CGNS vertices in `uniq_vertices` to their respective Plex vertices.
1161:    From `DMPlexBuildFromCellListParallel()`, the mapping of CGNS vertices to Plex vertices is uniq_vert[i] -> i + plex_vertex_offset, where the right side is the Plex point ID.
1162:    So with `plex_vertex_offset = 5`,
1163:    uniq_vertices:  [19, 52, 1, 89]
1164:    plex_point_ids: [5,   6, 7, 8]
1165: **/
1166: static PetscErrorCode DMPlexCGNS_MatchCGNSFacesToPlexFaces(DM dm, PetscInt nuniq_verts, const PetscInt uniq_verts[], PetscInt plex_vertex_offset, PetscInt NVertices, PetscSection connSection, const PetscInt face_ids[], const PetscInt conn[], PetscSF *cg2plexSF, PetscInt *plexFaces[])
1167: {
1168:   MPI_Comm    comm = PetscObjectComm((PetscObject)dm);
1169:   PetscMPIInt myrank, nranks;
1170:   PetscInt    fownedStart, fownedEnd, fownedSize;

1172:   PetscFunctionBegin;
1173:   PetscCallMPI(MPI_Comm_rank(comm, &myrank));
1174:   PetscCallMPI(MPI_Comm_size(comm, &nranks));

1176:   { // -- Create cg2plexSF
1177:     PetscInt     nuniq_face_verts, *uniq_face_verts;
1178:     PetscSection fvert2mvertSection;
1179:     PetscSFNode *fvert2mvert = NULL;

1181:     { // -- Create fvert2mvert, which map CGNS vertices in the owned-face connectivity to the CGNS vertices in the global mesh
1182:       PetscLayout layout;

1184:       PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, NVertices, 1, &layout));
1185:       { // Count locally unique vertices in the face connectivity
1186:         PetscHSetI vhash;
1187:         PetscInt   off = 0, conn_size;

1189:         PetscCall(PetscHSetICreate(&vhash));
1190:         PetscCall(PetscSectionGetStorageSize(connSection, &conn_size));
1191:         for (PetscInt v = 0; v < conn_size; v++) PetscCall(PetscHSetIAdd(vhash, conn[v]));
1192:         PetscCall(PetscHSetIGetSize(vhash, &nuniq_face_verts));
1193:         PetscCall(PetscMalloc1(nuniq_face_verts, &uniq_face_verts));
1194:         PetscCall(PetscHSetIGetElems(vhash, &off, uniq_face_verts));
1195:         PetscCall(PetscHSetIDestroy(&vhash));
1196:         PetscCheck(off == nuniq_face_verts, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid number of local vertices %" PetscInt_FMT " should be %" PetscInt_FMT, off, nuniq_face_verts);
1197:       }
1198:       PetscCall(PetscSortInt(nuniq_face_verts, uniq_face_verts));
1199:       PetscCall(PetscSFFindMatchingIndices(layout, nuniq_verts, uniq_verts, NULL, 0, nuniq_face_verts, uniq_face_verts, NULL, 0, &fvert2mvertSection, &fvert2mvert));

1201:       PetscCall(PetscLayoutDestroy(&layout));
1202:     }

1204:     PetscSFNode *plexFaceRemotes, *ownedFaceRemotes;
1205:     PetscCount   nPlexFaceRemotes;
1206:     PetscInt    *local_rank_count;

1208:     PetscCall(PetscSectionGetChart(connSection, &fownedStart, &fownedEnd));
1209:     fownedSize = fownedEnd - fownedStart;
1210:     PetscCall(PetscCalloc1(nranks, &local_rank_count));

1212:     { // Find the rank(s) whose local Plex has the owned CGNS face
1213:       // We determine ownership by determining which ranks contain all the vertices in a face's connectivity
1214:       PetscInt       maxRanksPerVert;
1215:       PetscInt      *face_ranks;
1216:       PetscSegBuffer plexFaceRemotes_SB, ownedFaceRemotes_SB;

1218:       PetscCall(PetscSegBufferCreate(sizeof(PetscSFNode), fownedSize, &plexFaceRemotes_SB));
1219:       PetscCall(PetscSegBufferCreate(sizeof(PetscSFNode), fownedSize, &ownedFaceRemotes_SB));
1220:       PetscCall(PetscSectionGetMaxDof(fvert2mvertSection, &maxRanksPerVert));
1221:       PetscCall(PetscMalloc1(maxRanksPerVert, &face_ranks));
1222:       for (PetscInt f = fownedStart, f_i = 0; f < fownedEnd; f++, f_i++) {
1223:         PetscInt fndof, foffset, lndof, loffset, idx, nface_ranks = 0;

1225:         PetscCall(PetscSectionGetDof(connSection, f, &fndof));
1226:         PetscCall(PetscSectionGetOffset(connSection, f, &foffset));
1227:         PetscCall(PetscFindInt(conn[foffset + 0], nuniq_face_verts, uniq_face_verts, &idx));
1228:         PetscCall(PetscSectionGetDof(fvert2mvertSection, idx, &lndof));
1229:         PetscCall(PetscSectionGetOffset(fvert2mvertSection, idx, &loffset));
1230:         // Loop over ranks of the first vertex in the face connectivity
1231:         for (PetscInt l = 0; l < lndof; l++) {
1232:           PetscInt rank = fvert2mvert[loffset + l].rank;
1233:           // Loop over vertices of face (except for the first) to see if those vertices have the same candidate rank
1234:           for (PetscInt v = 1; v < fndof; v++) {
1235:             PetscInt  ldndof, ldoffset, idx;
1236:             PetscBool vert_has_rank = PETSC_FALSE;

1238:             PetscCall(PetscFindInt(conn[foffset + v], nuniq_face_verts, uniq_face_verts, &idx));
1239:             PetscCall(PetscSectionGetDof(fvert2mvertSection, idx, &ldndof));
1240:             PetscCall(PetscSectionGetOffset(fvert2mvertSection, idx, &ldoffset));
1241:             // Loop over ranks of the vth vertex to see if it has the candidate rank
1242:             for (PetscInt ld = 0; ld < ldndof; ld++) vert_has_rank = (fvert2mvert[ldoffset + ld].rank == rank) || vert_has_rank;
1243:             if (vert_has_rank) continue; // This vertex has the candidate rank, proceed to the next vertex
1244:             else goto next_candidate_rank;
1245:           }
1246:           face_ranks[nface_ranks++] = rank;
1247:         next_candidate_rank:
1248:           continue;
1249:         }
1250:         PetscCheck(nface_ranks > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Rank containing CGNS face %" PetscInt_FMT " could not be found", face_ids[f_i]);

1252:         // Once we have found the rank(s) whose Plex has the vertices of CGNS face `f`, we can begin to build the information for the SF.
1253:         // We want the roots to be the CGNS faces and the leaves to be the corresponding Plex faces, but we have the opposite; for the owned face on myrank, we know the rank that has the corresponding Plex face.
1254:         // To get the inverse, we assign each SF edge with a tuple of PetscSFNodes; one in `plexFaceRemotes` and the other in `ownedFaceRemotes`.
1255:         // `plexFaceRemotes` has the rank and index of the Plex face.
1256:         // `ownedFaceRemotes` has the rank and index of the owned CGNS face.
1257:         // Note that `ownedFaceRemotes` is all on my rank (e.g. rank == myrank).
1258:         //
1259:         // Then, to build the `cg2plexSF`, we communicate the `ownedFaceRemotes` to the `plexFaceRemotes` (via SFReduce).
1260:         // Those `ownedFaceRemotes` then act as the leaves to the roots on this process.
1261:         //
1262:         // Conceptually, this is the same as calling `PetscSFCreateInverseSF` on an SF with `iremotes = plexFaceRemotes` and the `ilocal = ownedFaceRemotes[:].index`.
1263:         // However, we cannot use this much simpler way because when there are multiple matching Plex faces, `PetscSFCreateInverseSF()` will be invalid due to `ownedFaceRemotes[:].index` having repeated values (only root vertices of the SF graph may have degree > 1)
1264:         PetscSFNode *plexFaceRemotes_buffer, *ownedFaceRemotes_buffer;
1265:         PetscCall(PetscSegBufferGet(plexFaceRemotes_SB, nface_ranks, &plexFaceRemotes_buffer));
1266:         PetscCall(PetscSegBufferGet(ownedFaceRemotes_SB, nface_ranks, &ownedFaceRemotes_buffer));
1267:         for (PetscInt n = 0; n < nface_ranks; n++) {
1268:           plexFaceRemotes_buffer[n].rank = face_ranks[n];
1269:           local_rank_count[face_ranks[n]]++;
1270:           ownedFaceRemotes_buffer[n] = (PetscSFNode){.rank = myrank, .index = f_i};
1271:         }
1272:       }
1273:       PetscCall(PetscFree(face_ranks));

1275:       PetscCall(PetscSegBufferGetSize(plexFaceRemotes_SB, &nPlexFaceRemotes));
1276:       PetscCall(PetscSegBufferExtractAlloc(plexFaceRemotes_SB, &plexFaceRemotes));
1277:       PetscCall(PetscSegBufferDestroy(&plexFaceRemotes_SB));
1278:       PetscCall(PetscSegBufferExtractAlloc(ownedFaceRemotes_SB, &ownedFaceRemotes));
1279:       PetscCall(PetscSegBufferDestroy(&ownedFaceRemotes_SB));

1281:       // To get the index for plexFaceRemotes, we partition the leaves on each rank (e.g. the array that will hold the local Plex face mesh points) by each rank that has the CGNS owned rank.
1282:       // For r in [0,numranks), local_rank_count[r] holds the number plexFaces that myrank holds.
1283:       // This determines how large a partition the leaves on rank r need to create for myrank.
1284:       // To get the offset into the leaves, we use Exscan to get rank_start.
1285:       // For r in [0, numranks), rank_start[r] holds the offset into rank r's leaves that myrank will index into.

1287:       // Below is an example:
1288:       //
1289:       // myrank:             | 0           1           2
1290:       // local_rank_count:   | [3, 2, 0]   [1, 0, 2]   [2, 2, 1]
1291:       // myrank_total_count: | 6           4           3
1292:       // rank_start:         | [0, 0, 0]   [3, 2, 0]   [4, 2, 2]
1293:       //                     |
1294:       // plexFaceRemotes: 0  | (0, 0)      (2, 0)      (2, 2)        <-- (rank, index) tuples (e.g. PetscSFNode)
1295:       //                  1  | (1, 0)      (2, 1)      (0, 4)
1296:       //                  2  | (0, 1)      (0, 3)      (1, 2)
1297:       //                  3  | (0, 2)                  (0, 5)
1298:       //                  4  | (1, 1)                  (1, 3)
1299:       //
1300:       // leaves:          0  | (0, 0)      (0, 1)      (1, 0)    (rank and index into plexFaceRemotes)
1301:       //                  1  | (0, 2)      (0, 4)      (1, 1)
1302:       //                  2  | (0, 3)      (2, 2)      (2, 0)
1303:       //                  3  | (1, 2)      (2, 4)
1304:       //                  4  | (2, 1)
1305:       //                  5  | (2, 3)
1306:       //
1307:       // Note how at the leaves, the ranks are contiguous and in order
1308:       PetscInt myrank_total_count;
1309:       {
1310:         PetscInt *rank_start, *rank_offset;

1312:         PetscCall(PetscCalloc2(nranks, &rank_start, nranks, &rank_offset));
1313:         PetscCallMPI(MPIU_Allreduce(local_rank_count, rank_start, nranks, MPIU_INT, MPI_SUM, comm));
1314:         myrank_total_count = rank_start[myrank];
1315:         PetscCall(PetscArrayzero(rank_start, nranks));
1316:         PetscCallMPI(MPI_Exscan(local_rank_count, rank_start, nranks, MPIU_INT, MPI_SUM, comm));

1318:         for (PetscInt r = 0; r < nPlexFaceRemotes; r++) {
1319:           PetscInt rank            = plexFaceRemotes[r].rank;
1320:           plexFaceRemotes[r].index = rank_start[rank] + rank_offset[rank];
1321:           rank_offset[rank]++;
1322:         }
1323:         PetscCall(PetscFree2(rank_start, rank_offset));
1324:       }

1326:       { // Communicate the leaves to roots and build cg2plexSF
1327:         PetscSF      plexRemotes2ownedRemotesSF;
1328:         PetscSFNode *iremote_cg2plexSF;

1330:         PetscCall(PetscSFCreate(comm, &plexRemotes2ownedRemotesSF));
1331:         PetscCall(PetscSFSetGraph(plexRemotes2ownedRemotesSF, myrank_total_count, (PetscInt)nPlexFaceRemotes, NULL, PETSC_COPY_VALUES, plexFaceRemotes, PETSC_OWN_POINTER));
1332:         PetscCall(PetscMalloc1(myrank_total_count, &iremote_cg2plexSF));
1333:         PetscCall(PetscSFViewFromOptions(plexRemotes2ownedRemotesSF, NULL, "-plex2ownedremotes_sf_view"));
1334:         for (PetscInt i = 0; i < myrank_total_count; i++) iremote_cg2plexSF[i] = (PetscSFNode){.rank = -1, .index = -1};
1335:         PetscCall(PetscSFReduceBegin(plexRemotes2ownedRemotesSF, MPIU_SF_NODE, ownedFaceRemotes, iremote_cg2plexSF, MPI_REPLACE));
1336:         PetscCall(PetscSFReduceEnd(plexRemotes2ownedRemotesSF, MPIU_SF_NODE, ownedFaceRemotes, iremote_cg2plexSF, MPI_REPLACE));
1337:         PetscCall(PetscSFDestroy(&plexRemotes2ownedRemotesSF));
1338:         for (PetscInt i = 0; i < myrank_total_count; i++) PetscCheck(iremote_cg2plexSF[i].rank >= 0 && iremote_cg2plexSF[i].index != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Owned face SFNode was not reduced properly");

1340:         PetscCall(PetscSFCreate(comm, cg2plexSF));
1341:         PetscCall(PetscSFSetGraph(*cg2plexSF, fownedSize, myrank_total_count, NULL, PETSC_COPY_VALUES, iremote_cg2plexSF, PETSC_OWN_POINTER));

1343:         PetscCall(PetscFree(ownedFaceRemotes));
1344:       }

1346:       PetscCall(PetscFree(local_rank_count));
1347:     }

1349:     PetscCall(PetscSectionDestroy(&fvert2mvertSection));
1350:     PetscCall(PetscFree(uniq_face_verts));
1351:     PetscCall(PetscFree(fvert2mvert));
1352:   }

1354:   { // -- Find plexFaces
1355:     // Distribute owned-CGNS-face connectivity to the ranks which have corresponding Plex faces, and then find the corresponding Plex faces
1356:     PetscSection connDistSection;
1357:     PetscInt    *connDist;

1359:     // Distribute the face connectivity to the rank that has that face
1360:     PetscCall(PetscSectionCreate(comm, &connDistSection));
1361:     PetscCall(PetscSectionMigrateData(*cg2plexSF, MPIU_INT, connSection, conn, connDistSection, (void **)&connDist, NULL));

1363:     { // Translate CGNS vertex numbering to local Plex numbering
1364:       PetscInt *dmplex_verts, *uniq_verts_sorted;
1365:       PetscInt  connDistSize;

1367:       PetscCall(PetscMalloc2(nuniq_verts, &dmplex_verts, nuniq_verts, &uniq_verts_sorted));
1368:       PetscCall(PetscArraycpy(uniq_verts_sorted, uniq_verts, nuniq_verts));
1369:       // uniq_verts are one-to-one with the DMPlex vertices with an offset, see DMPlexBuildFromCellListParallel()
1370:       for (PetscInt v = 0; v < nuniq_verts; v++) dmplex_verts[v] = v + plex_vertex_offset;
1371:       PetscCall(PetscSortIntWithArray(nuniq_verts, uniq_verts_sorted, dmplex_verts));

1373:       PetscCall(PetscSectionGetStorageSize(connDistSection, &connDistSize));
1374:       for (PetscInt v = 0; v < connDistSize; v++) {
1375:         PetscInt idx;
1376:         PetscCall(PetscFindInt(connDist[v], nuniq_verts, uniq_verts_sorted, &idx));
1377:         PetscCheck(idx >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Could not find CGNS vertex id (from face connectivity) in local plex");
1378:         connDist[v] = dmplex_verts[idx];
1379:       }
1380:       PetscCall(PetscFree2(dmplex_verts, uniq_verts_sorted));
1381:     }

1383:     // Debugging info
1384:     PetscBool view_connectivity = PETSC_FALSE;
1385:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-dm_plex_cgns_view_face_connectivity", &view_connectivity, NULL));
1386:     if (view_connectivity) {
1387:       PetscSection conesSection;
1388:       PetscInt    *cones;

1390:       PetscCall(PetscPrintf(comm, "Distributed CGNS Face Connectivity (in Plex vertex numbering):\n"));
1391:       PetscCall(PetscSectionArrayView(connDistSection, connDist, PETSC_INT, NULL));
1392:       PetscCall(DMPlexGetCones(dm, &cones));
1393:       PetscCall(DMPlexGetConeSection(dm, &conesSection));
1394:       PetscCall(PetscPrintf(comm, "Plex Cones:\n"));
1395:       PetscCall(PetscSectionArrayView(conesSection, cones, PETSC_INT, NULL));
1396:     }

1398:     // For every face in connDistSection, find the transitive support of a vertex in that face connectivity.
1399:     // Loop through the faces of the transitive support and find the matching face
1400:     PetscBT  plex_face_found;
1401:     PetscInt fplexStart, fplexEnd, vplexStart, vplexEnd;
1402:     PetscInt fdistStart, fdistEnd, numfdist;
1403:     PetscCall(DMPlexGetHeightStratum(dm, 1, &fplexStart, &fplexEnd));
1404:     PetscCall(DMPlexGetDepthStratum(dm, 0, &vplexStart, &vplexEnd));
1405:     PetscCall(PetscSectionGetChart(connDistSection, &fdistStart, &fdistEnd));
1406:     numfdist = fdistEnd - fdistStart;
1407:     PetscCall(PetscMalloc1(numfdist, plexFaces));
1408:     PetscCall(PetscBTCreate(numfdist, &plex_face_found));
1409:     for (PetscInt i = 0; i < numfdist; i++) (*plexFaces)[i] = -1;

1411:     for (PetscInt f = fdistStart, f_i = 0; f < fdistEnd; f++, f_i++) {
1412:       PetscInt  ndof, offset, support_size;
1413:       PetscInt *support = NULL;

1415:       PetscCall(PetscSectionGetDof(connDistSection, f, &ndof));
1416:       PetscCall(PetscSectionGetOffset(connDistSection, f, &offset));

1418:       // Loop through transitive support of a vertex in the CGNS face connectivity
1419:       PetscCall(DMPlexGetTransitiveClosure(dm, connDist[offset + 0], PETSC_FALSE, &support_size, &support));
1420:       for (PetscInt s = 0; s < support_size; s++) {
1421:         PetscInt face_point = support[s * 2]; // closure stores points and orientations, [p_0, o_0, p_1, o_1, ...]
1422:         PetscInt trans_cone_size, *trans_cone = NULL;

1424:         if (face_point < fplexStart || face_point >= fplexEnd) continue; // Skip non-face points
1425:         // See if face_point has the same vertices
1426:         PetscCall(DMPlexGetTransitiveClosure(dm, face_point, PETSC_TRUE, &trans_cone_size, &trans_cone));
1427:         for (PetscInt c = 0; c < trans_cone_size; c++) {
1428:           PetscInt vertex_point = trans_cone[c * 2], conn_has_vertex;
1429:           if (vertex_point < vplexStart || vertex_point >= vplexEnd) continue; // Skip non-vertex points
1430:           PetscCall(PetscFindIntUnsorted(vertex_point, ndof, &connDist[offset], &conn_has_vertex));
1431:           if (conn_has_vertex < 0) goto check_next_face;
1432:         }
1433:         (*plexFaces)[f_i] = face_point;
1434:         PetscCall(DMPlexRestoreTransitiveClosure(dm, face_point, PETSC_TRUE, &trans_cone_size, &trans_cone));
1435:         break;
1436:       check_next_face:
1437:         PetscCall(DMPlexRestoreTransitiveClosure(dm, face_point, PETSC_TRUE, &trans_cone_size, &trans_cone));
1438:         continue;
1439:       }
1440:       PetscCall(DMPlexRestoreTransitiveClosure(dm, connDist[offset + 0], PETSC_FALSE, &support_size, &support));
1441:       if ((*plexFaces)[f_i] != -1) PetscCall(PetscBTSet(plex_face_found, f_i));
1442:     }

1444:     // Some distributed CGNS faces did not find a matching plex face
1445:     // This can happen if a partition has all the faces surrounding a distributed CGNS face, but does not have the face itself (it's parent element is owned by a different partition).
1446:     // Thus, the partition has the vertices associated with the CGNS face, but doesn't actually have the face itself.
1447:     // For example, take the following quad mesh, where the numbers represent the owning rank and CGNS face ID.
1448:     //
1449:     //    2     3     4     <-- face ID
1450:     //  ----- ----- -----
1451:     // |  0  |  1  |  0  |  <-- rank
1452:     //  ----- ----- -----
1453:     //    5     6     7     <-- face ID
1454:     //
1455:     // In this case, rank 0 will have all the vertices of face 3 and 6 in it's Plex, but does not actually have either face.
1456:     //
1457:     // To address this, we remove the leaves associated with these missing faces from cg2plexSF and then verify that all owned faces did find a matching plex face (e.g. root degree > 1)
1458:     PetscInt  num_plex_faces_found = (PetscInt)PetscBTCountSet(plex_face_found, numfdist);
1459:     PetscBool some_faces_not_found = num_plex_faces_found < numfdist;
1460:     PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &some_faces_not_found, 1, MPI_C_BOOL, MPI_LOR, comm));
1461:     if (some_faces_not_found) {
1462:       PetscSFNode    *iremote_cg2plex_new;
1463:       const PetscInt *root_degree;
1464:       PetscInt        num_roots, *plexFacesNew;

1466:       PetscCall(PetscMalloc1(num_plex_faces_found, &iremote_cg2plex_new));
1467:       PetscCall(PetscCalloc1(num_plex_faces_found, &plexFacesNew));
1468:       { // Get SFNodes with matching faces
1469:         const PetscSFNode *iremote_cg2plex_old;
1470:         PetscInt           num_leaves_old, n = 0;
1471:         PetscCall(PetscSFGetGraph(*cg2plexSF, &num_roots, &num_leaves_old, NULL, &iremote_cg2plex_old));
1472:         PetscAssert(num_roots == fownedSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent roots and owned faces.");
1473:         PetscAssert(num_leaves_old == numfdist, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent leaves and distributed faces.");
1474:         for (PetscInt o = 0; o < num_leaves_old; o++) {
1475:           if (PetscBTLookupSet(plex_face_found, o)) {
1476:             iremote_cg2plex_new[n] = iremote_cg2plex_old[o];
1477:             plexFacesNew[n]        = (*plexFaces)[o];
1478:             n++;
1479:           }
1480:         }
1481:         PetscAssert(n == num_plex_faces_found, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Found %" PetscInt_FMT " matching plex faces, but only set %" PetscInt_FMT " SFNodes", num_plex_faces_found, n);
1482:       }
1483:       PetscCall(PetscSFSetGraph(*cg2plexSF, num_roots, num_plex_faces_found, NULL, PETSC_COPY_VALUES, iremote_cg2plex_new, PETSC_OWN_POINTER));

1485:       // Verify that all CGNS faces have a matching Plex face on any rank
1486:       PetscCall(PetscSFComputeDegreeBegin(*cg2plexSF, &root_degree));
1487:       PetscCall(PetscSFComputeDegreeEnd(*cg2plexSF, &root_degree));
1488:       for (PetscInt r = 0; r < num_roots; r++) {
1489:         PetscCheck(root_degree[r] > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Could not find plex face for the CGNS face %" PetscInt_FMT, face_ids[r]);
1490:         PetscCheck(root_degree[r] == 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Found more than one plex face for the CGNS face %" PetscInt_FMT ". Face may be internal rather than at mesh domain boundary", face_ids[r]);
1491:       }

1493:       if (PetscDefined(USE_DEBUG)) {
1494:         for (PetscInt i = 0; i < num_plex_faces_found; i++)
1495:           PetscCheck(plexFacesNew[i] >= fplexStart && plexFacesNew[i] < fplexEnd, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Plex face ID %" PetscInt_FMT "outside of face stratum [%" PetscInt_FMT ", %" PetscInt_FMT ")", plexFacesNew[i], fplexStart, fplexEnd);
1496:       }

1498:       PetscCall(PetscFree(*plexFaces));
1499:       *plexFaces = plexFacesNew;
1500:     }

1502:     PetscCall(PetscBTDestroy(&plex_face_found));
1503:     PetscCall(PetscSectionDestroy(&connDistSection));
1504:     PetscCall(PetscFree(connDist));
1505:   }
1506:   PetscFunctionReturn(PETSC_SUCCESS);
1507: }

1509: // Copied from PetscOptionsStringToInt
1510: static inline PetscErrorCode PetscStrtoInt(const char name[], PetscInt *a)
1511: {
1512:   size_t len;
1513:   char  *endptr;
1514:   long   strtolval;

1516:   PetscFunctionBegin;
1517:   PetscCall(PetscStrlen(name, &len));
1518:   PetscCheck(len, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "character string of length zero has no numerical value");

1520:   strtolval = strtol(name, &endptr, 10);
1521:   PetscCheck((size_t)(endptr - name) == len, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Input string \"%s\" has no integer value (do not include . in it)", name);

1523: #if defined(PETSC_USE_64BIT_INDICES) && defined(PETSC_HAVE_ATOLL)
1524:   (void)strtolval;
1525:   *a = atoll(name);
1526: #elif defined(PETSC_USE_64BIT_INDICES) && defined(PETSC_HAVE___INT64)
1527:   (void)strtolval;
1528:   *a = _atoi64(name);
1529: #else
1530:   *a = (PetscInt)strtolval;
1531: #endif
1532:   PetscFunctionReturn(PETSC_SUCCESS);
1533: }

1535: typedef struct {
1536:   char     name[CGIO_MAX_NAME_LENGTH + 1];
1537:   int      normal[3], ndatasets;
1538:   cgsize_t npoints, nnormals;
1539:   CGNS_ENUMT(BCType_t) bctype;
1540:   CGNS_ENUMT(DataType_t) normal_datatype;
1541:   CGNS_ENUMT(PointSetType_t) pointtype;
1542: } CGBCInfo;

1544: PetscErrorCode DMPlexCreateCGNS_Internal_Parallel(MPI_Comm comm, PetscInt cgid, PetscBool interpolate, DM *dm)
1545: {
1546:   PetscMPIInt num_proc, rank;
1547:   /* Read from file */
1548:   char     basename[CGIO_MAX_NAME_LENGTH + 1];
1549:   char     buffer[CGIO_MAX_NAME_LENGTH + 1];
1550:   int      dim = 0, physDim = 0, coordDim = 0;
1551:   PetscInt NVertices = 0, NCells = 0;
1552:   int      nzones = 0, nbases;
1553:   int      zone   = 1; // Only supports single zone files
1554:   int      base   = 1; // Only supports single base

1556:   PetscFunctionBegin;
1557:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1558:   PetscCallMPI(MPI_Comm_size(comm, &num_proc));
1559:   PetscCall(DMCreate(comm, dm));
1560:   PetscCall(DMSetType(*dm, DMPLEX));

1562:   PetscCallCGNSRead(cg_nbases(cgid, &nbases), *dm, 0);
1563:   PetscCheck(nbases <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single base, not %d", nbases);
1564:   //  From the CGNS web page                 cell_dim  phys_dim (embedding space in PETSc) CGNS defines as length of spatial vectors/components)
1565:   PetscCallCGNSRead(cg_base_read(cgid, base, basename, &dim, &physDim), *dm, 0);
1566:   PetscCallCGNSRead(cg_nzones(cgid, base, &nzones), *dm, 0);
1567:   PetscCheck(nzones == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Parallel reader limited to one zone, not %d", nzones);
1568:   {
1569:     cgsize_t sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */

1571:     PetscCallCGNSRead(cg_zone_read(cgid, base, zone, buffer, sizes), *dm, 0);
1572:     PetscCheck(sizes[0] + sizes[1] < PETSC_INT_MAX, comm, PETSC_ERR_INT_OVERFLOW, "Number of mesh cells and vertices too large for PetscInt type, reconfigure with --with-64-bit-indices");
1573:     NVertices = (PetscInt)sizes[0];
1574:     NCells    = (PetscInt)sizes[1];
1575:   }

1577:   PetscCall(PetscObjectSetName((PetscObject)*dm, basename));
1578:   PetscCall(DMSetDimension(*dm, dim));
1579:   coordDim = dim;

1581:   // This is going to be a headache for mixed-topology and multiple sections. We may have to restore reading the data twice (once before  the SetChart
1582:   // call to get this right but continuing for now with single section, single topology, one zone.
1583:   // establish element ranges for my rank
1584:   PetscInt    mystarte, myende, mystartv, myendv, myownede, myownedv;
1585:   PetscLayout elem_map, vtx_map;
1586:   PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, NCells, 1, &elem_map));
1587:   PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, NVertices, 1, &vtx_map));
1588:   PetscCall(PetscLayoutGetRange(elem_map, &mystarte, &myende));
1589:   PetscCall(PetscLayoutGetLocalSize(elem_map, &myownede));
1590:   PetscCall(PetscLayoutGetRange(vtx_map, &mystartv, &myendv));
1591:   PetscCall(PetscLayoutGetLocalSize(vtx_map, &myownedv));

1593:   // -- Build Plex in parallel
1594:   DMPolytopeType dm_cell_type = DM_POLYTOPE_UNKNOWN;
1595:   PetscInt       pOrder = 1, numClosure = -1;
1596:   cgsize_t      *elements = NULL;
1597:   int           *face_section_ids, *cell_section_ids, num_face_sections = 0, num_cell_sections = 0;
1598:   PetscInt      *uniq_verts, nuniq_verts;
1599:   {
1600:     int        nsections;
1601:     PetscInt  *elementsQ1, numCorners = -1;
1602:     const int *perm;
1603:     cgsize_t   start, end; // Throwaway

1605:     cg_nsections(cgid, base, zone, &nsections);
1606:     PetscCall(PetscMalloc2(nsections, &face_section_ids, nsections, &cell_section_ids));
1607:     // Read element connectivity
1608:     for (int index_sect = 1; index_sect <= nsections; index_sect++) {
1609:       int      nbndry, parentFlag;
1610:       PetscInt cell_dim;
1611:       CGNS_ENUMT(ElementType_t) cellType;

1613:       PetscCallCGNSRead(cg_section_read(cgid, base, zone, index_sect, buffer, &cellType, &start, &end, &nbndry, &parentFlag), *dm, 0);

1615:       PetscCall(CGNSElementTypeGetTopologyInfo(cellType, &dm_cell_type, &numCorners, &cell_dim));
1616:       // Skip over element that are not max dimension (ie. boundary elements)
1617:       if (cell_dim == dim) cell_section_ids[num_cell_sections++] = index_sect;
1618:       else if (cell_dim == dim - 1) face_section_ids[num_face_sections++] = index_sect;
1619:     }
1620:     PetscCheck(num_cell_sections == 1, comm, PETSC_ERR_SUP, "CGNS Reader does not support more than 1 full-dimension cell section");

1622:     {
1623:       int index_sect = cell_section_ids[0], nbndry, parentFlag;
1624:       CGNS_ENUMT(ElementType_t) cellType;

1626:       PetscCallCGNSRead(cg_section_read(cgid, base, zone, index_sect, buffer, &cellType, &start, &end, &nbndry, &parentFlag), *dm, 0);
1627:       PetscCall(CGNSElementTypeGetDiscretizationInfo(cellType, &numClosure, &pOrder));
1628:       PetscCall(PetscMalloc1(myownede * numClosure, &elements));
1629:       PetscCallCGNSReadData(cgp_elements_read_data(cgid, base, zone, index_sect, mystarte + 1, myende, elements), *dm, 0);
1630:       for (PetscInt v = 0; v < myownede * numClosure; v++) elements[v] -= 1; // 0 based

1632:       // Create corners-only connectivity
1633:       PetscCall(CGNSElementTypeGetTopologyInfo(cellType, &dm_cell_type, &numCorners, NULL));
1634:       PetscCall(PetscMalloc1(myownede * numCorners, &elementsQ1));
1635:       PetscCall(DMPlexCGNSGetPermutation_Internal(dm_cell_type, numCorners, NULL, &perm));
1636:       for (PetscInt e = 0; e < myownede; ++e) {
1637:         for (PetscInt v = 0; v < numCorners; v++) elementsQ1[e * numCorners + perm[v]] = (PetscInt)elements[e * numClosure + v];
1638:       }
1639:     }

1641:     // Build cell-vertex Plex
1642:     PetscCall(DMPlexBuildFromCellListParallel(*dm, myownede, myownedv, NVertices, numCorners, elementsQ1, NULL, &uniq_verts));
1643:     PetscCall(DMViewFromOptions(*dm, NULL, "-corner_dm_view"));
1644:     {
1645:       PetscInt pStart, pEnd;
1646:       PetscCall(DMPlexGetChart(*dm, &pStart, &pEnd));
1647:       nuniq_verts = (pEnd - pStart) - myownede;
1648:     }
1649:     PetscCall(PetscFree(elementsQ1));
1650:   }

1652:   if (interpolate) PetscCall(DMPlexInterpolateInPlace_Internal(*dm));

1654:   // -- Create SF for naive nodal-data read to elements
1655:   PetscSF plex_to_cgns_sf;
1656:   {
1657:     PetscInt     nleaves, num_comp;
1658:     PetscInt    *leaf, num_leaves = 0;
1659:     PetscInt     cStart, cEnd;
1660:     const int   *perm;
1661:     PetscSF      cgns_to_local_sf;
1662:     PetscSection local_section;
1663:     PetscFE      fe;

1665:     // sfNatural requires PetscSection to handle DMDistribute, so we use PetscFE to define the section
1666:     // Use number of components = 1 to work with just the nodes themselves
1667:     PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, 1, dm_cell_type, pOrder, PETSC_DETERMINE, &fe));
1668:     PetscCall(PetscObjectSetName((PetscObject)fe, "FE for sfNatural"));
1669:     PetscCall(DMAddField(*dm, NULL, (PetscObject)fe));
1670:     PetscCall(DMCreateDS(*dm));
1671:     PetscCall(PetscFEDestroy(&fe));

1673:     PetscCall(DMGetLocalSection(*dm, &local_section));
1674:     PetscCall(PetscSectionViewFromOptions(local_section, NULL, "-fe_natural_section_view"));
1675:     PetscCall(PetscSectionGetFieldComponents(local_section, 0, &num_comp));
1676:     PetscCall(PetscSectionGetStorageSize(local_section, &nleaves));
1677:     nleaves /= num_comp;
1678:     PetscCall(PetscMalloc1(nleaves, &leaf));
1679:     for (PetscInt i = 0; i < nleaves; i++) leaf[i] = -1;

1681:     // Get the permutation from CGNS closure numbering to PLEX closure numbering
1682:     PetscCall(DMPlexCGNSGetPermutation_Internal(dm_cell_type, numClosure, NULL, &perm));
1683:     PetscCall(DMPlexGetHeightStratum(*dm, 0, &cStart, &cEnd));
1684:     for (PetscInt cell = cStart; cell < cEnd; ++cell) {
1685:       PetscInt num_closure_dof, *closure_idx = NULL;

1687:       PetscCall(DMPlexGetClosureIndices(*dm, local_section, local_section, cell, PETSC_FALSE, &num_closure_dof, &closure_idx, NULL, NULL));
1688:       PetscAssert(numClosure * num_comp == num_closure_dof, comm, PETSC_ERR_PLIB, "Closure dof size does not match polytope");
1689:       for (PetscInt i = 0; i < numClosure; i++) {
1690:         PetscInt li = closure_idx[perm[i] * num_comp] / num_comp;
1691:         if (li < 0) continue;

1693:         PetscInt cgns_idx = (PetscInt)elements[cell * numClosure + i];
1694:         if (leaf[li] == -1) {
1695:           leaf[li] = cgns_idx;
1696:           num_leaves++;
1697:         } else PetscAssert(leaf[li] == cgns_idx, PETSC_COMM_SELF, PETSC_ERR_PLIB, "leaf does not match previously set");
1698:       }
1699:       PetscCall(DMPlexRestoreClosureIndices(*dm, local_section, local_section, cell, PETSC_FALSE, &num_closure_dof, &closure_idx, NULL, NULL));
1700:     }
1701:     PetscAssert(num_leaves == nleaves, PETSC_COMM_SELF, PETSC_ERR_PLIB, "leaf count in closure does not match nleaves");
1702:     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)*dm), &cgns_to_local_sf));
1703:     PetscCall(PetscSFSetGraphLayout(cgns_to_local_sf, vtx_map, nleaves, NULL, PETSC_USE_POINTER, leaf));
1704:     PetscCall(PetscObjectSetName((PetscObject)cgns_to_local_sf, "CGNS to Plex SF"));
1705:     PetscCall(PetscSFViewFromOptions(cgns_to_local_sf, NULL, "-CGNStoPlex_sf_view"));
1706:     PetscCall(PetscFree(leaf));
1707:     PetscCall(PetscFree(elements));

1709:     { // Convert cgns_to_local to global_to_cgns
1710:       PetscSF sectionsf, cgns_to_global_sf;

1712:       PetscCall(DMGetSectionSF(*dm, &sectionsf));
1713:       PetscCall(PetscSFComposeInverse(cgns_to_local_sf, sectionsf, &cgns_to_global_sf));
1714:       PetscCall(PetscSFDestroy(&cgns_to_local_sf));
1715:       PetscCall(PetscSFCreateInverseSF(cgns_to_global_sf, &plex_to_cgns_sf));
1716:       PetscCall(PetscObjectSetName((PetscObject)plex_to_cgns_sf, "Global Plex to CGNS"));
1717:       PetscCall(PetscSFDestroy(&cgns_to_global_sf));
1718:     }
1719:   }

1721:   { // -- Set coordinates for DM
1722:     PetscScalar *coords;
1723:     float       *x[3];
1724:     double      *xd[3];
1725:     PetscBool    read_with_double;
1726:     PetscFE      cfe;

1728:     // Setup coordinate space first. Use pOrder here for isoparametric; revisit with CPEX-0045 High Order.
1729:     PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, coordDim, dm_cell_type, pOrder, PETSC_DETERMINE, &cfe));
1730:     PetscCall(DMSetCoordinateDisc(*dm, cfe, PETSC_FALSE, PETSC_FALSE));
1731:     PetscCall(PetscFEDestroy(&cfe));

1733:     { // Determine if coords are written in single or double precision
1734:       CGNS_ENUMT(DataType_t) datatype;

1736:       PetscCallCGNSRead(cg_coord_info(cgid, base, zone, 1, &datatype, buffer), *dm, 0);
1737:       read_with_double = datatype == CGNS_ENUMV(RealDouble) ? PETSC_TRUE : PETSC_FALSE;
1738:     }

1740:     // Read coords from file and set into component-major ordering
1741:     if (read_with_double) PetscCall(PetscMalloc3(myownedv, &xd[0], myownedv, &xd[1], myownedv, &xd[2]));
1742:     else PetscCall(PetscMalloc3(myownedv, &x[0], myownedv, &x[1], myownedv, &x[2]));
1743:     PetscCall(PetscMalloc1(myownedv * coordDim, &coords));
1744:     {
1745:       cgsize_t sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */
1746:       cgsize_t range_min[3] = {mystartv + 1, 1, 1};
1747:       cgsize_t range_max[3] = {myendv, 1, 1};
1748:       int      ngrids, ncoords;
1749:       // We assume that the coordinates array are in the CGNS file in the expected order (X,Y,Z, or whatever other coordinate system). This could be verified using `cg_coord_info()`, but ignoring for now
1750:       int coord_indices[] = {1, 2, 3};

1752:       PetscCallCGNSRead(cg_zone_read(cgid, base, zone, buffer, sizes), *dm, 0);
1753:       PetscCallCGNSRead(cg_ngrids(cgid, base, zone, &ngrids), *dm, 0);
1754:       PetscCheck(ngrids <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single grid, not %d", ngrids);
1755:       PetscCallCGNSRead(cg_ncoords(cgid, base, zone, &ncoords), *dm, 0);
1756:       PetscCheck(ncoords == coordDim, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a coordinate array for each dimension, not %d", ncoords);
1757:       if (read_with_double) {
1758:         PetscCallCGNSReadData(cgp_coord_multi_read_data(cgid, base, zone, coord_indices, range_min, range_max, coordDim, (void **)xd), *dm, 0);
1759:         if (coordDim >= 1) {
1760:           for (PetscInt v = 0; v < myownedv; v++) coords[v * coordDim + 0] = xd[0][v];
1761:         }
1762:         if (coordDim >= 2) {
1763:           for (PetscInt v = 0; v < myownedv; v++) coords[v * coordDim + 1] = xd[1][v];
1764:         }
1765:         if (coordDim >= 3) {
1766:           for (PetscInt v = 0; v < myownedv; v++) coords[v * coordDim + 2] = xd[2][v];
1767:         }
1768:       } else {
1769:         PetscCallCGNSReadData(cgp_coord_multi_read_data(cgid, base, zone, coord_indices, range_min, range_max, coordDim, (void **)x), *dm, 0);
1770:         if (coordDim >= 1) {
1771:           for (PetscInt v = 0; v < myownedv; v++) coords[v * coordDim + 0] = x[0][v];
1772:         }
1773:         if (coordDim >= 2) {
1774:           for (PetscInt v = 0; v < myownedv; v++) coords[v * coordDim + 1] = x[1][v];
1775:         }
1776:         if (coordDim >= 3) {
1777:           for (PetscInt v = 0; v < myownedv; v++) coords[v * coordDim + 2] = x[2][v];
1778:         }
1779:       }
1780:     }
1781:     if (read_with_double) PetscCall(PetscFree3(xd[0], xd[1], xd[2]));
1782:     else PetscCall(PetscFree3(x[0], x[1], x[2]));

1784:     { // Reduce CGNS-ordered coordinate nodes to Plex ordering, and set DM's coordinates
1785:       Vec          coord_global;
1786:       MPI_Datatype unit;
1787:       PetscScalar *coord_global_array;
1788:       DM           cdm;

1790:       PetscCall(DMGetCoordinateDM(*dm, &cdm));
1791:       PetscCall(DMCreateGlobalVector(cdm, &coord_global));
1792:       PetscCall(VecGetArrayWrite(coord_global, &coord_global_array));
1793:       PetscCallMPI(MPI_Type_contiguous(coordDim, MPIU_SCALAR, &unit));
1794:       PetscCallMPI(MPI_Type_commit(&unit));
1795:       PetscCall(PetscSFReduceBegin(plex_to_cgns_sf, unit, coords, coord_global_array, MPI_REPLACE));
1796:       PetscCall(PetscSFReduceEnd(plex_to_cgns_sf, unit, coords, coord_global_array, MPI_REPLACE));
1797:       PetscCall(VecRestoreArrayWrite(coord_global, &coord_global_array));
1798:       PetscCallMPI(MPI_Type_free(&unit));
1799:       PetscCall(DMSetCoordinates(*dm, coord_global));
1800:       PetscCall(VecDestroy(&coord_global));
1801:     }
1802:     PetscCall(PetscFree(coords));
1803:   }

1805:   PetscCall(DMViewFromOptions(*dm, NULL, "-corner_interpolated_dm_view"));

1807:   int nbocos;
1808:   PetscCallCGNSRead(cg_nbocos(cgid, base, zone, &nbocos), *dm, 0);
1809:   // In order to extract boundary condition (boco) information into DMLabels, each rank holds:
1810:   // - The local Plex
1811:   // - Naively read CGNS face connectivity
1812:   // - Naively read list of CGNS faces for each boco
1813:   //
1814:   // First, we need to build a mapping from the CGNS faces to the (probably off-rank) Plex face.
1815:   // The CGNS faces that each rank owns is known globally via cgnsLayouts.
1816:   // The cg2plexSF maps these CGNS face IDs to their (probably off-rank) Plex face.
1817:   // The plexFaces array maps the (contiguous) leaves of cg2plexSF to the local Plex face point.
1818:   //
1819:   // Next, we read the list of CGNS faces for each boco and find the location of that face's owner using cgnsLayouts.
1820:   // Then, we can communicate the label value to the local Plex which corresponds to the CGNS face.
1821:   if (interpolate && num_face_sections != 0 && nbocos != 0) {
1822:     PetscSection connSection;
1823:     PetscInt     nCgFaces, nPlexFaces;
1824:     PetscInt    *face_ids, *conn, *plexFaces;
1825:     PetscSF      cg2plexSF;
1826:     PetscLayout *cgnsLayouts;

1828:     PetscCall(DMPlexCGNS_CreateCornersConnectivitySection(*dm, cgid, base, zone, num_face_sections, face_section_ids, &connSection, NULL, &face_ids, &cgnsLayouts, &conn));
1829:     {
1830:       PetscBool view_connectivity = PETSC_FALSE;
1831:       PetscCall(PetscOptionsGetBool(NULL, NULL, "-dm_plex_cgns_view_face_connectivity", &view_connectivity, NULL));
1832:       if (view_connectivity) PetscCall(PetscSectionArrayView(connSection, conn, PETSC_INT, NULL));
1833:     }
1834:     PetscCall(DMPlexCGNS_MatchCGNSFacesToPlexFaces(*dm, nuniq_verts, uniq_verts, myownede, NVertices, connSection, face_ids, conn, &cg2plexSF, &plexFaces));
1835:     PetscCall(PetscSFGetGraph(cg2plexSF, NULL, &nPlexFaces, NULL, NULL));
1836:     {
1837:       PetscInt start, end;
1838:       PetscCall(PetscSectionGetChart(connSection, &start, &end));
1839:       nCgFaces = end - start;
1840:     }

1842:     PetscInt *plexFaceValues, *cgFaceValues;
1843:     PetscCall(PetscMalloc2(nPlexFaces, &plexFaceValues, nCgFaces, &cgFaceValues));
1844:     for (PetscInt BC = 1; BC <= nbocos; BC++) {
1845:       cgsize_t *points;
1846:       CGBCInfo  bcinfo;
1847:       PetscBool is_faceset  = PETSC_FALSE;
1848:       PetscInt  label_value = 1;

1850:       PetscCallCGNSRead(cg_boco_info(cgid, base, zone, BC, bcinfo.name, &bcinfo.bctype, &bcinfo.pointtype, &bcinfo.npoints, bcinfo.normal, &bcinfo.nnormals, &bcinfo.normal_datatype, &bcinfo.ndatasets), *dm, 0);
1851:       PetscCheck(bcinfo.npoints < PETSC_INT_MAX, comm, PETSC_ERR_INT_OVERFLOW, "Number of mesh points too large for PetscInt type, reconfigure with --with-64-bit-indices");

1853:       PetscCall(PetscStrbeginswith(bcinfo.name, "FaceSet", &is_faceset));
1854:       if (is_faceset) {
1855:         size_t faceset_len;
1856:         PetscCall(PetscStrlen("FaceSet", &faceset_len));
1857:         PetscCall(PetscStrtoInt(bcinfo.name + faceset_len, &label_value));
1858:       }
1859:       const char *label_name = is_faceset ? "Face Sets" : bcinfo.name;

1861:       if (bcinfo.npoints < 1) continue;

1863:       PetscLayout bc_layout;
1864:       PetscInt    bcStart, bcEnd, bcSize;
1865:       PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, (PetscInt)bcinfo.npoints, 1, &bc_layout));
1866:       PetscCall(PetscLayoutGetRange(bc_layout, &bcStart, &bcEnd));
1867:       PetscCall(PetscLayoutGetLocalSize(bc_layout, &bcSize));
1868:       PetscCall(PetscLayoutDestroy(&bc_layout));
1869:       PetscCall(DMGetWorkArray(*dm, bcSize, MPIU_CGSIZE, &points));

1871:       const char *labels[] = {"Zone_t", "ZoneBC_t", "BC_t", "PointList"};
1872:       PetscCallCGNSRead(cg_golist(cgid, base, 4, (char **)labels, (int[]){zone, 1, BC, 0}), *dm, 0);
1873:       PetscCallCGNSReadData(cgp_ptlist_read_data(cgid, bcStart + 1, bcEnd, points), *dm, 0);

1875:       PetscInt    *label_values;
1876:       PetscSFNode *remotes;
1877:       PetscCall(PetscMalloc2(bcSize, &remotes, bcSize, &label_values));
1878:       for (PetscInt p = 0; p < bcSize; p++) {
1879:         PetscMPIInt bcrank;
1880:         PetscInt    bcidx;

1882:         PetscCall(PetscLayoutFindOwnerIndex_CGNSSectionLayouts(cgnsLayouts, num_face_sections, (PetscInt)points[p], &bcrank, &bcidx, NULL));
1883:         remotes[p].rank  = bcrank;
1884:         remotes[p].index = bcidx;
1885:         label_values[p]  = label_value;
1886:       }
1887:       PetscCall(DMRestoreWorkArray(*dm, bcSize, MPIU_CGSIZE, &points));

1889:       { // Communicate the BC values to their Plex-face owners
1890:         PetscSF cg2bcSF;
1891:         DMLabel label;

1893:         for (PetscInt i = 0; i < nCgFaces; i++) cgFaceValues[i] = -1;
1894:         for (PetscInt i = 0; i < nPlexFaces; i++) plexFaceValues[i] = -1;

1896:         PetscCall(PetscSFCreate(comm, &cg2bcSF));
1897:         PetscCall(PetscSFSetGraph(cg2bcSF, nCgFaces, bcSize, NULL, PETSC_COPY_VALUES, remotes, PETSC_USE_POINTER));

1899:         PetscCall(PetscSFReduceBegin(cg2bcSF, MPIU_INT, label_values, cgFaceValues, MPI_REPLACE));
1900:         PetscCall(PetscSFReduceEnd(cg2bcSF, MPIU_INT, label_values, cgFaceValues, MPI_REPLACE));
1901:         PetscCall(PetscSFBcastBegin(cg2plexSF, MPIU_INT, cgFaceValues, plexFaceValues, MPI_REPLACE));
1902:         PetscCall(PetscSFBcastEnd(cg2plexSF, MPIU_INT, cgFaceValues, plexFaceValues, MPI_REPLACE));
1903:         PetscCall(PetscSFDestroy(&cg2bcSF));
1904:         PetscCall(PetscFree2(remotes, label_values));

1906:         // Set the label values for the communicated faces
1907:         PetscCall(DMGetLabel(*dm, label_name, &label));
1908:         if (label == NULL) {
1909:           PetscCall(DMCreateLabel(*dm, label_name));
1910:           PetscCall(DMGetLabel(*dm, label_name, &label));
1911:         }
1912:         for (PetscInt i = 0; i < nPlexFaces; i++) {
1913:           if (plexFaceValues[i] == -1) continue;
1914:           PetscCall(DMLabelSetValue(label, plexFaces[i], plexFaceValues[i]));
1915:         }
1916:       }
1917:     }
1918:     PetscCall(PetscFree2(plexFaceValues, cgFaceValues));
1919:     PetscCall(PetscFree(plexFaces));
1920:     PetscCall(PetscSFDestroy(&cg2plexSF));
1921:     PetscCall(PetscFree(conn));
1922:     for (PetscInt s = 0; s < num_face_sections; s++) PetscCall(PetscLayoutDestroy(&cgnsLayouts[s]));
1923:     PetscCall(PetscSectionDestroy(&connSection));
1924:     PetscCall(PetscFree(cgnsLayouts));
1925:     PetscCall(PetscFree(face_ids));
1926:   }
1927:   PetscCall(PetscFree(uniq_verts));
1928:   PetscCall(PetscFree2(face_section_ids, cell_section_ids));

1930:   // -- Set sfNatural for solution vectors in CGNS file
1931:   // NOTE: We set sfNatural to be the map between the original CGNS ordering of nodes and the Plex ordering of nodes.
1932:   PetscCall(PetscSFViewFromOptions(plex_to_cgns_sf, NULL, "-sfNatural_init_view"));
1933:   PetscCall(DMSetNaturalSF(*dm, plex_to_cgns_sf));
1934:   PetscCall(DMSetUseNatural(*dm, PETSC_TRUE));
1935:   PetscCall(PetscSFDestroy(&plex_to_cgns_sf));

1937:   PetscCall(PetscLayoutDestroy(&elem_map));
1938:   PetscCall(PetscLayoutDestroy(&vtx_map));
1939:   PetscFunctionReturn(PETSC_SUCCESS);
1940: }

1942: // node_l2g must be freed
1943: static PetscErrorCode DMPlexCreateNodeNumbering(DM dm, PetscInt *num_local_nodes, PetscInt *num_global_nodes, PetscInt *nStart, PetscInt *nEnd, const PetscInt **node_l2g)
1944: {
1945:   PetscSection    local_section;
1946:   PetscSF         point_sf;
1947:   PetscInt        pStart, pEnd, spStart, spEnd, *points, nleaves, ncomp, *nodes;
1948:   PetscMPIInt     comm_size;
1949:   const PetscInt *ilocal, field = 0;

1951:   PetscFunctionBegin;
1952:   *num_local_nodes  = -1;
1953:   *num_global_nodes = -1;
1954:   *nStart           = -1;
1955:   *nEnd             = -1;
1956:   *node_l2g         = NULL;

1958:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &comm_size));
1959:   PetscCall(DMGetLocalSection(dm, &local_section));
1960:   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
1961:   PetscCall(PetscSectionGetChart(local_section, &spStart, &spEnd));
1962:   PetscCall(DMGetPointSF(dm, &point_sf));
1963:   if (comm_size == 1) nleaves = 0;
1964:   else PetscCall(PetscSFGetGraph(point_sf, NULL, &nleaves, &ilocal, NULL));
1965:   PetscCall(PetscSectionGetFieldComponents(local_section, field, &ncomp));

1967:   PetscInt local_node = 0, owned_node = 0, owned_start = 0;
1968:   for (PetscInt p = spStart, leaf = 0; p < spEnd; p++) {
1969:     PetscInt dof;
1970:     PetscCall(PetscSectionGetFieldDof(local_section, p, field, &dof));
1971:     PetscAssert(dof % ncomp == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Field dof %" PetscInt_FMT " must be divisible by components %" PetscInt_FMT, dof, ncomp);
1972:     local_node += dof / ncomp;
1973:     if (leaf < nleaves && p == ilocal[leaf]) { // skip points owned by a different process
1974:       leaf++;
1975:     } else {
1976:       owned_node += dof / ncomp;
1977:     }
1978:   }
1979:   PetscCallMPI(MPI_Exscan(&owned_node, &owned_start, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)dm)));
1980:   PetscCall(PetscMalloc1(pEnd - pStart, &points));
1981:   owned_node = 0;
1982:   for (PetscInt p = spStart, leaf = 0; p < spEnd; p++) {
1983:     if (leaf < nleaves && p == ilocal[leaf]) { // skip points owned by a different process
1984:       points[p - pStart] = -1;
1985:       leaf++;
1986:       continue;
1987:     }
1988:     PetscInt dof, offset;
1989:     PetscCall(PetscSectionGetFieldDof(local_section, p, field, &dof));
1990:     PetscCall(PetscSectionGetFieldOffset(local_section, p, field, &offset));
1991:     PetscAssert(offset % ncomp == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Field offset %" PetscInt_FMT " must be divisible by components %" PetscInt_FMT, offset, ncomp);
1992:     points[p - pStart] = owned_start + owned_node;
1993:     owned_node += dof / ncomp;
1994:   }
1995:   if (comm_size > 1) {
1996:     PetscCall(PetscSFBcastBegin(point_sf, MPIU_INT, points, points, MPI_REPLACE));
1997:     PetscCall(PetscSFBcastEnd(point_sf, MPIU_INT, points, points, MPI_REPLACE));
1998:   }

2000:   // Set up global indices for each local node
2001:   PetscCall(PetscMalloc1(local_node, &nodes));
2002:   for (PetscInt p = spStart; p < spEnd; p++) {
2003:     PetscInt dof, offset;
2004:     PetscCall(PetscSectionGetFieldDof(local_section, p, field, &dof));
2005:     PetscCall(PetscSectionGetFieldOffset(local_section, p, field, &offset));
2006:     for (PetscInt n = 0; n < dof / ncomp; n++) nodes[offset / ncomp + n] = points[p - pStart] + n;
2007:   }
2008:   PetscCall(PetscFree(points));
2009:   *num_local_nodes = local_node;
2010:   *nStart          = owned_start;
2011:   *nEnd            = owned_start + owned_node;
2012:   PetscCallMPI(MPIU_Allreduce(&owned_node, num_global_nodes, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)dm)));
2013:   *node_l2g = nodes;
2014:   PetscFunctionReturn(PETSC_SUCCESS);
2015: }

2017: PetscErrorCode DMView_PlexCGNS(DM dm, PetscViewer viewer)
2018: {
2019:   MPI_Comm          comm = PetscObjectComm((PetscObject)dm);
2020:   PetscViewer_CGNS *cgv  = (PetscViewer_CGNS *)viewer->data;
2021:   PetscInt          fvGhostStart;
2022:   PetscInt          topo_dim, coord_dim, num_global_elems;
2023:   PetscInt          cStart, cEnd, num_local_nodes, num_global_nodes, nStart, nEnd, fStart, fEnd;
2024:   const PetscInt   *node_l2g;
2025:   Vec               coord;
2026:   DM                colloc_dm, cdm;
2027:   PetscMPIInt       size;
2028:   const char       *dm_name;
2029:   int               base, zone;
2030:   cgsize_t          isize[3], elem_offset = 0;

2032:   PetscFunctionBegin;
2033:   if (!cgv->file_num) {
2034:     PetscInt time_step;
2035:     PetscCall(DMGetOutputSequenceNumber(dm, &time_step, NULL));
2036:     PetscCall(PetscViewerCGNSFileOpen_Internal(viewer, time_step));
2037:   }
2038:   PetscCallMPI(MPI_Comm_size(comm, &size));
2039:   PetscCall(DMGetDimension(dm, &topo_dim));
2040:   PetscCall(DMGetCoordinateDim(dm, &coord_dim));
2041:   PetscCall(PetscObjectGetName((PetscObject)dm, &dm_name));
2042:   PetscCallCGNSWrite(cg_base_write(cgv->file_num, dm_name, topo_dim, coord_dim, &base), dm, viewer);
2043:   PetscCallCGNS(cg_goto(cgv->file_num, base, NULL));
2044:   PetscCallCGNSWrite(cg_dataclass_write(CGNS_ENUMV(NormalizedByDimensional)), dm, viewer);

2046:   {
2047:     PetscFE        fe, fe_coord;
2048:     PetscClassId   ds_id;
2049:     PetscDualSpace dual_space, dual_space_coord;
2050:     PetscInt       num_fields, field_order = -1, field_order_coord;
2051:     PetscBool      is_simplex;
2052:     PetscCall(DMGetNumFields(dm, &num_fields));
2053:     if (num_fields > 0) {
2054:       PetscCall(DMGetField(dm, 0, NULL, (PetscObject *)&fe));
2055:       PetscCall(PetscObjectGetClassId((PetscObject)fe, &ds_id));
2056:       if (ds_id != PETSCFE_CLASSID) {
2057:         fe = NULL;
2058:         if (ds_id == PETSCFV_CLASSID) field_order = -1; // use whatever is present for coords; field will be CellCenter
2059:         else field_order = 1;                           // assume vertex-based linear elements
2060:       }
2061:     } else fe = NULL;
2062:     if (fe) {
2063:       PetscCall(PetscFEGetDualSpace(fe, &dual_space));
2064:       PetscCall(PetscDualSpaceGetOrder(dual_space, &field_order));
2065:     }
2066:     PetscCall(DMGetCoordinateDM(dm, &cdm));
2067:     PetscCall(DMGetField(cdm, 0, NULL, (PetscObject *)&fe_coord));
2068:     {
2069:       PetscClassId id;
2070:       PetscCall(PetscObjectGetClassId((PetscObject)fe_coord, &id));
2071:       if (id != PETSCFE_CLASSID) fe_coord = NULL;
2072:     }
2073:     if (fe_coord) {
2074:       PetscCall(PetscFEGetDualSpace(fe_coord, &dual_space_coord));
2075:       PetscCall(PetscDualSpaceGetOrder(dual_space_coord, &field_order_coord));
2076:     } else field_order_coord = 1;
2077:     if (field_order > 0 && field_order != field_order_coord) {
2078:       PetscInt quadrature_order = field_order;
2079:       PetscCall(DMClone(dm, &colloc_dm));
2080:       { // Inform the new colloc_dm that it is a coordinate DM so isoperiodic affine corrections can be applied
2081:         const PetscSF *face_sfs;
2082:         PetscInt       num_face_sfs;
2083:         PetscCall(DMPlexGetIsoperiodicFaceSF(dm, &num_face_sfs, &face_sfs));
2084:         PetscCall(DMPlexSetIsoperiodicFaceSF(colloc_dm, num_face_sfs, (PetscSF *)face_sfs));
2085:         if (face_sfs) colloc_dm->periodic.setup = DMPeriodicCoordinateSetUp_Internal;
2086:       }
2087:       PetscCall(DMPlexIsSimplex(dm, &is_simplex));
2088:       PetscCall(PetscFECreateLagrange(comm, topo_dim, coord_dim, is_simplex, field_order, quadrature_order, &fe));
2089:       PetscCall(DMSetCoordinateDisc(colloc_dm, fe, PETSC_FALSE, PETSC_TRUE));
2090:       PetscCall(PetscFEDestroy(&fe));
2091:     } else {
2092:       PetscCall(PetscObjectReference((PetscObject)dm));
2093:       colloc_dm = dm;
2094:     }
2095:   }
2096:   PetscCall(DMGetCoordinateDM(colloc_dm, &cdm));
2097:   PetscCall(DMPlexCreateNodeNumbering(cdm, &num_local_nodes, &num_global_nodes, &nStart, &nEnd, &node_l2g));
2098:   PetscCall(DMGetCoordinatesLocal(colloc_dm, &coord));
2099:   PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
2100:   PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd));
2101:   PetscCall(DMPlexGetCellTypeStratum(dm, DM_POLYTOPE_FV_GHOST, &fvGhostStart, NULL));
2102:   if (fvGhostStart >= 0) cEnd = fvGhostStart;
2103:   num_global_elems = cEnd - cStart;
2104:   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &num_global_elems, 1, MPIU_INT, MPI_SUM, comm));
2105:   isize[0] = num_global_nodes;
2106:   isize[1] = num_global_elems;
2107:   isize[2] = 0;
2108:   PetscCallCGNSWrite(cg_zone_write(cgv->file_num, base, "Zone", isize, CGNS_ENUMV(Unstructured), &zone), dm, viewer);

2110:   { // Write the coordinate data to file
2111:     const PetscScalar *X;
2112:     PetscScalar       *x[3];
2113:     cgsize_t           start = nStart + 1, end = nEnd; // CGNS nodes use 1-based indexing
2114:     int                coord_ids[3];

2116:     for (PetscInt d = 0; d < coord_dim; d++) {
2117:       const double exponents[] = {0, 1, 0, 0, 0};
2118:       char         coord_name[64];
2119:       PetscCall(PetscSNPrintf(coord_name, sizeof coord_name, "Coordinate%c", 'X' + (int)d));
2120:       PetscCallCGNSWrite(cgp_coord_write(cgv->file_num, base, zone, CGNS_ENUMV(RealDouble), coord_name, &coord_ids[d]), dm, viewer);
2121:       PetscCallCGNS(cg_goto(cgv->file_num, base, "Zone_t", zone, "GridCoordinates", 0, coord_name, 0, NULL));
2122:       PetscCallCGNSWrite(cg_exponents_write(CGNS_ENUMV(RealDouble), exponents), dm, viewer);
2123:     }

2125:     PetscCall(VecGetArrayRead(coord, &X));
2126:     PetscCall(PetscMalloc3(nEnd - nStart, &x[0], nEnd - nStart, &x[1], nEnd - nStart, &x[2]));
2127:     for (PetscInt d = 0; d < coord_dim; d++) {
2128:       for (PetscInt n = 0; n < num_local_nodes; n++) {
2129:         PetscInt gn = node_l2g[n];
2130:         if (gn < nStart || nEnd <= gn) continue;
2131:         x[d][gn - nStart] = X[n * coord_dim + d];
2132:       }
2133:     }
2134:     PetscCallCGNSWriteData(cgp_coord_multi_write_data(cgv->file_num, base, zone, coord_ids, &start, &end, coord_dim, (const void **)x), dm, viewer);
2135:     PetscCall(PetscFree3(x[0], x[1], x[2]));
2136:     PetscCall(VecRestoreArrayRead(coord, &X));
2137:   }

2139:   { // Write element connectivity and element type
2140:     int        section;
2141:     cgsize_t   e_owned = cEnd - cStart, e_global, e_start;
2142:     cgsize_t  *conn    = NULL;
2143:     const int *perm;
2144:     CGNS_ENUMT(ElementType_t) element_type = CGNS_ENUMV(ElementTypeNull);
2145:     if (e_owned > 0) {
2146:       DMPolytopeType cell_type;

2148:       PetscCall(DMPlexGetCellType(dm, cStart, &cell_type));
2149:       for (PetscInt i = cStart, c = 0; i < cEnd; i++) {
2150:         PetscInt closure_dof, *closure_indices, elem_size;

2152:         PetscCall(DMPlexGetClosureIndices(cdm, cdm->localSection, cdm->localSection, i, PETSC_FALSE, &closure_dof, &closure_indices, NULL, NULL));
2153:         elem_size = closure_dof / coord_dim;
2154:         if (!conn) PetscCall(PetscMalloc1(e_owned * elem_size, &conn));
2155:         PetscCall(DMPlexCGNSGetPermutation_Internal(cell_type, closure_dof / coord_dim, &element_type, &perm));
2156:         for (PetscInt j = 0; j < elem_size; j++) conn[c++] = node_l2g[closure_indices[perm[j] * coord_dim] / coord_dim] + 1;
2157:         PetscCall(DMPlexRestoreClosureIndices(cdm, cdm->localSection, cdm->localSection, i, PETSC_FALSE, &closure_dof, &closure_indices, NULL, NULL));
2158:       }
2159:     }

2161:     { // Get global element_type (for ranks that do not have owned elements)
2162:       PetscInt local_element_type, global_element_type;

2164:       local_element_type = e_owned > 0 ? (PetscInt)element_type : -1;
2165:       PetscCallMPI(MPIU_Allreduce(&local_element_type, &global_element_type, 1, MPIU_INT, MPI_MAX, comm));
2166:       if (local_element_type != -1)
2167:         PetscCheck(local_element_type == global_element_type, PETSC_COMM_SELF, PETSC_ERR_SUP, "Ranks with different element types not supported. Local element type is %s, but global is %s", cg_ElementTypeName(local_element_type), cg_ElementTypeName(global_element_type));
2168:       element_type = (CGNS_ENUMT(ElementType_t))global_element_type;
2169:     }
2170:     PetscCallMPI(MPIU_Allreduce(&e_owned, &e_global, 1, MPIU_CGSIZE, MPI_SUM, comm));
2171:     PetscCheck(e_global == num_global_elems, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected number of elements %" PRIdCGSIZE " vs %" PetscInt_FMT, e_global, num_global_elems);
2172:     e_start = 0;
2173:     PetscCallMPI(MPI_Exscan(&e_owned, &e_start, 1, MPIU_CGSIZE, MPI_SUM, comm));
2174:     e_start += elem_offset;
2175:     PetscCallCGNSWrite(cgp_section_write(cgv->file_num, base, zone, "Elem", element_type, 1, e_global, 0, &section), dm, viewer);
2176:     PetscCallCGNSWriteData(cgp_elements_write_data(cgv->file_num, base, zone, section, e_start + 1, e_start + e_owned, conn), dm, viewer);
2177:     elem_offset = e_global;
2178:     PetscCall(PetscFree(conn));

2180:     { // Write CellInfo for Rank and CellSet information
2181:       PetscMPIInt rank;
2182:       int        *efield;
2183:       int         sol, field;
2184:       DMLabel     label;
2185:       PetscCallMPI(MPI_Comm_rank(comm, &rank));
2186:       PetscCall(PetscMalloc1(e_owned, &efield));
2187:       for (PetscInt i = 0; i < e_owned; i++) efield[i] = rank;
2188:       PetscCallCGNSWrite(cg_sol_write(cgv->file_num, base, zone, "CellInfo", CGNS_ENUMV(CellCenter), &sol), dm, viewer);
2189:       PetscCallCGNSWrite(cgp_field_write(cgv->file_num, base, zone, sol, CGNS_ENUMV(Integer), "Rank", &field), dm, viewer);
2190:       cgsize_t start = e_start + 1, end = e_start + e_owned;
2191:       PetscCallCGNSWriteData(cgp_field_write_data(cgv->file_num, base, zone, sol, field, &start, &end, efield), dm, viewer);
2192:       PetscCall(DMGetLabel(dm, "Cell Sets", &label));
2193:       if (label) {
2194:         for (PetscInt c = cStart; c < cEnd; c++) {
2195:           PetscInt value;
2196:           PetscCall(DMLabelGetValue(label, c, &value));
2197:           efield[c - cStart] = value;
2198:         }
2199:         PetscCallCGNSWrite(cgp_field_write(cgv->file_num, base, zone, sol, CGNS_ENUMV(Integer), "CellSet", &field), dm, viewer);
2200:         PetscCallCGNSWriteData(cgp_field_write_data(cgv->file_num, base, zone, sol, field, &start, &end, efield), dm, viewer);
2201:       }
2202:       PetscCall(PetscFree(efield));
2203:     }

2205:     cgv->base            = base;
2206:     cgv->zone            = zone;
2207:     cgv->node_l2g        = node_l2g;
2208:     cgv->num_local_nodes = num_local_nodes;
2209:     cgv->nStart          = nStart;
2210:     cgv->nEnd            = nEnd;
2211:     cgv->eStart          = (PetscInt)e_start;
2212:     cgv->eEnd            = (PetscInt)(e_start + e_owned);
2213:   }

2215:   DMLabel  fsLabel;
2216:   PetscInt num_fs_global;
2217:   IS       fsValuesGlobalIS;
2218:   PetscCall(DMGetLabel(dm, "Face Sets", &fsLabel));
2219:   PetscCall(DMLabelGetValueISGlobal(comm, fsLabel, PETSC_TRUE, &fsValuesGlobalIS));
2220:   PetscCall(ISGetSize(fsValuesGlobalIS, &num_fs_global));

2222:   if (num_fs_global > 0) {
2223:     CGNS_ENUMT(ElementType_t) element_type = CGNS_ENUMV(ElementTypeNull);
2224:     const PetscInt *fsValuesLocal;
2225:     IS              stratumIS, fsFacesAll;
2226:     int             section;
2227:     const int      *perm;
2228:     cgsize_t        f_owned = 0, f_global, f_start;
2229:     cgsize_t       *parents, *conn = NULL;
2230:     PetscInt        fStart, fEnd;

2232:     PetscInt num_fs_local;
2233:     IS       fsValuesLocalIS;

2235:     if (fsLabel) {
2236:       PetscCall(DMLabelGetNonEmptyStratumValuesIS(fsLabel, &fsValuesLocalIS));
2237:       PetscCall(ISGetSize(fsValuesLocalIS, &num_fs_local));
2238:       PetscCall(ISGetIndices(fsValuesLocalIS, &fsValuesLocal));
2239:     } else num_fs_local = 0;

2241:     PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd));
2242:     { // Get single IS without duplicates of the local face IDs in the FaceSets
2243:       IS *fsPoints = NULL;

2245:       PetscCall(PetscMalloc1(num_fs_local, &fsPoints));
2246:       for (PetscInt fs = 0; fs < num_fs_local; ++fs) PetscCall(DMLabelGetStratumIS(fsLabel, fsValuesLocal[fs], &fsPoints[fs]));
2247:       PetscCall(ISConcatenate(PETSC_COMM_SELF, num_fs_local, fsPoints, &fsFacesAll));
2248:       PetscCall(ISSortRemoveDups(fsFacesAll));
2249:       PetscCall(ISGeneralFilter(fsFacesAll, fStart, fEnd)); // Remove non-face mesh points from the IS
2250:       {
2251:         PetscInt f_owned_int;
2252:         PetscCall(ISGetSize(fsFacesAll, &f_owned_int));
2253:         f_owned = f_owned_int;
2254:       }
2255:       for (PetscInt fs = 0; fs < num_fs_local; ++fs) PetscCall(ISDestroy(&fsPoints[fs]));
2256:       PetscCall(PetscFree(fsPoints));
2257:     }
2258:     PetscCall(ISRestoreIndices(fsValuesLocalIS, &fsValuesLocal));
2259:     PetscCall(ISDestroy(&fsValuesLocalIS));

2261:     {
2262:       const PetscInt *faces;
2263:       DMPolytopeType  cell_type, cell_type_f;
2264:       PetscInt        closure_dof = -1, closure_dof_f;

2266:       PetscCall(ISGetIndices(fsFacesAll, &faces));
2267:       if (f_owned) PetscCall(DMPlexGetCellType(dm, faces[0], &cell_type));
2268:       PetscCall(PetscCalloc1(f_owned * 2, &parents));
2269:       for (PetscInt f = 0, c = 0; f < f_owned; f++) {
2270:         PetscInt      *closure_indices, elem_size;
2271:         const PetscInt face = faces[f];

2273:         PetscCall(DMPlexGetCellType(dm, face, &cell_type_f));
2274:         PetscCheck(cell_type_f == cell_type, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Only mono-topology face sets are supported currently. Face %" PetscInt_FMT " is %s, which is different than the previous type %s", face, DMPolytopeTypes[cell_type_f], DMPolytopeTypes[cell_type]);

2276:         // Get connectivity of the face
2277:         PetscCall(DMPlexGetClosureIndices(cdm, cdm->localSection, cdm->localSection, face, PETSC_FALSE, &closure_dof_f, &closure_indices, NULL, NULL));
2278:         elem_size = closure_dof_f / coord_dim;
2279:         if (!conn) {
2280:           PetscCall(PetscMalloc1(f_owned * elem_size, &conn));
2281:           closure_dof = closure_dof_f;
2282:         }
2283:         PetscCheck(closure_dof_f == closure_dof, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Closure of face %" PetscInt_FMT " has %" PetscInt_FMT " dofs instead of the previously written %" PetscInt_FMT " dofs. Only mono-topology face sets are supported currently", face, closure_dof_f, closure_dof);
2284:         PetscCall(DMPlexCGNSGetPermutation_Internal(cell_type, elem_size, &element_type, &perm));
2285:         for (PetscInt j = 0; j < elem_size; j++) conn[c++] = node_l2g[closure_indices[perm[j] * coord_dim] / coord_dim] + 1;
2286:         PetscCall(DMPlexRestoreClosureIndices(cdm, cdm->localSection, cdm->localSection, face, PETSC_FALSE, &closure_dof_f, &closure_indices, NULL, NULL));
2287:       }
2288:       PetscCall(ISRestoreIndices(fsFacesAll, &faces));
2289:     }

2291:     {   // Write connectivity for face sets
2292:       { // Get global element type
2293:         PetscInt local_element_type, global_element_type;

2295:         local_element_type = f_owned > 0 ? (PetscInt)element_type : -1;
2296:         PetscCallMPI(MPIU_Allreduce(&local_element_type, &global_element_type, 1, MPIU_INT, MPI_MAX, comm));
2297:         if (local_element_type != -1)
2298:           PetscCheck(local_element_type == global_element_type, PETSC_COMM_SELF, PETSC_ERR_SUP, "Ranks with different element types not supported. Local element type is %s, but global is %s", cg_ElementTypeName(local_element_type), cg_ElementTypeName(global_element_type));
2299:         element_type = (CGNS_ENUMT(ElementType_t))global_element_type;
2300:       }
2301:       PetscCallMPI(MPIU_Allreduce(&f_owned, &f_global, 1, MPIU_CGSIZE, MPI_SUM, comm));
2302:       f_start = 0;
2303:       PetscCallMPI(MPI_Exscan(&f_owned, &f_start, 1, MPIU_CGSIZE, MPI_SUM, comm));
2304:       f_start += elem_offset;
2305:       PetscCallCGNSWrite(cgp_section_write(cgv->file_num, base, zone, "Faces", element_type, elem_offset + 1, elem_offset + f_global, 0, &section), dm, viewer);
2306:       PetscCallCGNSWriteData(cgp_elements_write_data(cgv->file_num, base, zone, section, f_start + 1, f_start + f_owned, conn), dm, viewer);

2308:       PetscCall(PetscFree(conn));
2309:       PetscCall(PetscFree(parents));
2310:     }

2312:     const PetscInt *fsValuesGlobal = NULL;
2313:     PetscCall(ISGetIndices(fsValuesGlobalIS, &fsValuesGlobal));
2314:     for (PetscInt fs = 0; fs < num_fs_global; ++fs) {
2315:       int            BC;
2316:       const PetscInt fsID    = fsValuesGlobal[fs];
2317:       PetscInt      *fs_pnts = NULL;
2318:       char           bc_name[33];
2319:       cgsize_t       fs_start, fs_owned, fs_global;
2320:       cgsize_t      *fs_pnts_cg;

2322:       PetscCall(DMLabelGetStratumIS(fsLabel, fsID, &stratumIS));
2323:       if (stratumIS) { // Get list of only face points
2324:         PetscSegBuffer  fs_pntsSB;
2325:         PetscCount      fs_owned_count;
2326:         PetscInt        nstratumPnts;
2327:         const PetscInt *stratumPnts;

2329:         PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 16, &fs_pntsSB));
2330:         PetscCall(ISGetIndices(stratumIS, &stratumPnts));
2331:         PetscCall(ISGetSize(stratumIS, &nstratumPnts));
2332:         for (PetscInt i = 0; i < nstratumPnts; i++) {
2333:           PetscInt *fs_pnts_buffer, stratumPnt = stratumPnts[i];
2334:           if (stratumPnt < fStart || stratumPnt >= fEnd) continue; // Skip non-face points
2335:           PetscCall(PetscSegBufferGetInts(fs_pntsSB, 1, &fs_pnts_buffer));
2336:           *fs_pnts_buffer = stratumPnt;
2337:         }
2338:         PetscCall(PetscSegBufferGetSize(fs_pntsSB, &fs_owned_count));
2339:         fs_owned = fs_owned_count;
2340:         PetscCall(PetscSegBufferExtractAlloc(fs_pntsSB, &fs_pnts));

2342:         PetscCall(PetscSegBufferDestroy(&fs_pntsSB));
2343:         PetscCall(ISRestoreIndices(stratumIS, &stratumPnts));
2344:         PetscCall(ISDestroy(&stratumIS));
2345:       } else fs_owned = 0;

2347:       PetscCallMPI(MPIU_Allreduce(&fs_owned, &fs_global, 1, MPIU_CGSIZE, MPI_SUM, comm));
2348:       fs_start = 0;
2349:       PetscCallMPI(MPI_Exscan(&fs_owned, &fs_start, 1, MPIU_CGSIZE, MPI_SUM, comm));
2350:       PetscCheck(fs_start + fs_owned <= fs_global, PETSC_COMM_SELF, PETSC_ERR_PLIB, "End range of point set (%" PRIdCGSIZE ") greater than global point set size (%" PRIdCGSIZE ")", fs_start + fs_owned, fs_global);

2352:       PetscCall(PetscSNPrintf(bc_name, sizeof bc_name, "FaceSet%" PetscInt_FMT, fsID));
2353:       PetscCallCGNSWrite(cg_boco_write(cgv->file_num, base, zone, bc_name, CGNS_ENUMV(BCTypeNull), CGNS_ENUMV(PointList), fs_global, NULL, &BC), dm, viewer);

2355:       PetscCall(PetscMalloc1(fs_owned, &fs_pnts_cg));
2356:       for (PetscInt i = 0; i < fs_owned; i++) {
2357:         PetscInt is_idx;

2359:         PetscCall(ISLocate(fsFacesAll, fs_pnts[i], &is_idx));
2360:         PetscCheck(is_idx >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Could not find face %" PetscInt_FMT " in list of all local face points", fs_pnts[i]);
2361:         fs_pnts_cg[i] = is_idx + f_start + 1;
2362:       }

2364:       const char *labels[] = {"Zone_t", "ZoneBC_t", "BC_t", "PointList"};
2365:       PetscCallCGNSWrite(cg_golist(cgv->file_num, base, 4, (char **)labels, (int[]){zone, 1, BC, 0}), dm, 0);
2366:       PetscCallCGNSWriteData(cgp_ptlist_write_data(cgv->file_num, fs_start + 1, fs_start + fs_owned, fs_pnts_cg), dm, viewer);

2368:       CGNS_ENUMT(GridLocation_t) grid_loc = CGNS_ENUMV(GridLocationNull);
2369:       if (topo_dim == 3) grid_loc = CGNS_ENUMV(FaceCenter);
2370:       else if (topo_dim == 2) grid_loc = CGNS_ENUMV(EdgeCenter);
2371:       else if (topo_dim == 1) grid_loc = CGNS_ENUMV(Vertex);
2372:       PetscCallCGNSWriteData(cg_boco_gridlocation_write(cgv->file_num, base, zone, BC, grid_loc), dm, viewer);

2374:       PetscCall(PetscFree(fs_pnts_cg));
2375:       PetscCall(PetscFree(fs_pnts));
2376:     }
2377:     PetscCall(ISDestroy(&fsFacesAll));
2378:     PetscCall(ISRestoreIndices(fsValuesGlobalIS, &fsValuesGlobal));
2379:     elem_offset += f_global;
2380:   }
2381:   PetscCall(ISDestroy(&fsValuesGlobalIS));

2383:   PetscCall(DMDestroy(&colloc_dm));
2384:   PetscFunctionReturn(PETSC_SUCCESS);
2385: }

2387: PetscErrorCode VecView_Plex_Local_CGNS(Vec V, PetscViewer viewer)
2388: {
2389:   PetscViewer_CGNS  *cgv = (PetscViewer_CGNS *)viewer->data;
2390:   DM                 dm;
2391:   PetscSection       section;
2392:   PetscInt           time_step, num_fields, pStart, pEnd, fvGhostStart, ncomp;
2393:   PetscReal          time, *time_slot;
2394:   size_t            *step_slot;
2395:   const PetscScalar *v;
2396:   char               solution_name[PETSC_MAX_PATH_LEN];
2397:   int                sol;

2399:   PetscFunctionBegin;
2400:   PetscCall(VecGetDM(V, &dm));
2401:   PetscCall(DMGetLocalSection(dm, &section));
2402:   PetscCall(PetscSectionGetChart(section, &pStart, &pEnd));
2403:   PetscCall(DMPlexGetCellTypeStratum(dm, DM_POLYTOPE_FV_GHOST, &fvGhostStart, NULL));
2404:   if (fvGhostStart >= 0) pEnd = fvGhostStart;

2406:   if (!cgv->node_l2g) PetscCall(DMView(dm, viewer));
2407:   if (!cgv->grid_loc) { // Determine if writing to cell-centers or to nodes
2408:     PetscInt cStart, cEnd;
2409:     PetscInt local_grid_loc, global_grid_loc;

2411:     PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
2412:     if (fvGhostStart >= 0) cEnd = fvGhostStart;
2413:     if (cgv->num_local_nodes == 0) local_grid_loc = -1;
2414:     else if (cStart == pStart && cEnd == pEnd) local_grid_loc = CGNS_ENUMV(CellCenter);
2415:     else local_grid_loc = CGNS_ENUMV(Vertex);

2417:     PetscCallMPI(MPIU_Allreduce(&local_grid_loc, &global_grid_loc, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)viewer)));
2418:     if (local_grid_loc != -1)
2419:       PetscCheck(local_grid_loc == global_grid_loc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Ranks with different grid locations not supported. Local has %" PetscInt_FMT ", allreduce returned %" PetscInt_FMT, local_grid_loc, global_grid_loc);
2420:     PetscCheck((global_grid_loc == CGNS_ENUMV(CellCenter)) || (global_grid_loc == CGNS_ENUMV(Vertex)), PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Grid location should only be CellCenter (%d) or Vertex(%d), but have %" PetscInt_FMT, CGNS_ENUMV(CellCenter), CGNS_ENUMV(Vertex), global_grid_loc);
2421:     cgv->grid_loc = (CGNS_ENUMT(GridLocation_t))global_grid_loc;
2422:   }
2423:   if (!cgv->output_times) PetscCall(PetscSegBufferCreate(sizeof(PetscReal), 20, &cgv->output_times));
2424:   if (!cgv->output_steps) PetscCall(PetscSegBufferCreate(sizeof(size_t), 20, &cgv->output_steps));

2426:   PetscCall(DMGetOutputSequenceNumber(dm, &time_step, &time));
2427:   if (time_step < 0) {
2428:     time_step = 0;
2429:     time      = 0.;
2430:   }
2431:   // Avoid "Duplicate child name found" error by not writing an already-written solution.
2432:   // This usually occurs when a solution is written and then diverges on the very next timestep.
2433:   if (time_step == cgv->previous_output_step) PetscFunctionReturn(PETSC_SUCCESS);

2435:   PetscCall(PetscSegBufferGet(cgv->output_times, 1, &time_slot));
2436:   *time_slot = time;
2437:   PetscCall(PetscSegBufferGet(cgv->output_steps, 1, &step_slot));
2438:   *step_slot = cgv->previous_output_step = time_step;
2439:   PetscCall(PetscSNPrintf(solution_name, sizeof solution_name, "FlowSolution%" PetscInt_FMT, time_step));
2440:   PetscCallCGNSWrite(cg_sol_write(cgv->file_num, cgv->base, cgv->zone, solution_name, cgv->grid_loc, &sol), V, viewer);
2441:   PetscCall(VecGetArrayRead(V, &v));
2442:   PetscCall(PetscSectionGetNumFields(section, &num_fields));

2444:   int *cgfield_ids;
2445:   if (cgv->num_nodal_fields == 0) {
2446:     for (PetscInt field = 0; field < num_fields; field++) {
2447:       PetscCall(PetscSectionGetFieldComponents(section, field, &ncomp));
2448:       cgv->num_nodal_fields += ncomp;
2449:     }
2450:   }
2451:   if (!cgv->nodal_fields) PetscCall(PetscCalloc1(cgv->num_nodal_fields, &cgv->nodal_fields));
2452:   PetscCall(PetscMalloc1(cgv->num_nodal_fields, &cgfield_ids));
2453:   for (PetscInt field = 0, nodal_field_idx = 0; field < num_fields; field++) {
2454:     const char *field_name;

2456:     PetscCall(PetscSectionGetFieldName(section, field, &field_name));
2457:     PetscCall(PetscSectionGetFieldComponents(section, field, &ncomp));
2458:     for (PetscInt comp = 0; comp < ncomp; comp++, nodal_field_idx++) {
2459:       const char *comp_name;
2460:       char        cgns_field_name[32]; // CGNS max field name is 32
2461:       CGNS_ENUMT(DataType_t) datatype;

2463:       PetscCall(PetscSectionGetComponentName(section, field, comp, &comp_name));
2464:       if (ncomp == 1 && comp_name[0] == '0' && comp_name[1] == '\0' && field_name[0] != '\0') PetscCall(PetscStrncpy(cgns_field_name, field_name, sizeof cgns_field_name));
2465:       else if (field_name[0] == '\0') PetscCall(PetscStrncpy(cgns_field_name, comp_name, sizeof cgns_field_name));
2466:       else PetscCall(PetscSNPrintf(cgns_field_name, sizeof cgns_field_name, "%s.%s", field_name, comp_name));
2467:       PetscCall(PetscCGNSDataType(PETSC_SCALAR, &datatype));
2468:       PetscCallCGNSWrite(cgp_field_write(cgv->file_num, cgv->base, cgv->zone, sol, datatype, cgns_field_name, &cgfield_ids[nodal_field_idx]), V, viewer);

2470:       if (!cgv->nodal_fields[nodal_field_idx]) {
2471:         switch (cgv->grid_loc) {
2472:         case CGNS_ENUMV(Vertex): {
2473:           PetscCall(PetscMalloc1(cgv->nEnd - cgv->nStart, &cgv->nodal_fields[nodal_field_idx]));
2474:         } break;
2475:         case CGNS_ENUMV(CellCenter): {
2476:           PetscCall(PetscMalloc1(cgv->eEnd - cgv->eStart, &cgv->nodal_fields[nodal_field_idx]));
2477:         } break;
2478:         default:
2479:           SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Can only write for Vertex and CellCenter grid locations");
2480:         }
2481:       }
2482:       for (PetscInt p = pStart, n = 0; p < pEnd; p++) {
2483:         PetscInt off, dof;
2484:         PetscCall(PetscSectionGetFieldDof(section, p, field, &dof));
2485:         if (dof == 0) continue;
2486:         PetscCall(PetscSectionGetFieldOffset(section, p, field, &off));
2487:         for (PetscInt c = comp; c < dof; c += ncomp, n++) {
2488:           switch (cgv->grid_loc) {
2489:           case CGNS_ENUMV(Vertex): {
2490:             PetscInt gn = cgv->node_l2g[n];
2491:             if (gn < cgv->nStart || cgv->nEnd <= gn) continue;
2492:             cgv->nodal_fields[nodal_field_idx][gn - cgv->nStart] = v[off + c];
2493:           } break;
2494:           case CGNS_ENUMV(CellCenter): {
2495:             cgv->nodal_fields[nodal_field_idx][n] = v[off + c];
2496:           } break;
2497:           default:
2498:             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Can only pack for Vertex and CellCenter grid locations");
2499:           }
2500:         }
2501:       }
2502:     }
2503:   }

2505:   cgsize_t start, end;
2506:   // CGNS nodes use 1-based indexing
2507:   switch (cgv->grid_loc) {
2508:   case CGNS_ENUMV(Vertex): {
2509:     start = cgv->nStart + 1;
2510:     end   = cgv->nEnd;
2511:   } break;
2512:   case CGNS_ENUMV(CellCenter): {
2513:     start = cgv->eStart + 1;
2514:     end   = cgv->eEnd;
2515:   } break;
2516:   default:
2517:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Can only write for Vertex and CellCenter grid locations");
2518:   }
2519:   PetscCallCGNSWriteData(cgp_field_multi_write_data(cgv->file_num, cgv->base, cgv->zone, sol, cgfield_ids, &start, &end, cgv->num_nodal_fields, (const void **)cgv->nodal_fields), V, viewer);

2521:   PetscCall(PetscFree(cgfield_ids));
2522:   PetscCall(VecRestoreArrayRead(V, &v));
2523:   PetscCall(PetscViewerCGNSCheckBatch_Internal(viewer));
2524:   PetscFunctionReturn(PETSC_SUCCESS);
2525: }

2527: PetscErrorCode VecLoad_Plex_CGNS_Internal(Vec V, PetscViewer viewer)
2528: {
2529:   MPI_Comm          comm;
2530:   char              buffer[CGIO_MAX_NAME_LENGTH + 1];
2531:   PetscViewer_CGNS *cgv                 = (PetscViewer_CGNS *)viewer->data;
2532:   int               cgid                = cgv->file_num;
2533:   PetscBool         use_parallel_viewer = PETSC_FALSE;
2534:   int               z                   = 1; // Only support one zone
2535:   int               B                   = 1; // Only support one base
2536:   int               numComp;
2537:   PetscInt          V_numComps, mystartv, myendv, myownedv;

2539:   PetscFunctionBegin;
2540:   PetscCall(PetscObjectGetComm((PetscObject)V, &comm));

2542:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-dm_plex_cgns_parallel", &use_parallel_viewer, NULL));
2543:   PetscCheck(use_parallel_viewer, comm, PETSC_ERR_USER_INPUT, "Cannot use VecLoad with CGNS file in serial reader; use -dm_plex_cgns_parallel to enable parallel reader");

2545:   { // Get CGNS node ownership information
2546:     int         nbases, nzones;
2547:     PetscInt    NVertices;
2548:     PetscLayout vtx_map;
2549:     cgsize_t    sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */

2551:     PetscCallCGNSRead(cg_nbases(cgid, &nbases), V, viewer);
2552:     PetscCheck(nbases <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single base, not %d", nbases);
2553:     PetscCallCGNSRead(cg_nzones(cgid, B, &nzones), V, viewer);
2554:     PetscCheck(nzones == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "limited to one zone %d", (int)nzones);

2556:     PetscCallCGNSRead(cg_zone_read(cgid, B, z, buffer, sizes), V, viewer);
2557:     PetscCheck(sizes[0] < PETSC_INT_MAX, comm, PETSC_ERR_INT_OVERFLOW, "Number of read vertices too large for PetscInt type, reconfigure with --with-64-bit-indices");
2558:     NVertices = (PetscInt)sizes[0];

2560:     PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, NVertices, 1, &vtx_map));
2561:     PetscCall(PetscLayoutGetRange(vtx_map, &mystartv, &myendv));
2562:     PetscCall(PetscLayoutGetLocalSize(vtx_map, &myownedv));
2563:     PetscCall(PetscLayoutDestroy(&vtx_map));
2564:   }

2566:   { // -- Read data from file into Vec
2567:     PetscScalar *fields = NULL;
2568:     PetscSF      sfNatural;

2570:     { // Check compatibility between sfNatural and the data source and sink
2571:       DM       dm;
2572:       PetscInt nleaves, nroots, V_local_size;

2574:       PetscCall(VecGetDM(V, &dm));
2575:       PetscCall(DMGetNaturalSF(dm, &sfNatural));
2576:       PetscCheck(sfNatural, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "DM of Vec must have sfNatural");
2577:       PetscCall(PetscSFGetGraph(sfNatural, &nroots, &nleaves, NULL, NULL));
2578:       PetscCall(VecGetLocalSize(V, &V_local_size));
2579:       PetscCheck(nleaves == myownedv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Number of locally owned vertices (% " PetscInt_FMT ") must match number of leaves in sfNatural (% " PetscInt_FMT ")", myownedv, nleaves);
2580:       if (nroots == 0) {
2581:         PetscCheck(V_local_size == nroots, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Local Vec size (% " PetscInt_FMT ") must be zero if number of roots in sfNatural is zero", V_local_size);
2582:         V_numComps = 0;
2583:       } else {
2584:         PetscCheck(V_local_size % nroots == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Local Vec size (% " PetscInt_FMT ") not evenly divisible by number of roots in sfNatural (% " PetscInt_FMT ")", V_local_size, nroots);
2585:         V_numComps = V_local_size / nroots;
2586:       }
2587:     }

2589:     { // Read data into component-major ordering
2590:       CGNS_ENUMT(DataType_t) datatype;
2591:       int      isol, numSols;
2592:       double **fields_CGNS;

2594:       PetscCallCGNSRead(cg_nsols(cgid, B, z, &numSols), V, viewer);
2595:       PetscCall(PetscViewerCGNSGetSolutionFileIndex_Internal(viewer, &isol));
2596:       PetscCallCGNSRead(cg_nfields(cgid, B, z, isol, &numComp), V, viewer);
2597:       PetscCheck(V_numComps == numComp || V_numComps == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Vec sized for  % " PetscInt_FMT " components per node, but file has %d components per node", V_numComps, numComp);

2599:       cgsize_t range_min[3] = {mystartv + 1, 1, 1};
2600:       cgsize_t range_max[3] = {myendv, 1, 1};
2601:       int     *field_ids;
2602:       PetscCall(PetscMalloc2(numComp, &fields_CGNS, numComp, &field_ids));
2603:       for (PetscInt i = 0; i < numComp; i++) {
2604:         PetscCall(PetscMalloc1(myownedv, &fields_CGNS[i]));
2605:         field_ids[i] = i + 1;
2606:       }
2607:       PetscCall(PetscMalloc1(myownedv * numComp, &fields));
2608:       for (int d = 0; d < numComp; ++d) {
2609:         PetscCallCGNSRead(cg_field_info(cgid, B, z, isol, (d + 1), &datatype, buffer), V, viewer);
2610:         PetscCheck(datatype == CGNS_ENUMV(RealDouble), PETSC_COMM_SELF, PETSC_ERR_ARG_NOTSAMETYPE, "Field %s in file is not of type double", buffer);
2611:       }
2612:       PetscCallCGNSReadData(cgp_field_multi_read_data(cgid, B, z, isol, field_ids, range_min, range_max, numComp, (void **)fields_CGNS), V, viewer);
2613:       for (int d = 0; d < numComp; ++d) {
2614:         for (PetscInt v = 0; v < myownedv; ++v) fields[v * numComp + d] = fields_CGNS[d][v];
2615:       }
2616:       for (PetscInt i = 0; i < numComp; i++) PetscCall(PetscFree(fields_CGNS[i]));
2617:       PetscCall(PetscFree2(fields_CGNS, field_ids));
2618:     }

2620:     { // Reduce fields into Vec array
2621:       PetscScalar *V_array;
2622:       MPI_Datatype fieldtype;

2624:       PetscCall(VecGetArrayWrite(V, &V_array));
2625:       PetscCallMPI(MPI_Type_contiguous(numComp, MPIU_SCALAR, &fieldtype));
2626:       PetscCallMPI(MPI_Type_commit(&fieldtype));
2627:       PetscCall(PetscSFReduceBegin(sfNatural, fieldtype, fields, V_array, MPI_REPLACE));
2628:       PetscCall(PetscSFReduceEnd(sfNatural, fieldtype, fields, V_array, MPI_REPLACE));
2629:       PetscCallMPI(MPI_Type_free(&fieldtype));
2630:       PetscCall(VecRestoreArrayWrite(V, &V_array));
2631:     }
2632:     PetscCall(PetscFree(fields));
2633:   }
2634:   PetscFunctionReturn(PETSC_SUCCESS);
2635: }