Actual source code: plexcgns2.c

  1: #include <petsc/private/dmpleximpl.h>
  2: #include <petsc/private/sfimpl.h>
  3: #include <petsc/private/viewercgnsimpl.h>
  4: #include <petsc/private/hashseti.h>

  6: #include <pcgnslib.h>
  7: #include <cgns_io.h>

  9: #if !defined(CGNS_ENUMT)
 10:   #define CGNS_ENUMT(a) a
 11: #endif
 12: #if !defined(CGNS_ENUMV)
 13:   #define CGNS_ENUMV(a) a
 14: #endif
 15: // Permute plex closure ordering to CGNS
 16: static PetscErrorCode DMPlexCGNSGetPermutation_Internal(DMPolytopeType cell_type, PetscInt closure_size, CGNS_ENUMT(ElementType_t) * element_type, const int **perm)
 17: {
 18:   CGNS_ENUMT(ElementType_t) element_type_tmp;

 20:   // https://cgns.github.io/CGNS_docs_current/sids/conv.html#unstructgrid
 21:   static const int bar_2[2]   = {0, 1};
 22:   static const int bar_3[3]   = {1, 2, 0};
 23:   static const int bar_4[4]   = {2, 3, 0, 1};
 24:   static const int bar_5[5]   = {3, 4, 0, 1, 2};
 25:   static const int tri_3[3]   = {0, 1, 2};
 26:   static const int tri_6[6]   = {3, 4, 5, 0, 1, 2};
 27:   static const int tri_10[10] = {7, 8, 9, 1, 2, 3, 4, 5, 6, 0};
 28:   static const int quad_4[4]  = {0, 1, 2, 3};
 29:   static const int quad_9[9]  = {
 30:     5, 6, 7, 8, // vertices
 31:     1, 2, 3, 4, // edges
 32:     0,          // center
 33:   };
 34:   static const int quad_16[] = {
 35:     12, 13, 14, 15,               // vertices
 36:     4,  5,  6,  7,  8, 9, 10, 11, // edges
 37:     0,  1,  3,  2,                // centers
 38:   };
 39:   static const int quad_25[] = {
 40:     21, 22, 23, 24,                                 // vertices
 41:     9,  10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, // edges
 42:     0,  1,  2,  5,  8,  7,  6,  3,  4,              // centers
 43:   };
 44:   static const int tetra_4[4]   = {0, 2, 1, 3};
 45:   static const int tetra_10[10] = {6, 8, 7, 9, 2, 1, 0, 3, 5, 4};
 46:   static const int tetra_20[20] = {
 47:     16, 18, 17, 19,         // vertices
 48:     9,  8,  7,  6,  5,  4,  // bottom edges
 49:     10, 11, 14, 15, 13, 12, // side edges
 50:     0,  2,  3,  1,          // faces
 51:   };
 52:   static const int hexa_8[8]   = {0, 3, 2, 1, 4, 5, 6, 7};
 53:   static const int hexa_27[27] = {
 54:     19, 22, 21, 20, 23, 24, 25, 26, // vertices
 55:     10, 9,  8,  7,                  // bottom edges
 56:     16, 15, 18, 17,                 // mid edges
 57:     11, 12, 13, 14,                 // top edges
 58:     1,  3,  5,  4,  6,  2,          // faces
 59:     0,                              // center
 60:   };
 61:   static const int hexa_64[64] = {
 62:     // debug with $PETSC_ARCH/tests/dm/impls/plex/tests/ex49 -dm_plex_simplex 0 -dm_plex_dim 3 -dm_plex_box_faces 1,1,1 -dm_coord_petscspace_degree 3
 63:     56, 59, 58, 57, 60, 61, 62, 63, // vertices
 64:     39, 38, 37, 36, 35, 34, 33, 32, // bottom edges
 65:     51, 50, 48, 49, 52, 53, 55, 54, // mid edges; Paraview needs edge 21-22 swapped with 23-24
 66:     40, 41, 42, 43, 44, 45, 46, 47, // top edges
 67:     8,  10, 11, 9,                  // z-minus face
 68:     16, 17, 19, 18,                 // y-minus face
 69:     24, 25, 27, 26,                 // x-plus face
 70:     20, 21, 23, 22,                 // y-plus face
 71:     30, 28, 29, 31,                 // x-minus face
 72:     12, 13, 15, 14,                 // z-plus face
 73:     0,  1,  3,  2,  4,  5,  7,  6,  // center
 74:   };

 76:   PetscFunctionBegin;
 77:   element_type_tmp = CGNS_ENUMV(ElementTypeNull);
 78:   *perm            = NULL;
 79:   switch (cell_type) {
 80:   case DM_POLYTOPE_SEGMENT:
 81:     switch (closure_size) {
 82:     case 2:
 83:       element_type_tmp = CGNS_ENUMV(BAR_2);
 84:       *perm            = bar_2;
 85:       break;
 86:     case 3:
 87:       element_type_tmp = CGNS_ENUMV(BAR_3);
 88:       *perm            = bar_3;
 89:       break;
 90:     case 4:
 91:       element_type_tmp = CGNS_ENUMV(BAR_4);
 92:       *perm            = bar_4;
 93:       break;
 94:     case 5:
 95:       element_type_tmp = CGNS_ENUMV(BAR_5);
 96:       *perm            = bar_5;
 97:       break;
 98:     default:
 99:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
100:     }
101:     break;
102:   case DM_POLYTOPE_TRIANGLE:
103:     switch (closure_size) {
104:     case 3:
105:       element_type_tmp = CGNS_ENUMV(TRI_3);
106:       *perm            = tri_3;
107:       break;
108:     case 6:
109:       element_type_tmp = CGNS_ENUMV(TRI_6);
110:       *perm            = tri_6;
111:       break;
112:     case 10:
113:       element_type_tmp = CGNS_ENUMV(TRI_10);
114:       *perm            = tri_10;
115:       break;
116:     default:
117:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
118:     }
119:     break;
120:   case DM_POLYTOPE_QUADRILATERAL:
121:     switch (closure_size) {
122:     case 4:
123:       element_type_tmp = CGNS_ENUMV(QUAD_4);
124:       *perm            = quad_4;
125:       break;
126:     case 9:
127:       element_type_tmp = CGNS_ENUMV(QUAD_9);
128:       *perm            = quad_9;
129:       break;
130:     case 16:
131:       element_type_tmp = CGNS_ENUMV(QUAD_16);
132:       *perm            = quad_16;
133:       break;
134:     case 25:
135:       element_type_tmp = CGNS_ENUMV(QUAD_25);
136:       *perm            = quad_25;
137:       break;
138:     default:
139:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
140:     }
141:     break;
142:   case DM_POLYTOPE_TETRAHEDRON:
143:     switch (closure_size) {
144:     case 4:
145:       element_type_tmp = CGNS_ENUMV(TETRA_4);
146:       *perm            = tetra_4;
147:       break;
148:     case 10:
149:       element_type_tmp = CGNS_ENUMV(TETRA_10);
150:       *perm            = tetra_10;
151:       break;
152:     case 20:
153:       element_type_tmp = CGNS_ENUMV(TETRA_20);
154:       *perm            = tetra_20;
155:       break;
156:     default:
157:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
158:     }
159:     break;
160:   case DM_POLYTOPE_HEXAHEDRON:
161:     switch (closure_size) {
162:     case 8:
163:       element_type_tmp = CGNS_ENUMV(HEXA_8);
164:       *perm            = hexa_8;
165:       break;
166:     case 27:
167:       element_type_tmp = CGNS_ENUMV(HEXA_27);
168:       *perm            = hexa_27;
169:       break;
170:     case 64:
171:       element_type_tmp = CGNS_ENUMV(HEXA_64);
172:       *perm            = hexa_64;
173:       break;
174:     default:
175:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
176:     }
177:     break;
178:   default:
179:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Cell type %s with closure size %" PetscInt_FMT, DMPolytopeTypes[cell_type], closure_size);
180:   }
181:   if (element_type) *element_type = element_type_tmp;
182:   PetscFunctionReturn(PETSC_SUCCESS);
183: }

185: /*
186:   Input Parameters:
187: + cellType  - The CGNS-defined element type

189:   Output Parameters:
190: + dmcelltype  - The equivalent DMPolytopeType for the cellType
191: . numCorners - Number of corners of the polytope
192: - dim - The topological dimension of the polytope

194: CGNS elements defined in: https://cgns.github.io/CGNS_docs_current/sids/conv.html#unstructgrid
195: */
196: static inline PetscErrorCode CGNSElementTypeGetTopologyInfo(CGNS_ENUMT(ElementType_t) cellType, DMPolytopeType *dmcelltype, PetscInt *numCorners, PetscInt *dim)
197: {
198:   DMPolytopeType _dmcelltype;

200:   PetscFunctionBegin;
201:   switch (cellType) {
202:   case CGNS_ENUMV(BAR_2):
203:   case CGNS_ENUMV(BAR_3):
204:   case CGNS_ENUMV(BAR_4):
205:   case CGNS_ENUMV(BAR_5):
206:     _dmcelltype = DM_POLYTOPE_SEGMENT;
207:     break;
208:   case CGNS_ENUMV(TRI_3):
209:   case CGNS_ENUMV(TRI_6):
210:   case CGNS_ENUMV(TRI_9):
211:   case CGNS_ENUMV(TRI_10):
212:   case CGNS_ENUMV(TRI_12):
213:   case CGNS_ENUMV(TRI_15):
214:     _dmcelltype = DM_POLYTOPE_TRIANGLE;
215:     break;
216:   case CGNS_ENUMV(QUAD_4):
217:   case CGNS_ENUMV(QUAD_8):
218:   case CGNS_ENUMV(QUAD_9):
219:   case CGNS_ENUMV(QUAD_12):
220:   case CGNS_ENUMV(QUAD_16):
221:   case CGNS_ENUMV(QUAD_P4_16):
222:   case CGNS_ENUMV(QUAD_25):
223:     _dmcelltype = DM_POLYTOPE_QUADRILATERAL;
224:     break;
225:   case CGNS_ENUMV(TETRA_4):
226:   case CGNS_ENUMV(TETRA_10):
227:   case CGNS_ENUMV(TETRA_16):
228:   case CGNS_ENUMV(TETRA_20):
229:   case CGNS_ENUMV(TETRA_22):
230:   case CGNS_ENUMV(TETRA_34):
231:   case CGNS_ENUMV(TETRA_35):
232:     _dmcelltype = DM_POLYTOPE_TETRAHEDRON;
233:     break;
234:   case CGNS_ENUMV(PYRA_5):
235:   case CGNS_ENUMV(PYRA_13):
236:   case CGNS_ENUMV(PYRA_14):
237:   case CGNS_ENUMV(PYRA_21):
238:   case CGNS_ENUMV(PYRA_29):
239:   case CGNS_ENUMV(PYRA_P4_29):
240:   case CGNS_ENUMV(PYRA_30):
241:   case CGNS_ENUMV(PYRA_50):
242:   case CGNS_ENUMV(PYRA_55):
243:     _dmcelltype = DM_POLYTOPE_PYRAMID;
244:     break;
245:   case CGNS_ENUMV(PENTA_6):
246:   case CGNS_ENUMV(PENTA_15):
247:   case CGNS_ENUMV(PENTA_18):
248:   case CGNS_ENUMV(PENTA_24):
249:   case CGNS_ENUMV(PENTA_33):
250:   case CGNS_ENUMV(PENTA_38):
251:   case CGNS_ENUMV(PENTA_40):
252:   case CGNS_ENUMV(PENTA_66):
253:   case CGNS_ENUMV(PENTA_75):
254:     _dmcelltype = DM_POLYTOPE_TRI_PRISM;
255:     break;
256:   case CGNS_ENUMV(HEXA_8):
257:   case CGNS_ENUMV(HEXA_20):
258:   case CGNS_ENUMV(HEXA_27):
259:   case CGNS_ENUMV(HEXA_32):
260:   case CGNS_ENUMV(HEXA_44):
261:   case CGNS_ENUMV(HEXA_56):
262:   case CGNS_ENUMV(HEXA_64):
263:   case CGNS_ENUMV(HEXA_98):
264:   case CGNS_ENUMV(HEXA_125):
265:     _dmcelltype = DM_POLYTOPE_HEXAHEDRON;
266:     break;
267:   case CGNS_ENUMV(MIXED):
268:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid CGNS ElementType_t: MIXED");
269:   default:
270:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid CGNS ElementType_t: %d", (int)cellType);
271:   }

273:   if (dmcelltype) *dmcelltype = _dmcelltype;
274:   if (numCorners) *numCorners = DMPolytopeTypeGetNumVertices(_dmcelltype);
275:   if (dim) *dim = DMPolytopeTypeGetDim(_dmcelltype);
276:   PetscFunctionReturn(PETSC_SUCCESS);
277: }

279: /*
280:   Input Parameters:
281: + cellType  - The CGNS-defined cell type

283:   Output Parameters:
284: + numClosure - Number of nodes that define the function space on the cell
285: - pOrder - The polynomial order of the cell

287: CGNS elements defined in: https://cgns.github.io/CGNS_docs_current/sids/conv.html#unstructgrid

289: Note: we only support "full" elements, ie. not seredipity elements
290: */
291: static inline PetscErrorCode CGNSElementTypeGetDiscretizationInfo(CGNS_ENUMT(ElementType_t) cellType, PetscInt *numClosure, PetscInt *pOrder)
292: {
293:   PetscInt _numClosure, _pOrder;

295:   PetscFunctionBegin;
296:   switch (cellType) {
297:   case CGNS_ENUMV(BAR_2):
298:     _numClosure = 2;
299:     _pOrder     = 1;
300:     break;
301:   case CGNS_ENUMV(BAR_3):
302:     _numClosure = 3;
303:     _pOrder     = 2;
304:     break;
305:   case CGNS_ENUMV(BAR_4):
306:     _numClosure = 4;
307:     _pOrder     = 3;
308:     break;
309:   case CGNS_ENUMV(BAR_5):
310:     _numClosure = 5;
311:     _pOrder     = 4;
312:     break;
313:   case CGNS_ENUMV(TRI_3):
314:     _numClosure = 3;
315:     _pOrder     = 1;
316:     break;
317:   case CGNS_ENUMV(TRI_6):
318:     _numClosure = 6;
319:     _pOrder     = 2;
320:     break;
321:   case CGNS_ENUMV(TRI_10):
322:     _numClosure = 10;
323:     _pOrder     = 3;
324:     break;
325:   case CGNS_ENUMV(TRI_15):
326:     _numClosure = 15;
327:     _pOrder     = 4;
328:     break;
329:   case CGNS_ENUMV(QUAD_4):
330:     _numClosure = 4;
331:     _pOrder     = 1;
332:     break;
333:   case CGNS_ENUMV(QUAD_9):
334:     _numClosure = 9;
335:     _pOrder     = 2;
336:     break;
337:   case CGNS_ENUMV(QUAD_16):
338:     _numClosure = 16;
339:     _pOrder     = 3;
340:     break;
341:   case CGNS_ENUMV(QUAD_25):
342:     _numClosure = 25;
343:     _pOrder     = 4;
344:     break;
345:   case CGNS_ENUMV(TETRA_4):
346:     _numClosure = 4;
347:     _pOrder     = 1;
348:     break;
349:   case CGNS_ENUMV(TETRA_10):
350:     _numClosure = 10;
351:     _pOrder     = 2;
352:     break;
353:   case CGNS_ENUMV(TETRA_20):
354:     _numClosure = 20;
355:     _pOrder     = 3;
356:     break;
357:   case CGNS_ENUMV(TETRA_35):
358:     _numClosure = 35;
359:     _pOrder     = 4;
360:     break;
361:   case CGNS_ENUMV(PYRA_5):
362:     _numClosure = 5;
363:     _pOrder     = 1;
364:     break;
365:   case CGNS_ENUMV(PYRA_14):
366:     _numClosure = 14;
367:     _pOrder     = 2;
368:     break;
369:   case CGNS_ENUMV(PYRA_30):
370:     _numClosure = 30;
371:     _pOrder     = 3;
372:     break;
373:   case CGNS_ENUMV(PYRA_55):
374:     _numClosure = 55;
375:     _pOrder     = 4;
376:     break;
377:   case CGNS_ENUMV(PENTA_6):
378:     _numClosure = 6;
379:     _pOrder     = 1;
380:     break;
381:   case CGNS_ENUMV(PENTA_18):
382:     _numClosure = 18;
383:     _pOrder     = 2;
384:     break;
385:   case CGNS_ENUMV(PENTA_40):
386:     _numClosure = 40;
387:     _pOrder     = 3;
388:     break;
389:   case CGNS_ENUMV(PENTA_75):
390:     _numClosure = 75;
391:     _pOrder     = 4;
392:     break;
393:   case CGNS_ENUMV(HEXA_8):
394:     _numClosure = 8;
395:     _pOrder     = 1;
396:     break;
397:   case CGNS_ENUMV(HEXA_27):
398:     _numClosure = 27;
399:     _pOrder     = 2;
400:     break;
401:   case CGNS_ENUMV(HEXA_64):
402:     _numClosure = 64;
403:     _pOrder     = 3;
404:     break;
405:   case CGNS_ENUMV(HEXA_125):
406:     _numClosure = 125;
407:     _pOrder     = 4;
408:     break;
409:   case CGNS_ENUMV(MIXED):
410:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid CGNS ElementType_t: MIXED");
411:   default:
412:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unsupported or Invalid cell type %d", (int)cellType);
413:   }
414:   if (numClosure) *numClosure = _numClosure;
415:   if (pOrder) *pOrder = _pOrder;
416:   PetscFunctionReturn(PETSC_SUCCESS);
417: }

419: static PetscErrorCode PetscCGNSDataType(PetscDataType pd, CGNS_ENUMT(DataType_t) * cd)
420: {
421:   PetscFunctionBegin;
422:   switch (pd) {
423:   case PETSC_FLOAT:
424:     *cd = CGNS_ENUMV(RealSingle);
425:     break;
426:   case PETSC_DOUBLE:
427:     *cd = CGNS_ENUMV(RealDouble);
428:     break;
429:   case PETSC_COMPLEX:
430:     *cd = CGNS_ENUMV(ComplexDouble);
431:     break;
432:   default:
433:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Data type %s", PetscDataTypes[pd]);
434:   }
435:   PetscFunctionReturn(PETSC_SUCCESS);
436: }

438: PetscErrorCode DMPlexCreateCGNSFromFile_Internal(MPI_Comm comm, const char filename[], PetscBool interpolate, DM *dm)
439: {
440:   int       cgid                = -1;
441:   PetscBool use_parallel_viewer = PETSC_FALSE;

443:   PetscFunctionBegin;
444:   PetscAssertPointer(filename, 2);
445:   PetscCall(PetscViewerCGNSRegisterLogEvents_Internal());
446:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-dm_plex_cgns_parallel", &use_parallel_viewer, NULL));

448:   if (use_parallel_viewer) {
449:     PetscCallCGNS(cgp_mpi_comm(comm));
450:     PetscCallCGNSOpen(cgp_open(filename, CG_MODE_READ, &cgid), 0, 0);
451:     PetscCheck(cgid > 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "cgp_open(\"%s\",...) did not return a valid file ID", filename);
452:     PetscCall(DMPlexCreateCGNS(comm, cgid, interpolate, dm));
453:     PetscCallCGNSClose(cgp_close(cgid), 0, 0);
454:   } else {
455:     PetscCallCGNSOpen(cg_open(filename, CG_MODE_READ, &cgid), 0, 0);
456:     PetscCheck(cgid > 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "cg_open(\"%s\",...) did not return a valid file ID", filename);
457:     PetscCall(DMPlexCreateCGNS(comm, cgid, interpolate, dm));
458:     PetscCallCGNSClose(cg_close(cgid), 0, 0);
459:   }
460:   PetscFunctionReturn(PETSC_SUCCESS);
461: }

463: PetscErrorCode DMPlexCreateCGNS_Internal_Serial(MPI_Comm comm, PetscInt cgid, PetscBool interpolate, DM *dm)
464: {
465:   PetscMPIInt  num_proc, rank;
466:   DM           cdm;
467:   DMLabel      label;
468:   PetscSection coordSection;
469:   Vec          coordinates;
470:   PetscScalar *coords;
471:   PetscInt    *cellStart, *vertStart, v;
472:   PetscInt     labelIdRange[2], labelId;
473:   /* Read from file */
474:   char      basename[CGIO_MAX_NAME_LENGTH + 1];
475:   char      buffer[CGIO_MAX_NAME_LENGTH + 1];
476:   int       dim = 0, physDim = 0, coordDim = 0, numVertices = 0, numCells = 0;
477:   int       nzones = 0;
478:   const int B      = 1; // Only support single base

480:   PetscFunctionBegin;
481:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
482:   PetscCallMPI(MPI_Comm_size(comm, &num_proc));
483:   PetscCall(DMCreate(comm, dm));
484:   PetscCall(DMSetType(*dm, DMPLEX));

486:   /* Open CGNS II file and read basic information on rank 0, then broadcast to all processors */
487:   if (rank == 0) {
488:     int nbases, z;

490:     PetscCallCGNSRead(cg_nbases(cgid, &nbases), *dm, 0);
491:     PetscCheck(nbases <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single base, not %d", nbases);
492:     PetscCallCGNSRead(cg_base_read(cgid, B, basename, &dim, &physDim), *dm, 0);
493:     PetscCallCGNSRead(cg_nzones(cgid, B, &nzones), *dm, 0);
494:     PetscCall(PetscCalloc2(nzones + 1, &cellStart, nzones + 1, &vertStart));
495:     for (z = 1; z <= nzones; ++z) {
496:       cgsize_t sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */

498:       PetscCallCGNSRead(cg_zone_read(cgid, B, z, buffer, sizes), *dm, 0);
499:       numVertices += sizes[0];
500:       numCells += sizes[1];
501:       cellStart[z] += sizes[1] + cellStart[z - 1];
502:       vertStart[z] += sizes[0] + vertStart[z - 1];
503:     }
504:     for (z = 1; z <= nzones; ++z) vertStart[z] += numCells;
505:     coordDim = dim;
506:   }
507:   PetscCallMPI(MPI_Bcast(basename, CGIO_MAX_NAME_LENGTH + 1, MPI_CHAR, 0, comm));
508:   PetscCallMPI(MPI_Bcast(&dim, 1, MPI_INT, 0, comm));
509:   PetscCallMPI(MPI_Bcast(&coordDim, 1, MPI_INT, 0, comm));
510:   PetscCallMPI(MPI_Bcast(&nzones, 1, MPI_INT, 0, comm));

512:   PetscCall(PetscObjectSetName((PetscObject)*dm, basename));
513:   PetscCall(DMSetDimension(*dm, dim));
514:   PetscCall(DMCreateLabel(*dm, "celltype"));
515:   PetscCall(DMPlexSetChart(*dm, 0, numCells + numVertices));

517:   /* Read zone information */
518:   if (rank == 0) {
519:     int z, c, c_loc;

521:     /* Read the cell set connectivity table and build mesh topology
522:        CGNS standard requires that cells in a zone be numbered sequentially and be pairwise disjoint. */
523:     /* First set sizes */
524:     for (z = 1, c = 0; z <= nzones; ++z) {
525:       CGNS_ENUMT(ZoneType_t) zonetype;
526:       int nsections;
527:       CGNS_ENUMT(ElementType_t) cellType;
528:       cgsize_t       start, end;
529:       int            nbndry, parentFlag;
530:       PetscInt       numCorners, pOrder;
531:       DMPolytopeType ctype;
532:       const int      S = 1; // Only support single section

534:       PetscCallCGNSRead(cg_zone_type(cgid, B, z, &zonetype), *dm, 0);
535:       PetscCheck(zonetype != CGNS_ENUMV(Structured), PETSC_COMM_SELF, PETSC_ERR_LIB, "Can only handle Unstructured zones for CGNS");
536:       PetscCallCGNSRead(cg_nsections(cgid, B, z, &nsections), *dm, 0);
537:       PetscCheck(nsections <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single section, not %d", nsections);
538:       PetscCallCGNSRead(cg_section_read(cgid, B, z, S, buffer, &cellType, &start, &end, &nbndry, &parentFlag), *dm, 0);
539:       if (cellType == CGNS_ENUMV(MIXED)) {
540:         cgsize_t elementDataSize, *elements;
541:         PetscInt off;

543:         PetscCallCGNSRead(cg_ElementDataSize(cgid, B, z, S, &elementDataSize), *dm, 0);
544:         PetscCall(PetscMalloc1(elementDataSize, &elements));
545:         PetscCallCGNSReadData(cg_poly_elements_read(cgid, B, z, S, elements, NULL, NULL), *dm, 0);
546:         for (c_loc = start, off = 0; c_loc <= end; ++c_loc, ++c) {
547:           PetscCall(CGNSElementTypeGetTopologyInfo((CGNS_ENUMT(ElementType_t))elements[off], &ctype, &numCorners, NULL));
548:           PetscCall(CGNSElementTypeGetDiscretizationInfo((CGNS_ENUMT(ElementType_t))elements[off], NULL, &pOrder));
549:           PetscCheck(pOrder == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Serial CGNS reader only supports first order elements, not %" PetscInt_FMT " order", pOrder);
550:           PetscCall(DMPlexSetConeSize(*dm, c, numCorners));
551:           PetscCall(DMPlexSetCellType(*dm, c, ctype));
552:           off += numCorners + 1;
553:         }
554:         PetscCall(PetscFree(elements));
555:       } else {
556:         PetscCall(CGNSElementTypeGetTopologyInfo(cellType, &ctype, &numCorners, NULL));
557:         PetscCall(CGNSElementTypeGetDiscretizationInfo(cellType, NULL, &pOrder));
558:         PetscCheck(pOrder == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Serial CGNS reader only supports first order elements, not %" PetscInt_FMT " order", pOrder);
559:         for (c_loc = start; c_loc <= end; ++c_loc, ++c) {
560:           PetscCall(DMPlexSetConeSize(*dm, c, numCorners));
561:           PetscCall(DMPlexSetCellType(*dm, c, ctype));
562:         }
563:       }
564:     }
565:     for (v = numCells; v < numCells + numVertices; ++v) PetscCall(DMPlexSetCellType(*dm, v, DM_POLYTOPE_POINT));
566:   }

568:   PetscCall(DMSetUp(*dm));

570:   PetscCall(DMCreateLabel(*dm, "zone"));
571:   if (rank == 0) {
572:     int z, c, c_loc, v_loc;

574:     PetscCall(DMGetLabel(*dm, "zone", &label));
575:     for (z = 1, c = 0; z <= nzones; ++z) {
576:       CGNS_ENUMT(ElementType_t) cellType;
577:       cgsize_t  elementDataSize, *elements, start, end;
578:       int       nbndry, parentFlag;
579:       PetscInt *cone, numc, numCorners, maxCorners = 27, pOrder;
580:       const int S = 1; // Only support single section

582:       PetscCallCGNSRead(cg_section_read(cgid, B, z, S, buffer, &cellType, &start, &end, &nbndry, &parentFlag), *dm, 0);
583:       numc = end - start;
584:       PetscCallCGNSRead(cg_ElementDataSize(cgid, B, z, S, &elementDataSize), *dm, 0);
585:       PetscCall(PetscMalloc2(elementDataSize, &elements, maxCorners, &cone));
586:       PetscCallCGNSReadData(cg_poly_elements_read(cgid, B, z, S, elements, NULL, NULL), *dm, 0);
587:       if (cellType == CGNS_ENUMV(MIXED)) {
588:         /* CGNS uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */
589:         for (c_loc = 0, v = 0; c_loc <= numc; ++c_loc, ++c) {
590:           PetscCall(CGNSElementTypeGetTopologyInfo((CGNS_ENUMT(ElementType_t))elements[v], NULL, &numCorners, NULL));
591:           PetscCall(CGNSElementTypeGetDiscretizationInfo((CGNS_ENUMT(ElementType_t))elements[v], NULL, &pOrder));
592:           PetscCheck(pOrder == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Serial CGNS reader only supports first order elements, not %" PetscInt_FMT " order", pOrder);
593:           ++v;
594:           for (v_loc = 0; v_loc < numCorners; ++v_loc, ++v) cone[v_loc] = elements[v] + numCells - 1;
595:           PetscCall(DMPlexReorderCell(*dm, c, cone));
596:           PetscCall(DMPlexSetCone(*dm, c, cone));
597:           PetscCall(DMLabelSetValue(label, c, z));
598:         }
599:       } else {
600:         PetscCall(CGNSElementTypeGetTopologyInfo(cellType, NULL, &numCorners, NULL));
601:         PetscCall(CGNSElementTypeGetDiscretizationInfo(cellType, NULL, &pOrder));
602:         PetscCheck(pOrder == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Serial CGNS reader only supports first order elements, not %" PetscInt_FMT " order", pOrder);
603:         /* CGNS uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */
604:         for (c_loc = 0, v = 0; c_loc <= numc; ++c_loc, ++c) {
605:           for (v_loc = 0; v_loc < numCorners; ++v_loc, ++v) cone[v_loc] = elements[v] + numCells - 1;
606:           PetscCall(DMPlexReorderCell(*dm, c, cone));
607:           PetscCall(DMPlexSetCone(*dm, c, cone));
608:           PetscCall(DMLabelSetValue(label, c, z));
609:         }
610:       }
611:       PetscCall(PetscFree2(elements, cone));
612:     }
613:   }

615:   PetscCall(DMPlexSymmetrize(*dm));
616:   PetscCall(DMPlexStratify(*dm));
617:   if (interpolate) PetscCall(DMPlexInterpolateInPlace_Internal(*dm));

619:   /* Read coordinates */
620:   PetscCall(DMSetCoordinateDim(*dm, coordDim));
621:   PetscCall(DMGetCoordinateDM(*dm, &cdm));
622:   PetscCall(DMGetLocalSection(cdm, &coordSection));
623:   PetscCall(PetscSectionSetNumFields(coordSection, 1));
624:   PetscCall(PetscSectionSetFieldComponents(coordSection, 0, coordDim));
625:   PetscCall(PetscSectionSetChart(coordSection, numCells, numCells + numVertices));
626:   for (v = numCells; v < numCells + numVertices; ++v) {
627:     PetscCall(PetscSectionSetDof(coordSection, v, dim));
628:     PetscCall(PetscSectionSetFieldDof(coordSection, v, 0, coordDim));
629:   }
630:   PetscCall(PetscSectionSetUp(coordSection));

632:   PetscCall(DMCreateLocalVector(cdm, &coordinates));
633:   PetscCall(VecGetArray(coordinates, &coords));
634:   if (rank == 0) {
635:     PetscInt off = 0;
636:     float   *x[3];
637:     int      z, d;

639:     PetscCall(PetscMalloc3(numVertices, &x[0], numVertices, &x[1], numVertices, &x[2]));
640:     for (z = 1; z <= nzones; ++z) {
641:       CGNS_ENUMT(DataType_t) datatype;
642:       cgsize_t sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */
643:       cgsize_t range_min[3] = {1, 1, 1};
644:       cgsize_t range_max[3] = {1, 1, 1};
645:       int      ngrids, ncoords;

647:       PetscCallCGNSRead(cg_zone_read(cgid, B, z, buffer, sizes), *dm, 0);
648:       range_max[0] = sizes[0];
649:       PetscCallCGNSRead(cg_ngrids(cgid, B, z, &ngrids), *dm, 0);
650:       PetscCheck(ngrids <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single grid, not %d", ngrids);
651:       PetscCallCGNSRead(cg_ncoords(cgid, B, z, &ncoords), *dm, 0);
652:       PetscCheck(ncoords == coordDim, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a coordinate array for each dimension, not %d", ncoords);
653:       for (d = 0; d < coordDim; ++d) {
654:         PetscCallCGNSRead(cg_coord_info(cgid, B, z, 1 + d, &datatype, buffer), *dm, 0);
655:         PetscCallCGNSReadData(cg_coord_read(cgid, B, z, buffer, CGNS_ENUMV(RealSingle), range_min, range_max, x[d]), *dm, 0);
656:       }
657:       if (coordDim >= 1) {
658:         for (v = 0; v < sizes[0]; ++v) coords[(v + off) * coordDim + 0] = x[0][v];
659:       }
660:       if (coordDim >= 2) {
661:         for (v = 0; v < sizes[0]; ++v) coords[(v + off) * coordDim + 1] = x[1][v];
662:       }
663:       if (coordDim >= 3) {
664:         for (v = 0; v < sizes[0]; ++v) coords[(v + off) * coordDim + 2] = x[2][v];
665:       }
666:       off += sizes[0];
667:     }
668:     PetscCall(PetscFree3(x[0], x[1], x[2]));
669:   }
670:   PetscCall(VecRestoreArray(coordinates, &coords));

672:   PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
673:   PetscCall(VecSetBlockSize(coordinates, coordDim));
674:   PetscCall(DMSetCoordinatesLocal(*dm, coordinates));
675:   PetscCall(VecDestroy(&coordinates));

677:   /* Read boundary conditions */
678:   PetscCall(DMGetNumLabels(*dm, &labelIdRange[0]));
679:   if (rank == 0) {
680:     CGNS_ENUMT(BCType_t) bctype;
681:     CGNS_ENUMT(DataType_t) datatype;
682:     CGNS_ENUMT(PointSetType_t) pointtype;
683:     cgsize_t  *points;
684:     PetscReal *normals;
685:     int        normal[3];
686:     char      *bcname = buffer;
687:     cgsize_t   npoints, nnormals;
688:     int        z, nbc, bc, c, ndatasets;

690:     for (z = 1; z <= nzones; ++z) {
691:       PetscCallCGNSRead(cg_nbocos(cgid, B, z, &nbc), *dm, 0);
692:       for (bc = 1; bc <= nbc; ++bc) {
693:         PetscCallCGNSRead(cg_boco_info(cgid, B, z, bc, bcname, &bctype, &pointtype, &npoints, normal, &nnormals, &datatype, &ndatasets), *dm, 0);
694:         PetscCall(DMCreateLabel(*dm, bcname));
695:         PetscCall(DMGetLabel(*dm, bcname, &label));
696:         PetscCall(PetscMalloc2(npoints, &points, nnormals, &normals));
697:         PetscCallCGNSReadData(cg_boco_read(cgid, B, z, bc, points, (void *)normals), *dm, 0);
698:         if (pointtype == CGNS_ENUMV(ElementRange)) {
699:           // Range of cells: assuming half-open interval
700:           for (c = points[0]; c < points[1]; ++c) PetscCall(DMLabelSetValue(label, c - cellStart[z - 1], 1));
701:         } else if (pointtype == CGNS_ENUMV(ElementList)) {
702:           // List of cells
703:           for (c = 0; c < npoints; ++c) PetscCall(DMLabelSetValue(label, points[c] - cellStart[z - 1], 1));
704:         } else if (pointtype == CGNS_ENUMV(PointRange)) {
705:           CGNS_ENUMT(GridLocation_t) gridloc;

707:           // List of points:
708:           PetscCallCGNS(cg_goto(cgid, 1, "Zone_t", z, "BC_t", bc, "end"));
709:           PetscCallCGNSRead(cg_gridlocation_read(&gridloc), *dm, 0);
710:           // Range of points: assuming half-open interval
711:           for (c = points[0]; c < points[1]; ++c) {
712:             if (gridloc == CGNS_ENUMV(Vertex)) PetscCall(DMLabelSetValue(label, c - vertStart[z - 1], 1));
713:             else PetscCall(DMLabelSetValue(label, c - cellStart[z - 1], 1));
714:           }
715:         } else if (pointtype == CGNS_ENUMV(PointList)) {
716:           CGNS_ENUMT(GridLocation_t) gridloc;

718:           // List of points:
719:           PetscCallCGNS(cg_goto(cgid, 1, "Zone_t", z, "BC_t", bc, "end"));
720:           PetscCallCGNSRead(cg_gridlocation_read(&gridloc), *dm, 0);
721:           for (c = 0; c < npoints; ++c) {
722:             if (gridloc == CGNS_ENUMV(Vertex)) PetscCall(DMLabelSetValue(label, points[c] - vertStart[z - 1], 1));
723:             else PetscCall(DMLabelSetValue(label, points[c] - cellStart[z - 1], 1));
724:           }
725:         } else SETERRQ(comm, PETSC_ERR_SUP, "Unsupported point set type %d", (int)pointtype);
726:         PetscCall(PetscFree2(points, normals));
727:       }
728:     }
729:     PetscCall(PetscFree2(cellStart, vertStart));
730:   }
731:   PetscCall(DMGetNumLabels(*dm, &labelIdRange[1]));
732:   PetscCallMPI(MPI_Bcast(labelIdRange, 2, MPIU_INT, 0, comm));

734:   /* Create BC labels at all processes */
735:   for (labelId = labelIdRange[0]; labelId < labelIdRange[1]; ++labelId) {
736:     char       *labelName = buffer;
737:     size_t      len       = sizeof(buffer);
738:     const char *locName;

740:     if (rank == 0) {
741:       PetscCall(DMGetLabelByNum(*dm, labelId, &label));
742:       PetscCall(PetscObjectGetName((PetscObject)label, &locName));
743:       PetscCall(PetscStrncpy(labelName, locName, len));
744:     }
745:     PetscCallMPI(MPI_Bcast(labelName, (PetscMPIInt)len, MPIU_INT, 0, comm));
746:     PetscCallMPI(DMCreateLabel(*dm, labelName));
747:   }
748:   PetscFunctionReturn(PETSC_SUCCESS);
749: }

751: typedef struct {
752:   cgsize_t start, end;
753: } CGRange;

755: // Creates a PetscLayout from the given sizes, but adjusts the ranges by the offset. So the first rank ranges will be [offset, offset + local_size) rather than [0, local_size)
756: static PetscErrorCode PetscLayoutCreateFromSizesAndOffset(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt bs, PetscInt offset, PetscLayout *map)
757: {
758:   PetscLayout     init;
759:   const PetscInt *ranges;
760:   PetscInt       *new_ranges;
761:   PetscMPIInt     num_ranks;

763:   PetscFunctionBegin;
764:   PetscCall(PetscLayoutCreateFromSizes(comm, n, N, bs, &init));
765:   PetscCall(PetscLayoutGetRanges(init, &ranges));
766:   PetscCallMPI(MPI_Comm_size(comm, &num_ranks));
767:   PetscCall(PetscMalloc1(num_ranks + 1, &new_ranges));
768:   PetscCall(PetscArraycpy(new_ranges, ranges, num_ranks + 1));
769:   for (PetscInt r = 0; r < num_ranks + 1; r++) new_ranges[r] += offset;
770:   PetscCall(PetscLayoutCreateFromRanges(comm, new_ranges, PETSC_OWN_POINTER, bs, map));
771:   PetscCall(PetscLayoutDestroy(&init));
772:   PetscFunctionReturn(PETSC_SUCCESS);
773: }

775: /**
776:   @brief Creates connectivity array of CGNS elements' corners in Plex ordering, with a `PetscSection` to describe the data layout

778:   @param[in]  dm           `DM`
779:   @param[in]  cgid         CGNS file ID
780:   @param[in]  base         CGNS base ID
781:   @param[in]  zone         CGNS zone ID
782:   @param[in]  num_sections Number of sections to put in connectivity
783:   @param[in]  section_ids  CGNS section IDs to obtain connectivity from
784:   @param[out] section      Section describing the connectivity for each element
785:   @param[out] cellTypes    Array specifying the CGNS ElementType_t for each element
786:   @param[out] cell_ids     CGNS IDs of the cells
787:   @param[out] layouts      Array of `PetscLayout` that describes the distributed ownership of the cells in each `section_ids`
788:   @param[out] connectivity Array of the cell connectivity, described by `section`. The vertices are the local Plex IDs

790:   @description

792:   Each point in `section` corresponds to the `cellTypes` and `cell_ids` arrays.
793:   The dof and offset of `section` maps into the `connectivity` array.

795:   The `layouts` array is intended to be used with `PetscLayoutFindOwnerIndex_CGNSSectionLayouts()`
796: **/
797: static PetscErrorCode DMPlexCGNS_CreateCornersConnectivitySection(DM dm, PetscInt cgid, int base, int zone, PetscInt num_sections, const int section_ids[], PetscSection *section, CGNS_ENUMT(ElementType_t) * cellTypes[], PetscInt *cell_ids[], PetscLayout *layouts[], PetscInt *connectivity[])
798: {
799:   MPI_Comm     comm = PetscObjectComm((PetscObject)dm);
800:   PetscSection section_;
801:   char         buffer[CGIO_MAX_NAME_LENGTH + 1];
802:   CGNS_ENUMT(ElementType_t) * sectionCellTypes, *cellTypes_;
803:   CGRange       *ranges;
804:   PetscInt       nlocal_cells = 0, global_cell_dim = -1;
805:   PetscSegBuffer conn_sb;
806:   PetscLayout   *layouts_;

808:   PetscFunctionBegin;
809:   PetscCall(PetscMalloc2(num_sections, &ranges, num_sections, &sectionCellTypes));
810:   PetscCall(PetscMalloc1(num_sections, &layouts_));
811:   for (PetscInt s = 0; s < num_sections; s++) {
812:     int      nbndry, parentFlag;
813:     PetscInt local_size;

815:     PetscCallCGNSRead(cg_section_read(cgid, base, zone, section_ids[s], buffer, &sectionCellTypes[s], &ranges[s].start, &ranges[s].end, &nbndry, &parentFlag), dm, 0);
816:     PetscCheck(sectionCellTypes[s] != CGNS_ENUMV(NGON_n) && sectionCellTypes[s] != CGNS_ENUMV(NFACE_n), comm, PETSC_ERR_SUP, "CGNS reader does not support elements of type NGON_n or NFACE_n");
817:     PetscInt num_section_cells = ranges[s].end - ranges[s].start + 1;
818:     PetscCall(PetscLayoutCreateFromSizesAndOffset(comm, PETSC_DECIDE, num_section_cells, 1, ranges[s].start, &layouts_[s]));
819:     PetscCall(PetscLayoutGetLocalSize(layouts_[s], &local_size));
820:     nlocal_cells += local_size;
821:   }
822:   PetscCall(PetscSectionCreate(comm, &section_));
823:   PetscCall(PetscSectionSetChart(section_, 0, nlocal_cells));

825:   PetscCall(PetscMalloc1(nlocal_cells, cell_ids));
826:   PetscCall(PetscMalloc1(nlocal_cells, &cellTypes_));
827:   PetscCall(PetscSegBufferCreate(sizeof(PetscInt), nlocal_cells * 2, &conn_sb));
828:   for (PetscInt s = 0, c = 0; s < num_sections; s++) {
829:     PetscInt mystart, myend, myowned;

831:     PetscCall(PetscLayoutGetRange(layouts_[s], &mystart, &myend));
832:     PetscCall(PetscLayoutGetLocalSize(layouts_[s], &myowned));
833:     if (sectionCellTypes[s] == CGNS_ENUMV(MIXED)) {
834:       cgsize_t *offsets, *conn_cg;

836:       PetscCall(PetscMalloc1(myowned + 1, &offsets)); // The last element in the array is the total size of the connectivity for the given [start,end] range
837:       PetscCallCGNSRead(cgp_poly_elements_read_data_offsets(cgid, base, zone, section_ids[s], mystart, myend - 1, offsets), dm, 0);
838:       PetscCall(PetscMalloc1(offsets[myowned + 1], &conn_cg));
839:       PetscCallCGNSRead(cgp_poly_elements_read_data_elements(cgid, base, zone, section_ids[s], mystart, myend - 1, offsets, conn_cg), dm, 0);
840:       for (PetscInt i = 0; i < myowned; i++) {
841:         DMPolytopeType dm_cell_type = DM_POLYTOPE_UNKNOWN;
842:         PetscInt       numCorners, cell_dim, *conn_sb_seg;
843:         const int     *perm;

845:         (*cell_ids)[c] = mystart + i;

847:         cellTypes_[c] = (CGNS_ENUMT(ElementType_t))conn_cg[offsets[i]];
848:         PetscCall(CGNSElementTypeGetTopologyInfo(cellTypes_[c], &dm_cell_type, &numCorners, &cell_dim));
849:         if (global_cell_dim == -1) global_cell_dim = cell_dim;
850:         else
851:           PetscCheck(cell_dim == global_cell_dim, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Can only combine cells of the same dimension. Global cell dimension detected as %" PetscInt_FMT ", but CGNS element %" PetscInt_FMT " is dimension %" PetscInt_FMT, global_cell_dim, (*cell_ids)[c], cell_dim);
852:         PetscCall(PetscSegBufferGetInts(conn_sb, numCorners, &conn_sb_seg));

854:         PetscCall(DMPlexCGNSGetPermutation_Internal(dm_cell_type, numCorners, NULL, &perm));
855:         for (PetscInt v = 0; v < numCorners; ++v) conn_sb_seg[perm[v]] = conn_cg[offsets[i] + 1 + v];
856:         PetscCall(PetscSectionSetDof(section_, c, numCorners));
857:         c++;
858:       }
859:       PetscCall(PetscFree(offsets));
860:       PetscCall(PetscFree(conn_cg));
861:     } else {
862:       PetscInt       numCorners, cell_dim;
863:       PetscInt      *conn_sb_seg;
864:       DMPolytopeType dm_cell_type = DM_POLYTOPE_UNKNOWN;
865:       int            npe; // nodes per element
866:       const int     *perm;
867:       cgsize_t      *conn_cg;

869:       PetscCall(CGNSElementTypeGetTopologyInfo(sectionCellTypes[s], &dm_cell_type, &numCorners, &cell_dim));
870:       if (global_cell_dim == -1) global_cell_dim = cell_dim;
871:       else
872:         PetscCheck(cell_dim == global_cell_dim, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Can only combine cells of the same dimension. Global cell dimension detected as %" PetscInt_FMT ", but CGNS element %" PetscInt_FMT " is dimension %" PetscInt_FMT, global_cell_dim, (*cell_ids)[c], cell_dim);

874:       PetscCallCGNSRead(cg_npe(sectionCellTypes[s], &npe), dm, 0);
875:       PetscCall(PetscMalloc1(myowned * npe, &conn_cg));
876:       PetscCallCGNSRead(cgp_elements_read_data(cgid, base, zone, section_ids[s], mystart, myend - 1, conn_cg), dm, 0);
877:       PetscCall(DMPlexCGNSGetPermutation_Internal(dm_cell_type, numCorners, NULL, &perm));
878:       PetscCall(PetscSegBufferGetInts(conn_sb, numCorners * myowned, &conn_sb_seg));
879:       for (PetscInt i = 0; i < myowned; i++) {
880:         (*cell_ids)[c] = mystart + i;
881:         cellTypes_[c]  = sectionCellTypes[s];
882:         for (PetscInt v = 0; v < numCorners; ++v) conn_sb_seg[i * numCorners + perm[v]] = conn_cg[i * npe + v];
883:         PetscCall(PetscSectionSetDof(section_, c, numCorners));
884:         c++;
885:       }
886:       PetscCall(PetscFree(conn_cg));
887:     }
888:   }

890:   PetscCall(PetscSectionSetUp(section_));
891:   *section = section_;
892:   PetscCall(PetscSegBufferExtractAlloc(conn_sb, connectivity));
893:   PetscInt connSize;
894:   PetscCall(PetscSectionGetStorageSize(section_, &connSize));
895:   for (PetscInt i = 0; i < connSize; i++) (*connectivity)[i] -= 1; // vertices should be 0-based indexing for consistency with DMPlexBuildFromCellListParallel()
896:   *layouts = layouts_;
897:   if (cellTypes) *cellTypes = cellTypes_;
898:   else PetscCall(PetscFree(cellTypes_));

900:   PetscCall(PetscSegBufferDestroy(&conn_sb));
901:   PetscCall(PetscFree2(ranges, sectionCellTypes));
902:   PetscFunctionReturn(PETSC_SUCCESS);
903: }

905: static inline PetscErrorCode PetscFindIntUnsorted(PetscInt key, PetscInt size, const PetscInt array[], PetscInt *loc)
906: {
907:   PetscFunctionBegin;
908:   *loc = -1;
909:   for (PetscInt i = 0; i < size; i++) {
910:     if (array[i] == key) {
911:       *loc = i;
912:       break;
913:     }
914:   }
915:   PetscFunctionReturn(PETSC_SUCCESS);
916: }

918: // Same as `PetscLayoutFindOwnerIndex()`, but does not fail if owner not in PetscLayout
919: static PetscErrorCode PetscLayoutFindOwnerIndex_Internal(PetscLayout map, PetscInt idx, PetscMPIInt *owner, PetscInt *lidx, PetscBool *found_owner)
920: {
921:   PetscMPIInt lo = 0, hi, t;

923:   PetscFunctionBegin;
924:   PetscAssert((map->n >= 0) && (map->N >= 0) && (map->range), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PetscLayoutSetUp() must be called first");
925:   if (owner) *owner = -1;
926:   if (lidx) *lidx = -1;
927:   if (idx < map->range[0] && idx >= map->range[map->size + 1]) {
928:     *found_owner = PETSC_FALSE;
929:     PetscFunctionReturn(PETSC_SUCCESS);
930:   }
931:   hi = map->size;
932:   while (hi - lo > 1) {
933:     t = lo + (hi - lo) / 2;
934:     if (idx < map->range[t]) hi = t;
935:     else lo = t;
936:   }
937:   if (owner) *owner = lo;
938:   if (lidx) *lidx = idx - map->range[lo];
939:   if (found_owner) *found_owner = PETSC_TRUE;
940:   PetscFunctionReturn(PETSC_SUCCESS);
941: }

943: // This function assumes that there is an array that `maps` describes the layout too. Locally, the range of each map is concatenated onto each other.
944: // So [maps[0].start, ..., maps[0].end - 1, maps[1].start, ..., maps[1].end - 1, ...]
945: // The returned index is the index into this array
946: static PetscErrorCode PetscLayoutFindOwnerIndex_CGNSSectionLayouts(PetscLayout maps[], PetscInt nmaps, PetscInt idx, PetscMPIInt *owner, PetscInt *lidx, PetscInt *mapidx)
947: {
948:   PetscFunctionBegin;
949:   for (PetscInt m = 0; m < nmaps; m++) {
950:     PetscBool found_owner = PETSC_FALSE;
951:     PetscCall(PetscLayoutFindOwnerIndex_Internal(maps[m], idx, owner, lidx, &found_owner));
952:     if (found_owner) {
953:       // Now loop back through the previous maps to get the local offset for the containing index
954:       for (PetscInt mm = m - 1; mm >= 0; mm--) {
955:         PetscInt size = maps[mm]->range[*owner + 1] - maps[mm]->range[*owner];
956:         *lidx += size;
957:       }
958:       if (mapidx) *mapidx = m;
959:       PetscFunctionReturn(PETSC_SUCCESS);
960:     }
961:   }
962:   SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "CGNS id %" PetscInt_FMT " not found in layouts", idx);
963: }

965: /*
966:   @brief Matching root and leaf indices across processes, allowing multiple roots per leaf

968:   Collective

970:   Input Parameters:
971: + layout           - `PetscLayout` defining the global index space and the MPI rank that brokers each index
972: . numRootIndices   - size of `rootIndices`
973: . rootIndices      - array of global indices of which this process requests ownership
974: . rootLocalIndices - root local index permutation (`NULL` if no permutation)
975: . rootLocalOffset  - offset to be added to `rootLocalIndices`
976: . numLeafIndices   - size of `leafIndices`
977: . leafIndices      - array of global indices with which this process requires data associated
978: . leafLocalIndices - leaf local index permutation (`NULL` if no permutation)
979: - leafLocalOffset  - offset to be added to `leafLocalIndices`

981:   Output Parameters:
982: + matchSection - The `PetscSection` describing the layout of `matches` with respect to the `leafIndices` (the points of the section indexes into `leafIndices`)
983: - matches      - Array of `PetscSFNode` denoting the location (rank and index) of the root matching the leaf.

985:   Example 1:
986: .vb
987:   rank             : 0            1            2
988:   rootIndices      : [1 0 2]      [3]          [3]
989:   rootLocalOffset  : 100          200          300
990:   layout           : [0 1]        [2]          [3]
991:   leafIndices      : [0]          [2]          [0 3]
992:   leafLocalOffset  : 400          500          600

994: would build the following result

996:   rank 0: [0] 400 <- [0] (0,101)
997:   ------------------------------
998:   rank 1: [0] 500 <- [0] (0,102)
999:   ------------------------------
1000:   rank 2: [0] 600 <- [0] (0,101)
1001:           [1] 601 <- [1] (1,200)
1002:           [1] 601 <- [2] (2,300)
1003:            |   |      |     |
1004:            |   |      |     + `matches`, the rank and index of the respective match with the leaf index
1005:            |   |      + index into `matches` array
1006:            |   + the leaves for the respective root
1007:            + The point in `matchSection` (indexes into `leafIndices`)

1009:   For rank 2, the `matchSection` would be:

1011:   [0]: (1, 0)
1012:   [1]: (2, 1)
1013:    |    |  |
1014:    |    |  + offset
1015:    |    + ndof
1016:    + point
1017: .ve

1019:   Notes:
1020:   This function is identical to `PetscSFCreateByMatchingIndices()` except it includes *all* matching indices instead of designating a single rank as the "owner".
1021:   Attempting to create an SF with all matching indices would create an invalid SF, thus we give an array of `matches` and `matchSection` to describe the layout
1022:   Compare the examples in this document to those in `PetscSFCreateByMatchingIndices()`.

1024: .seealso: [](sec_petscsf), `PetscSF`, `PetscSFCreate()`, `PetscSFCreateByMatchingIndices()`
1025: */
1026: static PetscErrorCode PetscSFFindMatchingIndices(PetscLayout layout, PetscInt numRootIndices, const PetscInt rootIndices[], const PetscInt rootLocalIndices[], PetscInt rootLocalOffset, PetscInt numLeafIndices, const PetscInt leafIndices[], const PetscInt leafLocalIndices[], PetscInt leafLocalOffset, PetscSection *matchSection, PetscSFNode *matches[])
1027: {
1028:   MPI_Comm     comm = layout->comm;
1029:   PetscMPIInt  rank;
1030:   PetscSF      sf1;
1031:   PetscSection sectionBuffer, matchSection_;
1032:   PetscInt     numMatches;
1033:   PetscSFNode *roots, *buffer, *matches_;
1034:   PetscInt     N, n, pStart, pEnd;
1035:   PetscBool    areIndicesSame;

1037:   PetscFunctionBegin;
1038:   if (rootIndices) PetscAssertPointer(rootIndices, 3);
1039:   if (rootLocalIndices) PetscAssertPointer(rootLocalIndices, 4);
1040:   if (leafIndices) PetscAssertPointer(leafIndices, 7);
1041:   if (leafLocalIndices) PetscAssertPointer(leafLocalIndices, 8);
1042:   PetscCheck(numRootIndices >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "numRootIndices (%" PetscInt_FMT ") must be non-negative", numRootIndices);
1043:   PetscCheck(numLeafIndices >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "numLeafIndices (%" PetscInt_FMT ") must be non-negative", numLeafIndices);
1044:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1045:   PetscCall(PetscLayoutSetUp(layout));
1046:   PetscCall(PetscLayoutGetSize(layout, &N));
1047:   PetscCall(PetscLayoutGetLocalSize(layout, &n));
1048:   areIndicesSame = (PetscBool)(leafIndices == rootIndices);
1049:   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &areIndicesSame, 1, MPI_C_BOOL, MPI_LAND, comm));
1050:   PetscCheck(!areIndicesSame || numLeafIndices == numRootIndices, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "leafIndices == rootIndices, but numLeafIndices (%" PetscInt_FMT ") != numRootIndices(%" PetscInt_FMT ")", numLeafIndices, numRootIndices);
1051:   if (PetscDefined(USE_DEBUG)) {
1052:     PetscInt N1 = PETSC_INT_MIN;
1053:     for (PetscInt i = 0; i < numRootIndices; i++)
1054:       if (rootIndices[i] > N1) N1 = rootIndices[i];
1055:     PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &N1, 1, MPIU_INT, MPI_MAX, comm));
1056:     PetscCheck(N1 < N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Max. root index (%" PetscInt_FMT ") out of layout range [0,%" PetscInt_FMT ")", N1, N);
1057:     if (!areIndicesSame) {
1058:       N1 = PETSC_INT_MIN;
1059:       for (PetscInt i = 0; i < numLeafIndices; i++)
1060:         if (leafIndices[i] > N1) N1 = leafIndices[i];
1061:       PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &N1, 1, MPIU_INT, MPI_MAX, comm));
1062:       PetscCheck(N1 < N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Max. leaf index (%" PetscInt_FMT ") out of layout range [0,%" PetscInt_FMT ")", N1, N);
1063:     }
1064:   }

1066:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1067:   { /* Reduce: roots -> buffer */
1068:     // Data in buffer described by section_buffer. The chart of `section_buffer` maps onto the local portion of `layout`, with dofs denoting how many matches there are.
1069:     PetscInt        bufsize;
1070:     const PetscInt *root_degree;

1072:     PetscCall(PetscSFCreate(comm, &sf1));
1073:     PetscCall(PetscSFSetFromOptions(sf1));
1074:     PetscCall(PetscSFSetGraphLayout(sf1, layout, numRootIndices, NULL, PETSC_OWN_POINTER, rootIndices));

1076:     PetscCall(PetscSFComputeDegreeBegin(sf1, &root_degree));
1077:     PetscCall(PetscSFComputeDegreeEnd(sf1, &root_degree));
1078:     PetscCall(PetscSectionCreate(comm, &sectionBuffer));
1079:     PetscCall(PetscSectionSetChart(sectionBuffer, 0, n));
1080:     PetscCall(PetscMalloc1(numRootIndices, &roots));
1081:     for (PetscInt i = 0; i < numRootIndices; ++i) {
1082:       roots[i].rank  = rank;
1083:       roots[i].index = rootLocalOffset + (rootLocalIndices ? rootLocalIndices[i] : i);
1084:     }
1085:     for (PetscInt i = 0; i < n; i++) PetscCall(PetscSectionSetDof(sectionBuffer, i, root_degree[i]));
1086:     PetscCall(PetscSectionSetUp(sectionBuffer));
1087:     PetscCall(PetscSectionGetStorageSize(sectionBuffer, &bufsize));
1088:     PetscCall(PetscMalloc1(bufsize, &buffer));
1089:     for (PetscInt i = 0; i < bufsize; ++i) {
1090:       buffer[i].index = -1;
1091:       buffer[i].rank  = -1;
1092:     }
1093:     PetscCall(PetscSFGatherBegin(sf1, MPIU_SF_NODE, roots, buffer));
1094:     PetscCall(PetscSFGatherEnd(sf1, MPIU_SF_NODE, roots, buffer));
1095:     PetscCall(PetscFree(roots));
1096:   }

1098:   // Distribute data in buffers to the leaf locations. The chart of `sectionMatches` maps to `leafIndices`, with dofs denoting how many matches there are for each leaf.
1099:   if (!areIndicesSame) PetscCall(PetscSFSetGraphLayout(sf1, layout, numLeafIndices, NULL, PETSC_OWN_POINTER, leafIndices));
1100:   PetscCall(PetscSectionCreate(comm, &matchSection_));
1101:   PetscCall(PetscSectionMigrateData(sf1, MPIU_SF_NODE, sectionBuffer, buffer, matchSection_, (void **)&matches_, NULL));
1102:   PetscCall(PetscSectionGetChart(matchSection_, &pStart, &pEnd));
1103:   PetscCheck(pEnd - pStart == numLeafIndices, comm, PETSC_ERR_PLIB, "Section of matches has different chart size (%" PetscInt_FMT ")  than number of leaf indices %" PetscInt_FMT ". Section chart is [%" PetscInt_FMT ", %" PetscInt_FMT ")", pEnd - pStart, numLeafIndices, pStart, pEnd);
1104:   PetscCall(PetscSectionGetStorageSize(matchSection_, &numMatches));
1105:   for (PetscInt p = pStart; p < pEnd; p++) {
1106:     PetscInt ndofs;
1107:     PetscCall(PetscSectionGetDof(matchSection_, p, &ndofs));
1108:     PetscCheck(ndofs > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "No match found for index %" PetscInt_FMT, leafIndices[p]);
1109:   }

1111:   *matchSection = matchSection_;
1112:   *matches      = matches_;

1114:   PetscCall(PetscFree(buffer));
1115:   PetscCall(PetscSectionDestroy(&sectionBuffer));
1116:   PetscCall(PetscSFDestroy(&sf1));
1117:   PetscFunctionReturn(PETSC_SUCCESS);
1118: }

1120: /**
1121:   @brief Match CGNS faces to their Plex equivalents

1123:   @param[in]  dm                 DM that holds the Plex to match against
1124:   @param[in]  nuniq_verts        Number of unique CGNS vertices on this rank
1125:   @param[in]  uniq_verts         Unique CGNS vertices on this rank
1126:   @param[in]  plex_vertex_offset Index offset to match `uniq_vertices` to their respective Plex vertices
1127:   @param[in]  NVertices          Number of vertices for Layout for `PetscSFFindMatchingIndices()`
1128:   @param[in]  connSection        PetscSection describing the CGNS face connectivity
1129:   @param[in]  face_ids           Array of the CGNS face IDs
1130:   @param[in]  conn               Array of the CGNS face connectivity
1131:   @param[out] cg2plexSF          PetscSF describing the mapping from owned CGNS faces to remote `plexFaces`
1132:   @param[out] plexFaces          Matching Plex face IDs

1134:   @description

1136:    `cg2plexSF` is a mapping from the owned CGNS faces to the rank whose local Plex has that face.
1137:    `plexFaces` holds the actual mesh point in the local Plex that corresponds to the owned CGNS face (which is the root)

1139:          cg2plexSF
1140:    __________|__________
1141:    |                   |

1143:    [F0_11] -----> [P0_0]  [38]
1144:    [F0_12] --                        Rank 0
1145:    ~~~~~~~~~ \ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1146:               --> [P1_0]  [59]       Rank 1
1147:    [F1_13] -----> [P1_1]  [98]
1148:       |              |     |
1149:       |              |     + plexFaces, maps the leaves of cg2plexSF to local Plex face mesh points
1150:       |              + Leaves of cg2plexSF. P(rank)_(root_index)
1151:       + Roots of cg2plexSF, F(rank)_(CGNS face ID)

1153:    Note that, unlike a pointSF, the leaves of `cg2plexSF` do not map onto chart of the local Plex, but just onto an array.
1154:    The plexFaces array is then what maps the leaves to the actual local Plex mesh points.

1156:   `plex_vertex_offset` is used to map the CGNS vertices in `uniq_vertices` to their respective Plex vertices.
1157:    From `DMPlexBuildFromCellListParallel()`, the mapping of CGNS vertices to Plex vertices is uniq_vert[i] -> i + plex_vertex_offset, where the right side is the Plex point ID.
1158:    So with `plex_vertex_offset = 5`,
1159:    uniq_vertices:  [19, 52, 1, 89]
1160:    plex_point_ids: [5,   6, 7, 8]
1161: **/
1162: static PetscErrorCode DMPlexCGNS_MatchCGNSFacesToPlexFaces(DM dm, PetscInt nuniq_verts, const PetscInt uniq_verts[], PetscInt plex_vertex_offset, PetscInt NVertices, PetscSection connSection, const PetscInt face_ids[], const PetscInt conn[], PetscSF *cg2plexSF, PetscInt *plexFaces[])
1163: {
1164:   MPI_Comm    comm = PetscObjectComm((PetscObject)dm);
1165:   PetscMPIInt myrank, nranks;
1166:   PetscInt    fownedStart, fownedEnd, fownedSize;

1168:   PetscFunctionBegin;
1169:   PetscCallMPI(MPI_Comm_rank(comm, &myrank));
1170:   PetscCallMPI(MPI_Comm_size(comm, &nranks));

1172:   { // -- Create cg2plexSF
1173:     PetscInt     nuniq_face_verts, *uniq_face_verts;
1174:     PetscSection fvert2mvertSection;
1175:     PetscSFNode *fvert2mvert = NULL;

1177:     { // -- Create fvert2mvert, which map CGNS vertices in the owned-face connectivity to the CGNS vertices in the global mesh
1178:       PetscLayout layout;

1180:       PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, NVertices, 1, &layout));
1181:       { // Count locally unique vertices in the face connectivity
1182:         PetscHSetI vhash;
1183:         PetscInt   off = 0, conn_size;

1185:         PetscCall(PetscHSetICreate(&vhash));
1186:         PetscCall(PetscSectionGetStorageSize(connSection, &conn_size));
1187:         for (PetscInt v = 0; v < conn_size; ++v) PetscCall(PetscHSetIAdd(vhash, conn[v]));
1188:         PetscCall(PetscHSetIGetSize(vhash, &nuniq_face_verts));
1189:         PetscCall(PetscMalloc1(nuniq_face_verts, &uniq_face_verts));
1190:         PetscCall(PetscHSetIGetElems(vhash, &off, uniq_face_verts));
1191:         PetscCall(PetscHSetIDestroy(&vhash));
1192:         PetscCheck(off == nuniq_face_verts, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid number of local vertices %" PetscInt_FMT " should be %" PetscInt_FMT, off, nuniq_face_verts);
1193:       }
1194:       PetscCall(PetscSortInt(nuniq_face_verts, uniq_face_verts));
1195:       PetscCall(PetscSFFindMatchingIndices(layout, nuniq_verts, uniq_verts, NULL, 0, nuniq_face_verts, uniq_face_verts, NULL, 0, &fvert2mvertSection, &fvert2mvert));

1197:       PetscCall(PetscLayoutDestroy(&layout));
1198:     }

1200:     PetscSFNode *plexFaceRemotes, *ownedFaceRemotes;
1201:     PetscCount   nPlexFaceRemotes;
1202:     PetscInt    *local_rank_count;

1204:     PetscCall(PetscSectionGetChart(connSection, &fownedStart, &fownedEnd));
1205:     fownedSize = fownedEnd - fownedStart;
1206:     PetscCall(PetscCalloc1(nranks, &local_rank_count));

1208:     { // Find the rank(s) whose local Plex has the owned CGNS face
1209:       // We determine ownership by determining which ranks contain all the vertices in a face's connectivity
1210:       PetscInt       maxRanksPerVert;
1211:       PetscInt      *face_ranks;
1212:       PetscSegBuffer plexFaceRemotes_SB, ownedFaceRemotes_SB;

1214:       PetscCall(PetscSegBufferCreate(sizeof(PetscSFNode), fownedSize, &plexFaceRemotes_SB));
1215:       PetscCall(PetscSegBufferCreate(sizeof(PetscSFNode), fownedSize, &ownedFaceRemotes_SB));
1216:       PetscCall(PetscSectionGetMaxDof(fvert2mvertSection, &maxRanksPerVert));
1217:       PetscCall(PetscMalloc1(maxRanksPerVert, &face_ranks));
1218:       for (PetscInt f = fownedStart, f_i = 0; f < fownedEnd; f++, f_i++) {
1219:         PetscInt fndof, foffset, lndof, loffset, idx, nface_ranks = 0;

1221:         PetscCall(PetscSectionGetDof(connSection, f, &fndof));
1222:         PetscCall(PetscSectionGetOffset(connSection, f, &foffset));
1223:         PetscCall(PetscFindInt(conn[foffset + 0], nuniq_face_verts, uniq_face_verts, &idx));
1224:         PetscCall(PetscSectionGetDof(fvert2mvertSection, idx, &lndof));
1225:         PetscCall(PetscSectionGetOffset(fvert2mvertSection, idx, &loffset));
1226:         // Loop over ranks of the first vertex in the face connectivity
1227:         for (PetscInt l = 0; l < lndof; l++) {
1228:           PetscInt rank = fvert2mvert[loffset + l].rank;
1229:           // Loop over vertices of face (except for the first) to see if those vertices have the same candidate rank
1230:           for (PetscInt v = 1; v < fndof; v++) {
1231:             PetscInt  ldndof, ldoffset, idx;
1232:             PetscBool vert_has_rank = PETSC_FALSE;

1234:             PetscCall(PetscFindInt(conn[foffset + v], nuniq_face_verts, uniq_face_verts, &idx));
1235:             PetscCall(PetscSectionGetDof(fvert2mvertSection, idx, &ldndof));
1236:             PetscCall(PetscSectionGetOffset(fvert2mvertSection, idx, &ldoffset));
1237:             // Loop over ranks of the vth vertex to see if it has the candidate rank
1238:             for (PetscInt ld = 0; ld < ldndof; ld++) vert_has_rank = (fvert2mvert[ldoffset + ld].rank == rank) || vert_has_rank;
1239:             if (vert_has_rank) continue; // This vertex has the candidate rank, proceed to the next vertex
1240:             else goto next_candidate_rank;
1241:           }
1242:           face_ranks[nface_ranks++] = rank;
1243:         next_candidate_rank:
1244:           continue;
1245:         }
1246:         PetscCheck(nface_ranks > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Rank containing CGNS face %" PetscInt_FMT " could not be found", face_ids[f_i]);

1248:         // Once we have found the rank(s) whose Plex has the vertices of CGNS face `f`, we can begin to build the information for the SF.
1249:         // We want the roots to be the CGNS faces and the leaves to be the corresponding Plex faces, but we have the opposite; for the owned face on myrank, we know the rank that has the corresponding Plex face.
1250:         // To get the inverse, we assign each SF edge with a tuple of PetscSFNodes; one in `plexFaceRemotes` and the other in `ownedFaceRemotes`.
1251:         // `plexFaceRemotes` has the rank and index of the Plex face.
1252:         // `ownedFaceRemotes` has the rank and index of the owned CGNS face.
1253:         // Note that `ownedFaceRemotes` is all on my rank (e.g. rank == myrank).
1254:         //
1255:         // Then, to build the `cg2plexSF`, we communicate the `ownedFaceRemotes` to the `plexFaceRemotes` (via SFReduce).
1256:         // Those `ownedFaceRemotes` then act as the leaves to the roots on this process.
1257:         //
1258:         // Conceptually, this is the same as calling `PetscSFCreateInverseSF` on an SF with `iremotes = plexFaceRemotes` and the `ilocal = ownedFaceRemotes[:].index`.
1259:         // However, we cannot use this much simpler way because when there are multiple matching Plex faces, `PetscSFCreateInverseSF()` will be invalid due to `ownedFaceRemotes[:].index` having repeated values (only root vertices of the SF graph may have degree > 1)
1260:         PetscSFNode *plexFaceRemotes_buffer, *ownedFaceRemotes_buffer;
1261:         PetscCall(PetscSegBufferGet(plexFaceRemotes_SB, nface_ranks, &plexFaceRemotes_buffer));
1262:         PetscCall(PetscSegBufferGet(ownedFaceRemotes_SB, nface_ranks, &ownedFaceRemotes_buffer));
1263:         for (PetscInt n = 0; n < nface_ranks; n++) {
1264:           plexFaceRemotes_buffer[n].rank = face_ranks[n];
1265:           local_rank_count[face_ranks[n]]++;
1266:           ownedFaceRemotes_buffer[n] = (PetscSFNode){.rank = myrank, .index = f_i};
1267:         }
1268:       }
1269:       PetscCall(PetscFree(face_ranks));

1271:       PetscCall(PetscSegBufferGetSize(plexFaceRemotes_SB, &nPlexFaceRemotes));
1272:       PetscCall(PetscSegBufferExtractAlloc(plexFaceRemotes_SB, &plexFaceRemotes));
1273:       PetscCall(PetscSegBufferDestroy(&plexFaceRemotes_SB));
1274:       PetscCall(PetscSegBufferExtractAlloc(ownedFaceRemotes_SB, &ownedFaceRemotes));
1275:       PetscCall(PetscSegBufferDestroy(&ownedFaceRemotes_SB));

1277:       // To get the index for plexFaceRemotes, we partition the leaves on each rank (e.g. the array that will hold the local Plex face mesh points) by each rank that has the CGNS owned rank.
1278:       // For r in [0,numranks), local_rank_count[r] holds the number plexFaces that myrank holds.
1279:       // This determines how large a partition the leaves on rank r need to create for myrank.
1280:       // To get the offset into the leaves, we use Exscan to get rank_start.
1281:       // For r in [0, numranks), rank_start[r] holds the offset into rank r's leaves that myrank will index into.

1283:       // Below is an example:
1284:       //
1285:       // myrank:             | 0           1           2
1286:       // local_rank_count:   | [3, 2, 0]   [1, 0, 2]   [2, 2, 1]
1287:       // myrank_total_count: | 6           4           3
1288:       // rank_start:         | [0, 0, 0]   [3, 2, 0]   [4, 2, 2]
1289:       //                     |
1290:       // plexFaceRemotes: 0  | (0, 0)      (2, 0)      (2, 2)        <-- (rank, index) tuples (e.g. PetscSFNode)
1291:       //                  1  | (1, 0)      (2, 1)      (0, 4)
1292:       //                  2  | (0, 1)      (0, 3)      (1, 2)
1293:       //                  3  | (0, 2)                  (0, 5)
1294:       //                  4  | (1, 1)                  (1, 3)
1295:       //
1296:       // leaves:          0  | (0, 0)      (0, 1)      (1, 0)    (rank and index into plexFaceRemotes)
1297:       //                  1  | (0, 2)      (0, 4)      (1, 1)
1298:       //                  2  | (0, 3)      (2, 2)      (2, 0)
1299:       //                  3  | (1, 2)      (2, 4)
1300:       //                  4  | (2, 1)
1301:       //                  5  | (2, 3)
1302:       //
1303:       // Note how at the leaves, the ranks are contiguous and in order
1304:       PetscInt myrank_total_count;
1305:       {
1306:         PetscInt *rank_start, *rank_offset;

1308:         PetscCall(PetscCalloc2(nranks, &rank_start, nranks, &rank_offset));
1309:         PetscCallMPI(MPIU_Allreduce(local_rank_count, rank_start, nranks, MPIU_INT, MPI_SUM, comm));
1310:         myrank_total_count = rank_start[myrank];
1311:         PetscCall(PetscArrayzero(rank_start, nranks));
1312:         PetscCallMPI(MPI_Exscan(local_rank_count, rank_start, nranks, MPIU_INT, MPI_SUM, comm));

1314:         for (PetscInt r = 0; r < nPlexFaceRemotes; r++) {
1315:           PetscInt rank            = plexFaceRemotes[r].rank;
1316:           plexFaceRemotes[r].index = rank_start[rank] + rank_offset[rank];
1317:           rank_offset[rank]++;
1318:         }
1319:         PetscCall(PetscFree2(rank_start, rank_offset));
1320:       }

1322:       { // Communicate the leaves to roots and build cg2plexSF
1323:         PetscSF      plexRemotes2ownedRemotesSF;
1324:         PetscSFNode *iremote_cg2plexSF;

1326:         PetscCall(PetscSFCreate(comm, &plexRemotes2ownedRemotesSF));
1327:         PetscCall(PetscSFSetGraph(plexRemotes2ownedRemotesSF, myrank_total_count, nPlexFaceRemotes, NULL, PETSC_COPY_VALUES, plexFaceRemotes, PETSC_OWN_POINTER));
1328:         PetscCall(PetscMalloc1(myrank_total_count, &iremote_cg2plexSF));
1329:         PetscCall(PetscSFViewFromOptions(plexRemotes2ownedRemotesSF, NULL, "-plex2ownedremotes_sf_view"));
1330:         for (PetscInt i = 0; i < myrank_total_count; i++) iremote_cg2plexSF[i] = (PetscSFNode){.rank = -1, .index = -1};
1331:         PetscCall(PetscSFReduceBegin(plexRemotes2ownedRemotesSF, MPIU_SF_NODE, ownedFaceRemotes, iremote_cg2plexSF, MPI_REPLACE));
1332:         PetscCall(PetscSFReduceEnd(plexRemotes2ownedRemotesSF, MPIU_SF_NODE, ownedFaceRemotes, iremote_cg2plexSF, MPI_REPLACE));
1333:         PetscCall(PetscSFDestroy(&plexRemotes2ownedRemotesSF));
1334:         for (PetscInt i = 0; i < myrank_total_count; i++) PetscCheck(iremote_cg2plexSF[i].rank >= 0 && iremote_cg2plexSF[i].index != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Owned face SFNode was not reduced properly");

1336:         PetscCall(PetscSFCreate(comm, cg2plexSF));
1337:         PetscCall(PetscSFSetGraph(*cg2plexSF, fownedSize, myrank_total_count, NULL, PETSC_COPY_VALUES, iremote_cg2plexSF, PETSC_OWN_POINTER));

1339:         PetscCall(PetscFree(ownedFaceRemotes));
1340:       }

1342:       PetscCall(PetscFree(local_rank_count));
1343:     }

1345:     PetscCall(PetscSectionDestroy(&fvert2mvertSection));
1346:     PetscCall(PetscFree(uniq_face_verts));
1347:     PetscCall(PetscFree(fvert2mvert));
1348:   }

1350:   { // -- Find plexFaces
1351:     // Distribute owned-CGNS-face connectivity to the ranks which have corresponding Plex faces, and then find the corresponding Plex faces
1352:     PetscSection connDistSection;
1353:     PetscInt    *connDist;

1355:     // Distribute the face connectivity to the rank that has that face
1356:     PetscCall(PetscSectionCreate(comm, &connDistSection));
1357:     PetscCall(PetscSectionMigrateData(*cg2plexSF, MPIU_INT, connSection, conn, connDistSection, (void **)&connDist, NULL));

1359:     { // Translate CGNS vertex numbering to local Plex numbering
1360:       PetscInt *dmplex_verts, *uniq_verts_sorted;
1361:       PetscInt  connDistSize;

1363:       PetscCall(PetscMalloc2(nuniq_verts, &dmplex_verts, nuniq_verts, &uniq_verts_sorted));
1364:       PetscCall(PetscArraycpy(uniq_verts_sorted, uniq_verts, nuniq_verts));
1365:       // uniq_verts are one-to-one with the DMPlex vertices with an offset, see DMPlexBuildFromCellListParallel()
1366:       for (PetscInt v = 0; v < nuniq_verts; v++) dmplex_verts[v] = v + plex_vertex_offset;
1367:       PetscCall(PetscSortIntWithArray(nuniq_verts, uniq_verts_sorted, dmplex_verts));

1369:       PetscCall(PetscSectionGetStorageSize(connDistSection, &connDistSize));
1370:       for (PetscInt v = 0; v < connDistSize; v++) {
1371:         PetscInt idx;
1372:         PetscCall(PetscFindInt(connDist[v], nuniq_verts, uniq_verts_sorted, &idx));
1373:         PetscCheck(idx >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Could not find CGNS vertex id (from face connectivity) in local plex");
1374:         connDist[v] = dmplex_verts[idx];
1375:       }
1376:       PetscCall(PetscFree2(dmplex_verts, uniq_verts_sorted));
1377:     }

1379:     // Debugging info
1380:     PetscBool view_connectivity = PETSC_FALSE;
1381:     PetscCall(PetscOptionsGetBool(NULL, NULL, "-dm_plex_cgns_view_face_connectivity", &view_connectivity, NULL));
1382:     if (view_connectivity) {
1383:       PetscSection conesSection;
1384:       PetscInt    *cones;

1386:       PetscCall(PetscPrintf(comm, "Distributed CGNS Face Connectivity (in Plex vertex numbering):\n"));
1387:       PetscCall(PetscSectionArrayView(connDistSection, connDist, PETSC_INT, NULL));
1388:       PetscCall(DMPlexGetCones(dm, &cones));
1389:       PetscCall(DMPlexGetConeSection(dm, &conesSection));
1390:       PetscCall(PetscPrintf(comm, "Plex Cones:\n"));
1391:       PetscCall(PetscSectionArrayView(conesSection, cones, PETSC_INT, NULL));
1392:     }

1394:     // For every face in connDistSection, find the transitive support of a vertex in that face connectivity.
1395:     // Loop through the faces of the transitive support and find the matching face
1396:     PetscBT  plex_face_found;
1397:     PetscInt fplexStart, fplexEnd, vplexStart, vplexEnd;
1398:     PetscInt fdistStart, fdistEnd, numfdist;
1399:     PetscCall(DMPlexGetHeightStratum(dm, 1, &fplexStart, &fplexEnd));
1400:     PetscCall(DMPlexGetDepthStratum(dm, 0, &vplexStart, &vplexEnd));
1401:     PetscCall(PetscSectionGetChart(connDistSection, &fdistStart, &fdistEnd));
1402:     numfdist = fdistEnd - fdistStart;
1403:     PetscCall(PetscMalloc1(numfdist, plexFaces));
1404:     PetscCall(PetscBTCreate(numfdist, &plex_face_found));
1405:     for (PetscInt i = 0; i < numfdist; i++) (*plexFaces)[i] = -1;

1407:     for (PetscInt f = fdistStart, f_i = 0; f < fdistEnd; f++, f_i++) {
1408:       PetscInt  ndof, offset, support_size;
1409:       PetscInt *support = NULL;

1411:       PetscCall(PetscSectionGetDof(connDistSection, f, &ndof));
1412:       PetscCall(PetscSectionGetOffset(connDistSection, f, &offset));

1414:       // Loop through transitive support of a vertex in the CGNS face connectivity
1415:       PetscCall(DMPlexGetTransitiveClosure(dm, connDist[offset + 0], PETSC_FALSE, &support_size, &support));
1416:       for (PetscInt s = 0; s < support_size; s++) {
1417:         PetscInt face_point = support[s * 2]; // closure stores points and orientations, [p_0, o_0, p_1, o_1, ...]
1418:         PetscInt trans_cone_size, *trans_cone = NULL;

1420:         if (face_point < fplexStart || face_point >= fplexEnd) continue; // Skip non-face points
1421:         // See if face_point has the same vertices
1422:         PetscCall(DMPlexGetTransitiveClosure(dm, face_point, PETSC_TRUE, &trans_cone_size, &trans_cone));
1423:         for (PetscInt c = 0; c < trans_cone_size; c++) {
1424:           PetscInt vertex_point = trans_cone[c * 2], conn_has_vertex;
1425:           if (vertex_point < vplexStart || vertex_point >= vplexEnd) continue; // Skip non-vertex points
1426:           PetscCall(PetscFindIntUnsorted(vertex_point, ndof, &connDist[offset], &conn_has_vertex));
1427:           if (conn_has_vertex < 0) goto check_next_face;
1428:         }
1429:         (*plexFaces)[f_i] = face_point;
1430:         PetscCall(DMPlexRestoreTransitiveClosure(dm, face_point, PETSC_TRUE, &trans_cone_size, &trans_cone));
1431:         break;
1432:       check_next_face:
1433:         PetscCall(DMPlexRestoreTransitiveClosure(dm, face_point, PETSC_TRUE, &trans_cone_size, &trans_cone));
1434:         continue;
1435:       }
1436:       PetscCall(DMPlexRestoreTransitiveClosure(dm, connDist[offset + 0], PETSC_FALSE, &support_size, &support));
1437:       if ((*plexFaces)[f_i] != -1) PetscCall(PetscBTSet(plex_face_found, f_i));
1438:     }

1440:     // Some distributed CGNS faces did not find a matching plex face
1441:     // This can happen if a partition has all the faces surrounding a distributed CGNS face, but does not have the face itself (it's parent element is owned by a different partition).
1442:     // Thus, the partition has the vertices associated with the CGNS face, but doesn't actually have the face itself.
1443:     // For example, take the following quad mesh, where the numbers represent the owning rank and CGNS face ID.
1444:     //
1445:     //    2     3     4     <-- face ID
1446:     //  ----- ----- -----
1447:     // |  0  |  1  |  0  |  <-- rank
1448:     //  ----- ----- -----
1449:     //    5     6     7     <-- face ID
1450:     //
1451:     // In this case, rank 0 will have all the vertices of face 3 and 6 in it's Plex, but does not actually have either face.
1452:     //
1453:     // To address this, we remove the leaves associated with these missing faces from cg2plexSF and then verify that all owned faces did find a matching plex face (e.g. root degree > 1)
1454:     PetscCount num_plex_faces_found = PetscBTCountSet(plex_face_found, numfdist);
1455:     PetscBool  some_faces_not_found = num_plex_faces_found < numfdist;
1456:     PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &some_faces_not_found, 1, MPI_C_BOOL, MPI_LOR, comm));
1457:     if (some_faces_not_found) {
1458:       PetscSFNode    *iremote_cg2plex_new;
1459:       const PetscInt *root_degree;
1460:       PetscInt        num_roots, *plexFacesNew;

1462:       PetscCall(PetscMalloc1(num_plex_faces_found, &iremote_cg2plex_new));
1463:       PetscCall(PetscCalloc1(num_plex_faces_found, &plexFacesNew));
1464:       { // Get SFNodes with matching faces
1465:         const PetscSFNode *iremote_cg2plex_old;
1466:         PetscInt           num_leaves_old, n = 0;
1467:         PetscCall(PetscSFGetGraph(*cg2plexSF, &num_roots, &num_leaves_old, NULL, &iremote_cg2plex_old));
1468:         PetscAssert(num_roots == fownedSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent roots and owned faces.");
1469:         PetscAssert(num_leaves_old == numfdist, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent leaves and distributed faces.");
1470:         for (PetscInt o = 0; o < num_leaves_old; o++) {
1471:           if (PetscBTLookupSet(plex_face_found, o)) {
1472:             iremote_cg2plex_new[n] = iremote_cg2plex_old[o];
1473:             plexFacesNew[n]        = (*plexFaces)[o];
1474:             n++;
1475:           }
1476:         }
1477:         PetscAssert(n == num_plex_faces_found, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Found %" PetscCount_FMT " matching plex faces, but only set %" PetscInt_FMT " SFNodes", num_plex_faces_found, n);
1478:       }
1479:       PetscCall(PetscSFSetGraph(*cg2plexSF, num_roots, num_plex_faces_found, NULL, PETSC_COPY_VALUES, iremote_cg2plex_new, PETSC_OWN_POINTER));

1481:       // Verify that all CGNS faces have a matching Plex face on any rank
1482:       PetscCall(PetscSFComputeDegreeBegin(*cg2plexSF, &root_degree));
1483:       PetscCall(PetscSFComputeDegreeEnd(*cg2plexSF, &root_degree));
1484:       for (PetscInt r = 0; r < num_roots; r++) {
1485:         PetscCheck(root_degree[r] > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Could not find plex face for the CGNS face %" PetscInt_FMT, face_ids[r]);
1486:         PetscCheck(root_degree[r] == 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Found more than one plex face for the CGNS face %" PetscInt_FMT ". Face may be internal rather than at mesh domain boundary", face_ids[r]);
1487:       }

1489:       if (PetscDefined(USE_DEBUG)) {
1490:         for (PetscInt i = 0; i < num_plex_faces_found; i++)
1491:           PetscCheck(plexFacesNew[i] >= fplexStart && plexFacesNew[i] < fplexEnd, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Plex face ID %" PetscInt_FMT "outside of face stratum [%" PetscInt_FMT ", %" PetscInt_FMT ")", plexFacesNew[i], fplexStart, fplexEnd);
1492:       }

1494:       PetscCall(PetscFree(*plexFaces));
1495:       *plexFaces = plexFacesNew;
1496:     }

1498:     PetscCall(PetscBTDestroy(&plex_face_found));
1499:     PetscCall(PetscSectionDestroy(&connDistSection));
1500:     PetscCall(PetscFree(connDist));
1501:   }
1502:   PetscFunctionReturn(PETSC_SUCCESS);
1503: }

1505: // Copied from PetscOptionsStringToInt
1506: static inline PetscErrorCode PetscStrtoInt(const char name[], PetscInt *a)
1507: {
1508:   size_t len;
1509:   char  *endptr;
1510:   long   strtolval;

1512:   PetscFunctionBegin;
1513:   PetscCall(PetscStrlen(name, &len));
1514:   PetscCheck(len, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "character string of length zero has no numerical value");

1516:   strtolval = strtol(name, &endptr, 10);
1517:   PetscCheck((size_t)(endptr - name) == len, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Input string \"%s\" has no integer value (do not include . in it)", name);

1519: #if defined(PETSC_USE_64BIT_INDICES) && defined(PETSC_HAVE_ATOLL)
1520:   (void)strtolval;
1521:   *a = atoll(name);
1522: #elif defined(PETSC_USE_64BIT_INDICES) && defined(PETSC_HAVE___INT64)
1523:   (void)strtolval;
1524:   *a = _atoi64(name);
1525: #else
1526:   *a = (PetscInt)strtolval;
1527: #endif
1528:   PetscFunctionReturn(PETSC_SUCCESS);
1529: }

1531: typedef struct {
1532:   char     name[CGIO_MAX_NAME_LENGTH + 1];
1533:   int      normal[3], ndatasets;
1534:   cgsize_t npoints, nnormals;
1535:   CGNS_ENUMT(BCType_t) bctype;
1536:   CGNS_ENUMT(DataType_t) normal_datatype;
1537:   CGNS_ENUMT(PointSetType_t) pointtype;
1538: } CGBCInfo;

1540: PetscErrorCode DMPlexCreateCGNS_Internal_Parallel(MPI_Comm comm, PetscInt cgid, PetscBool interpolate, DM *dm)
1541: {
1542:   PetscMPIInt num_proc, rank;
1543:   /* Read from file */
1544:   char     basename[CGIO_MAX_NAME_LENGTH + 1];
1545:   char     buffer[CGIO_MAX_NAME_LENGTH + 1];
1546:   int      dim = 0, physDim = 0, coordDim = 0;
1547:   PetscInt NVertices = 0, NCells = 0;
1548:   int      nzones = 0, nbases;
1549:   int      zone   = 1; // Only supports single zone files
1550:   int      base   = 1; // Only supports single base

1552:   PetscFunctionBegin;
1553:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1554:   PetscCallMPI(MPI_Comm_size(comm, &num_proc));
1555:   PetscCall(DMCreate(comm, dm));
1556:   PetscCall(DMSetType(*dm, DMPLEX));

1558:   PetscCallCGNSRead(cg_nbases(cgid, &nbases), *dm, 0);
1559:   PetscCheck(nbases <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single base, not %d", nbases);
1560:   //  From the CGNS web page                 cell_dim  phys_dim (embedding space in PETSc) CGNS defines as length of spatial vectors/components)
1561:   PetscCallCGNSRead(cg_base_read(cgid, base, basename, &dim, &physDim), *dm, 0);
1562:   PetscCallCGNSRead(cg_nzones(cgid, base, &nzones), *dm, 0);
1563:   PetscCheck(nzones == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Parallel reader limited to one zone, not %d", nzones);
1564:   {
1565:     cgsize_t sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */

1567:     PetscCallCGNSRead(cg_zone_read(cgid, base, zone, buffer, sizes), *dm, 0);
1568:     NVertices = sizes[0];
1569:     NCells    = sizes[1];
1570:   }

1572:   PetscCall(PetscObjectSetName((PetscObject)*dm, basename));
1573:   PetscCall(DMSetDimension(*dm, dim));
1574:   coordDim = dim;

1576:   // This is going to be a headache for mixed-topology and multiple sections. We may have to restore reading the data twice (once before  the SetChart
1577:   // call to get this right but continuing for now with single section, single topology, one zone.
1578:   // establish element ranges for my rank
1579:   PetscInt    mystarte, myende, mystartv, myendv, myownede, myownedv;
1580:   PetscLayout elem_map, vtx_map;
1581:   PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, NCells, 1, &elem_map));
1582:   PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, NVertices, 1, &vtx_map));
1583:   PetscCall(PetscLayoutGetRange(elem_map, &mystarte, &myende));
1584:   PetscCall(PetscLayoutGetLocalSize(elem_map, &myownede));
1585:   PetscCall(PetscLayoutGetRange(vtx_map, &mystartv, &myendv));
1586:   PetscCall(PetscLayoutGetLocalSize(vtx_map, &myownedv));

1588:   // -- Build Plex in parallel
1589:   DMPolytopeType dm_cell_type = DM_POLYTOPE_UNKNOWN;
1590:   PetscInt       pOrder = 1, numClosure = -1;
1591:   cgsize_t      *elements = NULL;
1592:   int           *face_section_ids, *cell_section_ids, num_face_sections = 0, num_cell_sections = 0;
1593:   PetscInt      *uniq_verts, nuniq_verts;
1594:   {
1595:     int        nsections;
1596:     PetscInt  *elementsQ1, numCorners = -1;
1597:     const int *perm;
1598:     cgsize_t   start, end; // Throwaway

1600:     cg_nsections(cgid, base, zone, &nsections);
1601:     PetscCall(PetscMalloc2(nsections, &face_section_ids, nsections, &cell_section_ids));
1602:     // Read element connectivity
1603:     for (int index_sect = 1; index_sect <= nsections; index_sect++) {
1604:       int      nbndry, parentFlag;
1605:       PetscInt cell_dim;
1606:       CGNS_ENUMT(ElementType_t) cellType;

1608:       PetscCallCGNSRead(cg_section_read(cgid, base, zone, index_sect, buffer, &cellType, &start, &end, &nbndry, &parentFlag), *dm, 0);

1610:       PetscCall(CGNSElementTypeGetTopologyInfo(cellType, &dm_cell_type, &numCorners, &cell_dim));
1611:       // Skip over element that are not max dimension (ie. boundary elements)
1612:       if (cell_dim == dim) cell_section_ids[num_cell_sections++] = index_sect;
1613:       else if (cell_dim == dim - 1) face_section_ids[num_face_sections++] = index_sect;
1614:     }
1615:     PetscCheck(num_cell_sections == 1, comm, PETSC_ERR_SUP, "CGNS Reader does not support more than 1 full-dimension cell section");

1617:     {
1618:       int index_sect = cell_section_ids[0], nbndry, parentFlag;
1619:       CGNS_ENUMT(ElementType_t) cellType;

1621:       PetscCallCGNSRead(cg_section_read(cgid, base, zone, index_sect, buffer, &cellType, &start, &end, &nbndry, &parentFlag), *dm, 0);
1622:       PetscCall(CGNSElementTypeGetDiscretizationInfo(cellType, &numClosure, &pOrder));
1623:       PetscCall(PetscMalloc1(myownede * numClosure, &elements));
1624:       PetscCallCGNSReadData(cgp_elements_read_data(cgid, base, zone, index_sect, mystarte + 1, myende, elements), *dm, 0);
1625:       for (PetscInt v = 0; v < myownede * numClosure; ++v) elements[v] -= 1; // 0 based

1627:       // Create corners-only connectivity
1628:       PetscCall(CGNSElementTypeGetTopologyInfo(cellType, &dm_cell_type, &numCorners, NULL));
1629:       PetscCall(PetscMalloc1(myownede * numCorners, &elementsQ1));
1630:       PetscCall(DMPlexCGNSGetPermutation_Internal(dm_cell_type, numCorners, NULL, &perm));
1631:       for (PetscInt e = 0; e < myownede; ++e) {
1632:         for (PetscInt v = 0; v < numCorners; ++v) elementsQ1[e * numCorners + perm[v]] = elements[e * numClosure + v];
1633:       }
1634:     }

1636:     // Build cell-vertex Plex
1637:     PetscCall(DMPlexBuildFromCellListParallel(*dm, myownede, myownedv, NVertices, numCorners, elementsQ1, NULL, &uniq_verts));
1638:     PetscCall(DMViewFromOptions(*dm, NULL, "-corner_dm_view"));
1639:     {
1640:       PetscInt pStart, pEnd;
1641:       PetscCall(DMPlexGetChart(*dm, &pStart, &pEnd));
1642:       nuniq_verts = (pEnd - pStart) - myownede;
1643:     }
1644:     PetscCall(PetscFree(elementsQ1));
1645:   }

1647:   if (interpolate) PetscCall(DMPlexInterpolateInPlace_Internal(*dm));

1649:   // -- Create SF for naive nodal-data read to elements
1650:   PetscSF plex_to_cgns_sf;
1651:   {
1652:     PetscInt     nleaves, num_comp;
1653:     PetscInt    *leaf, num_leaves = 0;
1654:     PetscInt     cStart, cEnd;
1655:     const int   *perm;
1656:     PetscSF      cgns_to_local_sf;
1657:     PetscSection local_section;
1658:     PetscFE      fe;

1660:     // sfNatural requires PetscSection to handle DMDistribute, so we use PetscFE to define the section
1661:     // Use number of components = 1 to work with just the nodes themselves
1662:     PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, 1, dm_cell_type, pOrder, PETSC_DETERMINE, &fe));
1663:     PetscCall(PetscObjectSetName((PetscObject)fe, "FE for sfNatural"));
1664:     PetscCall(DMAddField(*dm, NULL, (PetscObject)fe));
1665:     PetscCall(DMCreateDS(*dm));
1666:     PetscCall(PetscFEDestroy(&fe));

1668:     PetscCall(DMGetLocalSection(*dm, &local_section));
1669:     PetscCall(PetscSectionViewFromOptions(local_section, NULL, "-fe_natural_section_view"));
1670:     PetscCall(PetscSectionGetFieldComponents(local_section, 0, &num_comp));
1671:     PetscCall(PetscSectionGetStorageSize(local_section, &nleaves));
1672:     nleaves /= num_comp;
1673:     PetscCall(PetscMalloc1(nleaves, &leaf));
1674:     for (PetscInt i = 0; i < nleaves; i++) leaf[i] = -1;

1676:     // Get the permutation from CGNS closure numbering to PLEX closure numbering
1677:     PetscCall(DMPlexCGNSGetPermutation_Internal(dm_cell_type, numClosure, NULL, &perm));
1678:     PetscCall(DMPlexGetHeightStratum(*dm, 0, &cStart, &cEnd));
1679:     for (PetscInt cell = cStart; cell < cEnd; ++cell) {
1680:       PetscInt num_closure_dof, *closure_idx = NULL;

1682:       PetscCall(DMPlexGetClosureIndices(*dm, local_section, local_section, cell, PETSC_FALSE, &num_closure_dof, &closure_idx, NULL, NULL));
1683:       PetscAssert(numClosure * num_comp == num_closure_dof, comm, PETSC_ERR_PLIB, "Closure dof size does not match polytope");
1684:       for (PetscInt i = 0; i < numClosure; i++) {
1685:         PetscInt li = closure_idx[perm[i] * num_comp] / num_comp;
1686:         if (li < 0) continue;

1688:         PetscInt cgns_idx = elements[cell * numClosure + i];
1689:         if (leaf[li] == -1) {
1690:           leaf[li] = cgns_idx;
1691:           num_leaves++;
1692:         } else PetscAssert(leaf[li] == cgns_idx, PETSC_COMM_SELF, PETSC_ERR_PLIB, "leaf does not match previously set");
1693:       }
1694:       PetscCall(DMPlexRestoreClosureIndices(*dm, local_section, local_section, cell, PETSC_FALSE, &num_closure_dof, &closure_idx, NULL, NULL));
1695:     }
1696:     PetscAssert(num_leaves == nleaves, PETSC_COMM_SELF, PETSC_ERR_PLIB, "leaf count in closure does not match nleaves");
1697:     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)*dm), &cgns_to_local_sf));
1698:     PetscCall(PetscSFSetGraphLayout(cgns_to_local_sf, vtx_map, nleaves, NULL, PETSC_USE_POINTER, leaf));
1699:     PetscCall(PetscObjectSetName((PetscObject)cgns_to_local_sf, "CGNS to Plex SF"));
1700:     PetscCall(PetscSFViewFromOptions(cgns_to_local_sf, NULL, "-CGNStoPlex_sf_view"));
1701:     PetscCall(PetscFree(leaf));
1702:     PetscCall(PetscFree(elements));

1704:     { // Convert cgns_to_local to global_to_cgns
1705:       PetscSF sectionsf, cgns_to_global_sf;

1707:       PetscCall(DMGetSectionSF(*dm, &sectionsf));
1708:       PetscCall(PetscSFComposeInverse(cgns_to_local_sf, sectionsf, &cgns_to_global_sf));
1709:       PetscCall(PetscSFDestroy(&cgns_to_local_sf));
1710:       PetscCall(PetscSFCreateInverseSF(cgns_to_global_sf, &plex_to_cgns_sf));
1711:       PetscCall(PetscObjectSetName((PetscObject)plex_to_cgns_sf, "Global Plex to CGNS"));
1712:       PetscCall(PetscSFDestroy(&cgns_to_global_sf));
1713:     }
1714:   }

1716:   { // -- Set coordinates for DM
1717:     PetscScalar *coords;
1718:     float       *x[3];
1719:     double      *xd[3];
1720:     PetscBool    read_with_double;
1721:     PetscFE      cfe;

1723:     // Setup coordinate space first. Use pOrder here for isoparametric; revisit with CPEX-0045 High Order.
1724:     PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, coordDim, dm_cell_type, pOrder, PETSC_DETERMINE, &cfe));
1725:     PetscCall(DMSetCoordinateDisc(*dm, cfe, PETSC_FALSE, PETSC_FALSE));
1726:     PetscCall(PetscFEDestroy(&cfe));

1728:     { // Determine if coords are written in single or double precision
1729:       CGNS_ENUMT(DataType_t) datatype;

1731:       PetscCallCGNSRead(cg_coord_info(cgid, base, zone, 1, &datatype, buffer), *dm, 0);
1732:       read_with_double = datatype == CGNS_ENUMV(RealDouble) ? PETSC_TRUE : PETSC_FALSE;
1733:     }

1735:     // Read coords from file and set into component-major ordering
1736:     if (read_with_double) PetscCall(PetscMalloc3(myownedv, &xd[0], myownedv, &xd[1], myownedv, &xd[2]));
1737:     else PetscCall(PetscMalloc3(myownedv, &x[0], myownedv, &x[1], myownedv, &x[2]));
1738:     PetscCall(PetscMalloc1(myownedv * coordDim, &coords));
1739:     {
1740:       cgsize_t sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */
1741:       cgsize_t range_min[3] = {mystartv + 1, 1, 1};
1742:       cgsize_t range_max[3] = {myendv, 1, 1};
1743:       int      ngrids, ncoords;

1745:       PetscCallCGNSRead(cg_zone_read(cgid, base, zone, buffer, sizes), *dm, 0);
1746:       PetscCallCGNSRead(cg_ngrids(cgid, base, zone, &ngrids), *dm, 0);
1747:       PetscCheck(ngrids <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single grid, not %d", ngrids);
1748:       PetscCallCGNSRead(cg_ncoords(cgid, base, zone, &ncoords), *dm, 0);
1749:       PetscCheck(ncoords == coordDim, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a coordinate array for each dimension, not %d", ncoords);
1750:       if (read_with_double) {
1751:         for (int d = 0; d < coordDim; ++d) PetscCallCGNSReadData(cgp_coord_read_data(cgid, base, zone, (d + 1), range_min, range_max, xd[d]), *dm, 0);
1752:         if (coordDim >= 1) {
1753:           for (PetscInt v = 0; v < myownedv; ++v) coords[v * coordDim + 0] = xd[0][v];
1754:         }
1755:         if (coordDim >= 2) {
1756:           for (PetscInt v = 0; v < myownedv; ++v) coords[v * coordDim + 1] = xd[1][v];
1757:         }
1758:         if (coordDim >= 3) {
1759:           for (PetscInt v = 0; v < myownedv; ++v) coords[v * coordDim + 2] = xd[2][v];
1760:         }
1761:       } else {
1762:         for (int d = 0; d < coordDim; ++d) PetscCallCGNSReadData(cgp_coord_read_data(cgid, 1, zone, (d + 1), range_min, range_max, x[d]), *dm, 0);
1763:         if (coordDim >= 1) {
1764:           for (PetscInt v = 0; v < myownedv; ++v) coords[v * coordDim + 0] = x[0][v];
1765:         }
1766:         if (coordDim >= 2) {
1767:           for (PetscInt v = 0; v < myownedv; ++v) coords[v * coordDim + 1] = x[1][v];
1768:         }
1769:         if (coordDim >= 3) {
1770:           for (PetscInt v = 0; v < myownedv; ++v) coords[v * coordDim + 2] = x[2][v];
1771:         }
1772:       }
1773:     }
1774:     if (read_with_double) PetscCall(PetscFree3(xd[0], xd[1], xd[2]));
1775:     else PetscCall(PetscFree3(x[0], x[1], x[2]));

1777:     { // Reduce CGNS-ordered coordinate nodes to Plex ordering, and set DM's coordinates
1778:       Vec          coord_global;
1779:       MPI_Datatype unit;
1780:       PetscScalar *coord_global_array;
1781:       DM           cdm;

1783:       PetscCall(DMGetCoordinateDM(*dm, &cdm));
1784:       PetscCall(DMCreateGlobalVector(cdm, &coord_global));
1785:       PetscCall(VecGetArrayWrite(coord_global, &coord_global_array));
1786:       PetscCallMPI(MPI_Type_contiguous(coordDim, MPIU_SCALAR, &unit));
1787:       PetscCallMPI(MPI_Type_commit(&unit));
1788:       PetscCall(PetscSFReduceBegin(plex_to_cgns_sf, unit, coords, coord_global_array, MPI_REPLACE));
1789:       PetscCall(PetscSFReduceEnd(plex_to_cgns_sf, unit, coords, coord_global_array, MPI_REPLACE));
1790:       PetscCall(VecRestoreArrayWrite(coord_global, &coord_global_array));
1791:       PetscCallMPI(MPI_Type_free(&unit));
1792:       PetscCall(DMSetCoordinates(*dm, coord_global));
1793:       PetscCall(VecDestroy(&coord_global));
1794:     }
1795:     PetscCall(PetscFree(coords));
1796:   }

1798:   PetscCall(DMViewFromOptions(*dm, NULL, "-corner_interpolated_dm_view"));

1800:   int nbocos;
1801:   PetscCallCGNSRead(cg_nbocos(cgid, base, zone, &nbocos), *dm, 0);
1802:   // In order to extract boundary condition (boco) information into DMLabels, each rank holds:
1803:   // - The local Plex
1804:   // - Naively read CGNS face connectivity
1805:   // - Naively read list of CGNS faces for each boco
1806:   //
1807:   // First, we need to build a mapping from the CGNS faces to the (probably off-rank) Plex face.
1808:   // The CGNS faces that each rank owns is known globally via cgnsLayouts.
1809:   // The cg2plexSF maps these CGNS face IDs to their (probably off-rank) Plex face.
1810:   // The plexFaces array maps the (contiguous) leaves of cg2plexSF to the local Plex face point.
1811:   //
1812:   // Next, we read the list of CGNS faces for each boco and find the location of that face's owner using cgnsLayouts.
1813:   // Then, we can communicate the label value to the local Plex which corresponds to the CGNS face.
1814:   if (interpolate && num_face_sections != 0 && nbocos != 0) {
1815:     PetscSection connSection;
1816:     PetscInt     nCgFaces, nPlexFaces;
1817:     PetscInt    *face_ids, *conn, *plexFaces;
1818:     PetscSF      cg2plexSF;
1819:     PetscLayout *cgnsLayouts;

1821:     PetscCall(DMPlexCGNS_CreateCornersConnectivitySection(*dm, cgid, base, zone, num_face_sections, face_section_ids, &connSection, NULL, &face_ids, &cgnsLayouts, &conn));
1822:     {
1823:       PetscBool view_connectivity = PETSC_FALSE;
1824:       PetscCall(PetscOptionsGetBool(NULL, NULL, "-dm_plex_cgns_view_face_connectivity", &view_connectivity, NULL));
1825:       if (view_connectivity) PetscCall(PetscSectionArrayView(connSection, conn, PETSC_INT, NULL));
1826:     }
1827:     PetscCall(DMPlexCGNS_MatchCGNSFacesToPlexFaces(*dm, nuniq_verts, uniq_verts, myownede, NVertices, connSection, face_ids, conn, &cg2plexSF, &plexFaces));
1828:     PetscCall(PetscSFGetGraph(cg2plexSF, NULL, &nPlexFaces, NULL, NULL));
1829:     {
1830:       PetscInt start, end;
1831:       PetscCall(PetscSectionGetChart(connSection, &start, &end));
1832:       nCgFaces = end - start;
1833:     }

1835:     PetscInt *plexFaceValues, *cgFaceValues;
1836:     PetscCall(PetscMalloc2(nPlexFaces, &plexFaceValues, nCgFaces, &cgFaceValues));
1837:     for (PetscInt BC = 1; BC <= nbocos; BC++) {
1838:       cgsize_t *points;
1839:       CGBCInfo  bcinfo;
1840:       PetscBool is_faceset  = PETSC_FALSE;
1841:       PetscInt  label_value = 1;

1843:       PetscCallCGNSRead(cg_boco_info(cgid, base, zone, BC, bcinfo.name, &bcinfo.bctype, &bcinfo.pointtype, &bcinfo.npoints, bcinfo.normal, &bcinfo.nnormals, &bcinfo.normal_datatype, &bcinfo.ndatasets), *dm, 0);

1845:       PetscCall(PetscStrbeginswith(bcinfo.name, "FaceSet", &is_faceset));
1846:       if (is_faceset) {
1847:         size_t faceset_len;
1848:         PetscCall(PetscStrlen("FaceSet", &faceset_len));
1849:         PetscCall(PetscStrtoInt(bcinfo.name + faceset_len, &label_value));
1850:       }
1851:       const char *label_name = is_faceset ? "Face Sets" : bcinfo.name;

1853:       if (bcinfo.npoints < 1) continue;

1855:       PetscLayout bc_layout;
1856:       PetscInt    bcStart, bcEnd, bcSize;
1857:       PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, bcinfo.npoints, 1, &bc_layout));
1858:       PetscCall(PetscLayoutGetRange(bc_layout, &bcStart, &bcEnd));
1859:       PetscCall(PetscLayoutGetLocalSize(bc_layout, &bcSize));
1860:       PetscCall(PetscLayoutDestroy(&bc_layout));
1861:       PetscCall(DMGetWorkArray(*dm, bcSize, MPIU_CGSIZE, &points));

1863:       const char *labels[] = {"Zone_t", "ZoneBC_t", "BC_t", "PointList"};
1864:       PetscCallCGNSRead(cg_golist(cgid, base, 4, (char **)labels, (int[]){zone, 1, BC, 0}), *dm, 0);
1865:       PetscCallCGNSReadData(cgp_ptlist_read_data(cgid, bcStart + 1, bcEnd, points), *dm, 0);

1867:       PetscInt    *label_values;
1868:       PetscSFNode *remotes;
1869:       PetscCall(PetscMalloc2(bcSize, &remotes, bcSize, &label_values));
1870:       for (PetscInt p = 0; p < bcSize; p++) {
1871:         PetscMPIInt bcrank;
1872:         PetscInt    bcidx;

1874:         PetscCall(PetscLayoutFindOwnerIndex_CGNSSectionLayouts(cgnsLayouts, num_face_sections, points[p], &bcrank, &bcidx, NULL));
1875:         remotes[p].rank  = bcrank;
1876:         remotes[p].index = bcidx;
1877:         label_values[p]  = label_value;
1878:       }
1879:       PetscCall(DMRestoreWorkArray(*dm, bcSize, MPIU_CGSIZE, &points));

1881:       { // Communicate the BC values to their Plex-face owners
1882:         PetscSF cg2bcSF;
1883:         DMLabel label;

1885:         for (PetscInt i = 0; i < nCgFaces; i++) cgFaceValues[i] = -1;
1886:         for (PetscInt i = 0; i < nPlexFaces; i++) plexFaceValues[i] = -1;

1888:         PetscCall(PetscSFCreate(comm, &cg2bcSF));
1889:         PetscCall(PetscSFSetGraph(cg2bcSF, nCgFaces, bcSize, NULL, PETSC_COPY_VALUES, remotes, PETSC_USE_POINTER));

1891:         PetscCall(PetscSFReduceBegin(cg2bcSF, MPIU_INT, label_values, cgFaceValues, MPI_REPLACE));
1892:         PetscCall(PetscSFReduceEnd(cg2bcSF, MPIU_INT, label_values, cgFaceValues, MPI_REPLACE));
1893:         PetscCall(PetscSFBcastBegin(cg2plexSF, MPIU_INT, cgFaceValues, plexFaceValues, MPI_REPLACE));
1894:         PetscCall(PetscSFBcastEnd(cg2plexSF, MPIU_INT, cgFaceValues, plexFaceValues, MPI_REPLACE));
1895:         PetscCall(PetscSFDestroy(&cg2bcSF));
1896:         PetscCall(PetscFree2(remotes, label_values));

1898:         // Set the label values for the communicated faces
1899:         PetscCall(DMGetLabel(*dm, label_name, &label));
1900:         if (label == NULL) {
1901:           PetscCall(DMCreateLabel(*dm, label_name));
1902:           PetscCall(DMGetLabel(*dm, label_name, &label));
1903:         }
1904:         for (PetscInt i = 0; i < nPlexFaces; i++) {
1905:           if (plexFaceValues[i] == -1) continue;
1906:           PetscCall(DMLabelSetValue(label, plexFaces[i], plexFaceValues[i]));
1907:         }
1908:       }
1909:     }
1910:     PetscCall(PetscFree2(plexFaceValues, cgFaceValues));
1911:     PetscCall(PetscFree(plexFaces));
1912:     PetscCall(PetscSFDestroy(&cg2plexSF));
1913:     PetscCall(PetscFree(conn));
1914:     for (PetscInt s = 0; s < num_face_sections; s++) PetscCall(PetscLayoutDestroy(&cgnsLayouts[s]));
1915:     PetscCall(PetscSectionDestroy(&connSection));
1916:     PetscCall(PetscFree(cgnsLayouts));
1917:     PetscCall(PetscFree(face_ids));
1918:   }
1919:   PetscCall(PetscFree(uniq_verts));
1920:   PetscCall(PetscFree2(face_section_ids, cell_section_ids));

1922:   // -- Set sfNatural for solution vectors in CGNS file
1923:   // NOTE: We set sfNatural to be the map between the original CGNS ordering of nodes and the Plex ordering of nodes.
1924:   PetscCall(PetscSFViewFromOptions(plex_to_cgns_sf, NULL, "-sfNatural_init_view"));
1925:   PetscCall(DMSetNaturalSF(*dm, plex_to_cgns_sf));
1926:   PetscCall(DMSetUseNatural(*dm, PETSC_TRUE));
1927:   PetscCall(PetscSFDestroy(&plex_to_cgns_sf));

1929:   PetscCall(PetscLayoutDestroy(&elem_map));
1930:   PetscCall(PetscLayoutDestroy(&vtx_map));
1931:   PetscFunctionReturn(PETSC_SUCCESS);
1932: }

1934: // node_l2g must be freed
1935: static PetscErrorCode DMPlexCreateNodeNumbering(DM dm, PetscInt *num_local_nodes, PetscInt *num_global_nodes, PetscInt *nStart, PetscInt *nEnd, const PetscInt **node_l2g)
1936: {
1937:   PetscSection    local_section;
1938:   PetscSF         point_sf;
1939:   PetscInt        pStart, pEnd, spStart, spEnd, *points, nleaves, ncomp, *nodes;
1940:   PetscMPIInt     comm_size;
1941:   const PetscInt *ilocal, field = 0;

1943:   PetscFunctionBegin;
1944:   *num_local_nodes  = -1;
1945:   *num_global_nodes = -1;
1946:   *nStart           = -1;
1947:   *nEnd             = -1;
1948:   *node_l2g         = NULL;

1950:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &comm_size));
1951:   PetscCall(DMGetLocalSection(dm, &local_section));
1952:   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
1953:   PetscCall(PetscSectionGetChart(local_section, &spStart, &spEnd));
1954:   PetscCall(DMGetPointSF(dm, &point_sf));
1955:   if (comm_size == 1) nleaves = 0;
1956:   else PetscCall(PetscSFGetGraph(point_sf, NULL, &nleaves, &ilocal, NULL));
1957:   PetscCall(PetscSectionGetFieldComponents(local_section, field, &ncomp));

1959:   PetscInt local_node = 0, owned_node = 0, owned_start = 0;
1960:   for (PetscInt p = spStart, leaf = 0; p < spEnd; p++) {
1961:     PetscInt dof;
1962:     PetscCall(PetscSectionGetFieldDof(local_section, p, field, &dof));
1963:     PetscAssert(dof % ncomp == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Field dof %" PetscInt_FMT " must be divisible by components %" PetscInt_FMT, dof, ncomp);
1964:     local_node += dof / ncomp;
1965:     if (leaf < nleaves && p == ilocal[leaf]) { // skip points owned by a different process
1966:       leaf++;
1967:     } else {
1968:       owned_node += dof / ncomp;
1969:     }
1970:   }
1971:   PetscCallMPI(MPI_Exscan(&owned_node, &owned_start, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)dm)));
1972:   PetscCall(PetscMalloc1(pEnd - pStart, &points));
1973:   owned_node = 0;
1974:   for (PetscInt p = spStart, leaf = 0; p < spEnd; p++) {
1975:     if (leaf < nleaves && p == ilocal[leaf]) { // skip points owned by a different process
1976:       points[p - pStart] = -1;
1977:       leaf++;
1978:       continue;
1979:     }
1980:     PetscInt dof, offset;
1981:     PetscCall(PetscSectionGetFieldDof(local_section, p, field, &dof));
1982:     PetscCall(PetscSectionGetFieldOffset(local_section, p, field, &offset));
1983:     PetscAssert(offset % ncomp == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Field offset %" PetscInt_FMT " must be divisible by components %" PetscInt_FMT, offset, ncomp);
1984:     points[p - pStart] = owned_start + owned_node;
1985:     owned_node += dof / ncomp;
1986:   }
1987:   if (comm_size > 1) {
1988:     PetscCall(PetscSFBcastBegin(point_sf, MPIU_INT, points, points, MPI_REPLACE));
1989:     PetscCall(PetscSFBcastEnd(point_sf, MPIU_INT, points, points, MPI_REPLACE));
1990:   }

1992:   // Set up global indices for each local node
1993:   PetscCall(PetscMalloc1(local_node, &nodes));
1994:   for (PetscInt p = spStart; p < spEnd; p++) {
1995:     PetscInt dof, offset;
1996:     PetscCall(PetscSectionGetFieldDof(local_section, p, field, &dof));
1997:     PetscCall(PetscSectionGetFieldOffset(local_section, p, field, &offset));
1998:     for (PetscInt n = 0; n < dof / ncomp; n++) nodes[offset / ncomp + n] = points[p - pStart] + n;
1999:   }
2000:   PetscCall(PetscFree(points));
2001:   *num_local_nodes = local_node;
2002:   *nStart          = owned_start;
2003:   *nEnd            = owned_start + owned_node;
2004:   PetscCallMPI(MPIU_Allreduce(&owned_node, num_global_nodes, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)dm)));
2005:   *node_l2g = nodes;
2006:   PetscFunctionReturn(PETSC_SUCCESS);
2007: }

2009: PetscErrorCode DMView_PlexCGNS(DM dm, PetscViewer viewer)
2010: {
2011:   MPI_Comm          comm = PetscObjectComm((PetscObject)dm);
2012:   PetscViewer_CGNS *cgv  = (PetscViewer_CGNS *)viewer->data;
2013:   PetscInt          fvGhostStart;
2014:   PetscInt          topo_dim, coord_dim, num_global_elems;
2015:   PetscInt          cStart, cEnd, num_local_nodes, num_global_nodes, nStart, nEnd, fStart, fEnd;
2016:   const PetscInt   *node_l2g;
2017:   Vec               coord;
2018:   DM                colloc_dm, cdm;
2019:   PetscMPIInt       size;
2020:   const char       *dm_name;
2021:   int               base, zone;
2022:   cgsize_t          isize[3], elem_offset = 0;

2024:   PetscFunctionBegin;
2025:   if (!cgv->file_num) {
2026:     PetscInt time_step;
2027:     PetscCall(DMGetOutputSequenceNumber(dm, &time_step, NULL));
2028:     PetscCall(PetscViewerCGNSFileOpen_Internal(viewer, time_step));
2029:   }
2030:   PetscCallMPI(MPI_Comm_size(comm, &size));
2031:   PetscCall(DMGetDimension(dm, &topo_dim));
2032:   PetscCall(DMGetCoordinateDim(dm, &coord_dim));
2033:   PetscCall(PetscObjectGetName((PetscObject)dm, &dm_name));
2034:   PetscCallCGNSWrite(cg_base_write(cgv->file_num, dm_name, topo_dim, coord_dim, &base), dm, viewer);
2035:   PetscCallCGNS(cg_goto(cgv->file_num, base, NULL));
2036:   PetscCallCGNSWrite(cg_dataclass_write(CGNS_ENUMV(NormalizedByDimensional)), dm, viewer);

2038:   {
2039:     PetscFE        fe, fe_coord;
2040:     PetscClassId   ds_id;
2041:     PetscDualSpace dual_space, dual_space_coord;
2042:     PetscInt       num_fields, field_order = -1, field_order_coord;
2043:     PetscBool      is_simplex;
2044:     PetscCall(DMGetNumFields(dm, &num_fields));
2045:     if (num_fields > 0) {
2046:       PetscCall(DMGetField(dm, 0, NULL, (PetscObject *)&fe));
2047:       PetscCall(PetscObjectGetClassId((PetscObject)fe, &ds_id));
2048:       if (ds_id != PETSCFE_CLASSID) {
2049:         fe = NULL;
2050:         if (ds_id == PETSCFV_CLASSID) field_order = -1; // use whatever is present for coords; field will be CellCenter
2051:         else field_order = 1;                           // assume vertex-based linear elements
2052:       }
2053:     } else fe = NULL;
2054:     if (fe) {
2055:       PetscCall(PetscFEGetDualSpace(fe, &dual_space));
2056:       PetscCall(PetscDualSpaceGetOrder(dual_space, &field_order));
2057:     }
2058:     PetscCall(DMGetCoordinateDM(dm, &cdm));
2059:     PetscCall(DMGetField(cdm, 0, NULL, (PetscObject *)&fe_coord));
2060:     {
2061:       PetscClassId id;
2062:       PetscCall(PetscObjectGetClassId((PetscObject)fe_coord, &id));
2063:       if (id != PETSCFE_CLASSID) fe_coord = NULL;
2064:     }
2065:     if (fe_coord) {
2066:       PetscCall(PetscFEGetDualSpace(fe_coord, &dual_space_coord));
2067:       PetscCall(PetscDualSpaceGetOrder(dual_space_coord, &field_order_coord));
2068:     } else field_order_coord = 1;
2069:     if (field_order > 0 && field_order != field_order_coord) {
2070:       PetscInt quadrature_order = field_order;
2071:       PetscCall(DMClone(dm, &colloc_dm));
2072:       { // Inform the new colloc_dm that it is a coordinate DM so isoperiodic affine corrections can be applied
2073:         const PetscSF *face_sfs;
2074:         PetscInt       num_face_sfs;
2075:         PetscCall(DMPlexGetIsoperiodicFaceSF(dm, &num_face_sfs, &face_sfs));
2076:         PetscCall(DMPlexSetIsoperiodicFaceSF(colloc_dm, num_face_sfs, (PetscSF *)face_sfs));
2077:         if (face_sfs) colloc_dm->periodic.setup = DMPeriodicCoordinateSetUp_Internal;
2078:       }
2079:       PetscCall(DMPlexIsSimplex(dm, &is_simplex));
2080:       PetscCall(PetscFECreateLagrange(comm, topo_dim, coord_dim, is_simplex, field_order, quadrature_order, &fe));
2081:       PetscCall(DMSetCoordinateDisc(colloc_dm, fe, PETSC_FALSE, PETSC_TRUE));
2082:       PetscCall(PetscFEDestroy(&fe));
2083:     } else {
2084:       PetscCall(PetscObjectReference((PetscObject)dm));
2085:       colloc_dm = dm;
2086:     }
2087:   }
2088:   PetscCall(DMGetCoordinateDM(colloc_dm, &cdm));
2089:   PetscCall(DMPlexCreateNodeNumbering(cdm, &num_local_nodes, &num_global_nodes, &nStart, &nEnd, &node_l2g));
2090:   PetscCall(DMGetCoordinatesLocal(colloc_dm, &coord));
2091:   PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
2092:   PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd));
2093:   PetscCall(DMPlexGetCellTypeStratum(dm, DM_POLYTOPE_FV_GHOST, &fvGhostStart, NULL));
2094:   if (fvGhostStart >= 0) cEnd = fvGhostStart;
2095:   num_global_elems = cEnd - cStart;
2096:   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &num_global_elems, 1, MPIU_INT, MPI_SUM, comm));
2097:   isize[0] = num_global_nodes;
2098:   isize[1] = num_global_elems;
2099:   isize[2] = 0;
2100:   PetscCallCGNSWrite(cg_zone_write(cgv->file_num, base, "Zone", isize, CGNS_ENUMV(Unstructured), &zone), dm, viewer);

2102:   cgsize_t e_owned, e_global, e_start;
2103:   {
2104:     const PetscScalar *X;
2105:     PetscScalar       *x;
2106:     int                coord_ids[3];

2108:     PetscCall(VecGetArrayRead(coord, &X));
2109:     for (PetscInt d = 0; d < coord_dim; d++) {
2110:       const double exponents[] = {0, 1, 0, 0, 0};
2111:       char         coord_name[64];
2112:       PetscCall(PetscSNPrintf(coord_name, sizeof coord_name, "Coordinate%c", 'X' + (int)d));
2113:       PetscCallCGNSWrite(cgp_coord_write(cgv->file_num, base, zone, CGNS_ENUMV(RealDouble), coord_name, &coord_ids[d]), dm, viewer);
2114:       PetscCallCGNS(cg_goto(cgv->file_num, base, "Zone_t", zone, "GridCoordinates", 0, coord_name, 0, NULL));
2115:       PetscCallCGNSWrite(cg_exponents_write(CGNS_ENUMV(RealDouble), exponents), dm, viewer);
2116:     }

2118:     int        section;
2119:     cgsize_t  *conn = NULL;
2120:     const int *perm;
2121:     CGNS_ENUMT(ElementType_t) element_type = CGNS_ENUMV(ElementTypeNull);
2122:     {
2123:       PetscCall(PetscMalloc1(nEnd - nStart, &x));
2124:       for (PetscInt d = 0; d < coord_dim; d++) {
2125:         for (PetscInt n = 0; n < num_local_nodes; n++) {
2126:           PetscInt gn = node_l2g[n];
2127:           if (gn < nStart || nEnd <= gn) continue;
2128:           x[gn - nStart] = X[n * coord_dim + d];
2129:         }
2130:         // CGNS nodes use 1-based indexing
2131:         cgsize_t start = nStart + 1, end = nEnd;
2132:         PetscCallCGNSWriteData(cgp_coord_write_data(cgv->file_num, base, zone, coord_ids[d], &start, &end, x), dm, viewer);
2133:       }
2134:       PetscCall(PetscFree(x));
2135:       PetscCall(VecRestoreArrayRead(coord, &X));
2136:     }

2138:     e_owned = cEnd - cStart;
2139:     if (e_owned > 0) {
2140:       DMPolytopeType cell_type;

2142:       PetscCall(DMPlexGetCellType(dm, cStart, &cell_type));
2143:       for (PetscInt i = cStart, c = 0; i < cEnd; i++) {
2144:         PetscInt closure_dof, *closure_indices, elem_size;

2146:         PetscCall(DMPlexGetClosureIndices(cdm, cdm->localSection, cdm->localSection, i, PETSC_FALSE, &closure_dof, &closure_indices, NULL, NULL));
2147:         elem_size = closure_dof / coord_dim;
2148:         if (!conn) PetscCall(PetscMalloc1(e_owned * elem_size, &conn));
2149:         PetscCall(DMPlexCGNSGetPermutation_Internal(cell_type, closure_dof / coord_dim, &element_type, &perm));
2150:         for (PetscInt j = 0; j < elem_size; j++) conn[c++] = node_l2g[closure_indices[perm[j] * coord_dim] / coord_dim] + 1;
2151:         PetscCall(DMPlexRestoreClosureIndices(cdm, cdm->localSection, cdm->localSection, i, PETSC_FALSE, &closure_dof, &closure_indices, NULL, NULL));
2152:       }
2153:     }

2155:     { // Get global element_type (for ranks that do not have owned elements)
2156:       PetscInt local_element_type, global_element_type;

2158:       local_element_type = e_owned > 0 ? (PetscInt)element_type : -1;
2159:       PetscCallMPI(MPIU_Allreduce(&local_element_type, &global_element_type, 1, MPIU_INT, MPI_MAX, comm));
2160:       if (local_element_type != -1)
2161:         PetscCheck(local_element_type == global_element_type, PETSC_COMM_SELF, PETSC_ERR_SUP, "Ranks with different element types not supported. Local element type is %s, but global is %s", cg_ElementTypeName(local_element_type), cg_ElementTypeName(global_element_type));
2162:       element_type = (CGNS_ENUMT(ElementType_t))global_element_type;
2163:     }
2164:     PetscCallMPI(MPIU_Allreduce(&e_owned, &e_global, 1, MPIU_CGSIZE, MPI_SUM, comm));
2165:     PetscCheck(e_global == num_global_elems, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected number of elements %" PRIdCGSIZE " vs %" PetscInt_FMT, e_global, num_global_elems);
2166:     e_start = 0;
2167:     PetscCallMPI(MPI_Exscan(&e_owned, &e_start, 1, MPIU_CGSIZE, MPI_SUM, comm));
2168:     e_start += elem_offset;
2169:     PetscCallCGNSWrite(cgp_section_write(cgv->file_num, base, zone, "Elem", element_type, 1, e_global, 0, &section), dm, viewer);
2170:     PetscCallCGNSWriteData(cgp_elements_write_data(cgv->file_num, base, zone, section, e_start + 1, e_start + e_owned, conn), dm, viewer);
2171:     elem_offset = e_global;
2172:     PetscCall(PetscFree(conn));

2174:     cgv->base            = base;
2175:     cgv->zone            = zone;
2176:     cgv->node_l2g        = node_l2g;
2177:     cgv->num_local_nodes = num_local_nodes;
2178:     cgv->nStart          = nStart;
2179:     cgv->nEnd            = nEnd;
2180:     cgv->eStart          = e_start;
2181:     cgv->eEnd            = e_start + e_owned;
2182:     if (1) {
2183:       PetscMPIInt rank;
2184:       int        *efield;
2185:       int         sol, field;
2186:       DMLabel     label;
2187:       PetscCallMPI(MPI_Comm_rank(comm, &rank));
2188:       PetscCall(PetscMalloc1(e_owned, &efield));
2189:       for (PetscInt i = 0; i < e_owned; i++) efield[i] = rank;
2190:       PetscCallCGNSWrite(cg_sol_write(cgv->file_num, base, zone, "CellInfo", CGNS_ENUMV(CellCenter), &sol), dm, viewer);
2191:       PetscCallCGNSWrite(cgp_field_write(cgv->file_num, base, zone, sol, CGNS_ENUMV(Integer), "Rank", &field), dm, viewer);
2192:       cgsize_t start = e_start + 1, end = e_start + e_owned;
2193:       PetscCallCGNSWriteData(cgp_field_write_data(cgv->file_num, base, zone, sol, field, &start, &end, efield), dm, viewer);
2194:       PetscCall(DMGetLabel(dm, "Cell Sets", &label));
2195:       if (label) {
2196:         for (PetscInt c = cStart; c < cEnd; c++) {
2197:           PetscInt value;
2198:           PetscCall(DMLabelGetValue(label, c, &value));
2199:           efield[c - cStart] = value;
2200:         }
2201:         PetscCallCGNSWrite(cgp_field_write(cgv->file_num, base, zone, sol, CGNS_ENUMV(Integer), "CellSet", &field), dm, viewer);
2202:         PetscCallCGNSWriteData(cgp_field_write_data(cgv->file_num, base, zone, sol, field, &start, &end, efield), dm, viewer);
2203:       }
2204:       PetscCall(PetscFree(efield));
2205:     }
2206:   }

2208:   DMLabel  fsLabel;
2209:   PetscInt num_fs_global;
2210:   IS       fsValuesGlobalIS;
2211:   PetscCall(DMGetLabel(dm, "Face Sets", &fsLabel));
2212:   PetscCall(DMLabelGetValueISGlobal(comm, fsLabel, PETSC_TRUE, &fsValuesGlobalIS));
2213:   PetscCall(ISGetSize(fsValuesGlobalIS, &num_fs_global));

2215:   if (num_fs_global > 0) {
2216:     CGNS_ENUMT(ElementType_t) element_type = CGNS_ENUMV(ElementTypeNull);
2217:     const PetscInt *fsValuesLocal;
2218:     IS              stratumIS, fsFacesAll;
2219:     int             section;
2220:     const int      *perm;
2221:     cgsize_t        f_owned = 0, f_global, f_start;
2222:     cgsize_t       *parents, *conn = NULL;
2223:     PetscInt        fStart, fEnd;

2225:     PetscInt num_fs_local;
2226:     IS       fsValuesLocalIS;

2228:     if (fsLabel) {
2229:       PetscCall(DMLabelGetNonEmptyStratumValuesIS(fsLabel, &fsValuesLocalIS));
2230:       PetscCall(ISGetSize(fsValuesLocalIS, &num_fs_local));
2231:       PetscCall(ISGetIndices(fsValuesLocalIS, &fsValuesLocal));
2232:     } else num_fs_local = 0;

2234:     PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd));
2235:     { // Get single IS without duplicates of the local face IDs in the FaceSets
2236:       IS *fsPoints = NULL;

2238:       PetscCall(PetscMalloc1(num_fs_local, &fsPoints));
2239:       for (PetscInt fs = 0; fs < num_fs_local; ++fs) PetscCall(DMLabelGetStratumIS(fsLabel, fsValuesLocal[fs], &fsPoints[fs]));
2240:       PetscCall(ISConcatenate(PETSC_COMM_SELF, num_fs_local, fsPoints, &fsFacesAll));
2241:       PetscCall(ISSortRemoveDups(fsFacesAll));
2242:       PetscCall(ISGeneralFilter(fsFacesAll, fStart, fEnd)); // Remove non-face mesh points from the IS
2243:       {
2244:         PetscInt f_owned_int;
2245:         PetscCall(ISGetSize(fsFacesAll, &f_owned_int));
2246:         f_owned = f_owned_int;
2247:       }
2248:       for (PetscInt fs = 0; fs < num_fs_local; ++fs) PetscCall(ISDestroy(&fsPoints[fs]));
2249:       PetscCall(PetscFree(fsPoints));
2250:     }
2251:     PetscCall(ISRestoreIndices(fsValuesLocalIS, &fsValuesLocal));
2252:     PetscCall(ISDestroy(&fsValuesLocalIS));

2254:     {
2255:       const PetscInt *faces;
2256:       DMPolytopeType  cell_type, cell_type_f;
2257:       PetscInt        closure_dof = -1, closure_dof_f;

2259:       PetscCall(ISGetIndices(fsFacesAll, &faces));
2260:       if (f_owned) PetscCall(DMPlexGetCellType(dm, faces[0], &cell_type));
2261:       PetscCall(PetscCalloc1(f_owned * 2, &parents));
2262:       for (PetscInt f = 0, c = 0; f < f_owned; f++) {
2263:         PetscInt      *closure_indices, elem_size;
2264:         const PetscInt face = faces[f];

2266:         PetscCall(DMPlexGetCellType(dm, face, &cell_type_f));
2267:         PetscCheck(cell_type_f == cell_type, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Only mono-topology face sets are supported currently. Face %" PetscInt_FMT " is %s, which is different than the previous type %s", face, DMPolytopeTypes[cell_type_f], DMPolytopeTypes[cell_type]);

2269:         // Get connectivity of the face
2270:         PetscCall(DMPlexGetClosureIndices(cdm, cdm->localSection, cdm->localSection, face, PETSC_FALSE, &closure_dof_f, &closure_indices, NULL, NULL));
2271:         elem_size = closure_dof_f / coord_dim;
2272:         if (!conn) {
2273:           PetscCall(PetscMalloc1(f_owned * elem_size, &conn));
2274:           closure_dof = closure_dof_f;
2275:         }
2276:         PetscCheck(closure_dof_f == closure_dof, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Closure of face %" PetscInt_FMT " has %" PetscInt_FMT " dofs instead of the previously written %" PetscInt_FMT " dofs. Only mono-topology face sets are supported currently", face, closure_dof_f, closure_dof);
2277:         PetscCall(DMPlexCGNSGetPermutation_Internal(cell_type, elem_size, &element_type, &perm));
2278:         for (PetscInt j = 0; j < elem_size; j++) conn[c++] = node_l2g[closure_indices[perm[j] * coord_dim] / coord_dim] + 1;
2279:         PetscCall(DMPlexRestoreClosureIndices(cdm, cdm->localSection, cdm->localSection, face, PETSC_FALSE, &closure_dof_f, &closure_indices, NULL, NULL));
2280:       }
2281:       PetscCall(ISRestoreIndices(fsFacesAll, &faces));
2282:     }

2284:     {   // Write connectivity for face sets
2285:       { // Get global element type
2286:         PetscInt local_element_type, global_element_type;

2288:         local_element_type = f_owned > 0 ? (PetscInt)element_type : -1;
2289:         PetscCallMPI(MPIU_Allreduce(&local_element_type, &global_element_type, 1, MPIU_INT, MPI_MAX, comm));
2290:         if (local_element_type != -1)
2291:           PetscCheck(local_element_type == global_element_type, PETSC_COMM_SELF, PETSC_ERR_SUP, "Ranks with different element types not supported. Local element type is %s, but global is %s", cg_ElementTypeName(local_element_type), cg_ElementTypeName(global_element_type));
2292:         element_type = (CGNS_ENUMT(ElementType_t))global_element_type;
2293:       }
2294:       PetscCallMPI(MPIU_Allreduce(&f_owned, &f_global, 1, MPIU_CGSIZE, MPI_SUM, comm));
2295:       f_start = 0;
2296:       PetscCallMPI(MPI_Exscan(&f_owned, &f_start, 1, MPIU_CGSIZE, MPI_SUM, comm));
2297:       f_start += elem_offset;
2298:       PetscCallCGNSWrite(cgp_section_write(cgv->file_num, base, zone, "Faces", element_type, elem_offset + 1, elem_offset + f_global, 0, &section), dm, viewer);
2299:       PetscCallCGNSWriteData(cgp_elements_write_data(cgv->file_num, base, zone, section, f_start + 1, f_start + f_owned, conn), dm, viewer);

2301:       PetscCall(PetscFree(conn));
2302:       PetscCall(PetscFree(parents));
2303:     }

2305:     const PetscInt *fsValuesGlobal = NULL;
2306:     PetscCall(ISGetIndices(fsValuesGlobalIS, &fsValuesGlobal));
2307:     for (PetscInt fs = 0; fs < num_fs_global; ++fs) {
2308:       int            BC;
2309:       const PetscInt fsID    = fsValuesGlobal[fs];
2310:       PetscInt      *fs_pnts = NULL;
2311:       char           bc_name[33];
2312:       cgsize_t       fs_start, fs_owned, fs_global;
2313:       cgsize_t      *fs_pnts_cg;

2315:       PetscCall(DMLabelGetStratumIS(fsLabel, fsID, &stratumIS));
2316:       if (stratumIS) { // Get list of only face points
2317:         PetscSegBuffer  fs_pntsSB;
2318:         PetscCount      fs_owned_count;
2319:         PetscInt        nstratumPnts;
2320:         const PetscInt *stratumPnts;

2322:         PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 16, &fs_pntsSB));
2323:         PetscCall(ISGetIndices(stratumIS, &stratumPnts));
2324:         PetscCall(ISGetSize(stratumIS, &nstratumPnts));
2325:         for (PetscInt i = 0; i < nstratumPnts; i++) {
2326:           PetscInt *fs_pnts_buffer, stratumPnt = stratumPnts[i];
2327:           if (stratumPnt < fStart || stratumPnt >= fEnd) continue; // Skip non-face points
2328:           PetscCall(PetscSegBufferGetInts(fs_pntsSB, 1, &fs_pnts_buffer));
2329:           *fs_pnts_buffer = stratumPnt;
2330:         }
2331:         PetscCall(PetscSegBufferGetSize(fs_pntsSB, &fs_owned_count));
2332:         fs_owned = fs_owned_count;
2333:         PetscCall(PetscSegBufferExtractAlloc(fs_pntsSB, &fs_pnts));

2335:         PetscCall(PetscSegBufferDestroy(&fs_pntsSB));
2336:         PetscCall(ISRestoreIndices(stratumIS, &stratumPnts));
2337:         PetscCall(ISDestroy(&stratumIS));
2338:       } else fs_owned = 0;

2340:       PetscCallMPI(MPIU_Allreduce(&fs_owned, &fs_global, 1, MPIU_CGSIZE, MPI_SUM, comm));
2341:       fs_start = 0;
2342:       PetscCallMPI(MPI_Exscan(&fs_owned, &fs_start, 1, MPIU_CGSIZE, MPI_SUM, comm));
2343:       PetscCheck(fs_start + fs_owned <= fs_global, PETSC_COMM_SELF, PETSC_ERR_PLIB, "End range of point set (%" PRIdCGSIZE ") greater than global point set size (%" PRIdCGSIZE ")", fs_start + fs_owned, fs_global);

2345:       PetscCall(PetscSNPrintf(bc_name, sizeof bc_name, "FaceSet%" PetscInt_FMT, fsID));
2346:       PetscCallCGNSWrite(cg_boco_write(cgv->file_num, base, zone, bc_name, CGNS_ENUMV(BCTypeNull), CGNS_ENUMV(PointList), fs_global, NULL, &BC), dm, viewer);

2348:       PetscCall(PetscMalloc1(fs_owned, &fs_pnts_cg));
2349:       for (PetscInt i = 0; i < fs_owned; i++) {
2350:         PetscInt is_idx;

2352:         PetscCall(ISLocate(fsFacesAll, fs_pnts[i], &is_idx));
2353:         PetscCheck(is_idx >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Could not find face %" PetscInt_FMT " in list of all local face points", fs_pnts[i]);
2354:         fs_pnts_cg[i] = is_idx + f_start + 1;
2355:       }

2357:       const char *labels[] = {"Zone_t", "ZoneBC_t", "BC_t", "PointList"};
2358:       PetscCallCGNSWrite(cg_golist(cgv->file_num, base, 4, (char **)labels, (int[]){zone, 1, BC, 0}), dm, 0);
2359:       PetscCallCGNSWriteData(cgp_ptlist_write_data(cgv->file_num, fs_start + 1, fs_start + fs_owned, fs_pnts_cg), dm, viewer);

2361:       CGNS_ENUMT(GridLocation_t) grid_loc = CGNS_ENUMV(GridLocationNull);
2362:       if (topo_dim == 3) grid_loc = CGNS_ENUMV(FaceCenter);
2363:       else if (topo_dim == 2) grid_loc = CGNS_ENUMV(EdgeCenter);
2364:       else if (topo_dim == 1) grid_loc = CGNS_ENUMV(Vertex);
2365:       PetscCallCGNSWriteData(cg_boco_gridlocation_write(cgv->file_num, base, zone, BC, grid_loc), dm, viewer);

2367:       PetscCall(PetscFree(fs_pnts_cg));
2368:       PetscCall(PetscFree(fs_pnts));
2369:     }
2370:     PetscCall(ISDestroy(&fsFacesAll));
2371:     PetscCall(ISRestoreIndices(fsValuesGlobalIS, &fsValuesGlobal));
2372:     elem_offset += f_global;
2373:   }
2374:   PetscCall(ISDestroy(&fsValuesGlobalIS));

2376:   PetscCall(DMDestroy(&colloc_dm));
2377:   PetscFunctionReturn(PETSC_SUCCESS);
2378: }

2380: PetscErrorCode VecView_Plex_Local_CGNS(Vec V, PetscViewer viewer)
2381: {
2382:   PetscViewer_CGNS  *cgv = (PetscViewer_CGNS *)viewer->data;
2383:   DM                 dm;
2384:   PetscSection       section;
2385:   PetscInt           time_step, num_fields, pStart, pEnd, fvGhostStart;
2386:   PetscReal          time, *time_slot;
2387:   size_t            *step_slot;
2388:   const PetscScalar *v;
2389:   char               solution_name[PETSC_MAX_PATH_LEN];
2390:   int                sol;

2392:   PetscFunctionBegin;
2393:   PetscCall(VecGetDM(V, &dm));
2394:   PetscCall(DMGetLocalSection(dm, &section));
2395:   PetscCall(PetscSectionGetChart(section, &pStart, &pEnd));
2396:   PetscCall(DMPlexGetCellTypeStratum(dm, DM_POLYTOPE_FV_GHOST, &fvGhostStart, NULL));
2397:   if (fvGhostStart >= 0) pEnd = fvGhostStart;

2399:   if (!cgv->node_l2g) PetscCall(DMView(dm, viewer));
2400:   if (!cgv->grid_loc) { // Determine if writing to cell-centers or to nodes
2401:     PetscInt cStart, cEnd;
2402:     PetscInt local_grid_loc, global_grid_loc;

2404:     PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
2405:     if (fvGhostStart >= 0) cEnd = fvGhostStart;
2406:     if (cgv->num_local_nodes == 0) local_grid_loc = -1;
2407:     else if (cStart == pStart && cEnd == pEnd) local_grid_loc = CGNS_ENUMV(CellCenter);
2408:     else local_grid_loc = CGNS_ENUMV(Vertex);

2410:     PetscCallMPI(MPIU_Allreduce(&local_grid_loc, &global_grid_loc, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)viewer)));
2411:     if (local_grid_loc != -1)
2412:       PetscCheck(local_grid_loc == global_grid_loc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Ranks with different grid locations not supported. Local has %" PetscInt_FMT ", allreduce returned %" PetscInt_FMT, local_grid_loc, global_grid_loc);
2413:     PetscCheck((global_grid_loc == CGNS_ENUMV(CellCenter)) || (global_grid_loc == CGNS_ENUMV(Vertex)), PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Grid location should only be CellCenter (%d) or Vertex(%d), but have %" PetscInt_FMT, CGNS_ENUMV(CellCenter), CGNS_ENUMV(Vertex), global_grid_loc);
2414:     cgv->grid_loc = (CGNS_ENUMT(GridLocation_t))global_grid_loc;
2415:   }
2416:   if (!cgv->nodal_field) {
2417:     switch (cgv->grid_loc) {
2418:     case CGNS_ENUMV(Vertex): {
2419:       PetscCall(PetscMalloc1(cgv->nEnd - cgv->nStart, &cgv->nodal_field));
2420:     } break;
2421:     case CGNS_ENUMV(CellCenter): {
2422:       PetscCall(PetscMalloc1(cgv->eEnd - cgv->eStart, &cgv->nodal_field));
2423:     } break;
2424:     default:
2425:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Can only write for Vertex and CellCenter grid locations");
2426:     }
2427:   }
2428:   if (!cgv->output_times) PetscCall(PetscSegBufferCreate(sizeof(PetscReal), 20, &cgv->output_times));
2429:   if (!cgv->output_steps) PetscCall(PetscSegBufferCreate(sizeof(size_t), 20, &cgv->output_steps));

2431:   PetscCall(DMGetOutputSequenceNumber(dm, &time_step, &time));
2432:   if (time_step < 0) {
2433:     time_step = 0;
2434:     time      = 0.;
2435:   }
2436:   // Avoid "Duplicate child name found" error by not writing an already-written solution.
2437:   // This usually occurs when a solution is written and then diverges on the very next timestep.
2438:   if (time_step == cgv->previous_output_step) PetscFunctionReturn(PETSC_SUCCESS);

2440:   PetscCall(PetscSegBufferGet(cgv->output_times, 1, &time_slot));
2441:   *time_slot = time;
2442:   PetscCall(PetscSegBufferGet(cgv->output_steps, 1, &step_slot));
2443:   *step_slot = cgv->previous_output_step = time_step;
2444:   PetscCall(PetscSNPrintf(solution_name, sizeof solution_name, "FlowSolution%" PetscInt_FMT, time_step));
2445:   PetscCallCGNSWrite(cg_sol_write(cgv->file_num, cgv->base, cgv->zone, solution_name, cgv->grid_loc, &sol), V, viewer);
2446:   PetscCall(VecGetArrayRead(V, &v));
2447:   PetscCall(PetscSectionGetNumFields(section, &num_fields));
2448:   for (PetscInt field = 0; field < num_fields; field++) {
2449:     PetscInt    ncomp;
2450:     const char *field_name;
2451:     PetscCall(PetscSectionGetFieldName(section, field, &field_name));
2452:     PetscCall(PetscSectionGetFieldComponents(section, field, &ncomp));
2453:     for (PetscInt comp = 0; comp < ncomp; comp++) {
2454:       int         cgfield;
2455:       const char *comp_name;
2456:       char        cgns_field_name[32]; // CGNS max field name is 32
2457:       CGNS_ENUMT(DataType_t) datatype;
2458:       PetscCall(PetscSectionGetComponentName(section, field, comp, &comp_name));
2459:       if (ncomp == 1 && comp_name[0] == '0' && comp_name[1] == '\0' && field_name[0] != '\0') PetscCall(PetscStrncpy(cgns_field_name, field_name, sizeof cgns_field_name));
2460:       else if (field_name[0] == '\0') PetscCall(PetscStrncpy(cgns_field_name, comp_name, sizeof cgns_field_name));
2461:       else PetscCall(PetscSNPrintf(cgns_field_name, sizeof cgns_field_name, "%s.%s", field_name, comp_name));
2462:       PetscCall(PetscCGNSDataType(PETSC_SCALAR, &datatype));
2463:       PetscCallCGNSWrite(cgp_field_write(cgv->file_num, cgv->base, cgv->zone, sol, datatype, cgns_field_name, &cgfield), V, viewer);
2464:       for (PetscInt p = pStart, n = 0; p < pEnd; p++) {
2465:         PetscInt off, dof;
2466:         PetscCall(PetscSectionGetFieldDof(section, p, field, &dof));
2467:         if (dof == 0) continue;
2468:         PetscCall(PetscSectionGetFieldOffset(section, p, field, &off));
2469:         for (PetscInt c = comp; c < dof; c += ncomp, n++) {
2470:           switch (cgv->grid_loc) {
2471:           case CGNS_ENUMV(Vertex): {
2472:             PetscInt gn = cgv->node_l2g[n];
2473:             if (gn < cgv->nStart || cgv->nEnd <= gn) continue;
2474:             cgv->nodal_field[gn - cgv->nStart] = v[off + c];
2475:           } break;
2476:           case CGNS_ENUMV(CellCenter): {
2477:             cgv->nodal_field[n] = v[off + c];
2478:           } break;
2479:           default:
2480:             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Can only pack for Vertex and CellCenter grid locations");
2481:           }
2482:         }
2483:       }
2484:       // CGNS nodes use 1-based indexing
2485:       cgsize_t start, end;
2486:       switch (cgv->grid_loc) {
2487:       case CGNS_ENUMV(Vertex): {
2488:         start = cgv->nStart + 1;
2489:         end   = cgv->nEnd;
2490:       } break;
2491:       case CGNS_ENUMV(CellCenter): {
2492:         start = cgv->eStart + 1;
2493:         end   = cgv->eEnd;
2494:       } break;
2495:       default:
2496:         SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Can only write for Vertex and CellCenter grid locations");
2497:       }
2498:       PetscCallCGNSWriteData(cgp_field_write_data(cgv->file_num, cgv->base, cgv->zone, sol, cgfield, &start, &end, cgv->nodal_field), V, viewer);
2499:     }
2500:   }
2501:   PetscCall(VecRestoreArrayRead(V, &v));
2502:   PetscCall(PetscViewerCGNSCheckBatch_Internal(viewer));
2503:   PetscFunctionReturn(PETSC_SUCCESS);
2504: }

2506: PetscErrorCode VecLoad_Plex_CGNS_Internal(Vec V, PetscViewer viewer)
2507: {
2508:   MPI_Comm          comm;
2509:   char              buffer[CGIO_MAX_NAME_LENGTH + 1];
2510:   PetscViewer_CGNS *cgv                 = (PetscViewer_CGNS *)viewer->data;
2511:   int               cgid                = cgv->file_num;
2512:   PetscBool         use_parallel_viewer = PETSC_FALSE;
2513:   int               z                   = 1; // Only support one zone
2514:   int               B                   = 1; // Only support one base
2515:   int               numComp;
2516:   PetscInt          V_numComps, mystartv, myendv, myownedv;

2518:   PetscFunctionBegin;
2519:   PetscCall(PetscObjectGetComm((PetscObject)V, &comm));

2521:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-dm_plex_cgns_parallel", &use_parallel_viewer, NULL));
2522:   PetscCheck(use_parallel_viewer, comm, PETSC_ERR_USER_INPUT, "Cannot use VecLoad with CGNS file in serial reader; use -dm_plex_cgns_parallel to enable parallel reader");

2524:   { // Get CGNS node ownership information
2525:     int         nbases, nzones;
2526:     PetscInt    NVertices;
2527:     PetscLayout vtx_map;
2528:     cgsize_t    sizes[3]; /* Number of vertices, number of cells, number of boundary vertices */

2530:     PetscCallCGNSRead(cg_nbases(cgid, &nbases), V, viewer);
2531:     PetscCheck(nbases <= 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "CGNS file must have a single base, not %d", nbases);
2532:     PetscCallCGNSRead(cg_nzones(cgid, B, &nzones), V, viewer);
2533:     PetscCheck(nzones == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "limited to one zone %d", (int)nzones);

2535:     PetscCallCGNSRead(cg_zone_read(cgid, B, z, buffer, sizes), V, viewer);
2536:     NVertices = sizes[0];

2538:     PetscCall(PetscLayoutCreateFromSizes(comm, PETSC_DECIDE, NVertices, 1, &vtx_map));
2539:     PetscCall(PetscLayoutGetRange(vtx_map, &mystartv, &myendv));
2540:     PetscCall(PetscLayoutGetLocalSize(vtx_map, &myownedv));
2541:     PetscCall(PetscLayoutDestroy(&vtx_map));
2542:   }

2544:   { // -- Read data from file into Vec
2545:     PetscScalar *fields = NULL;
2546:     PetscSF      sfNatural;

2548:     { // Check compatibility between sfNatural and the data source and sink
2549:       DM       dm;
2550:       PetscInt nleaves, nroots, V_local_size;

2552:       PetscCall(VecGetDM(V, &dm));
2553:       PetscCall(DMGetNaturalSF(dm, &sfNatural));
2554:       PetscCheck(sfNatural, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "DM of Vec must have sfNatural");
2555:       PetscCall(PetscSFGetGraph(sfNatural, &nroots, &nleaves, NULL, NULL));
2556:       PetscCall(VecGetLocalSize(V, &V_local_size));
2557:       PetscCheck(nleaves == myownedv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Number of locally owned vertices (% " PetscInt_FMT ") must match number of leaves in sfNatural (% " PetscInt_FMT ")", myownedv, nleaves);
2558:       if (nroots == 0) {
2559:         PetscCheck(V_local_size == nroots, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Local Vec size (% " PetscInt_FMT ") must be zero if number of roots in sfNatural is zero", V_local_size);
2560:         V_numComps = 0;
2561:       } else {
2562:         PetscCheck(V_local_size % nroots == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Local Vec size (% " PetscInt_FMT ") not evenly divisible by number of roots in sfNatural (% " PetscInt_FMT ")", V_local_size, nroots);
2563:         V_numComps = V_local_size / nroots;
2564:       }
2565:     }

2567:     { // Read data into component-major ordering
2568:       int isol, numSols;
2569:       CGNS_ENUMT(DataType_t) datatype;
2570:       double *fields_CGNS;

2572:       PetscCallCGNSRead(cg_nsols(cgid, B, z, &numSols), V, viewer);
2573:       PetscCall(PetscViewerCGNSGetSolutionFileIndex_Internal(viewer, &isol));
2574:       PetscCallCGNSRead(cg_nfields(cgid, B, z, isol, &numComp), V, viewer);
2575:       PetscCheck(V_numComps == numComp || V_numComps == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Vec sized for  % " PetscInt_FMT " components per node, but file has %d components per node", V_numComps, numComp);

2577:       cgsize_t range_min[3] = {mystartv + 1, 1, 1};
2578:       cgsize_t range_max[3] = {myendv, 1, 1};
2579:       PetscCall(PetscMalloc1(myownedv * numComp, &fields_CGNS));
2580:       PetscCall(PetscMalloc1(myownedv * numComp, &fields));
2581:       for (int d = 0; d < numComp; ++d) {
2582:         PetscCallCGNSRead(cg_field_info(cgid, B, z, isol, (d + 1), &datatype, buffer), V, viewer);
2583:         PetscCheck(datatype == CGNS_ENUMV(RealDouble), PETSC_COMM_SELF, PETSC_ERR_ARG_NOTSAMETYPE, "Field %s in file is not of type double", buffer);
2584:         PetscCallCGNSReadData(cgp_field_read_data(cgid, B, z, isol, (d + 1), range_min, range_max, &fields_CGNS[d * myownedv]), V, viewer);
2585:       }
2586:       for (int d = 0; d < numComp; ++d) {
2587:         for (PetscInt v = 0; v < myownedv; ++v) fields[v * numComp + d] = fields_CGNS[d * myownedv + v];
2588:       }
2589:       PetscCall(PetscFree(fields_CGNS));
2590:     }

2592:     { // Reduce fields into Vec array
2593:       PetscScalar *V_array;
2594:       MPI_Datatype fieldtype;

2596:       PetscCall(VecGetArrayWrite(V, &V_array));
2597:       PetscCallMPI(MPI_Type_contiguous(numComp, MPIU_SCALAR, &fieldtype));
2598:       PetscCallMPI(MPI_Type_commit(&fieldtype));
2599:       PetscCall(PetscSFReduceBegin(sfNatural, fieldtype, fields, V_array, MPI_REPLACE));
2600:       PetscCall(PetscSFReduceEnd(sfNatural, fieldtype, fields, V_array, MPI_REPLACE));
2601:       PetscCallMPI(MPI_Type_free(&fieldtype));
2602:       PetscCall(VecRestoreArrayWrite(V, &V_array));
2603:     }
2604:     PetscCall(PetscFree(fields));
2605:   }
2606:   PetscFunctionReturn(PETSC_SUCCESS);
2607: }