Actual source code: ex15.c
1: #include "petscsf.h"
2: static char help[] = "Demonstrate CGNS parallel load-save-reload cycle, including data\n\n";
4: #include <petscdmplex.h>
5: #include <petscviewerhdf5.h>
6: #define EX "ex15.c"
8: typedef struct {
9: char infile[PETSC_MAX_PATH_LEN]; /* Input mesh filename */
10: char outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
11: PetscBool heterogeneous; /* Test save on N / load on M */
12: PetscInt ntimes; /* How many times do the cycle */
13: } AppCtx;
15: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
16: {
17: PetscBool flg;
19: PetscFunctionBeginUser;
20: options->infile[0] = '\0';
21: options->outfile[0] = '\0';
22: options->heterogeneous = PETSC_FALSE;
23: options->ntimes = 2;
24: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
25: PetscCall(PetscOptionsString("-infile", "The input CGNS file", EX, options->infile, options->infile, sizeof(options->infile), &flg));
26: PetscCall(PetscOptionsString("-outfile", "The output CGNS file", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg));
27: PetscCall(PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL));
28: PetscCall(PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL));
29: PetscOptionsEnd();
30: PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
31: PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
32: PetscFunctionReturn(PETSC_SUCCESS);
33: }
35: // @brief Create DM from CGNS file and setup PetscFE to VecLoad solution from that file
36: PetscErrorCode ReadCGNSDM(MPI_Comm comm, const char filename[], DM *dm)
37: {
38: PetscInt degree;
40: PetscFunctionBeginUser;
41: PetscCall(DMPlexCreateFromFile(comm, filename, "ex15_plex", PETSC_TRUE, dm));
42: PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
43: PetscCall(DMSetFromOptions(*dm));
44: PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
46: /* Redistribute */
47: PetscCall(DMSetOptionsPrefix(*dm, "redistributed_"));
48: PetscCall(DMSetFromOptions(*dm));
49: PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
51: { // Get degree of the natural section
52: PetscFE fe_natural;
53: PetscDualSpace dual_space_natural;
55: PetscCall(DMGetField(*dm, 0, NULL, (PetscObject *)&fe_natural));
56: PetscCall(PetscFEGetDualSpace(fe_natural, &dual_space_natural));
57: PetscCall(PetscDualSpaceGetOrder(dual_space_natural, °ree));
58: PetscCall(DMClearFields(*dm));
59: PetscCall(DMSetLocalSection(*dm, NULL));
60: }
62: { // Setup fe to load in the initial condition data
63: PetscFE fe;
64: PetscInt dim;
66: PetscCall(DMGetDimension(*dm, &dim));
67: PetscCall(PetscFECreateLagrange(PETSC_COMM_SELF, dim, 5, PETSC_FALSE, degree, PETSC_DETERMINE, &fe));
68: PetscCall(PetscObjectSetName((PetscObject)fe, "FE for VecLoad"));
69: PetscCall(DMAddField(*dm, NULL, (PetscObject)fe));
70: PetscCall(DMCreateDS(*dm));
71: PetscCall(PetscFEDestroy(&fe));
72: }
74: // Set section component names, used when writing out CGNS files
75: PetscSection section;
76: PetscCall(DMGetLocalSection(*dm, §ion));
77: PetscCall(PetscSectionSetFieldName(section, 0, ""));
78: PetscCall(PetscSectionSetComponentName(section, 0, 0, "Pressure"));
79: PetscCall(PetscSectionSetComponentName(section, 0, 1, "VelocityX"));
80: PetscCall(PetscSectionSetComponentName(section, 0, 2, "VelocityY"));
81: PetscCall(PetscSectionSetComponentName(section, 0, 3, "VelocityZ"));
82: PetscCall(PetscSectionSetComponentName(section, 0, 4, "Temperature"));
83: PetscFunctionReturn(PETSC_SUCCESS);
84: }
86: // Verify that V_load is equivalent to V_serial, even if V_load is distributed
87: PetscErrorCode VerifyLoadedSolution(DM dm_serial, Vec V_serial, DM dm_load, Vec V_load, PetscScalar tol)
88: {
89: MPI_Comm comm = PetscObjectComm((PetscObject)dm_load);
90: PetscSF load_to_serial_sf;
91: PetscScalar *array_load_bcast = NULL;
92: PetscInt num_comps = 5;
94: PetscFunctionBeginUser;
95: { // Create SF to broadcast loaded vec nodes to serial vec nodes
96: PetscInt dim, num_local_serial = 0, num_local_load;
97: Vec coord_Vec_serial, coord_Vec_load;
98: const PetscScalar *coord_serial = NULL, *coord_load;
100: PetscCall(DMGetCoordinateDim(dm_load, &dim));
101: PetscCall(DMGetCoordinates(dm_load, &coord_Vec_load));
102: PetscCall(VecGetLocalSize(coord_Vec_load, &num_local_load));
103: num_local_load /= dim;
105: PetscCall(VecGetArrayRead(coord_Vec_load, &coord_load));
107: if (dm_serial) {
108: PetscCall(DMGetCoordinates(dm_serial, &coord_Vec_serial));
109: PetscCall(VecGetLocalSize(coord_Vec_serial, &num_local_serial));
110: num_local_serial /= dim;
111: PetscCall(VecGetArrayRead(coord_Vec_serial, &coord_serial));
112: }
114: PetscCall(PetscSFCreate(comm, &load_to_serial_sf));
115: PetscCall(PetscSFSetGraphFromCoordinates(load_to_serial_sf, num_local_load, num_local_serial, dim, 100 * PETSC_MACHINE_EPSILON, coord_load, coord_serial));
116: PetscCall(PetscSFViewFromOptions(load_to_serial_sf, NULL, "-verify_sf_view"));
118: PetscCall(VecRestoreArrayRead(coord_Vec_load, &coord_load));
119: if (dm_serial) PetscCall(VecRestoreArrayRead(coord_Vec_serial, &coord_serial));
120: }
122: { // Broadcast the loaded vector data into a serial-sized array
123: PetscInt size_local_serial = 0;
124: const PetscScalar *array_load;
125: MPI_Datatype unit;
127: PetscCall(VecGetArrayRead(V_load, &array_load));
128: if (V_serial) {
129: PetscCall(VecGetLocalSize(V_serial, &size_local_serial));
130: PetscCall(PetscMalloc1(size_local_serial, &array_load_bcast));
131: }
133: PetscCallMPI(MPI_Type_contiguous(num_comps, MPIU_REAL, &unit));
134: PetscCallMPI(MPI_Type_commit(&unit));
135: PetscCall(PetscSFBcastBegin(load_to_serial_sf, unit, array_load, array_load_bcast, MPI_REPLACE));
136: PetscCall(PetscSFBcastEnd(load_to_serial_sf, unit, array_load, array_load_bcast, MPI_REPLACE));
137: PetscCallMPI(MPI_Type_free(&unit));
138: PetscCall(VecRestoreArrayRead(V_load, &array_load));
139: }
141: if (V_serial) {
142: const PetscScalar *array_serial;
143: PetscInt size_local_serial;
145: PetscCall(VecGetArrayRead(V_serial, &array_serial));
146: PetscCall(VecGetLocalSize(V_serial, &size_local_serial));
147: for (PetscInt i = 0; i < size_local_serial; i++) {
148: if (PetscAbs(array_serial[i] - array_load_bcast[i]) > tol) PetscCall(PetscPrintf(comm, "DoF %" PetscInt_FMT " is inconsistent. Found %g, expected %g\n", i, array_load_bcast[i], array_serial[i]));
149: }
150: PetscCall(VecRestoreArrayRead(V_serial, &array_serial));
151: }
153: PetscCall(PetscFree(array_load_bcast));
154: PetscCall(PetscSFDestroy(&load_to_serial_sf));
155: PetscFunctionReturn(PETSC_SUCCESS);
156: }
158: int main(int argc, char **argv)
159: {
160: AppCtx user;
161: MPI_Comm comm;
162: PetscMPIInt gsize, grank, mycolor;
163: PetscBool flg;
164: const char *infilename;
165: DM dm_serial = NULL;
166: Vec V_serial = NULL;
168: PetscFunctionBeginUser;
169: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
170: PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
171: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &gsize));
172: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank));
174: { // Read infile in serial
175: PetscViewer viewer;
176: PetscMPIInt gsize_serial;
178: mycolor = grank == 0 ? 0 : 1;
179: PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm));
181: if (grank == 0) {
182: PetscCallMPI(MPI_Comm_size(comm, &gsize_serial));
184: PetscCall(ReadCGNSDM(comm, user.infile, &dm_serial));
185: PetscCall(DMSetOptionsPrefix(dm_serial, "serial_"));
187: /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
188: PetscCall(DMPlexIsDistributed(dm_serial, &flg));
189: PetscCall(PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]));
191: PetscCall(DMViewFromOptions(dm_serial, NULL, "-dm_view"));
192: PetscCall(PetscViewerCGNSOpen(comm, user.infile, FILE_MODE_READ, &viewer));
193: PetscCall(DMGetGlobalVector(dm_serial, &V_serial));
194: PetscCall(VecLoad(V_serial, viewer));
195: PetscCall(PetscViewerDestroy(&viewer));
196: PetscCallMPI(MPI_Comm_free(&comm));
197: }
198: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
199: }
201: for (PetscInt i = 0; i < user.ntimes; i++) {
202: if (i == 0) {
203: /* Use infile for the initial load */
204: infilename = user.infile;
205: } else {
206: /* Use outfile for all I/O except the very initial load */
207: infilename = user.outfile;
208: }
210: if (user.heterogeneous) {
211: mycolor = (PetscMPIInt)(grank > user.ntimes - i);
212: } else {
213: mycolor = (PetscMPIInt)0;
214: }
215: PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm));
217: if (mycolor == 0) {
218: /* Load/Save only on processes with mycolor == 0 */
219: DM dm;
220: Vec V;
221: PetscViewer viewer;
222: PetscMPIInt comm_size;
223: const char *name;
224: PetscReal time;
225: PetscBool set;
227: PetscCallMPI(MPI_Comm_size(comm, &comm_size));
228: PetscCall(PetscPrintf(comm, "Begin cycle %" PetscInt_FMT ", comm size %" PetscInt_FMT "\n", i, comm_size));
230: // Load DM from CGNS file
231: PetscCall(ReadCGNSDM(comm, infilename, &dm));
232: PetscCall(DMSetOptionsPrefix(dm, "loaded_"));
233: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
235: // Load solution from CGNS file
236: PetscCall(PetscViewerCGNSOpen(comm, infilename, FILE_MODE_READ, &viewer));
237: PetscCall(DMGetGlobalVector(dm, &V));
238: PetscCall(PetscViewerCGNSSetSolutionIndex(viewer, 1));
239: { // Test GetSolutionIndex, not needed in application code
240: PetscInt solution_index;
241: PetscCall(PetscViewerCGNSGetSolutionIndex(viewer, &solution_index));
242: PetscCheck(solution_index == 1, comm, PETSC_ERR_ARG_INCOMP, "Returned solution index wrong.");
243: }
244: PetscCall(PetscViewerCGNSGetSolutionName(viewer, &name));
245: PetscCall(PetscViewerCGNSGetSolutionTime(viewer, &time, &set));
246: PetscCheck(set, comm, PETSC_ERR_RETURN, "Time data wasn't set!");
247: PetscCall(PetscPrintf(comm, "Solution Name: %s, and time %g\n", name, time));
248: PetscCall(VecLoad(V, viewer));
249: PetscCall(PetscViewerDestroy(&viewer));
251: // Verify loaded solution against serial solution
252: PetscCall(VerifyLoadedSolution(dm_serial, V_serial, dm, V, 100 * PETSC_MACHINE_EPSILON));
254: // Write loaded solution to CGNS file
255: PetscCall(PetscViewerCGNSOpen(comm, user.outfile, FILE_MODE_WRITE, &viewer));
256: PetscCall(VecView(V, viewer));
257: PetscCall(PetscViewerDestroy(&viewer));
259: PetscCall(DMRestoreGlobalVector(dm, &V));
260: PetscCall(DMDestroy(&dm));
261: PetscCall(PetscPrintf(comm, "End cycle %" PetscInt_FMT "\n--------\n", i));
262: }
263: PetscCallMPI(MPI_Comm_free(&comm));
264: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
265: }
267: if (V_serial && dm_serial) PetscCall(DMRestoreGlobalVector(dm_serial, &V_serial));
268: if (dm_serial) PetscCall(DMDestroy(&dm_serial));
269: PetscCall(PetscFinalize());
270: return 0;
271: }
273: /*TEST
274: build:
275: requires: cgns
276: testset:
277: suffix: cgns
278: requires: !complex
279: nsize: 4
280: args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/2x2x2_Q3_wave.cgns -outfile 2x2x2_Q3_wave_output.cgns
281: args: -dm_plex_cgns_parallel -ntimes 3 -heterogeneous -serial_dm_view -loaded_dm_view -redistributed_dm_distribute
282: test:
283: # this partitioner should not shuffle anything, it should yield the same partitioning as the XDMF reader - added just for testing
284: suffix: simple
285: args: -petscpartitioner_type simple
286: test:
287: suffix: parmetis
288: requires: parmetis
289: args: -petscpartitioner_type parmetis
290: test:
291: suffix: ptscotch
292: requires: ptscotch
293: args: -petscpartitioner_type ptscotch
295: TEST*/