Actual source code: grvtk.c

  1: #include <petsc/private/dmdaimpl.h>
  2: /*
  3:    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
  4:    including the private vtkvimpl.h file. The code should be refactored.
  5: */
  6: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>

  8: /* Helper function which determines if any DMDA fields are named.  This is used
  9:    as a proxy for the user's intention to use DMDA fields as distinct
 10:    scalar-valued fields as opposed to a single vector-valued field */
 11: static PetscErrorCode DMDAGetFieldsNamed(DM da, PetscBool *fieldsnamed)
 12: {
 13:   PetscInt f, bs;

 15:   PetscFunctionBegin;
 16:   *fieldsnamed = PETSC_FALSE;
 17:   PetscCall(DMDAGetDof(da, &bs));
 18:   for (f = 0; f < bs; ++f) {
 19:     const char *fieldname;
 20:     PetscCall(DMDAGetFieldName(da, f, &fieldname));
 21:     if (fieldname) {
 22:       *fieldsnamed = PETSC_TRUE;
 23:       break;
 24:     }
 25:   }
 26:   PetscFunctionReturn(PETSC_SUCCESS);
 27: }

 29: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da, PetscViewer viewer)
 30: {
 31:   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
 32: #if defined(PETSC_USE_REAL_SINGLE)
 33:   const char precision[] = "Float32";
 34: #elif defined(PETSC_USE_REAL_DOUBLE)
 35:   const char precision[] = "Float64";
 36: #else
 37:   const char precision[] = "UnknownPrecision";
 38: #endif
 39:   MPI_Comm                 comm;
 40:   Vec                      Coords;
 41:   PetscViewer_VTK         *vtk = (PetscViewer_VTK *)viewer->data;
 42:   PetscViewerVTKObjectLink link;
 43:   FILE                    *fp;
 44:   PetscMPIInt              rank, size, tag;
 45:   DMDALocalInfo            info;
 46:   PetscInt                 dim, mx, my, mz, cdim, bs, maxnnodes, maxbs, i, j, k;
 47:   PetscInt64               boffset;
 48:   PetscInt                 rloc[6], (*grloc)[6] = NULL;
 49:   PetscScalar             *array, *array2;

 51:   PetscFunctionBegin;
 52:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
 53:   PetscCheck(!PetscDefined(USE_COMPLEX), PETSC_COMM_SELF, PETSC_ERR_SUP, "Complex values not supported");
 54:   PetscCallMPI(MPI_Comm_size(comm, &size));
 55:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 56:   PetscCall(DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
 57:   PetscCall(DMDAGetLocalInfo(da, &info));
 58:   PetscCall(DMGetCoordinates(da, &Coords));
 59:   if (Coords) {
 60:     PetscInt csize;
 61:     PetscCall(VecGetSize(Coords, &csize));
 62:     PetscCheck(csize % (mx * my * mz) == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Coordinate vector size mismatch");
 63:     cdim = csize / (mx * my * mz);
 64:     PetscCheck(cdim >= dim && cdim <= 3, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Coordinate vector size mismatch");
 65:   } else {
 66:     cdim = dim;
 67:   }

 69:   PetscCall(PetscFOpen(comm, vtk->filename, "wb", &fp));
 70:   PetscCall(PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n"));
 71:   PetscCall(PetscFPrintf(comm, fp, "<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\" header_type=\"UInt64\">\n", byte_order));
 72:   PetscCall(PetscFPrintf(comm, fp, "  <StructuredGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1));

 74:   if (rank == 0) PetscCall(PetscMalloc1(size * 6, &grloc));
 75:   rloc[0] = info.xs;
 76:   rloc[1] = info.xm;
 77:   rloc[2] = info.ys;
 78:   rloc[3] = info.ym;
 79:   rloc[4] = info.zs;
 80:   rloc[5] = info.zm;
 81:   PetscCallMPI(MPI_Gather(rloc, 6, MPIU_INT, rank == 0 ? grloc[0] : NULL, 6, MPIU_INT, 0, comm));

 83:   /* Write XML header */
 84:   maxnnodes = 0; /* Used for the temporary array size on rank 0 */
 85:   maxbs     = 0; /* Used for the temporary array size on rank 0 */
 86:   boffset   = 0; /* Offset into binary file */
 87:   for (PetscMPIInt r = 0; r < size; r++) {
 88:     PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;

 90:     if (rank == 0) {
 91:       xs     = grloc[r][0];
 92:       xm     = grloc[r][1];
 93:       ys     = grloc[r][2];
 94:       ym     = grloc[r][3];
 95:       zs     = grloc[r][4];
 96:       zm     = grloc[r][5];
 97:       nnodes = xm * ym * zm;
 98:     }
 99:     maxnnodes = PetscMax(maxnnodes, nnodes);
100:     PetscCall(PetscFPrintf(comm, fp, "    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1));
101:     PetscCall(PetscFPrintf(comm, fp, "      <Points>\n"));
102:     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset));
103:     boffset += 3 * nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
104:     PetscCall(PetscFPrintf(comm, fp, "      </Points>\n"));

106:     PetscCall(PetscFPrintf(comm, fp, "      <PointData Scalars=\"ScalarPointData\">\n"));
107:     for (link = vtk->link; link; link = link->next) {
108:       Vec         X = (Vec)link->vec;
109:       PetscInt    bs, f;
110:       DM          daCurr;
111:       PetscBool   fieldsnamed;
112:       const char *vecname = "Unnamed";

114:       PetscCall(VecGetDM(X, &daCurr));
115:       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
116:       maxbs = PetscMax(maxbs, bs);

118:       if (((PetscObject)X)->name || link != vtk->link) PetscCall(PetscObjectGetName((PetscObject)X, &vecname));

120:       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
121:       PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
122:       if (fieldsnamed) {
123:         for (f = 0; f < bs; f++) {
124:           char        buf[256];
125:           const char *fieldname;
126:           PetscCall(DMDAGetFieldName(daCurr, f, &fieldname));
127:           if (!fieldname) {
128:             PetscCall(PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f));
129:             fieldname = buf;
130:           }
131:           PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, fieldname, boffset));
132:           boffset += nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
133:         }
134:       } else {
135:         PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, bs, boffset));
136:         boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
137:       }
138:     }
139:     PetscCall(PetscFPrintf(comm, fp, "      </PointData>\n"));
140:     PetscCall(PetscFPrintf(comm, fp, "    </Piece>\n"));
141:   }
142:   PetscCall(PetscFPrintf(comm, fp, "  </StructuredGrid>\n"));
143:   PetscCall(PetscFPrintf(comm, fp, "  <AppendedData encoding=\"raw\">\n"));
144:   PetscCall(PetscFPrintf(comm, fp, "_"));

146:   /* Now write the arrays. */
147:   tag = ((PetscObject)viewer)->tag;
148:   PetscCall(PetscMalloc2(maxnnodes * PetscMax(3, maxbs), &array, maxnnodes * PetscMax(3, maxbs), &array2));
149:   for (PetscMPIInt r = 0; r < size; r++) {
150:     MPI_Status status;
151:     PetscInt   xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;

153:     if (rank == 0) {
154:       xs     = grloc[r][0];
155:       xm     = grloc[r][1];
156:       ys     = grloc[r][2];
157:       ym     = grloc[r][3];
158:       zs     = grloc[r][4];
159:       zm     = grloc[r][5];
160:       nnodes = xm * ym * zm;
161:     } else if (r == rank) {
162:       nnodes = info.xm * info.ym * info.zm;
163:     }

165:     /* Write the coordinates */
166:     if (Coords) {
167:       const PetscScalar *coords;

169:       PetscCall(VecGetArrayRead(Coords, &coords));
170:       if (rank == 0) {
171:         if (r) {
172:           PetscMPIInt nn;

174:           PetscCallMPI(MPIU_Recv(array, nnodes * cdim, MPIU_SCALAR, r, tag, comm, &status));
175:           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
176:           PetscCheck(nn == nnodes * cdim, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch");
177:         } else PetscCall(PetscArraycpy(array, coords, nnodes * cdim));
178:         /* Transpose coordinates to VTK (C-style) ordering */
179:         for (k = 0; k < zm; k++) {
180:           for (j = 0; j < ym; j++) {
181:             for (i = 0; i < xm; i++) {
182:               PetscInt Iloc        = i + xm * (j + ym * k);
183:               array2[Iloc * 3 + 0] = array[Iloc * cdim + 0];
184:               array2[Iloc * 3 + 1] = cdim > 1 ? array[Iloc * cdim + 1] : 0.0;
185:               array2[Iloc * 3 + 2] = cdim > 2 ? array[Iloc * cdim + 2] : 0.0;
186:             }
187:           }
188:         }
189:       } else if (r == rank) {
190:         PetscCallMPI(MPIU_Send((void *)coords, nnodes * cdim, MPIU_SCALAR, 0, tag, comm));
191:       }
192:       PetscCall(VecRestoreArrayRead(Coords, &coords));
193:     } else { /* Fabricate some coordinates using grid index */
194:       for (k = 0; k < zm; k++) {
195:         for (j = 0; j < ym; j++) {
196:           for (i = 0; i < xm; i++) {
197:             PetscInt Iloc        = i + xm * (j + ym * k);
198:             array2[Iloc * 3 + 0] = xs + i;
199:             array2[Iloc * 3 + 1] = ys + j;
200:             array2[Iloc * 3 + 2] = zs + k;
201:           }
202:         }
203:       }
204:     }
205:     PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes * 3, MPIU_SCALAR));

207:     /* Write each of the objects queued up for this file */
208:     for (link = vtk->link; link; link = link->next) {
209:       Vec                X = (Vec)link->vec;
210:       const PetscScalar *x;
211:       PetscInt           bs, f;
212:       DM                 daCurr;
213:       PetscBool          fieldsnamed;
214:       PetscCall(VecGetDM(X, &daCurr));
215:       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
216:       PetscCall(VecGetArrayRead(X, &x));
217:       if (rank == 0) {
218:         if (r) {
219:           PetscMPIInt nn;

221:           PetscCallMPI(MPIU_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status));
222:           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
223:           PetscCheck(nn == nnodes * bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch receiving from rank %d", r);
224:         } else PetscCall(PetscArraycpy(array, x, nnodes * bs));

226:         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
227:         PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
228:         if (fieldsnamed) {
229:           for (f = 0; f < bs; f++) {
230:             /* Extract and transpose the f'th field */
231:             for (k = 0; k < zm; k++) {
232:               for (j = 0; j < ym; j++) {
233:                 for (i = 0; i < xm; i++) {
234:                   PetscInt Iloc = i + xm * (j + ym * k);
235:                   array2[Iloc]  = array[Iloc * bs + f];
236:                 }
237:               }
238:             }
239:             PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR));
240:           }
241:         } else PetscCall(PetscViewerVTKFWrite(viewer, fp, array, bs * nnodes, MPIU_SCALAR));
242:       } else if (r == rank) PetscCallMPI(MPIU_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm));
243:       PetscCall(VecRestoreArrayRead(X, &x));
244:     }
245:   }
246:   PetscCall(PetscFree2(array, array2));
247:   PetscCall(PetscFree(grloc));

249:   PetscCall(PetscFPrintf(comm, fp, "\n </AppendedData>\n"));
250:   PetscCall(PetscFPrintf(comm, fp, "</VTKFile>\n"));
251:   PetscCall(PetscFClose(comm, fp));
252:   PetscFunctionReturn(PETSC_SUCCESS);
253: }

255: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da, PetscViewer viewer)
256: {
257:   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
258: #if defined(PETSC_USE_REAL_SINGLE)
259:   const char precision[] = "Float32";
260: #elif defined(PETSC_USE_REAL_DOUBLE)
261:   const char precision[] = "Float64";
262: #else
263:   const char precision[] = "UnknownPrecision";
264: #endif
265:   MPI_Comm                 comm;
266:   PetscViewer_VTK         *vtk = (PetscViewer_VTK *)viewer->data;
267:   PetscViewerVTKObjectLink link;
268:   FILE                    *fp;
269:   PetscMPIInt              rank, size, tag;
270:   DMDALocalInfo            info;
271:   PetscInt                 dim, mx, my, mz, maxnnodes, maxbs, i, j, k;
272:   PetscInt64               boffset;
273:   PetscInt                 rloc[6], (*grloc)[6] = NULL;
274:   PetscScalar             *array, *array2;

276:   PetscFunctionBegin;
277:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
278:   PetscCheck(!PetscDefined(USE_COMPLEX), PETSC_COMM_SELF, PETSC_ERR_SUP, "Complex values not supported");
279:   PetscCallMPI(MPI_Comm_size(comm, &size));
280:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
281:   PetscCall(DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
282:   PetscCall(DMDAGetLocalInfo(da, &info));
283:   PetscCall(PetscFOpen(comm, vtk->filename, "wb", &fp));
284:   PetscCall(PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n"));
285:   PetscCall(PetscFPrintf(comm, fp, "<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\" header_type=\"UInt64\">\n", byte_order));
286:   PetscCall(PetscFPrintf(comm, fp, "  <RectilinearGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1));

288:   if (rank == 0) PetscCall(PetscMalloc1(size * 6, &grloc));
289:   rloc[0] = info.xs;
290:   rloc[1] = info.xm;
291:   rloc[2] = info.ys;
292:   rloc[3] = info.ym;
293:   rloc[4] = info.zs;
294:   rloc[5] = info.zm;
295:   PetscCallMPI(MPI_Gather(rloc, 6, MPIU_INT, rank == 0 ? grloc[0] : NULL, 6, MPIU_INT, 0, comm));

297:   /* Write XML header */
298:   maxnnodes = 0; /* Used for the temporary array size on rank 0 */
299:   maxbs     = 0; /* Used for the temporary array size on rank 0 */
300:   boffset   = 0; /* Offset into binary file */
301:   for (PetscMPIInt r = 0; r < size; r++) {
302:     PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;

304:     if (rank == 0) {
305:       xs     = grloc[r][0];
306:       xm     = grloc[r][1];
307:       ys     = grloc[r][2];
308:       ym     = grloc[r][3];
309:       zs     = grloc[r][4];
310:       zm     = grloc[r][5];
311:       nnodes = xm * ym * zm;
312:     }
313:     maxnnodes = PetscMax(maxnnodes, nnodes);
314:     PetscCall(PetscFPrintf(comm, fp, "    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1));
315:     PetscCall(PetscFPrintf(comm, fp, "      <Coordinates>\n"));
316:     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset));
317:     boffset += xm * sizeof(PetscScalar) + sizeof(PetscInt64);
318:     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset));
319:     boffset += ym * sizeof(PetscScalar) + sizeof(PetscInt64);
320:     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset));
321:     boffset += zm * sizeof(PetscScalar) + sizeof(PetscInt64);
322:     PetscCall(PetscFPrintf(comm, fp, "      </Coordinates>\n"));
323:     PetscCall(PetscFPrintf(comm, fp, "      <PointData Scalars=\"ScalarPointData\">\n"));
324:     for (link = vtk->link; link; link = link->next) {
325:       Vec         X = (Vec)link->vec;
326:       PetscInt    bs, f;
327:       DM          daCurr;
328:       PetscBool   fieldsnamed;
329:       const char *vecname = "Unnamed";

331:       PetscCall(VecGetDM(X, &daCurr));
332:       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
333:       maxbs = PetscMax(maxbs, bs);
334:       if (((PetscObject)X)->name || link != vtk->link) PetscCall(PetscObjectGetName((PetscObject)X, &vecname));

336:       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
337:       PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
338:       if (fieldsnamed) {
339:         for (f = 0; f < bs; f++) {
340:           char        buf[256];
341:           const char *fieldname;
342:           PetscCall(DMDAGetFieldName(daCurr, f, &fieldname));
343:           if (!fieldname) {
344:             PetscCall(PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f));
345:             fieldname = buf;
346:           }
347:           PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, fieldname, boffset));
348:           boffset += nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
349:         }
350:       } else {
351:         PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, bs, boffset));
352:         boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
353:       }
354:     }
355:     PetscCall(PetscFPrintf(comm, fp, "      </PointData>\n"));
356:     PetscCall(PetscFPrintf(comm, fp, "    </Piece>\n"));
357:   }
358:   PetscCall(PetscFPrintf(comm, fp, "  </RectilinearGrid>\n"));
359:   PetscCall(PetscFPrintf(comm, fp, "  <AppendedData encoding=\"raw\">\n"));
360:   PetscCall(PetscFPrintf(comm, fp, "_"));

362:   /* Now write the arrays. */
363:   tag = ((PetscObject)viewer)->tag;
364:   PetscCall(PetscMalloc2(PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array, PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array2));
365:   for (PetscMPIInt r = 0; r < size; r++) {
366:     MPI_Status status;
367:     PetscInt   xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;

369:     if (rank == 0) {
370:       xs     = grloc[r][0];
371:       xm     = grloc[r][1];
372:       ys     = grloc[r][2];
373:       ym     = grloc[r][3];
374:       zs     = grloc[r][4];
375:       zm     = grloc[r][5];
376:       nnodes = xm * ym * zm;
377:     } else if (r == rank) {
378:       nnodes = info.xm * info.ym * info.zm;
379:     }
380:     { /* Write the coordinates */
381:       Vec Coords;

383:       PetscCall(DMGetCoordinates(da, &Coords));
384:       if (Coords) {
385:         const PetscScalar *coords;
386:         PetscCall(VecGetArrayRead(Coords, &coords));
387:         if (rank == 0) {
388:           if (r) {
389:             PetscMPIInt nn;

391:             PetscCallMPI(MPIU_Recv(array, xm + ym + zm, MPIU_SCALAR, r, tag, comm, &status));
392:             PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
393:             PetscCheck(nn == xm + ym + zm, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch");
394:           } else {
395:             /* x coordinates */
396:             for (j = 0, k = 0, i = 0; i < xm; i++) {
397:               PetscInt Iloc = i + xm * (j + ym * k);

399:               array[i] = coords[Iloc * dim + 0];
400:             }
401:             /* y coordinates */
402:             for (i = 0, k = 0, j = 0; j < ym; j++) {
403:               PetscInt Iloc = i + xm * (j + ym * k);

405:               array[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
406:             }
407:             /* z coordinates */
408:             for (i = 0, j = 0, k = 0; k < zm; k++) {
409:               PetscInt Iloc = i + xm * (j + ym * k);

411:               array[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
412:             }
413:           }
414:         } else if (r == rank) {
415:           xm = info.xm;
416:           ym = info.ym;
417:           zm = info.zm;
418:           for (j = 0, k = 0, i = 0; i < xm; i++) {
419:             PetscInt Iloc = i + xm * (j + ym * k);

421:             array2[i] = coords[Iloc * dim + 0];
422:           }
423:           for (i = 0, k = 0, j = 0; j < ym; j++) {
424:             PetscInt Iloc = i + xm * (j + ym * k);

426:             array2[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
427:           }
428:           for (i = 0, j = 0, k = 0; k < zm; k++) {
429:             PetscInt Iloc = i + xm * (j + ym * k);

431:             array2[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
432:           }
433:           PetscCallMPI(MPIU_Send((void *)array2, xm + ym + zm, MPIU_SCALAR, 0, tag, comm));
434:         }
435:         PetscCall(VecRestoreArrayRead(Coords, &coords));
436:       } else { /* Fabricate some coordinates using grid index */
437:         for (i = 0; i < xm; i++) array[i] = xs + i;
438:         for (j = 0; j < ym; j++) array[j + xm] = ys + j;
439:         for (k = 0; k < zm; k++) array[k + xm + ym] = zs + k;
440:       }
441:       if (rank == 0) {
442:         PetscCall(PetscViewerVTKFWrite(viewer, fp, &array[0], xm, MPIU_SCALAR));
443:         PetscCall(PetscViewerVTKFWrite(viewer, fp, &array[xm], ym, MPIU_SCALAR));
444:         PetscCall(PetscViewerVTKFWrite(viewer, fp, &array[xm + ym], zm, MPIU_SCALAR));
445:       }
446:     }

448:     /* Write each of the objects queued up for this file */
449:     for (link = vtk->link; link; link = link->next) {
450:       Vec                X = (Vec)link->vec;
451:       const PetscScalar *x;
452:       PetscInt           bs, f;
453:       DM                 daCurr;
454:       PetscBool          fieldsnamed;

456:       PetscCall(VecGetDM(X, &daCurr));
457:       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));

459:       PetscCall(VecGetArrayRead(X, &x));
460:       if (rank == 0) {
461:         if (r) {
462:           PetscMPIInt nn;

464:           PetscCallMPI(MPIU_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status));
465:           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
466:           PetscCheck(nn == nnodes * bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch receiving from rank %d", r);
467:         } else PetscCall(PetscArraycpy(array, x, nnodes * bs));
468:         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
469:         PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
470:         if (fieldsnamed) {
471:           for (f = 0; f < bs; f++) {
472:             /* Extract and transpose the f'th field */
473:             for (k = 0; k < zm; k++) {
474:               for (j = 0; j < ym; j++) {
475:                 for (i = 0; i < xm; i++) {
476:                   PetscInt Iloc = i + xm * (j + ym * k);
477:                   array2[Iloc]  = array[Iloc * bs + f];
478:                 }
479:               }
480:             }
481:             PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR));
482:           }
483:         }
484:         PetscCall(PetscViewerVTKFWrite(viewer, fp, array, nnodes * bs, MPIU_SCALAR));

486:       } else if (r == rank) {
487:         PetscCallMPI(MPIU_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm));
488:       }
489:       PetscCall(VecRestoreArrayRead(X, &x));
490:     }
491:   }
492:   PetscCall(PetscFree2(array, array2));
493:   PetscCall(PetscFree(grloc));

495:   PetscCall(PetscFPrintf(comm, fp, "\n </AppendedData>\n"));
496:   PetscCall(PetscFPrintf(comm, fp, "</VTKFile>\n"));
497:   PetscCall(PetscFClose(comm, fp));
498:   PetscFunctionReturn(PETSC_SUCCESS);
499: }

501: /*@
502:   DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer

504:   Collective

506:   Input Parameters:
507: + odm    - `DMDA` specifying the grid layout, passed as a `PetscObject`
508: - viewer - viewer of type `PETSCVIEWERVTK`

510:   Level: developer

512:   Notes:
513:   This function is a callback used by the `PETSCVIEWERVTK` viewer to actually write the file.
514:   The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
515:   Instead, metadata for the entire file needs to be available up-front before you can start writing the file.

517:   If any fields have been named (see e.g. `DMDASetFieldName()`), then individual scalar
518:   fields are written. Otherwise, a single multi-dof (vector) field is written.

520: .seealso: [](sec_struct), `DMDA`, `DM`, `PETSCVIEWERVTK`, `DMDASetFieldName()`
521: @*/
522: PetscErrorCode DMDAVTKWriteAll(PetscObject odm, PetscViewer viewer)
523: {
524:   DM        dm = (DM)odm;
525:   PetscBool isvtk;

527:   PetscFunctionBegin;
530:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
531:   PetscCheck(isvtk, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_INCOMP, "Cannot use viewer type %s", ((PetscObject)viewer)->type_name);
532:   switch (viewer->format) {
533:   case PETSC_VIEWER_VTK_VTS:
534:     PetscCall(DMDAVTKWriteAll_VTS(dm, viewer));
535:     break;
536:   case PETSC_VIEWER_VTK_VTR:
537:     PetscCall(DMDAVTKWriteAll_VTR(dm, viewer));
538:     break;
539:   default:
540:     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No support for format '%s'", PetscViewerFormats[viewer->format]);
541:   }
542:   PetscFunctionReturn(PETSC_SUCCESS);
543: }