Actual source code: daview.c
1: /*
2: Code for manipulating distributed regular arrays in parallel.
3: */
5: #include <petsc/private/dmdaimpl.h>
7: #if defined(PETSC_HAVE_MATLAB)
8: #include <mat.h> /* MATLAB include file */
10: PetscErrorCode DMView_DA_Matlab(DM da, PetscViewer viewer)
11: {
12: PetscMPIInt rank;
13: PetscInt dim, m, n, p, dof, swidth;
14: DMDAStencilType stencil;
15: DMBoundaryType bx, by, bz;
16: mxArray *mx;
17: const char *fnames[] = {"dimension", "m", "n", "p", "dof", "stencil_width", "bx", "by", "bz", "stencil_type"};
19: PetscFunctionBegin;
20: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da), &rank));
21: if (rank == 0) {
22: PetscCall(DMDAGetInfo(da, &dim, &m, &n, &p, 0, 0, 0, &dof, &swidth, &bx, &by, &bz, &stencil));
23: mx = mxCreateStructMatrix(1, 1, 8, (const char **)fnames);
24: PetscCheck(mx, PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to generate MATLAB struct array to hold DMDA information");
25: mxSetFieldByNumber(mx, 0, 0, mxCreateDoubleScalar((double)dim));
26: mxSetFieldByNumber(mx, 0, 1, mxCreateDoubleScalar((double)m));
27: mxSetFieldByNumber(mx, 0, 2, mxCreateDoubleScalar((double)n));
28: mxSetFieldByNumber(mx, 0, 3, mxCreateDoubleScalar((double)p));
29: mxSetFieldByNumber(mx, 0, 4, mxCreateDoubleScalar((double)dof));
30: mxSetFieldByNumber(mx, 0, 5, mxCreateDoubleScalar((double)swidth));
31: mxSetFieldByNumber(mx, 0, 6, mxCreateDoubleScalar((double)bx));
32: mxSetFieldByNumber(mx, 0, 7, mxCreateDoubleScalar((double)by));
33: mxSetFieldByNumber(mx, 0, 8, mxCreateDoubleScalar((double)bz));
34: mxSetFieldByNumber(mx, 0, 9, mxCreateDoubleScalar((double)stencil));
35: PetscCall(PetscObjectName((PetscObject)da));
36: PetscCall(PetscViewerMatlabPutVariable(viewer, ((PetscObject)da)->name, mx));
37: }
38: PetscFunctionReturn(PETSC_SUCCESS);
39: }
40: #endif
42: PetscErrorCode DMView_DA_Binary(DM da, PetscViewer viewer)
43: {
44: PetscMPIInt rank;
45: PetscInt dim, m, n, p, dof, swidth, M, N, P;
46: DMDAStencilType stencil;
47: DMBoundaryType bx, by, bz;
48: MPI_Comm comm;
49: PetscBool coors = PETSC_FALSE;
50: Vec coordinates;
52: PetscFunctionBegin;
53: PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
55: PetscCall(DMDAGetInfo(da, &dim, &m, &n, &p, &M, &N, &P, &dof, &swidth, &bx, &by, &bz, &stencil));
56: PetscCallMPI(MPI_Comm_rank(comm, &rank));
57: PetscCall(DMGetCoordinates(da, &coordinates));
58: if (rank == 0) {
59: PetscCall(PetscViewerBinaryWrite(viewer, &dim, 1, PETSC_INT));
60: PetscCall(PetscViewerBinaryWrite(viewer, &m, 1, PETSC_INT));
61: PetscCall(PetscViewerBinaryWrite(viewer, &n, 1, PETSC_INT));
62: PetscCall(PetscViewerBinaryWrite(viewer, &p, 1, PETSC_INT));
63: PetscCall(PetscViewerBinaryWrite(viewer, &dof, 1, PETSC_INT));
64: PetscCall(PetscViewerBinaryWrite(viewer, &swidth, 1, PETSC_INT));
65: PetscCall(PetscViewerBinaryWrite(viewer, &bx, 1, PETSC_ENUM));
66: PetscCall(PetscViewerBinaryWrite(viewer, &by, 1, PETSC_ENUM));
67: PetscCall(PetscViewerBinaryWrite(viewer, &bz, 1, PETSC_ENUM));
68: PetscCall(PetscViewerBinaryWrite(viewer, &stencil, 1, PETSC_ENUM));
69: if (coordinates) coors = PETSC_TRUE;
70: PetscCall(PetscViewerBinaryWrite(viewer, &coors, 1, PETSC_BOOL));
71: }
73: /* save the coordinates if they exist to disk (in the natural ordering) */
74: if (coordinates) PetscCall(VecView(coordinates, viewer));
75: PetscFunctionReturn(PETSC_SUCCESS);
76: }
78: PetscErrorCode DMView_DA_VTK(DM da, PetscViewer viewer)
79: {
80: Vec coordinates;
81: PetscInt dim, dof, M = 0, N = 0, P = 0;
83: PetscFunctionBegin;
84: PetscCall(DMGetCoordinates(da, &coordinates));
85: PetscCall(DMDAGetInfo(da, &dim, &M, &N, &P, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
86: PetscCheck(coordinates, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "VTK output requires DMDA coordinates.");
87: /* Write Header */
88: PetscCall(PetscViewerASCIIPrintf(viewer, "# vtk DataFile Version 2.0\n"));
89: PetscCall(PetscViewerASCIIPrintf(viewer, "Structured Mesh Example\n"));
90: PetscCall(PetscViewerASCIIPrintf(viewer, "ASCII\n"));
91: PetscCall(PetscViewerASCIIPrintf(viewer, "DATASET STRUCTURED_GRID\n"));
92: PetscCall(PetscViewerASCIIPrintf(viewer, "DIMENSIONS %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", M, N, P));
93: PetscCall(PetscViewerASCIIPrintf(viewer, "POINTS %" PetscInt_FMT " double\n", M * N * P));
94: if (coordinates) {
95: DM dac;
96: Vec natural;
98: PetscCall(DMGetCoordinateDM(da, &dac));
99: PetscCall(DMDACreateNaturalVector(dac, &natural));
100: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, "coor_"));
101: PetscCall(DMDAGlobalToNaturalBegin(dac, coordinates, INSERT_VALUES, natural));
102: PetscCall(DMDAGlobalToNaturalEnd(dac, coordinates, INSERT_VALUES, natural));
103: PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_VTK_COORDS_DEPRECATED));
104: PetscCall(VecView(natural, viewer));
105: PetscCall(PetscViewerPopFormat(viewer));
106: PetscCall(VecDestroy(&natural));
107: }
108: PetscFunctionReturn(PETSC_SUCCESS);
109: }
111: /*@
112: DMDAGetInfo - Gets information about a given distributed array.
114: Not Collective
116: Input Parameter:
117: . da - the `DMDA`
119: Output Parameters:
120: + dim - dimension of the `DMDA` (1, 2, or 3)
121: . M - global dimension in first direction of the array
122: . N - global dimension in second direction of the array
123: . P - global dimension in third direction of the array
124: . m - corresponding number of MPI processes in first dimension
125: . n - corresponding number of MPI processes in second dimension
126: . p - corresponding number of MPI processes in third dimension
127: . dof - number of degrees of freedom per node
128: . s - stencil width
129: . bx - type of ghost nodes at boundary in first dimension
130: . by - type of ghost nodes at boundary in second dimension
131: . bz - type of ghost nodes at boundary in third dimension
132: - st - stencil type, either `DMDA_STENCIL_STAR` or `DMDA_STENCIL_BOX`
134: Level: beginner
136: Note:
137: Use `NULL` (`PETSC_NULL_INTEGER` in Fortran) in place of any output parameter that is not of interest.
139: .seealso: [](sec_struct), `DM`, `DMDA`, `DMView()`, `DMDAGetCorners()`, `DMDAGetLocalInfo()`
140: @*/
141: PetscErrorCode DMDAGetInfo(DM da, PetscInt *dim, PetscInt *M, PetscInt *N, PetscInt *P, PetscInt *m, PetscInt *n, PetscInt *p, PetscInt *dof, PetscInt *s, DMBoundaryType *bx, DMBoundaryType *by, DMBoundaryType *bz, DMDAStencilType *st)
142: {
143: DM_DA *dd = (DM_DA *)da->data;
145: PetscFunctionBegin;
147: if (dim) *dim = da->dim;
148: if (M) {
149: if (dd->Mo < 0) *M = dd->M;
150: else *M = dd->Mo;
151: }
152: if (N) {
153: if (dd->No < 0) *N = dd->N;
154: else *N = dd->No;
155: }
156: if (P) {
157: if (dd->Po < 0) *P = dd->P;
158: else *P = dd->Po;
159: }
160: if (m) *m = dd->m;
161: if (n) *n = dd->n;
162: if (p) *p = dd->p;
163: if (dof) *dof = dd->w;
164: if (s) *s = dd->s;
165: if (bx) *bx = dd->bx;
166: if (by) *by = dd->by;
167: if (bz) *bz = dd->bz;
168: if (st) *st = dd->stencil_type;
169: PetscFunctionReturn(PETSC_SUCCESS);
170: }
172: /*@
173: DMDAGetLocalInfo - Gets information about a given `DMDA` and this MPI process's location in it
175: Not Collective
177: Input Parameter:
178: . da - the `DMDA`
180: Output Parameters:
181: . info - structure containing the information
183: Level: beginner
185: Note:
186: See `DMDALocalInfo` for the information that is returned
188: Fortran Note:
189: Pass in an array of type `DMDALocalInfo` of length `DMDA_LOCAL_INFO_SIZE`
191: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGetInfo()`, `DMDAGetCorners()`, `DMDALocalInfo`
192: @*/
193: PetscErrorCode DMDAGetLocalInfo(DM da, DMDALocalInfo *info)
194: {
195: PetscInt w;
196: DM_DA *dd = (DM_DA *)da->data;
198: PetscFunctionBegin;
200: PetscAssertPointer(info, 2);
201: info->da = da;
202: info->dim = da->dim;
203: if (dd->Mo < 0) info->mx = dd->M;
204: else info->mx = dd->Mo;
205: if (dd->No < 0) info->my = dd->N;
206: else info->my = dd->No;
207: if (dd->Po < 0) info->mz = dd->P;
208: else info->mz = dd->Po;
209: info->dof = dd->w;
210: info->sw = dd->s;
211: info->bx = dd->bx;
212: info->by = dd->by;
213: info->bz = dd->bz;
214: info->st = dd->stencil_type;
216: /* since the xs, xe ... have all been multiplied by the number of degrees
217: of freedom per cell, w = dd->w, we divide that out before returning.*/
218: w = dd->w;
219: info->xs = dd->xs / w + dd->xo;
220: info->xm = (dd->xe - dd->xs) / w;
221: /* the y and z have NOT been multiplied by w */
222: info->ys = dd->ys + dd->yo;
223: info->ym = (dd->ye - dd->ys);
224: info->zs = dd->zs + dd->zo;
225: info->zm = (dd->ze - dd->zs);
227: info->gxs = dd->Xs / w + dd->xo;
228: info->gxm = (dd->Xe - dd->Xs) / w;
229: /* the y and z have NOT been multiplied by w */
230: info->gys = dd->Ys + dd->yo;
231: info->gym = (dd->Ye - dd->Ys);
232: info->gzs = dd->Zs + dd->zo;
233: info->gzm = (dd->Ze - dd->Zs);
234: PetscFunctionReturn(PETSC_SUCCESS);
235: }