Actual source code: networkcreate.c
1: #define PETSCDM_DLL
2: #include <petsc/private/dmnetworkimpl.h>
3: #include <petsc/private/vecimpl.h>
5: PetscErrorCode DMSetFromOptions_Network(DM dm, PetscOptionItems *PetscOptionsObject)
6: {
7: PetscOptionsHeadBegin(PetscOptionsObject, "DMNetwork Options");
8: PetscOptionsHeadEnd();
9: return 0;
10: }
12: /* External function declarations here */
13: extern PetscErrorCode DMCreateMatrix_Network(DM, Mat *);
14: extern PetscErrorCode DMDestroy_Network(DM);
15: extern PetscErrorCode DMView_Network(DM, PetscViewer);
16: extern PetscErrorCode DMGlobalToLocalBegin_Network(DM, Vec, InsertMode, Vec);
17: extern PetscErrorCode DMGlobalToLocalEnd_Network(DM, Vec, InsertMode, Vec);
18: extern PetscErrorCode DMLocalToGlobalBegin_Network(DM, Vec, InsertMode, Vec);
19: extern PetscErrorCode DMLocalToGlobalEnd_Network(DM, Vec, InsertMode, Vec);
20: extern PetscErrorCode DMSetUp_Network(DM);
21: extern PetscErrorCode DMClone_Network(DM, DM *);
23: static PetscErrorCode VecArrayPrint_private(PetscViewer viewer, PetscInt n, const PetscScalar *xv)
24: {
25: PetscInt i;
27: for (i = 0; i < n; i++) {
28: #if defined(PETSC_USE_COMPLEX)
29: if (PetscImaginaryPart(xv[i]) > 0.0) {
30: PetscViewerASCIIPrintf(viewer, " %g + %g i\n", (double)PetscRealPart(xv[i]), (double)PetscImaginaryPart(xv[i]));
31: } else if (PetscImaginaryPart(xv[i]) < 0.0) {
32: PetscViewerASCIIPrintf(viewer, " %g - %g i\n", (double)PetscRealPart(xv[i]), -(double)PetscImaginaryPart(xv[i]));
33: } else PetscViewerASCIIPrintf(viewer, " %g\n", (double)PetscRealPart(xv[i]));
34: #else
35: PetscViewerASCIIPrintf(viewer, " %g\n", (double)xv[i]);
36: #endif
37: }
38: return 0;
39: }
41: static PetscErrorCode VecView_Network_Seq(DM networkdm, Vec X, PetscViewer viewer)
42: {
43: PetscInt e, v, Start, End, offset, nvar, id;
44: const PetscScalar *xv;
46: VecGetArrayRead(X, &xv);
48: /* iterate over edges */
49: DMNetworkGetEdgeRange(networkdm, &Start, &End);
50: for (e = Start; e < End; e++) {
51: DMNetworkGetComponent(networkdm, e, ALL_COMPONENTS, NULL, NULL, &nvar);
52: if (!nvar) continue;
54: DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &offset);
55: DMNetworkGetGlobalEdgeIndex(networkdm, e, &id);
57: PetscViewerASCIIPrintf(viewer, " Edge %" PetscInt_FMT ":\n", id);
58: VecArrayPrint_private(viewer, nvar, xv + offset);
59: }
61: /* iterate over vertices */
62: DMNetworkGetVertexRange(networkdm, &Start, &End);
63: for (v = Start; v < End; v++) {
64: DMNetworkGetComponent(networkdm, v, ALL_COMPONENTS, NULL, NULL, &nvar);
65: if (!nvar) continue;
67: DMNetworkGetLocalVecOffset(networkdm, v, ALL_COMPONENTS, &offset);
68: DMNetworkGetGlobalVertexIndex(networkdm, v, &id);
70: PetscViewerASCIIPrintf(viewer, " Vertex %" PetscInt_FMT ":\n", id);
71: VecArrayPrint_private(viewer, nvar, xv + offset);
72: }
73: PetscViewerFlush(viewer);
74: VecRestoreArrayRead(X, &xv);
75: return 0;
76: }
78: static PetscErrorCode VecView_Network_MPI(DM networkdm, Vec X, PetscViewer viewer)
79: {
80: PetscInt i, e, v, eStart, eEnd, vStart, vEnd, offset, nvar, len_loc, len, k;
81: const PetscScalar *xv;
82: MPI_Comm comm;
83: PetscMPIInt size, rank, tag = ((PetscObject)viewer)->tag;
84: Vec localX;
85: PetscBool ghostvtex;
86: PetscScalar *values;
87: PetscInt j, ne, nv, id;
88: MPI_Status status;
90: PetscObjectGetComm((PetscObject)networkdm, &comm);
91: MPI_Comm_size(comm, &size);
92: MPI_Comm_rank(comm, &rank);
94: DMGetLocalVector(networkdm, &localX);
95: DMGlobalToLocalBegin(networkdm, X, INSERT_VALUES, localX);
96: DMGlobalToLocalEnd(networkdm, X, INSERT_VALUES, localX);
97: VecGetArrayRead(localX, &xv);
99: VecGetLocalSize(localX, &len_loc);
101: DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd);
102: DMNetworkGetVertexRange(networkdm, &vStart, &vEnd);
103: len_loc += 2 * (1 + eEnd - eStart + vEnd - vStart);
105: /* values = [nedges, nvertices; id, nvar, xedge; ...; id, nvars, xvertex;...], to be sent to proc[0] */
106: MPI_Allreduce(&len_loc, &len, 1, MPIU_INT, MPI_MAX, comm);
107: PetscCalloc1(len, &values);
109: if (rank == 0) PetscViewerASCIIPrintf(viewer, "Process [%d]\n", rank);
111: /* iterate over edges */
112: k = 2;
113: for (e = eStart; e < eEnd; e++) {
114: DMNetworkGetComponent(networkdm, e, ALL_COMPONENTS, NULL, NULL, &nvar);
115: if (!nvar) continue;
117: DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &offset);
118: DMNetworkGetGlobalEdgeIndex(networkdm, e, &id);
120: if (rank == 0) { /* print its own entries */
121: PetscViewerASCIIPrintf(viewer, " Edge %" PetscInt_FMT ":\n", id);
122: VecArrayPrint_private(viewer, nvar, xv + offset);
123: } else {
124: values[0] += 1; /* number of edges */
125: values[k++] = id;
126: values[k++] = nvar;
127: for (i = offset; i < offset + nvar; i++) values[k++] = xv[i];
128: }
129: }
131: /* iterate over vertices */
132: for (v = vStart; v < vEnd; v++) {
133: DMNetworkIsGhostVertex(networkdm, v, &ghostvtex);
134: if (ghostvtex) continue;
135: DMNetworkGetComponent(networkdm, v, ALL_COMPONENTS, NULL, NULL, &nvar);
136: if (!nvar) continue;
138: DMNetworkGetLocalVecOffset(networkdm, v, ALL_COMPONENTS, &offset);
139: DMNetworkGetGlobalVertexIndex(networkdm, v, &id);
141: if (rank == 0) {
142: PetscViewerASCIIPrintf(viewer, " Vertex %" PetscInt_FMT ":\n", id);
143: VecArrayPrint_private(viewer, nvar, xv + offset);
144: } else {
145: values[1] += 1; /* number of vertices */
146: values[k++] = id;
147: values[k++] = nvar;
148: for (i = offset; i < offset + nvar; i++) values[k++] = xv[i];
149: }
150: }
152: if (rank == 0) {
153: /* proc[0] receives and prints messages */
154: for (j = 1; j < size; j++) {
155: PetscViewerASCIIPrintf(viewer, "Process [%" PetscInt_FMT "]\n", j);
157: MPI_Recv(values, (PetscMPIInt)len, MPIU_SCALAR, j, tag, comm, &status);
159: ne = (PetscInt)PetscAbsScalar(values[0]);
160: nv = (PetscInt)PetscAbsScalar(values[1]);
162: /* print received edges */
163: k = 2;
164: for (i = 0; i < ne; i++) {
165: id = (PetscInt)PetscAbsScalar(values[k++]);
166: nvar = (PetscInt)PetscAbsScalar(values[k++]);
167: PetscViewerASCIIPrintf(viewer, " Edge %" PetscInt_FMT ":\n", id);
168: VecArrayPrint_private(viewer, nvar, values + k);
169: k += nvar;
170: }
172: /* print received vertices */
173: for (i = 0; i < nv; i++) {
174: id = (PetscInt)PetscAbsScalar(values[k++]);
175: nvar = (PetscInt)PetscAbsScalar(values[k++]);
176: PetscViewerASCIIPrintf(viewer, " Vertex %" PetscInt_FMT ":\n", id);
177: VecArrayPrint_private(viewer, nvar, values + k);
178: k += nvar;
179: }
180: }
181: } else {
182: /* sends values to proc[0] */
183: MPI_Send((void *)values, k, MPIU_SCALAR, 0, tag, comm);
184: }
186: PetscFree(values);
187: VecRestoreArrayRead(localX, &xv);
188: DMRestoreLocalVector(networkdm, &localX);
189: return 0;
190: }
192: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec, PetscViewer);
194: PetscErrorCode VecView_Network(Vec v, PetscViewer viewer)
195: {
196: DM dm;
197: PetscBool isseq;
198: PetscBool iascii;
200: VecGetDM(v, &dm);
202: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
203: PetscObjectTypeCompare((PetscObject)v, VECSEQ, &isseq);
205: /* Use VecView_Network if the viewer is ASCII; use VecView_Seq/MPI for other viewer formats */
206: if (iascii) {
207: if (isseq) VecView_Network_Seq(dm, v, viewer);
208: else VecView_Network_MPI(dm, v, viewer);
209: } else {
210: if (isseq) VecView_Seq(v, viewer);
211: else VecView_MPI(v, viewer);
212: }
213: return 0;
214: }
216: static PetscErrorCode DMCreateGlobalVector_Network(DM dm, Vec *vec)
217: {
218: DM_Network *network = (DM_Network *)dm->data;
220: DMCreateGlobalVector(network->plex, vec);
221: VecSetOperation(*vec, VECOP_VIEW, (void (*)(void))VecView_Network);
222: VecSetDM(*vec, dm);
223: return 0;
224: }
226: static PetscErrorCode DMCreateLocalVector_Network(DM dm, Vec *vec)
227: {
228: DM_Network *network = (DM_Network *)dm->data;
230: DMCreateLocalVector(network->plex, vec);
231: VecSetDM(*vec, dm);
232: return 0;
233: }
235: PetscErrorCode DMNetworkInitializeToDefault_NonShared(DM dm)
236: {
237: DM_Network *network = (DM_Network *)dm->data;
239: network->Je = NULL;
240: network->Jv = NULL;
241: network->Jvptr = NULL;
242: network->userEdgeJacobian = PETSC_FALSE;
243: network->userVertexJacobian = PETSC_FALSE;
245: network->vertex.DofSection = NULL;
246: network->vertex.GlobalDofSection = NULL;
247: network->vertex.mapping = NULL;
248: network->vertex.sf = NULL;
250: network->edge.DofSection = NULL;
251: network->edge.GlobalDofSection = NULL;
252: network->edge.mapping = NULL;
253: network->edge.sf = NULL;
255: network->DataSection = NULL;
256: network->DofSection = NULL;
257: network->GlobalDofSection = NULL;
258: network->componentsetup = PETSC_FALSE;
260: network->plex = NULL;
262: network->component = NULL;
263: network->ncomponent = 0;
265: network->header = NULL;
266: network->cvalue = NULL;
267: network->componentdataarray = NULL;
269: network->max_comps_registered = DMNETWORK_MAX_COMP_REGISTERED_DEFAULT; /* return to default */
270: return 0;
271: }
272: /* Default values for the parameters in DMNetwork */
273: PetscErrorCode DMNetworkInitializeToDefault(DM dm)
274: {
275: DM_Network *network = (DM_Network *)dm->data;
276: DMNetworkCloneShared cloneshared = network->cloneshared;
278: DMNetworkInitializeToDefault_NonShared(dm);
279: /* Default values for shared data */
280: cloneshared->refct = 1;
281: cloneshared->NVertices = 0;
282: cloneshared->NEdges = 0;
283: cloneshared->nVertices = 0;
284: cloneshared->nEdges = 0;
285: cloneshared->nsubnet = 0;
286: cloneshared->pStart = -1;
287: cloneshared->pEnd = -1;
288: cloneshared->vStart = -1;
289: cloneshared->vEnd = -1;
290: cloneshared->eStart = -1;
291: cloneshared->eEnd = -1;
292: cloneshared->vltog = NULL;
293: cloneshared->distributecalled = PETSC_FALSE;
295: cloneshared->subnet = NULL;
296: cloneshared->subnetvtx = NULL;
297: cloneshared->subnetedge = NULL;
298: cloneshared->svtx = NULL;
299: cloneshared->nsvtx = 0;
300: cloneshared->Nsvtx = 0;
301: cloneshared->svertices = NULL;
302: cloneshared->sedgelist = NULL;
303: cloneshared->svtable = NULL;
304: return 0;
305: }
307: PetscErrorCode DMInitialize_Network(DM dm)
308: {
309: DMSetDimension(dm, 1);
310: dm->ops->view = DMView_Network;
311: dm->ops->setfromoptions = DMSetFromOptions_Network;
312: dm->ops->clone = DMClone_Network;
313: dm->ops->setup = DMSetUp_Network;
314: dm->ops->createglobalvector = DMCreateGlobalVector_Network;
315: dm->ops->createlocalvector = DMCreateLocalVector_Network;
316: dm->ops->getlocaltoglobalmapping = NULL;
317: dm->ops->createfieldis = NULL;
318: dm->ops->createcoordinatedm = NULL;
319: dm->ops->getcoloring = NULL;
320: dm->ops->creatematrix = DMCreateMatrix_Network;
321: dm->ops->createinterpolation = NULL;
322: dm->ops->createinjection = NULL;
323: dm->ops->refine = NULL;
324: dm->ops->coarsen = NULL;
325: dm->ops->refinehierarchy = NULL;
326: dm->ops->coarsenhierarchy = NULL;
327: dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Network;
328: dm->ops->globaltolocalend = DMGlobalToLocalEnd_Network;
329: dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Network;
330: dm->ops->localtoglobalend = DMLocalToGlobalEnd_Network;
331: dm->ops->destroy = DMDestroy_Network;
332: dm->ops->createsubdm = NULL;
333: dm->ops->locatepoints = NULL;
334: return 0;
335: }
336: /*
337: copies over the subnetid and index portions of the DMNetworkComponentHeader from original dm to the newdm
338: */
339: static PetscErrorCode DMNetworkCopyHeaderTopological(DM dm, DM newdm)
340: {
341: DM_Network *network = (DM_Network *)dm->data, *newnetwork = (DM_Network *)newdm->data;
342: PetscInt p, i, np, index, subnetid;
344: np = network->cloneshared->pEnd - network->cloneshared->pStart;
345: PetscCalloc2(np, &newnetwork->header, np, &newnetwork->cvalue);
346: for (i = 0; i < np; i++) {
347: p = i + network->cloneshared->pStart;
348: DMNetworkGetSubnetID(dm, p, &subnetid);
349: DMNetworkGetIndex(dm, p, &index);
350: newnetwork->header[i].index = index;
351: newnetwork->header[i].subnetid = subnetid;
352: newnetwork->header[i].size = NULL;
353: newnetwork->header[i].key = NULL;
354: newnetwork->header[i].offset = NULL;
355: newnetwork->header[i].nvar = NULL;
356: newnetwork->header[i].offsetvarrel = NULL;
357: newnetwork->header[i].ndata = 0;
358: newnetwork->header[i].maxcomps = DMNETWORK_MAX_COMP_AT_POINT_DEFAULT;
359: newnetwork->header[i].hsize = sizeof(struct _p_DMNetworkComponentHeader) / sizeof(sizeof(DMNetworkComponentGenericDataType));
360: }
361: return 0;
362: }
364: PetscErrorCode DMClone_Network(DM dm, DM *newdm)
365: {
366: DM_Network *network = (DM_Network *)dm->data, *newnetwork = NULL;
368: network->cloneshared->refct++;
369: PetscNew(&newnetwork);
370: (*newdm)->data = newnetwork;
371: DMNetworkInitializeToDefault_NonShared(*newdm);
372: newnetwork->cloneshared = network->cloneshared; /* Share all data that can be cloneshared */
373: DMClone(network->plex, &newnetwork->plex);
374: DMNetworkCopyHeaderTopological(dm, *newdm);
375: DMNetworkInitializeNonTopological(*newdm); /* initialize all non-topological data to the state after DMNetworkLayoutSetUp as been called */
376: PetscObjectChangeTypeName((PetscObject)*newdm, DMNETWORK);
377: DMInitialize_Network(*newdm);
378: return 0;
379: }
381: /*MC
382: DMNETWORK = "network" - A DM object that encapsulates an unstructured network. The implementation is based on the DM object
383: DMPlex that manages unstructured grids. Distributed networks use a non-overlapping partitioning of
384: the edges. In the local representation, Vecs contain all unknowns in the interior and shared boundary.
385: This is specified by a PetscSection object. Ownership in the global representation is determined by
386: ownership of the underlying DMPlex points. This is specified by another PetscSection object.
388: Level: intermediate
390: .seealso: `DMType`, `DMNetworkCreate()`, `DMCreate()`, `DMSetType()`
391: M*/
393: PETSC_EXTERN PetscErrorCode DMCreate_Network(DM dm)
394: {
395: DM_Network *network;
398: PetscNew(&network);
399: PetscNew(&network->cloneshared);
400: dm->data = network;
402: DMNetworkInitializeToDefault(dm);
403: DMInitialize_Network(dm);
404: return 0;
405: }
407: /*@
408: DMNetworkCreate - Creates a DMNetwork object, which encapsulates an unstructured network.
410: Collective
412: Input Parameter:
413: . comm - The communicator for the DMNetwork object
415: Output Parameter:
416: . network - The DMNetwork object
418: Level: beginner
420: @*/
421: PetscErrorCode DMNetworkCreate(MPI_Comm comm, DM *network)
422: {
424: DMCreate(comm, network);
425: DMSetType(*network, DMNETWORK);
426: return 0;
427: }