Actual source code: ex4.c
1: static char help[] = "This example tests subnetwork coupling with zero size components. \n\n";
3: #include <petscdmnetwork.h>
5: int main(int argc, char **argv)
6: {
7: PetscMPIInt size, rank;
8: DM dmnetwork;
9: PetscInt i, j, net, Nsubnet, ne, nv, nvar, v, goffset, row, compkey0, compkey1, compkey;
10: PetscInt *numEdges, **edgelist, asvtx[2], bsvtx[2];
11: const PetscInt *vtx, *edges;
12: PetscBool ghost, distribute = PETSC_TRUE, sharedv;
13: Vec X;
14: PetscScalar val;
16: PetscFunctionBeginUser;
17: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
18: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
19: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
21: /* Create a network of subnetworks */
22: if (size == 1) Nsubnet = 2;
23: else Nsubnet = (PetscInt)size;
24: PetscCall(PetscCalloc2(Nsubnet, &numEdges, Nsubnet, &edgelist));
26: /* when size>1, process[i] creates subnetwork[i] */
27: for (i = 0; i < Nsubnet; i++) {
28: if (i == 0 && (size == 1 || (rank == i && size > 1))) {
29: numEdges[i] = 3;
30: PetscCall(PetscMalloc1(2 * numEdges[i], &edgelist[i]));
31: edgelist[i][0] = 0;
32: edgelist[i][1] = 1;
33: edgelist[i][2] = 1;
34: edgelist[i][3] = 2;
35: edgelist[i][4] = 2;
36: edgelist[i][5] = 3;
38: } else if (i == 1 && (size == 1 || (rank == i && size > 1))) {
39: numEdges[i] = 3;
40: PetscCall(PetscMalloc1(2 * numEdges[i], &edgelist[i]));
41: edgelist[i][0] = 0;
42: edgelist[i][1] = 1;
43: edgelist[i][2] = 1;
44: edgelist[i][3] = 2;
45: edgelist[i][4] = 2;
46: edgelist[i][5] = 3;
48: } else if (i > 1 && (size == 1 || (rank == i && size > 1))) {
49: numEdges[i] = 3;
50: PetscCall(PetscMalloc1(2 * numEdges[i], &edgelist[i]));
51: for (j = 0; j < numEdges[i]; j++) {
52: edgelist[i][2 * j] = j;
53: edgelist[i][2 * j + 1] = j + 1;
54: }
55: }
56: }
58: /* Create a dmnetwork */
59: PetscCall(DMNetworkCreate(PETSC_COMM_WORLD, &dmnetwork));
61: /* Register zero size components to get compkeys to be used by DMNetworkAddComponent() */
62: PetscCall(DMNetworkRegisterComponent(dmnetwork, "comp0", 0, &compkey0));
63: PetscCall(DMNetworkRegisterComponent(dmnetwork, "comp1", 0, &compkey1));
65: /* Set number of subnetworks, numbers of vertices and edges over each subnetwork */
66: PetscCall(DMNetworkSetNumSubNetworks(dmnetwork, PETSC_DECIDE, Nsubnet));
68: for (i = 0; i < Nsubnet; i++) {
69: PetscInt netNum = -1;
70: PetscCall(DMNetworkAddSubnetwork(dmnetwork, NULL, numEdges[i], edgelist[i], &netNum));
71: }
73: /* Add shared vertices -- all processes hold this info at current implementation
74: net[0].0 -> net[j].0, j=0,...,Nsubnet-1
75: net[0].1 -> net[j].1, j=0,...,Nsubnet-1 */
76: asvtx[0] = bsvtx[0] = 0;
77: asvtx[1] = bsvtx[1] = 1;
78: for (j = Nsubnet - 1; j >= 1; j--) PetscCall(DMNetworkAddSharedVertices(dmnetwork, 0, j, 2, asvtx, bsvtx));
80: /* Setup the network layout */
81: PetscCall(DMNetworkLayoutSetUp(dmnetwork));
83: /* Get Subnetwork(); Add nvar=1 to subnet[0] and nvar=2 to other subnets */
84: for (net = 0; net < Nsubnet; net++) {
85: PetscCall(DMNetworkGetSubnetwork(dmnetwork, net, &nv, &ne, &vtx, &edges));
86: for (v = 0; v < nv; v++) {
87: PetscCall(DMNetworkIsSharedVertex(dmnetwork, vtx[v], &sharedv));
88: if (sharedv) continue;
90: if (!net) {
91: /* Set nvar = 2 for subnet0 */
92: PetscCall(DMNetworkAddComponent(dmnetwork, vtx[v], compkey0, NULL, 2));
93: } else {
94: /* Set nvar = 1 for other subnets */
95: PetscCall(DMNetworkAddComponent(dmnetwork, vtx[v], compkey1, NULL, 1));
96: }
97: }
98: }
100: /* Add nvar to shared vertex -- owning and all ghost ranks must call DMNetworkAddComponent() */
101: PetscCall(DMNetworkGetSharedVertices(dmnetwork, &nv, &vtx));
102: for (v = 0; v < nv; v++) {
103: PetscCall(DMNetworkAddComponent(dmnetwork, vtx[v], compkey0, NULL, 2));
104: PetscCall(DMNetworkAddComponent(dmnetwork, vtx[v], compkey1, NULL, 1));
105: }
107: /* Enable runtime option of graph partition type -- must be called before DMSetUp() */
108: if (size > 1) {
109: DM plexdm;
110: PetscPartitioner part;
111: PetscCall(DMNetworkGetPlex(dmnetwork, &plexdm));
112: PetscCall(DMPlexGetPartitioner(plexdm, &part));
113: PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
114: PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_csr_alg", "mat")); /* for parmetis */
115: }
117: /* Setup dmnetwork */
118: PetscCall(DMSetUp(dmnetwork));
120: /* Redistribute the network layout; use '-distribute false' to skip */
121: PetscCall(PetscOptionsGetBool(NULL, NULL, "-distribute", &distribute, NULL));
122: if (distribute) {
123: PetscCall(DMNetworkDistribute(&dmnetwork, 0));
124: PetscCall(DMView(dmnetwork, PETSC_VIEWER_STDOUT_WORLD));
125: }
127: /* Create a global vector */
128: PetscCall(DMCreateGlobalVector(dmnetwork, &X));
129: PetscCall(VecSet(X, 0.0));
131: /* Set X values at shared vertex */
132: PetscCall(DMNetworkGetSharedVertices(dmnetwork, &nv, &vtx));
133: for (v = 0; v < nv; v++) {
134: PetscCall(DMNetworkIsGhostVertex(dmnetwork, vtx[v], &ghost));
135: if (ghost) continue;
137: /* only one process holds a non-ghost vertex */
138: PetscCall(DMNetworkGetComponent(dmnetwork, vtx[v], ALL_COMPONENTS, NULL, NULL, &nvar));
139: PetscCall(DMNetworkGetGlobalVecOffset(dmnetwork, vtx[v], ALL_COMPONENTS, &goffset));
140: for (i = 0; i < nvar; i++) {
141: row = goffset + i;
142: val = (PetscScalar)rank + 1.0;
143: PetscCall(VecSetValues(X, 1, &row, &val, ADD_VALUES));
144: }
146: PetscCall(DMNetworkGetComponent(dmnetwork, vtx[v], 1, &compkey, NULL, &nvar));
147: PetscCall(DMNetworkGetGlobalVecOffset(dmnetwork, vtx[v], compkey, &goffset));
148: for (i = 0; i < nvar; i++) {
149: row = goffset + i;
150: val = 1.0;
151: PetscCall(VecSetValues(X, 1, &row, &val, ADD_VALUES));
152: }
153: }
154: PetscCall(VecAssemblyBegin(X));
155: PetscCall(VecAssemblyEnd(X));
156: PetscCall(VecView(X, PETSC_VIEWER_STDOUT_WORLD));
158: /* Free work space */
159: PetscCall(VecDestroy(&X));
160: for (i = 0; i < Nsubnet; i++) {
161: if (size == 1 || rank == i) PetscCall(PetscFree(edgelist[i]));
162: }
163: PetscCall(PetscFree2(numEdges, edgelist));
164: PetscCall(DMDestroy(&dmnetwork));
165: PetscCall(PetscFinalize());
166: return 0;
167: }
169: /*TEST
171: build:
172: requires: !single double defined(PETSC_HAVE_ATTRIBUTEALIGNED)
174: test:
175: args:
177: test:
178: suffix: 2
179: nsize: 2
180: args: -options_left no -petscpartitioner_type simple
182: test:
183: suffix: 3
184: nsize: 4
185: args: -options_left no -petscpartitioner_type simple
187: TEST*/