Actual source code: partchaco.c
1: #include <petsc/private/partitionerimpl.h>
3: PetscBool ChacoPartitionerCite = PETSC_FALSE;
4: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
5: " author = {Bruce Hendrickson and Robert Leland},\n"
6: " title = {A multilevel algorithm for partitioning graphs},\n"
7: " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
8: " isbn = {0-89791-816-9},\n"
9: " pages = {28},\n"
10: " doi = {https://doi.acm.org/10.1145/224170.224228},\n"
11: " publisher = {ACM Press},\n"
12: " address = {New York},\n"
13: " year = {1995}\n"
14: "}\n";
16: typedef struct {
17: PetscInt dummy;
18: } PetscPartitioner_Chaco;
20: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
21: {
22: PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *)part->data;
24: PetscFunctionBegin;
25: PetscCall(PetscFree(p));
26: PetscFunctionReturn(PETSC_SUCCESS);
27: }
29: static PetscErrorCode PetscPartitionerView_Chaco_ASCII(PetscPartitioner part, PetscViewer viewer)
30: {
31: PetscFunctionBegin;
32: PetscFunctionReturn(PETSC_SUCCESS);
33: }
35: static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
36: {
37: PetscBool iascii;
39: PetscFunctionBegin;
42: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
43: if (iascii) PetscCall(PetscPartitionerView_Chaco_ASCII(part, viewer));
44: PetscFunctionReturn(PETSC_SUCCESS);
45: }
47: #if defined(PETSC_HAVE_CHACO)
48: #if defined(PETSC_HAVE_UNISTD_H)
49: #include <unistd.h>
50: #endif
51: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
52: #include <chaco.h>
53: #else
54: /* Older versions of Chaco do not have an include file */
55: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, float *ewgts, float *x, float *y, float *z, char *outassignname, char *outfilename, short *assignment, int architecture, int ndims_tot, int mesh_dims[3], double *goal, int global_method, int local_method, int rqi_flag, int vmax, int ndims, double eigtol, long seed);
56: #endif
57: extern int FREE_GRAPH;
58: #endif
60: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition)
61: {
62: #if defined(PETSC_HAVE_CHACO)
63: enum {
64: DEFAULT_METHOD = 1,
65: INERTIAL_METHOD = 3
66: };
67: MPI_Comm comm;
68: int nvtxs = numVertices; /* number of vertices in full graph */
69: int *vwgts = NULL; /* weights for all vertices */
70: float *ewgts = NULL; /* weights for all edges */
71: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
72: char *outassignname = NULL; /* name of assignment output file */
73: char *outfilename = NULL; /* output file name */
74: int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */
75: int ndims_tot = 0; /* total number of cube dimensions to divide */
76: int mesh_dims[3]; /* dimensions of mesh of processors */
77: double *goal = NULL; /* desired set sizes for each set */
78: int global_method = 1; /* global partitioning algorithm */
79: int local_method = 1; /* local partitioning algorithm */
80: int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */
81: int vmax = 200; /* how many vertices to coarsen down to? */
82: int ndims = 1; /* number of eigenvectors (2^d sets) */
83: double eigtol = 0.001; /* tolerance on eigenvectors */
84: long seed = 123636512; /* for random graph mutations */
85: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
86: int *assignment; /* Output partition */
87: #else
88: short int *assignment; /* Output partition */
89: #endif
90: int fd_stdout, fd_pipe[2];
91: PetscInt *points;
92: int i, v, p;
93: int err;
95: PetscFunctionBegin;
96: PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
97: if (PetscDefined(USE_DEBUG)) {
98: int ival, isum;
99: PetscBool distributed;
101: ival = (numVertices > 0);
102: PetscCallMPI(MPIU_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm));
103: distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
104: PetscCheck(!distributed, comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
105: }
106: if (!numVertices) { /* distributed case, return if not holding the graph */
107: PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
108: PetscFunctionReturn(PETSC_SUCCESS);
109: }
110: FREE_GRAPH = 0; /* Do not let Chaco free my memory */
111: for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
113: /* code would use manager.createCellCoordinates(nvtxs, &x, &y, &z); */
114: PetscCheck(global_method != INERTIAL_METHOD, PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
116: mesh_dims[0] = nparts;
117: mesh_dims[1] = 1;
118: mesh_dims[2] = 1;
119: PetscCall(PetscMalloc1(nvtxs, &assignment));
120: /* Chaco outputs to stdout. We redirect this to a buffer. */
121: /* TODO: check error codes for UNIX calls */
122: #if defined(PETSC_HAVE_UNISTD_H)
123: {
124: int piperet;
125: piperet = pipe(fd_pipe);
126: PetscCheck(!piperet, PETSC_COMM_SELF, PETSC_ERR_SYS, "Could not create pipe");
127: fd_stdout = dup(1);
128: close(1);
129: dup2(fd_pipe[1], 1);
130: }
131: #endif
132: if (part->usevwgt) PetscCall(PetscInfo(part, "PETSCPARTITIONERCHACO ignores vertex weights\n"));
133: if (part->useewgt) PetscCall(PetscInfo(part, "PETSCPARTITIONERCHACO ignores edge weights\n"));
134: err = interface(nvtxs, (int *)start, (int *)adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename, assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag, vmax, ndims, eigtol, seed);
135: #if defined(PETSC_HAVE_UNISTD_H)
136: {
137: char msgLog[10000];
138: int count;
140: PetscCall(PetscFFlush(stdout));
141: count = (int)read(fd_pipe[0], msgLog, (10000 - 1) * sizeof(char));
142: if (count < 0) count = 0;
143: msgLog[count] = 0;
144: close(1);
145: dup2(fd_stdout, 1);
146: close(fd_stdout);
147: close(fd_pipe[0]);
148: close(fd_pipe[1]);
149: PetscCheck(!err, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
150: }
151: #else
152: PetscCheck(!err, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
153: #endif
154: /* Convert to PetscSection+IS */
155: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
156: PetscCall(PetscMalloc1(nvtxs, &points));
157: for (p = 0, i = 0; p < nparts; ++p) {
158: for (v = 0; v < nvtxs; ++v) {
159: if (assignment[v] == p) points[i++] = v;
160: }
161: }
162: PetscCheck(i == nvtxs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
163: PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
164: /* code would use manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
165: PetscCheck(global_method != INERTIAL_METHOD, PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
166: PetscCall(PetscFree(assignment));
167: for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
168: PetscFunctionReturn(PETSC_SUCCESS);
169: #else
170: SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
171: #endif
172: }
174: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
175: {
176: PetscFunctionBegin;
177: part->noGraph = PETSC_FALSE;
178: part->ops->view = PetscPartitionerView_Chaco;
179: part->ops->destroy = PetscPartitionerDestroy_Chaco;
180: part->ops->partition = PetscPartitionerPartition_Chaco;
181: PetscFunctionReturn(PETSC_SUCCESS);
182: }
184: /*MC
185: PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
187: Level: intermediate
189: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
190: M*/
192: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
193: {
194: PetscPartitioner_Chaco *p;
196: PetscFunctionBegin;
198: PetscCall(PetscNew(&p));
199: part->data = p;
201: PetscCall(PetscPartitionerInitialize_Chaco(part));
202: PetscCall(PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionerCite));
203: PetscFunctionReturn(PETSC_SUCCESS);
204: }