Actual source code: partchaco.c

  1: #include <petsc/private/partitionerimpl.h>

  3: PetscBool  ChacoPartitionerCite       = PETSC_FALSE;
  4: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
  5:                                         "  author    = {Bruce Hendrickson and Robert Leland},\n"
  6:                                         "  title     = {A multilevel algorithm for partitioning graphs},\n"
  7:                                         "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
  8:                                         "  isbn      = {0-89791-816-9},\n"
  9:                                         "  pages     = {28},\n"
 10:                                         "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
 11:                                         "  publisher = {ACM Press},\n"
 12:                                         "  address   = {New York},\n"
 13:                                         "  year      = {1995}\n"
 14:                                         "}\n";

 16: typedef struct {
 17:   PetscInt dummy;
 18: } PetscPartitioner_Chaco;

 20: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
 21: {
 22:   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *)part->data;

 24:   PetscFunctionBegin;
 25:   PetscCall(PetscFree(p));
 26:   PetscFunctionReturn(PETSC_SUCCESS);
 27: }

 29: #if defined(PETSC_HAVE_CHACO)
 30:   #if defined(PETSC_HAVE_UNISTD_H)
 31:     #include <unistd.h>
 32:   #endif
 33:   #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
 34:     #include <chaco.h>
 35:   #else
 36: /* Older versions of Chaco do not have an include file */
 37: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, float *ewgts, float *x, float *y, float *z, char *outassignname, char *outfilename, short *assignment, int architecture, int ndims_tot, int mesh_dims[3], double *goal, int global_method, int local_method, int rqi_flag, int vmax, int ndims, double eigtol, long seed);
 38:   #endif
 39: extern int FREE_GRAPH;
 40: #endif

 42: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 43: {
 44: #if defined(PETSC_HAVE_CHACO)
 45:   enum {
 46:     DEFAULT_METHOD  = 1,
 47:     INERTIAL_METHOD = 3
 48:   };
 49:   MPI_Comm comm;
 50:   int      nvtxs = numVertices;            /* number of vertices in full graph */
 51:   int     *vwgts = NULL;                   /* weights for all vertices */
 52:   float   *ewgts = NULL;                   /* weights for all edges */
 53:   float   *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
 54:   char    *outassignname = NULL;           /*  name of assignment output file */
 55:   char    *outfilename   = NULL;           /* output file name */
 56:   int      architecture  = 1;              /* 0 => hypercube, d => d-dimensional mesh */
 57:   int      ndims_tot     = 0;              /* total number of cube dimensions to divide */
 58:   int      mesh_dims[3];                   /* dimensions of mesh of processors */
 59:   double  *goal          = NULL;           /* desired set sizes for each set */
 60:   int      global_method = 1;              /* global partitioning algorithm */
 61:   int      local_method  = 1;              /* local partitioning algorithm */
 62:   int      rqi_flag      = 0;              /* should I use RQI/Symmlq eigensolver? */
 63:   int      vmax          = 200;            /* how many vertices to coarsen down to? */
 64:   int      ndims         = 1;              /* number of eigenvectors (2^d sets) */
 65:   double   eigtol        = 0.001;          /* tolerance on eigenvectors */
 66:   long     seed          = 123636512;      /* for random graph mutations */
 67:   #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
 68:   int *assignment; /* Output partition */
 69:   #else
 70:   short int *assignment; /* Output partition */
 71:   #endif
 72:   int       fd_stdout, fd_pipe[2];
 73:   PetscInt *points;
 74:   int       i, v, p;
 75:   int       err;

 77:   PetscFunctionBegin;
 78:   PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
 79:   if (PetscDefined(USE_DEBUG)) {
 80:     int       ival, isum;
 81:     PetscBool distributed;

 83:     ival = (numVertices > 0);
 84:     PetscCallMPI(MPIU_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm));
 85:     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
 86:     PetscCheck(!distributed, comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
 87:   }
 88:   if (!numVertices) { /* distributed case, return if not holding the graph */
 89:     PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
 90:     PetscFunctionReturn(PETSC_SUCCESS);
 91:   }
 92:   FREE_GRAPH = 0; /* Do not let Chaco free my memory */
 93:   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];

 95:   /* code would use manager.createCellCoordinates(nvtxs, &x, &y, &z); */
 96:   PetscCheck(global_method != INERTIAL_METHOD, PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");

 98:   mesh_dims[0] = nparts;
 99:   mesh_dims[1] = 1;
100:   mesh_dims[2] = 1;
101:   PetscCall(PetscMalloc1(nvtxs, &assignment));
102:   /* Chaco outputs to stdout. We redirect this to a buffer. */
103:   /* TODO: check error codes for UNIX calls */
104:   #if defined(PETSC_HAVE_UNISTD_H)
105:   {
106:     int piperet;
107:     piperet = pipe(fd_pipe);
108:     PetscCheck(!piperet, PETSC_COMM_SELF, PETSC_ERR_SYS, "Could not create pipe");
109:     fd_stdout = dup(1);
110:     close(1);
111:     dup2(fd_pipe[1], 1);
112:   }
113:   #endif
114:   if (part->usevwgt) PetscCall(PetscInfo(part, "PETSCPARTITIONERCHACO ignores vertex weights\n"));
115:   if (part->useewgt) PetscCall(PetscInfo(part, "PETSCPARTITIONERCHACO ignores edge weights\n"));
116:   err = interface(nvtxs, (int *)start, (int *)adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename, assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag, vmax, ndims, eigtol, seed);
117:   #if defined(PETSC_HAVE_UNISTD_H)
118:   {
119:     char msgLog[10000];
120:     int  count;

122:     PetscCall(PetscFFlush(stdout));
123:     count = (int)read(fd_pipe[0], msgLog, (10000 - 1) * sizeof(char));
124:     if (count < 0) count = 0;
125:     msgLog[count] = 0;
126:     close(1);
127:     dup2(fd_stdout, 1);
128:     close(fd_stdout);
129:     close(fd_pipe[0]);
130:     close(fd_pipe[1]);
131:     PetscCheck(!err, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
132:   }
133:   #else
134:   PetscCheck(!err, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
135:   #endif
136:   /* Convert to PetscSection+IS */
137:   for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
138:   PetscCall(PetscMalloc1(nvtxs, &points));
139:   for (p = 0, i = 0; p < nparts; ++p) {
140:     for (v = 0; v < nvtxs; ++v) {
141:       if (assignment[v] == p) points[i++] = v;
142:     }
143:   }
144:   PetscCheck(i == nvtxs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
145:   PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
146:   /* code would use manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
147:   PetscCheck(global_method != INERTIAL_METHOD, PETSC_COMM_SELF, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
148:   PetscCall(PetscFree(assignment));
149:   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
150:   PetscFunctionReturn(PETSC_SUCCESS);
151: #else
152:   SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
153: #endif
154: }

156: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
157: {
158:   PetscFunctionBegin;
159:   part->noGraph        = PETSC_FALSE;
160:   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
161:   part->ops->partition = PetscPartitionerPartition_Chaco;
162:   PetscFunctionReturn(PETSC_SUCCESS);
163: }

165: /*MC
166:   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library

168:   Level: intermediate

170: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
171: M*/

173: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
174: {
175:   PetscPartitioner_Chaco *p;

177:   PetscFunctionBegin;
179:   PetscCall(PetscNew(&p));
180:   part->data = p;

182:   PetscCall(PetscPartitionerInitialize_Chaco(part));
183:   PetscCall(PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionerCite));
184:   PetscFunctionReturn(PETSC_SUCCESS);
185: }