Actual source code: partsimple.c

  1: #include <petscvec.h>
  2: #include <petsc/private/partitionerimpl.h>

  4: typedef struct {
  5:   PetscBool useGrid;        /* Flag to use a grid layout */
  6:   PetscInt  gridDim;        /* The grid dimension */
  7:   PetscInt  nodeGrid[3];    /* Dimension of node grid */
  8:   PetscInt  processGrid[3]; /* Dimension of local process grid on each node */
  9: } PetscPartitioner_Simple;

 11: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
 12: {
 13:   PetscFunctionBegin;
 14:   PetscCall(PetscFree(part->data));
 15:   PetscFunctionReturn(PETSC_SUCCESS);
 16: }

 18: static PetscErrorCode PetscPartitionerSetFromOptions_Simple(PetscPartitioner part, PetscOptionItems PetscOptionsObject)
 19: {
 20:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
 21:   PetscInt                 num, i;
 22:   PetscBool                flg;

 24:   PetscFunctionBegin;
 25:   for (i = 0; i < 3; ++i) p->processGrid[i] = p->nodeGrid[i] = 1;
 26:   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner Simple Options");
 27:   num = 3;
 28:   PetscCall(PetscOptionsIntArray("-petscpartitioner_simple_node_grid", "Number of nodes in each dimension", "", p->nodeGrid, &num, &flg));
 29:   if (flg) {
 30:     p->useGrid = PETSC_TRUE;
 31:     p->gridDim = num;
 32:   }
 33:   num = 3;
 34:   PetscCall(PetscOptionsIntArray("-petscpartitioner_simple_process_grid", "Number of local processes in each dimension for a given node", "", p->processGrid, &num, &flg));
 35:   if (flg) {
 36:     p->useGrid = PETSC_TRUE;
 37:     if (p->gridDim < 0) p->gridDim = num;
 38:     else PetscCheck(p->gridDim == num, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_INCOMP, "Process grid dimension %" PetscInt_FMT " != %" PetscInt_FMT " node grid dimension", num, p->gridDim);
 39:   }
 40:   PetscOptionsHeadEnd();
 41:   PetscFunctionReturn(PETSC_SUCCESS);
 42: }

 44: static PetscErrorCode PetscPartitionerPartition_Simple_Grid(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 45: {
 46:   PetscPartitioner_Simple *p     = (PetscPartitioner_Simple *)part->data;
 47:   const PetscInt          *nodes = p->nodeGrid;
 48:   const PetscInt          *procs = p->processGrid;
 49:   PetscInt                *cellproc, *offsets, cells[3] = {1, 1, 1}, pcells[3] = {1, 1, 1};
 50:   PetscInt                 Np = 1, Nr, np, nk, nj, ni, pk, pj, pi, ck, cj, ci, i;
 51:   MPI_Comm                 comm;
 52:   PetscMPIInt              size;

 54:   PetscFunctionBegin;
 55:   if (vertSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights when using grid partition\n"));
 56:   if (targetSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores partition weights when using grid partition\n"));
 57:   PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
 58:   PetscCallMPI(MPI_Comm_size(comm, &size));
 59:   /* Check grid */
 60:   for (i = 0; i < 3; ++i) Np *= nodes[i] * procs[i];
 61:   PetscCheck(nparts == Np, comm, PETSC_ERR_ARG_INCOMP, "Number of partitions %" PetscInt_FMT " != %" PetscInt_FMT " grid size", nparts, Np);
 62:   PetscCheck(nparts == size, comm, PETSC_ERR_ARG_INCOMP, "Number of partitions %" PetscInt_FMT " != %d processes", nparts, size);
 63:   PetscCheck(numVertices % nparts == 0, comm, PETSC_ERR_ARG_INCOMP, "Number of cells %" PetscInt_FMT " is not divisible by number of partitions %" PetscInt_FMT, numVertices, nparts);
 64:   for (i = 0; i < p->gridDim; ++i) cells[i] = nodes[i] * procs[i];
 65:   Nr = numVertices / nparts;
 66:   while (Nr > 1) {
 67:     for (i = 0; i < p->gridDim; ++i) {
 68:       cells[i] *= 2;
 69:       Nr /= 2;
 70:     }
 71:   }
 72:   PetscCheck(!numVertices || Nr == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Odd number of cells %" PetscInt_FMT ". Must be nprocs*2^k", numVertices);
 73:   for (i = 0; i < p->gridDim; ++i) {
 74:     PetscCheck(cells[i] % (nodes[i] * procs[i]) == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "dir %" PetscInt_FMT ". Number of cells (%" PetscInt_FMT ") mod number of processors %" PetscInt_FMT, i, cells[i], nodes[i] * procs[i]);
 75:     pcells[i] = cells[i] / (nodes[i] * procs[i]);
 76:   }
 77:   /* Compute sizes */
 78:   for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, numVertices / nparts));
 79:   PetscCall(PetscSectionSetUp(partSection));
 80:   PetscCall(PetscCalloc1(nparts, &offsets));
 81:   for (np = 0; np < nparts; ++np) PetscCall(PetscSectionGetOffset(partSection, np, &offsets[np]));
 82:   if (!numVertices) pcells[0] = pcells[1] = pcells[2] = 0;
 83:   /* Compute partition */
 84:   PetscCall(PetscMalloc1(numVertices, &cellproc));
 85:   for (nk = 0; nk < nodes[2]; ++nk) {
 86:     for (nj = 0; nj < nodes[1]; ++nj) {
 87:       for (ni = 0; ni < nodes[0]; ++ni) {
 88:         const PetscInt nid = (nk * nodes[1] + nj) * nodes[0] + ni;

 90:         for (pk = 0; pk < procs[2]; ++pk) {
 91:           for (pj = 0; pj < procs[1]; ++pj) {
 92:             for (pi = 0; pi < procs[0]; ++pi) {
 93:               const PetscInt pid = ((nid * procs[2] + pk) * procs[1] + pj) * procs[0] + pi;

 95:               /* Assume that cells are originally numbered lexicographically */
 96:               for (ck = 0; ck < pcells[2]; ++ck) {
 97:                 for (cj = 0; cj < pcells[1]; ++cj) {
 98:                   for (ci = 0; ci < pcells[0]; ++ci) {
 99:                     const PetscInt cid = (((nk * procs[2] + pk) * pcells[2] + ck) * cells[1] + ((nj * procs[1] + pj) * pcells[1] + cj)) * cells[0] + (ni * procs[0] + pi) * pcells[0] + ci;

101:                     cellproc[offsets[pid]++] = cid;
102:                   }
103:                 }
104:               }
105:             }
106:           }
107:         }
108:       }
109:     }
110:   }
111:   for (np = 1; np < nparts; ++np) PetscCheck(offsets[np] - offsets[np - 1] == numVertices / nparts, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Offset %" PetscInt_FMT " != %" PetscInt_FMT " partition size", offsets[np], numVertices / nparts);
112:   PetscCall(PetscFree(offsets));
113:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numVertices, cellproc, PETSC_OWN_POINTER, partition));
114:   PetscFunctionReturn(PETSC_SUCCESS);
115: }

117: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition)
118: {
119:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
120:   MPI_Comm                 comm;
121:   PetscInt                 np, *tpwgts = NULL, sumw = 0, numVerticesGlobal = 0;
122:   PetscMPIInt              size;

124:   PetscFunctionBegin;
125:   if (p->useGrid) {
126:     PetscCall(PetscPartitionerPartition_Simple_Grid(part, nparts, numVertices, start, adjacency, vertSection, targetSection, partSection, partition));
127:     PetscFunctionReturn(PETSC_SUCCESS);
128:   }
129:   if (vertSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights\n"));
130:   PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
131:   PetscCallMPI(MPI_Comm_size(comm, &size));
132:   if (targetSection) {
133:     PetscCallMPI(MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm));
134:     PetscCall(PetscCalloc1(nparts, &tpwgts));
135:     for (np = 0; np < nparts; ++np) {
136:       PetscCall(PetscSectionGetDof(targetSection, np, &tpwgts[np]));
137:       sumw += tpwgts[np];
138:     }
139:     if (sumw) {
140:       PetscInt m, mp;
141:       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np] * numVerticesGlobal) / sumw;
142:       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
143:         if (m < tpwgts[np]) {
144:           m  = tpwgts[np];
145:           mp = np;
146:         }
147:         sumw += tpwgts[np];
148:       }
149:       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
150:     }
151:     if (!sumw) PetscCall(PetscFree(tpwgts));
152:   }

154:   PetscCall(ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition));
155:   if (size == 1) {
156:     if (tpwgts) {
157:       for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, tpwgts[np]));
158:     } else {
159:       for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, numVertices / nparts + ((numVertices % nparts) > np)));
160:     }
161:   } else {
162:     if (tpwgts) {
163:       Vec          v;
164:       PetscScalar *array;
165:       PetscInt     st, j;
166:       PetscMPIInt  rank;

168:       PetscCall(VecCreate(comm, &v));
169:       PetscCall(VecSetSizes(v, numVertices, numVerticesGlobal));
170:       PetscCall(VecSetType(v, VECSTANDARD));
171:       PetscCallMPI(MPI_Comm_rank(comm, &rank));
172:       for (np = 0, st = 0; np < nparts; ++np) {
173:         if (rank == np || (rank == size - 1 && size < nparts && np >= size)) {
174:           for (j = 0; j < tpwgts[np]; j++) PetscCall(VecSetValue(v, st + j, np, INSERT_VALUES));
175:         }
176:         st += tpwgts[np];
177:       }
178:       PetscCall(VecAssemblyBegin(v));
179:       PetscCall(VecAssemblyEnd(v));
180:       PetscCall(VecGetArray(v, &array));
181:       for (j = 0; j < numVertices; ++j) PetscCall(PetscSectionAddDof(partSection, PetscRealPart(array[j]), 1));
182:       PetscCall(VecRestoreArray(v, &array));
183:       PetscCall(VecDestroy(&v));
184:     } else {
185:       PetscMPIInt rank;
186:       PetscInt    nvGlobal, *offsets, myFirst, myLast;

188:       PetscCall(PetscMalloc1(size + 1, &offsets));
189:       offsets[0] = 0;
190:       PetscCallMPI(MPI_Allgather(&numVertices, 1, MPIU_INT, &offsets[1], 1, MPIU_INT, comm));
191:       for (np = 2; np <= size; np++) offsets[np] += offsets[np - 1];
192:       nvGlobal = offsets[size];
193:       PetscCallMPI(MPI_Comm_rank(comm, &rank));
194:       myFirst = offsets[rank];
195:       myLast  = offsets[rank + 1] - 1;
196:       PetscCall(PetscFree(offsets));
197:       if (numVertices) {
198:         PetscInt firstPart = 0, firstLargePart = 0;
199:         PetscInt lastPart = 0, lastLargePart = 0;
200:         PetscInt rem    = nvGlobal % nparts;
201:         PetscInt pSmall = nvGlobal / nparts;
202:         PetscInt pBig   = nvGlobal / nparts + 1;

204:         if (rem) {
205:           firstLargePart = myFirst / pBig;
206:           lastLargePart  = myLast / pBig;

208:           if (firstLargePart < rem) {
209:             firstPart = firstLargePart;
210:           } else {
211:             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
212:           }
213:           if (lastLargePart < rem) {
214:             lastPart = lastLargePart;
215:           } else {
216:             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
217:           }
218:         } else {
219:           firstPart = myFirst / (nvGlobal / nparts);
220:           lastPart  = myLast / (nvGlobal / nparts);
221:         }

223:         for (np = firstPart; np <= lastPart; np++) {
224:           PetscInt PartStart = np * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np);
225:           PetscInt PartEnd   = (np + 1) * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np + 1);

227:           PartStart = PetscMax(PartStart, myFirst);
228:           PartEnd   = PetscMin(PartEnd, myLast + 1);
229:           PetscCall(PetscSectionSetDof(partSection, np, PartEnd - PartStart));
230:         }
231:       }
232:     }
233:   }
234:   PetscCall(PetscFree(tpwgts));
235:   PetscFunctionReturn(PETSC_SUCCESS);
236: }

238: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
239: {
240:   PetscFunctionBegin;
241:   part->noGraph             = PETSC_TRUE;
242:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Simple;
243:   part->ops->destroy        = PetscPartitionerDestroy_Simple;
244:   part->ops->partition      = PetscPartitionerPartition_Simple;
245:   PetscFunctionReturn(PETSC_SUCCESS);
246: }

248: /*MC
249:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

251:   Level: intermediate

253: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
254: M*/

256: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
257: {
258:   PetscPartitioner_Simple *p;

260:   PetscFunctionBegin;
262:   PetscCall(PetscNew(&p));
263:   p->gridDim = -1;
264:   part->data = p;

266:   PetscCall(PetscPartitionerInitialize_Simple(part));
267:   PetscFunctionReturn(PETSC_SUCCESS);
268: }