Actual source code: pmetis.c

  1: #include <../src/mat/impls/adj/mpi/mpiadj.h>

  3: /*
  4:    Currently using ParMetis-4.0.2
  5: */

  7: #include <parmetis.h>

  9: /*
 10:       The first 5 elements of this structure are the input control array to Metis
 11: */
 12: typedef struct {
 13:   PetscInt  cuts; /* number of cuts made (output) */
 14:   PetscInt  foldfactor;
 15:   PetscInt  parallel; /* use parallel partitioner for coarse problem */
 16:   PetscInt  indexing; /* 0 indicates C indexing, 1 Fortran */
 17:   PetscInt  printout; /* indicates if one wishes Metis to print info */
 18:   PetscBool repartition;
 19: } MatPartitioning_Parmetis;

 21: #define PetscCallPARMETIS(n, func) \
 22:   do { \
 23:     PetscCheck(n != METIS_ERROR_INPUT, PETSC_COMM_SELF, PETSC_ERR_LIB, "ParMETIS error due to wrong inputs and/or options for %s", func); \
 24:     PetscCheck(n != METIS_ERROR_MEMORY, PETSC_COMM_SELF, PETSC_ERR_LIB, "ParMETIS error due to insufficient memory in %s", func); \
 25:     PetscCheck(n != METIS_ERROR, PETSC_COMM_SELF, PETSC_ERR_LIB, "ParMETIS general error in %s", func); \
 26:   } while (0)

 28: #define PetscCallParmetis_(name, func, args) \
 29:   do { \
 30:     PetscStackPushExternal(name); \
 31:     int status = func args; \
 32:     PetscStackPop; \
 33:     PetscCallPARMETIS(status, name); \
 34:   } while (0)

 36: #define PetscCallParmetis(func, args) PetscCallParmetis_(PetscStringize(func), func, args)

 38: static PetscErrorCode MatPartitioningApply_Parmetis_Private(MatPartitioning part, PetscBool useND, PetscBool isImprove, IS *partitioning)
 39: {
 40:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
 41:   PetscInt                 *locals = NULL;
 42:   Mat                       mat    = part->adj, amat, pmat;
 43:   PetscBool                 flg;
 44:   PetscInt                  bs = 1;

 46:   PetscFunctionBegin;
 48:   PetscAssertPointer(partitioning, 4);
 49:   PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
 50:   if (flg) {
 51:     amat = mat;
 52:     PetscCall(PetscObjectReference((PetscObject)amat));
 53:   } else {
 54:     /* bs indicates if the converted matrix is "reduced" from the original and hence the
 55:        resulting partition results need to be stretched to match the original matrix */
 56:     PetscCall(MatConvert(mat, MATMPIADJ, MAT_INITIAL_MATRIX, &amat));
 57:     if (amat->rmap->n > 0) bs = mat->rmap->n / amat->rmap->n;
 58:   }
 59:   PetscCall(MatMPIAdjCreateNonemptySubcommMat(amat, &pmat));
 60:   PetscCallMPI(MPI_Barrier(PetscObjectComm((PetscObject)part)));

 62:   if (pmat) {
 63:     MPI_Comm    pcomm, comm;
 64:     Mat_MPIAdj *adj     = (Mat_MPIAdj *)pmat->data;
 65:     PetscInt   *vtxdist = pmat->rmap->range;
 66:     PetscInt   *xadj    = adj->i;
 67:     PetscInt   *adjncy  = adj->j;
 68:     PetscInt   *NDorder = NULL;
 69:     PetscInt    itmp = 0, wgtflag = 0, numflag = 0, ncon = part->ncon, nparts = part->n, options[24], i, j;
 70:     real_t     *tpwgts, *ubvec, itr = 0.1;

 72:     PetscCall(PetscObjectGetComm((PetscObject)pmat, &pcomm));
 73:     if (PetscDefined(USE_DEBUG)) {
 74:       /* check that matrix has no diagonal entries */
 75:       PetscInt rstart;
 76:       PetscCall(MatGetOwnershipRange(pmat, &rstart, NULL));
 77:       for (i = 0; i < pmat->rmap->n; i++) {
 78:         for (j = xadj[i]; j < xadj[i + 1]; j++) PetscCheck(adjncy[j] != i + rstart, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Row %" PetscInt_FMT " has diagonal entry; Parmetis forbids diagonal entry", i + rstart);
 79:       }
 80:     }

 82:     PetscCall(PetscMalloc1(pmat->rmap->n, &locals));

 84:     if (isImprove) {
 85:       PetscInt        i;
 86:       const PetscInt *part_indices;
 88:       PetscCall(ISGetIndices(*partitioning, &part_indices));
 89:       for (i = 0; i < pmat->rmap->n; i++) locals[i] = part_indices[i * bs];
 90:       PetscCall(ISRestoreIndices(*partitioning, &part_indices));
 91:       PetscCall(ISDestroy(partitioning));
 92:     }

 94:     if (adj->values && part->use_edge_weights && !part->vertex_weights) wgtflag = 1;
 95:     if (part->vertex_weights && !adj->values) wgtflag = 2;
 96:     if (part->vertex_weights && adj->values && part->use_edge_weights) wgtflag = 3;

 98:     if (PetscLogPrintInfo) {
 99:       itmp             = pmetis->printout;
100:       pmetis->printout = 127;
101:     }
102:     PetscCall(PetscMalloc1(ncon * nparts, &tpwgts));
103:     for (i = 0; i < ncon; i++) {
104:       for (j = 0; j < nparts; j++) {
105:         if (part->part_weights) {
106:           tpwgts[i * nparts + j] = part->part_weights[i * nparts + j];
107:         } else {
108:           tpwgts[i * nparts + j] = 1. / nparts;
109:         }
110:       }
111:     }
112:     PetscCall(PetscMalloc1(ncon, &ubvec));
113:     for (i = 0; i < ncon; i++) ubvec[i] = 1.05;
114:     /* This sets the defaults */
115:     options[0] = 1;
116:     for (i = 1; i < 24; i++) options[i] = -1;
117:     options[1] = 0; /* no verbosity */
118:     options[2] = 0;
119:     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
120:     /* Duplicate the communicator to be sure that ParMETIS attribute caching does not interfere with PETSc. */
121:     PetscCallMPI(MPI_Comm_dup(pcomm, &comm));
122:     if (useND) {
123:       PetscInt   *sizes, *seps, log2size, subd, *level;
124:       PetscMPIInt size;
125:       idx_t       mtype = PARMETIS_MTYPE_GLOBAL, rtype = PARMETIS_SRTYPE_2PHASE, p_nseps = 1, s_nseps = 1;
126:       real_t      ubfrac = 1.05;

128:       PetscCallMPI(MPI_Comm_size(comm, &size));
129:       PetscCall(PetscMalloc1(pmat->rmap->n, &NDorder));
130:       PetscCall(PetscMalloc3(2 * size, &sizes, 4 * size, &seps, size, &level));
131:       PetscCallParmetis(ParMETIS_V32_NodeND, ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy, (idx_t *)part->vertex_weights, (idx_t *)&numflag, &mtype, &rtype, &p_nseps, &s_nseps, &ubfrac, NULL /* seed */, NULL /* dbglvl */, (idx_t *)NDorder, (idx_t *)(sizes), &comm));
132:       log2size = PetscLog2Real(size);
133:       subd     = PetscPowInt(2, log2size);
134:       PetscCall(MatPartitioningSizesToSep_Private(subd, sizes, seps, level));
135:       for (i = 0; i < pmat->rmap->n; i++) {
136:         PetscInt loc;

138:         PetscCall(PetscFindInt(NDorder[i], 2 * subd, seps, &loc));
139:         if (loc < 0) {
140:           loc = -(loc + 1);
141:           if (loc % 2) { /* part of subdomain */
142:             locals[i] = loc / 2;
143:           } else {
144:             PetscCall(PetscFindInt(NDorder[i], 2 * (subd - 1), seps + 2 * subd, &loc));
145:             loc       = loc < 0 ? -(loc + 1) / 2 : loc / 2;
146:             locals[i] = level[loc];
147:           }
148:         } else locals[i] = loc / 2;
149:       }
150:       PetscCall(PetscFree3(sizes, seps, level));
151:     } else {
152:       if (pmetis->repartition) {
153:         PetscCallParmetis(ParMETIS_V3_AdaptiveRepart, ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy, (idx_t *)part->vertex_weights, (idx_t *)part->vertex_weights, (idx_t *)adj->values, (idx_t *)&wgtflag, (idx_t *)&numflag, (idx_t *)&ncon, (idx_t *)&nparts, tpwgts, ubvec, &itr, (idx_t *)options,
154:                                                        (idx_t *)&pmetis->cuts, (idx_t *)locals, &comm));
155:       } else if (isImprove) {
156:         PetscCallParmetis(ParMETIS_V3_RefineKway, ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy, (idx_t *)part->vertex_weights, (idx_t *)adj->values, (idx_t *)&wgtflag, (idx_t *)&numflag, (idx_t *)&ncon, (idx_t *)&nparts, tpwgts, ubvec, (idx_t *)options,
157:                                                    (idx_t *)&pmetis->cuts, (idx_t *)locals, &comm));
158:       } else {
159:         PetscCallParmetis(ParMETIS_V3_PartKway, ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy, (idx_t *)part->vertex_weights, (idx_t *)adj->values, (idx_t *)&wgtflag, (idx_t *)&numflag, (idx_t *)&ncon, (idx_t *)&nparts, tpwgts, ubvec, (idx_t *)options,
160:                                                  (idx_t *)&pmetis->cuts, (idx_t *)locals, &comm));
161:       }
162:     }
163:     PetscCallMPI(MPI_Comm_free(&comm));

165:     PetscCall(PetscFree(tpwgts));
166:     PetscCall(PetscFree(ubvec));
167:     if (PetscLogPrintInfo) pmetis->printout = itmp;

169:     if (bs > 1) {
170:       PetscInt i, j, *newlocals;
171:       PetscCall(PetscMalloc1(bs * pmat->rmap->n, &newlocals));
172:       for (i = 0; i < pmat->rmap->n; i++) {
173:         for (j = 0; j < bs; j++) newlocals[bs * i + j] = locals[i];
174:       }
175:       PetscCall(PetscFree(locals));
176:       PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), bs * pmat->rmap->n, newlocals, PETSC_OWN_POINTER, partitioning));
177:     } else {
178:       PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), pmat->rmap->n, locals, PETSC_OWN_POINTER, partitioning));
179:     }
180:     if (useND) {
181:       IS ndis;

183:       if (bs > 1) {
184:         PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)part), bs, pmat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
185:       } else {
186:         PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), pmat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
187:       }
188:       PetscCall(ISSetPermutation(ndis));
189:       PetscCall(PetscObjectCompose((PetscObject)(*partitioning), "_petsc_matpartitioning_ndorder", (PetscObject)ndis));
190:       PetscCall(ISDestroy(&ndis));
191:     }
192:   } else {
193:     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), 0, NULL, PETSC_COPY_VALUES, partitioning));
194:     if (useND) {
195:       IS ndis;

197:       if (bs > 1) {
198:         PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)part), bs, 0, NULL, PETSC_COPY_VALUES, &ndis));
199:       } else {
200:         PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), 0, NULL, PETSC_COPY_VALUES, &ndis));
201:       }
202:       PetscCall(ISSetPermutation(ndis));
203:       PetscCall(PetscObjectCompose((PetscObject)(*partitioning), "_petsc_matpartitioning_ndorder", (PetscObject)ndis));
204:       PetscCall(ISDestroy(&ndis));
205:     }
206:   }
207:   PetscCall(MatDestroy(&pmat));
208:   PetscCall(MatDestroy(&amat));
209:   PetscFunctionReturn(PETSC_SUCCESS);
210: }

212: /*
213:    Uses the ParMETIS parallel matrix partitioner to compute a nested dissection ordering of the matrix in parallel
214: */
215: static PetscErrorCode MatPartitioningApplyND_Parmetis(MatPartitioning part, IS *partitioning)
216: {
217:   PetscFunctionBegin;
218:   PetscCall(MatPartitioningApply_Parmetis_Private(part, PETSC_TRUE, PETSC_FALSE, partitioning));
219:   PetscFunctionReturn(PETSC_SUCCESS);
220: }

222: /*
223:    Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
224: */
225: static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part, IS *partitioning)
226: {
227:   PetscFunctionBegin;
228:   PetscCall(MatPartitioningApply_Parmetis_Private(part, PETSC_FALSE, PETSC_FALSE, partitioning));
229:   PetscFunctionReturn(PETSC_SUCCESS);
230: }

232: /*
233:    Uses the ParMETIS to improve the quality  of a partition
234: */
235: static PetscErrorCode MatPartitioningImprove_Parmetis(MatPartitioning part, IS *partitioning)
236: {
237:   PetscFunctionBegin;
238:   PetscCall(MatPartitioningApply_Parmetis_Private(part, PETSC_FALSE, PETSC_TRUE, partitioning));
239:   PetscFunctionReturn(PETSC_SUCCESS);
240: }

242: static PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part, PetscViewer viewer)
243: {
244:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
245:   PetscMPIInt               rank;
246:   PetscBool                 iascii;

248:   PetscFunctionBegin;
249:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));
250:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
251:   if (iascii) {
252:     if (pmetis->parallel == 2) {
253:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Using parallel coarse grid partitioner\n"));
254:     } else {
255:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Using sequential coarse grid partitioner\n"));
256:     }
257:     PetscCall(PetscViewerASCIIPrintf(viewer, "  Using %" PetscInt_FMT " fold factor\n", pmetis->foldfactor));
258:     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
259:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d]Number of cuts found %" PetscInt_FMT "\n", rank, pmetis->cuts));
260:     PetscCall(PetscViewerFlush(viewer));
261:     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
262:   }
263:   PetscFunctionReturn(PETSC_SUCCESS);
264: }

266: /*@
267:   MatPartitioningParmetisSetCoarseSequential - Use the sequential code to
268:   do the partitioning of the coarse grid.

270:   Logically Collective

272:   Input Parameter:
273: . part - the partitioning context

275:   Level: advanced

277: .seealso: `MATPARTITIONINGPARMETIS`
278: @*/
279: PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
280: {
281:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;

283:   PetscFunctionBegin;
284:   pmetis->parallel = 1;
285:   PetscFunctionReturn(PETSC_SUCCESS);
286: }

288: /*@
289:   MatPartitioningParmetisSetRepartition - Repartition
290:   current mesh to rebalance computation.

292:   Logically Collective

294:   Input Parameter:
295: . part - the partitioning context

297:   Level: advanced

299: .seealso: `MATPARTITIONINGPARMETIS`
300: @*/
301: PetscErrorCode MatPartitioningParmetisSetRepartition(MatPartitioning part)
302: {
303:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;

305:   PetscFunctionBegin;
306:   pmetis->repartition = PETSC_TRUE;
307:   PetscFunctionReturn(PETSC_SUCCESS);
308: }

310: /*@
311:   MatPartitioningParmetisGetEdgeCut - Returns the number of edge cuts in the vertex partition.

313:   Input Parameter:
314: . part - the partitioning context

316:   Output Parameter:
317: . cut - the edge cut

319:   Level: advanced

321: .seealso: `MATPARTITIONINGPARMETIS`
322: @*/
323: PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning part, PetscInt *cut)
324: {
325:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;

327:   PetscFunctionBegin;
328:   *cut = pmetis->cuts;
329:   PetscFunctionReturn(PETSC_SUCCESS);
330: }

332: static PetscErrorCode MatPartitioningSetFromOptions_Parmetis(MatPartitioning part, PetscOptionItems *PetscOptionsObject)
333: {
334:   PetscBool flag = PETSC_FALSE;

336:   PetscFunctionBegin;
337:   PetscOptionsHeadBegin(PetscOptionsObject, "Set ParMeTiS partitioning options");
338:   PetscCall(PetscOptionsBool("-mat_partitioning_parmetis_coarse_sequential", "Use sequential coarse partitioner", "MatPartitioningParmetisSetCoarseSequential", flag, &flag, NULL));
339:   if (flag) PetscCall(MatPartitioningParmetisSetCoarseSequential(part));
340:   PetscCall(PetscOptionsBool("-mat_partitioning_parmetis_repartition", "", "MatPartitioningParmetisSetRepartition", flag, &flag, NULL));
341:   if (flag) PetscCall(MatPartitioningParmetisSetRepartition(part));
342:   PetscOptionsHeadEnd();
343:   PetscFunctionReturn(PETSC_SUCCESS);
344: }

346: static PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
347: {
348:   MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;

350:   PetscFunctionBegin;
351:   PetscCall(PetscFree(pmetis));
352:   PetscFunctionReturn(PETSC_SUCCESS);
353: }

355: /*MC
356:    MATPARTITIONINGPARMETIS - Creates a partitioning context via the external package PARMETIS.

358:    Collective

360:    Input Parameter:
361: .  part - the partitioning context

363:    Options Database Key:
364: .  -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner

366:    Level: beginner

368:    Note:
369:     See https://www-users.cs.umn.edu/~karypis/metis/

371: .seealso: `MatPartitioningSetType()`, `MatPartitioningType`, `MatPartitioningParmetisSetCoarseSequential()`, `MatPartitioningParmetisSetRepartition()`,
372:           `MatPartitioningParmetisGetEdgeCut()`
373: M*/

375: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Parmetis(MatPartitioning part)
376: {
377:   MatPartitioning_Parmetis *pmetis;

379:   PetscFunctionBegin;
380:   PetscCall(PetscNew(&pmetis));
381:   part->data = (void *)pmetis;

383:   pmetis->cuts        = 0;   /* output variable */
384:   pmetis->foldfactor  = 150; /*folding factor */
385:   pmetis->parallel    = 2;   /* use parallel partitioner for coarse grid */
386:   pmetis->indexing    = 0;   /* index numbering starts from 0 */
387:   pmetis->printout    = 0;   /* print no output while running */
388:   pmetis->repartition = PETSC_FALSE;

390:   part->ops->apply          = MatPartitioningApply_Parmetis;
391:   part->ops->applynd        = MatPartitioningApplyND_Parmetis;
392:   part->ops->improve        = MatPartitioningImprove_Parmetis;
393:   part->ops->view           = MatPartitioningView_Parmetis;
394:   part->ops->destroy        = MatPartitioningDestroy_Parmetis;
395:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
396:   PetscFunctionReturn(PETSC_SUCCESS);
397: }

399: /*@
400:   MatMeshToCellGraph - Convert a mesh to a cell graph.

402:   Collective

404:   Input Parameters:
405: + mesh         - the graph that represents the coupling of the vertices of the mesh
406: - ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangles and
407:                      quadrilaterials, 3 for tetrahedrals and 4 for hexahedrals

409:   Output Parameter:
410: . dual - the dual graph

412:   Level: advanced

414:   Notes:
415:   Uses the ParMETIS package to convert a `Mat` that represents coupling of vertices of a mesh
416:   to a `Mat` the represents the graph of the coupling between cells (the "dual" graph) and is
417:   suitable for partitioning with the `MatPartitioning` object. Use this to partition cells of a
418:   mesh.

420:   Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

422:   Each row of the mesh object represents a single cell in the mesh. For triangles it has 3 entries, quadrilaterials 4 entries,
423:   tetrahedrals 4 entries and hexahedrals 8 entries. You can mix triangles and quadrilaterals in the same mesh, but cannot
424:   mix  tetrahedrals and hexahedrals
425:   The columns of each row of the `Mat` mesh are the global vertex numbers of the vertices of that row's cell.
426:   The number of rows in mesh is number of cells, the number of columns is the number of vertices.

428: .seealso: `MatCreateMPIAdj()`, `MatPartitioningCreate()`
429: @*/
430: PetscErrorCode MatMeshToCellGraph(Mat mesh, PetscInt ncommonnodes, Mat *dual)
431: {
432:   PetscInt   *newxadj, *newadjncy;
433:   PetscInt    numflag = 0;
434:   Mat_MPIAdj *adj     = (Mat_MPIAdj *)mesh->data, *newadj;
435:   PetscBool   flg;
436:   MPI_Comm    comm;

438:   PetscFunctionBegin;
439:   PetscCall(PetscObjectTypeCompare((PetscObject)mesh, MATMPIADJ, &flg));
440:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Must use MPIAdj matrix type");

442:   PetscCall(PetscObjectGetComm((PetscObject)mesh, &comm));
443:   PetscCallParmetis(ParMETIS_V3_Mesh2Dual, ((idx_t *)mesh->rmap->range, (idx_t *)adj->i, (idx_t *)adj->j, (idx_t *)&numflag, (idx_t *)&ncommonnodes, (idx_t **)&newxadj, (idx_t **)&newadjncy, &comm));
444:   PetscCall(MatCreateMPIAdj(PetscObjectComm((PetscObject)mesh), mesh->rmap->n, mesh->rmap->N, newxadj, newadjncy, NULL, dual));
445:   newadj = (Mat_MPIAdj *)(*dual)->data;

447:   newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
448:   PetscFunctionReturn(PETSC_SUCCESS);
449: }