Actual source code: partition.c

  1: #include <petsc/private/matimpl.h>

  3: /* Logging support */
  4: PetscClassId MAT_PARTITIONING_CLASSID;

  6: /*
  7:    Simplest partitioning, keeps the current partitioning.
  8: */
  9: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
 10: {
 11:   PetscInt    m;
 12:   PetscMPIInt rank, size;

 14:   PetscFunctionBegin;
 15:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
 16:   if (part->n != size) {
 17:     const char *prefix;
 18:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
 19:     SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
 20:   }
 21:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));

 23:   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
 24:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
 25:   PetscFunctionReturn(PETSC_SUCCESS);
 26: }

 28: /*
 29:    partition an index to rebalance the computation
 30: */
 31: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
 32: {
 33:   PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;

 35:   PetscFunctionBegin;
 36:   PetscCall(MatGetSize(part->adj, &M, NULL));
 37:   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
 38:   nparts = part->n;
 39:   PetscCall(PetscMalloc1(nparts, &parts));
 40:   d = M / nparts;
 41:   for (i = 0; i < nparts; i++) parts[i] = d;
 42:   r = M % nparts;
 43:   for (i = 0; i < r; i++) parts[i] += 1;
 44:   for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
 45:   PetscCall(PetscMalloc1(m, &indices));
 46:   PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
 47:   for (i = start; i < end; i++) {
 48:     PetscCall(PetscFindInt(i, nparts, parts, &loc));
 49:     if (loc < 0) loc = -(loc + 1);
 50:     else loc = loc + 1;
 51:     indices[i - start] = loc;
 52:   }
 53:   PetscCall(PetscFree(parts));
 54:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
 55:   PetscFunctionReturn(PETSC_SUCCESS);
 56: }

 58: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
 59: {
 60:   PetscInt    cell, n, N, p, rstart, rend, *color;
 61:   PetscMPIInt size;

 63:   PetscFunctionBegin;
 64:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
 65:   PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
 66:   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
 67:   PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");

 69:   PetscCall(MatGetSize(part->adj, &N, NULL));
 70:   n = (PetscInt)PetscSqrtReal((PetscReal)N);
 71:   PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
 72:   PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
 73:   PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
 74:   PetscCall(PetscMalloc1(rend - rstart, &color));
 75:   /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
 76:   for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
 77:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
 78:   PetscFunctionReturn(PETSC_SUCCESS);
 79: }

 81: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 82: {
 83:   PetscFunctionBegin;
 84:   part->ops->apply   = MatPartitioningApply_Current;
 85:   part->ops->view    = NULL;
 86:   part->ops->destroy = NULL;
 87:   PetscFunctionReturn(PETSC_SUCCESS);
 88: }

 90: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
 91: {
 92:   PetscFunctionBegin;
 93:   part->ops->apply   = MatPartitioningApply_Average;
 94:   part->ops->view    = NULL;
 95:   part->ops->destroy = NULL;
 96:   PetscFunctionReturn(PETSC_SUCCESS);
 97: }

 99: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
100: {
101:   PetscFunctionBegin;
102:   part->ops->apply   = MatPartitioningApply_Square;
103:   part->ops->view    = NULL;
104:   part->ops->destroy = NULL;
105:   PetscFunctionReturn(PETSC_SUCCESS);
106: }

108: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
109:        seps[  0 :         2*p) : the start and end node of each subdomain
110:        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
111:      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
112:    The arrays must be large enough
113: */
114: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
115: {
116:   PetscInt l2p, i, pTree, pStartTree;

118:   PetscFunctionBegin;
119:   l2p = PetscLog2Real(p);
120:   PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
121:   if (!p) PetscFunctionReturn(PETSC_SUCCESS);
122:   PetscCall(PetscArrayzero(seps, 2 * p - 2));
123:   PetscCall(PetscArrayzero(level, p - 1));
124:   seps[2 * p - 2] = sizes[2 * p - 2];
125:   pTree           = p;
126:   pStartTree      = 0;
127:   while (pTree != 1) {
128:     for (i = pStartTree; i < pStartTree + pTree; i++) {
129:       seps[i] += sizes[i];
130:       seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
131:     }
132:     pStartTree += pTree;
133:     pTree = pTree / 2;
134:   }
135:   seps[2 * p - 2] -= sizes[2 * p - 2];

137:   pStartTree = 2 * p - 2;
138:   pTree      = 1;
139:   while (pStartTree > 0) {
140:     for (i = pStartTree; i < pStartTree + pTree; i++) {
141:       PetscInt k = 2 * i - (pStartTree + 2 * pTree);
142:       PetscInt n = seps[k + 1];

144:       seps[k + 1]  = seps[i] - sizes[k + 1];
145:       seps[k]      = seps[k + 1] + sizes[k + 1] - n - sizes[k];
146:       level[i - p] = -pTree - i + pStartTree;
147:     }
148:     pTree *= 2;
149:     pStartTree -= pTree;
150:   }
151:   /* I know there should be a formula */
152:   PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
153:   for (i = 2 * p - 2; i >= 0; i--) {
154:     seps[2 * i]     = seps[i];
155:     seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
156:   }
157:   PetscFunctionReturn(PETSC_SUCCESS);
158: }

160: PetscFunctionList MatPartitioningList              = NULL;
161: PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;

163: /*@C
164:   MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.

166:   Not Collective, No Fortran Support

168:   Input Parameters:
169: + sname    - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
170: - function - function pointer that creates the partitioning type

172:   Level: developer

174:   Example Usage:
175: .vb
176:    MatPartitioningRegister("my_part", MyPartCreate);
177: .ve

179:   Then, your partitioner can be chosen with the procedural interface via `MatPartitioningSetType(part, "my_part")` or at runtime via the option
180:   `-mat_partitioning_type my_part`

182: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
183: @*/
184: PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
185: {
186:   PetscFunctionBegin;
187:   PetscCall(MatInitializePackage());
188:   PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
189:   PetscFunctionReturn(PETSC_SUCCESS);
190: }

192: /*@
193:   MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
194:   from the partitioning context.

196:   Not Collective

198:   Input Parameter:
199: . partitioning - the partitioning context

201:   Output Parameter:
202: . type - partitioner type

204:   Level: intermediate

206: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
207: @*/
208: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
209: {
210:   PetscFunctionBegin;
212:   PetscAssertPointer(type, 2);
213:   *type = ((PetscObject)partitioning)->type_name;
214:   PetscFunctionReturn(PETSC_SUCCESS);
215: }

217: /*@
218:   MatPartitioningSetNParts - Set how many partitions need to be created;
219:   by default this is one per processor. Certain partitioning schemes may
220:   in fact only support that option.

222:   Collective

224:   Input Parameters:
225: + part - the partitioning context
226: - n    - the number of partitions

228:   Level: intermediate

230: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
231: @*/
232: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
233: {
234:   PetscFunctionBegin;
235:   part->n = n;
236:   PetscFunctionReturn(PETSC_SUCCESS);
237: }

239: /*@
240:   MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.

242:   Collective

244:   Input Parameter:
245: . matp - the matrix partitioning object

247:   Output Parameter:
248: . partitioning - the partitioning. For each local node, a positive value indicates the processor
249:                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).

251:   Level: intermediate

253:   Note:
254:   The user can define additional partitionings; see `MatPartitioningRegister()`.

256: .seealso: [](ch_matrices), `Mat`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
257:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
258:           `ISPartitioningCount()`
259: @*/
260: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
261: {
262:   PetscFunctionBegin;
264:   PetscAssertPointer(partitioning, 2);
265:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
266:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
267:   PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
268:   PetscUseTypeMethod(matp, applynd, partitioning);
269:   PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));

271:   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
272:   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
273:   PetscFunctionReturn(PETSC_SUCCESS);
274: }

276: /*@
277:   MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.

279:   Collective

281:   Input Parameter:
282: . matp - the matrix partitioning object

284:   Output Parameter:
285: . partitioning - the partitioning. For each local node this tells the MPI rank that that node is assigned to.

287:   Options Database Keys:
288: + -mat_partitioning_type <type> - set the partitioning package or algorithm to use
289: - -mat_partitioning_view        - display information about the partitioning object

291:   Level: beginner

293:    The user can define additional partitionings; see `MatPartitioningRegister()`.

295: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
296:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
297:           `ISPartitioningCount()`
298: @*/
299: PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
300: {
301:   PetscBool viewbalance, improve;

303:   PetscFunctionBegin;
305:   PetscAssertPointer(partitioning, 2);
306:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
307:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
308:   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
309:   PetscUseTypeMethod(matp, apply, partitioning);
310:   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));

312:   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
313:   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));

315:   PetscObjectOptionsBegin((PetscObject)matp);
316:   viewbalance = PETSC_FALSE;
317:   PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
318:   improve = PETSC_FALSE;
319:   PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
320:   PetscOptionsEnd();

322:   if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));

324:   if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
325:   PetscFunctionReturn(PETSC_SUCCESS);
326: }

328: /*@
329:   MatPartitioningImprove - Improves the quality of a given partition.

331:   Collective

333:   Input Parameters:
334: + matp         - the matrix partitioning object
335: - partitioning - the original partitioning. For each local node this tells the processor
336:                    number that that node is assigned to.

338:   Options Database Key:
339: . -mat_partitioning_improve - improve the quality of the given partition

341:   Level: beginner

343: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
344:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
345:           `ISPartitioningCount()`
346: @*/
347: PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
348: {
349:   PetscFunctionBegin;
351:   PetscAssertPointer(partitioning, 2);
352:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
353:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
354:   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
355:   PetscTryTypeMethod(matp, improve, partitioning);
356:   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
357:   PetscFunctionReturn(PETSC_SUCCESS);
358: }

360: /*@
361:   MatPartitioningViewImbalance - Display partitioning imbalance information.

363:   Collective

365:   Input Parameters:
366: + matp         - the matrix partitioning object
367: - partitioning - the partitioning. For each local node this tells the MPI rank that that node is assigned to.

369:   Options Database Key:
370: . -mat_partitioning_view_balance - view the balance information from the last partitioning

372:   Level: beginner

374: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
375: @*/
376: PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
377: {
378:   PetscMPIInt     nparts;
379:   PetscInt       *subdomainsizes, *subdomainsizes_tmp, nlocal, maxsub, minsub, avgsub;
380:   const PetscInt *indices;
381:   PetscViewer     viewer;

383:   PetscFunctionBegin;
386:   PetscCall(PetscMPIIntCast(matp->n, &nparts));
387:   PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
388:   PetscCall(ISGetLocalSize(partitioning, &nlocal));
389:   PetscCall(ISGetIndices(partitioning, &indices));
390:   for (PetscInt i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
391:   PetscCallMPI(MPIU_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
392:   PetscCall(ISRestoreIndices(partitioning, &indices));
393:   minsub = PETSC_INT_MAX, maxsub = PETSC_INT_MIN, avgsub = 0;
394:   for (PetscMPIInt i = 0; i < nparts; i++) {
395:     minsub = PetscMin(minsub, subdomainsizes[i]);
396:     maxsub = PetscMax(maxsub, subdomainsizes[i]);
397:     avgsub += subdomainsizes[i];
398:   }
399:   avgsub /= nparts;
400:   PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
401:   PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
402:   PetscCall(MatPartitioningView(matp, viewer));
403:   PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
404:   PetscFunctionReturn(PETSC_SUCCESS);
405: }

407: /*@
408:   MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
409:   partitioned.

411:   Collective

413:   Input Parameters:
414: + part - the partitioning context
415: - adj  - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`

417:   Level: beginner

419: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
420: @*/
421: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
422: {
423:   PetscFunctionBegin;
426:   part->adj = adj;
427:   PetscFunctionReturn(PETSC_SUCCESS);
428: }

430: /*@
431:   MatPartitioningDestroy - Destroys the partitioning context.

433:   Collective

435:   Input Parameter:
436: . part - the partitioning context

438:   Level: beginner

440: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
441: @*/
442: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
443: {
444:   PetscFunctionBegin;
445:   if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
447:   if (--((PetscObject)*part)->refct > 0) {
448:     *part = NULL;
449:     PetscFunctionReturn(PETSC_SUCCESS);
450:   }

452:   PetscTryTypeMethod(*part, destroy);
453:   PetscCall(PetscFree((*part)->vertex_weights));
454:   PetscCall(PetscFree((*part)->part_weights));
455:   PetscCall(PetscHeaderDestroy(part));
456:   PetscFunctionReturn(PETSC_SUCCESS);
457: }

459: /*@C
460:   MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

462:   Logically Collective

464:   Input Parameters:
465: + part    - the partitioning context
466: - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
467:             1 if that is not provided

469:   Level: beginner

471:   Notes:
472:   The array weights is freed by PETSc so the user should not free the array. In C/C++
473:   the array must be obtained with a call to `PetscMalloc()`, not malloc().

475:   The weights may not be used by some partitioners

477:   Fortran Note:
478:   The array `weights` is copied during this function call.

480: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
481: @*/
482: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
483: {
484:   PetscFunctionBegin;
486:   PetscCall(PetscFree(part->vertex_weights));
487:   part->vertex_weights = (PetscInt *)weights;
488:   PetscFunctionReturn(PETSC_SUCCESS);
489: }

491: /*@C
492:   MatPartitioningSetPartitionWeights - Sets the weights for each partition.

494:   Logically Collective

496:   Input Parameters:
497: + part    - the partitioning context
498: - weights - An array of size nparts that is used to specify the fraction of
499:             vertex weight that should be distributed to each sub-domain for
500:             the balance constraint. If all of the sub-domains are to be of
501:             the same size, then each of the nparts elements should be set
502:             to a value of 1/nparts. Note that the sum of all of the weights
503:             should be one.

505:   Level: beginner

507:   Note:
508:   The array weights is freed by PETSc so the user should not free the array. In C/C++
509:   the array must be obtained with a call to `PetscMalloc()`, not malloc().

511:   Fortran Note:
512:   The array `weights` is copied during this function call.

514: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`
515: @*/
516: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
517: {
518:   PetscFunctionBegin;
520:   PetscCall(PetscFree(part->part_weights));
521:   part->part_weights = (PetscReal *)weights;
522:   PetscFunctionReturn(PETSC_SUCCESS);
523: }

525: /*@
526:   MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.

528:   Logically Collective

530:   Input Parameters:
531: + part             - the partitioning context
532: - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
533:                      that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
534:                      edge weights are stored in an ADJ matrix.

536:   Options Database Key:
537: . -mat_partitioning_use_edge_weights - (true or false)

539:   Level: beginner

541: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
542: @*/
543: PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
544: {
545:   PetscFunctionBegin;
547:   part->use_edge_weights = use_edge_weights;
548:   PetscFunctionReturn(PETSC_SUCCESS);
549: }

551: /*@
552:   MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.

554:   Logically Collective

556:   Input Parameter:
557: . part - the partitioning context

559:   Output Parameter:
560: . use_edge_weights - the flag indicateing whether or not to edge weights are used.

562:   Level: beginner

564: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
565:           `MatPartitioningSetUseEdgeWeights`
566: @*/
567: PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
568: {
569:   PetscFunctionBegin;
571:   PetscAssertPointer(use_edge_weights, 2);
572:   *use_edge_weights = part->use_edge_weights;
573:   PetscFunctionReturn(PETSC_SUCCESS);
574: }

576: /*@
577:   MatPartitioningCreate - Creates a partitioning context.

579:   Collective

581:   Input Parameter:
582: . comm - MPI communicator

584:   Output Parameter:
585: . newp - location to put the context

587:   Level: beginner

589: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
590:           `MatPartitioningSetAdjacency()`
591: @*/
592: PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
593: {
594:   MatPartitioning part;
595:   PetscMPIInt     size;

597:   PetscFunctionBegin;
598:   PetscAssertPointer(newp, 2);
599:   PetscCall(MatInitializePackage());

601:   PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatGraphOperations", comm, MatPartitioningDestroy, MatPartitioningView));
602:   part->vertex_weights   = NULL;
603:   part->part_weights     = NULL;
604:   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */

606:   PetscCallMPI(MPI_Comm_size(comm, &size));
607:   part->n    = (PetscInt)size;
608:   part->ncon = 1;

610:   *newp = part;
611:   PetscFunctionReturn(PETSC_SUCCESS);
612: }

614: /*@
615:   MatPartitioningViewFromOptions - View a partitioning context from the options database

617:   Collective

619:   Input Parameters:
620: + A    - the partitioning context
621: . obj  - Optional object that provides the prefix used in the options database check
622: - name - command line option

624:   Options Database Key:
625: . -mat_partitioning_view [viewertype]:... - the viewer and its options

627:   Level: intermediate

629:   Note:
630: .vb
631:     If no value is provided ascii:stdout is used
632:        ascii[:[filename][:[format][:append]]]    defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
633:                                                   for example ascii::ascii_info prints just the information about the object not all details
634:                                                   unless :append is given filename opens in write mode, overwriting what was already there
635:        binary[:[filename][:[format][:append]]]   defaults to the file binaryoutput
636:        draw[:drawtype[:filename]]                for example, draw:tikz, draw:tikz:figure.tex  or draw:x
637:        socket[:port]                             defaults to the standard output port
638:        saws[:communicatorname]                    publishes object to the Scientific Application Webserver (SAWs)
639: .ve

641: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
642: @*/
643: PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
644: {
645:   PetscFunctionBegin;
647:   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
648:   PetscFunctionReturn(PETSC_SUCCESS);
649: }

651: /*@
652:   MatPartitioningView - Prints the partitioning data structure.

654:   Collective

656:   Input Parameters:
657: + part   - the partitioning context
658: - viewer - optional visualization context

660:   Level: intermediate

662:   Note:
663:   The available visualization contexts include
664: +     `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
665: -     `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
666:   output where only the first processor opens
667:   the file.  All other processors send their
668:   data to the first processor to print.

670:   The user can open alternative visualization contexts with
671: .     `PetscViewerASCIIOpen()` - output to a specified file

673: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
674: @*/
675: PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
676: {
677:   PetscBool iascii;

679:   PetscFunctionBegin;
681:   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
683:   PetscCheckSameComm(part, 1, viewer, 2);

685:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
686:   if (iascii) {
687:     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
688:     if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, "  Using vertex weights\n"));
689:   }
690:   PetscCall(PetscViewerASCIIPushTab(viewer));
691:   PetscTryTypeMethod(part, view, viewer);
692:   PetscCall(PetscViewerASCIIPopTab(viewer));
693:   PetscFunctionReturn(PETSC_SUCCESS);
694: }

696: /*@
697:   MatPartitioningSetType - Sets the type of partitioner to use

699:   Collective

701:   Input Parameters:
702: + part - the partitioning context.
703: - type - a known method

705:   Options Database Key:
706: . -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods or see  `MatPartitioningType`

708:   Level: intermediate

710: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
711: @*/
712: PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
713: {
714:   PetscBool match;
715:   PetscErrorCode (*r)(MatPartitioning);

717:   PetscFunctionBegin;
719:   PetscAssertPointer(type, 2);

721:   PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
722:   if (match) PetscFunctionReturn(PETSC_SUCCESS);

724:   PetscTryTypeMethod(part, destroy);
725:   part->ops->destroy = NULL;

727:   part->setupcalled = 0;
728:   part->data        = NULL;
729:   PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));

731:   PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
732:   PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);

734:   PetscCall((*r)(part));

736:   PetscCall(PetscFree(((PetscObject)part)->type_name));
737:   PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
738:   PetscFunctionReturn(PETSC_SUCCESS);
739: }

741: /*@
742:   MatPartitioningSetFromOptions - Sets various partitioning options from the
743:   options database for the partitioning object

745:   Collective

747:   Input Parameter:
748: . part - the partitioning context.

750:   Options Database Keys:
751: + -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods
752: - -mat_partitioning_nparts       - number of subgraphs

754:   Level: beginner

756:   Note:
757:   If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
758:   no installed partitioners it does no repartioning.

760: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`
761: @*/
762: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
763: {
764:   PetscBool   flag;
765:   char        type[256];
766:   const char *def;

768:   PetscFunctionBegin;
769:   PetscObjectOptionsBegin((PetscObject)part);
770:   if (!((PetscObject)part)->type_name) {
771: #if defined(PETSC_HAVE_PARMETIS)
772:     def = MATPARTITIONINGPARMETIS;
773: #elif defined(PETSC_HAVE_CHACO)
774:     def = MATPARTITIONINGCHACO;
775: #elif defined(PETSC_HAVE_PARTY)
776:     def = MATPARTITIONINGPARTY;
777: #elif defined(PETSC_HAVE_PTSCOTCH)
778:     def = MATPARTITIONINGPTSCOTCH;
779: #else
780:     def = MATPARTITIONINGCURRENT;
781: #endif
782:   } else {
783:     def = ((PetscObject)part)->type_name;
784:   }
785:   PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
786:   if (flag) PetscCall(MatPartitioningSetType(part, type));

788:   PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));

790:   PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));

792:   /*
793:     Set the type if it was never set.
794:   */
795:   if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));

797:   PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
798:   PetscOptionsEnd();
799:   PetscFunctionReturn(PETSC_SUCCESS);
800: }

802: /*@
803:   MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex

805:   Not Collective

807:   Input Parameters:
808: + partitioning - the partitioning context
809: - ncon         - the number of weights

811:   Level: intermediate

813: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
814: @*/
815: PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
816: {
817:   PetscFunctionBegin;
819:   partitioning->ncon = ncon;
820:   PetscFunctionReturn(PETSC_SUCCESS);
821: }