Actual source code: partition.c
1: #include <petsc/private/matimpl.h>
3: /* Logging support */
4: PetscClassId MAT_PARTITIONING_CLASSID;
6: /*
7: Simplest partitioning, keeps the current partitioning.
8: */
9: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
10: {
11: PetscInt m;
12: PetscMPIInt rank, size;
14: PetscFunctionBegin;
15: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
16: if (part->n != size) {
17: const char *prefix;
18: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
19: SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
20: }
21: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));
23: PetscCall(MatGetLocalSize(part->adj, &m, NULL));
24: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
25: PetscFunctionReturn(PETSC_SUCCESS);
26: }
28: /*
29: partition an index to rebalance the computation
30: */
31: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
32: {
33: PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;
35: PetscFunctionBegin;
36: PetscCall(MatGetSize(part->adj, &M, NULL));
37: PetscCall(MatGetLocalSize(part->adj, &m, NULL));
38: nparts = part->n;
39: PetscCall(PetscMalloc1(nparts, &parts));
40: d = M / nparts;
41: for (i = 0; i < nparts; i++) parts[i] = d;
42: r = M % nparts;
43: for (i = 0; i < r; i++) parts[i] += 1;
44: for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
45: PetscCall(PetscMalloc1(m, &indices));
46: PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
47: for (i = start; i < end; i++) {
48: PetscCall(PetscFindInt(i, nparts, parts, &loc));
49: if (loc < 0) loc = -(loc + 1);
50: else loc = loc + 1;
51: indices[i - start] = loc;
52: }
53: PetscCall(PetscFree(parts));
54: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
55: PetscFunctionReturn(PETSC_SUCCESS);
56: }
58: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
59: {
60: PetscInt cell, n, N, p, rstart, rend, *color;
61: PetscMPIInt size;
63: PetscFunctionBegin;
64: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
65: PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
66: p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
67: PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");
69: PetscCall(MatGetSize(part->adj, &N, NULL));
70: n = (PetscInt)PetscSqrtReal((PetscReal)N);
71: PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
72: PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
73: PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
74: PetscCall(PetscMalloc1(rend - rstart, &color));
75: /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
76: for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
77: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
78: PetscFunctionReturn(PETSC_SUCCESS);
79: }
81: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
82: {
83: PetscFunctionBegin;
84: part->ops->apply = MatPartitioningApply_Current;
85: part->ops->view = NULL;
86: part->ops->destroy = NULL;
87: PetscFunctionReturn(PETSC_SUCCESS);
88: }
90: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
91: {
92: PetscFunctionBegin;
93: part->ops->apply = MatPartitioningApply_Average;
94: part->ops->view = NULL;
95: part->ops->destroy = NULL;
96: PetscFunctionReturn(PETSC_SUCCESS);
97: }
99: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
100: {
101: PetscFunctionBegin;
102: part->ops->apply = MatPartitioningApply_Square;
103: part->ops->view = NULL;
104: part->ops->destroy = NULL;
105: PetscFunctionReturn(PETSC_SUCCESS);
106: }
108: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
109: seps[ 0 : 2*p) : the start and end node of each subdomain
110: seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
111: levels[ 0 : p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
112: The arrays must be large enough
113: */
114: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
115: {
116: PetscInt l2p, i, pTree, pStartTree;
118: PetscFunctionBegin;
119: l2p = PetscLog2Real(p);
120: PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
121: if (!p) PetscFunctionReturn(PETSC_SUCCESS);
122: PetscCall(PetscArrayzero(seps, 2 * p - 2));
123: PetscCall(PetscArrayzero(level, p - 1));
124: seps[2 * p - 2] = sizes[2 * p - 2];
125: pTree = p;
126: pStartTree = 0;
127: while (pTree != 1) {
128: for (i = pStartTree; i < pStartTree + pTree; i++) {
129: seps[i] += sizes[i];
130: seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
131: }
132: pStartTree += pTree;
133: pTree = pTree / 2;
134: }
135: seps[2 * p - 2] -= sizes[2 * p - 2];
137: pStartTree = 2 * p - 2;
138: pTree = 1;
139: while (pStartTree > 0) {
140: for (i = pStartTree; i < pStartTree + pTree; i++) {
141: PetscInt k = 2 * i - (pStartTree + 2 * pTree);
142: PetscInt n = seps[k + 1];
144: seps[k + 1] = seps[i] - sizes[k + 1];
145: seps[k] = seps[k + 1] + sizes[k + 1] - n - sizes[k];
146: level[i - p] = -pTree - i + pStartTree;
147: }
148: pTree *= 2;
149: pStartTree -= pTree;
150: }
151: /* I know there should be a formula */
152: PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
153: for (i = 2 * p - 2; i >= 0; i--) {
154: seps[2 * i] = seps[i];
155: seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
156: }
157: PetscFunctionReturn(PETSC_SUCCESS);
158: }
160: PetscFunctionList MatPartitioningList = NULL;
161: PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
163: /*@C
164: MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
166: Not Collective, No Fortran Support
168: Input Parameters:
169: + sname - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
170: - function - function pointer that creates the partitioning type
172: Level: developer
174: Example Usage:
175: .vb
176: MatPartitioningRegister("my_part", MyPartCreate);
177: .ve
179: Then, your partitioner can be chosen with the procedural interface via `MatPartitioningSetType(part, "my_part")` or at runtime via the option
180: `-mat_partitioning_type my_part`
182: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
183: @*/
184: PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
185: {
186: PetscFunctionBegin;
187: PetscCall(MatInitializePackage());
188: PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
189: PetscFunctionReturn(PETSC_SUCCESS);
190: }
192: /*@
193: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
194: from the partitioning context.
196: Not Collective
198: Input Parameter:
199: . partitioning - the partitioning context
201: Output Parameter:
202: . type - partitioner type
204: Level: intermediate
206: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
207: @*/
208: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
209: {
210: PetscFunctionBegin;
212: PetscAssertPointer(type, 2);
213: *type = ((PetscObject)partitioning)->type_name;
214: PetscFunctionReturn(PETSC_SUCCESS);
215: }
217: /*@
218: MatPartitioningSetNParts - Set how many partitions need to be created;
219: by default this is one per processor. Certain partitioning schemes may
220: in fact only support that option.
222: Collective
224: Input Parameters:
225: + part - the partitioning context
226: - n - the number of partitions
228: Level: intermediate
230: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
231: @*/
232: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
233: {
234: PetscFunctionBegin;
235: part->n = n;
236: PetscFunctionReturn(PETSC_SUCCESS);
237: }
239: /*@
240: MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
242: Collective
244: Input Parameter:
245: . matp - the matrix partitioning object
247: Output Parameter:
248: . partitioning - the partitioning. For each local node, a positive value indicates the processor
249: number the node has been assigned to. Negative x values indicate the separator level -(x+1).
251: Level: intermediate
253: Note:
254: The user can define additional partitionings; see `MatPartitioningRegister()`.
256: .seealso: [](ch_matrices), `Mat`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
257: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
258: `ISPartitioningCount()`
259: @*/
260: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
261: {
262: PetscFunctionBegin;
264: PetscAssertPointer(partitioning, 2);
265: PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
266: PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
267: PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
268: PetscUseTypeMethod(matp, applynd, partitioning);
269: PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));
271: PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
272: PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
273: PetscFunctionReturn(PETSC_SUCCESS);
274: }
276: /*@
277: MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.
279: Collective
281: Input Parameter:
282: . matp - the matrix partitioning object
284: Output Parameter:
285: . partitioning - the partitioning. For each local node this tells the processor
286: number that that node is assigned to.
288: Options Database Keys:
289: + -mat_partitioning_type <type> - set the partitioning package or algorithm to use
290: - -mat_partitioning_view - display information about the partitioning object
292: Level: beginner
294: The user can define additional partitionings; see `MatPartitioningRegister()`.
296: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
297: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
298: `ISPartitioningCount()`
299: @*/
300: PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
301: {
302: PetscBool viewbalance, improve;
304: PetscFunctionBegin;
306: PetscAssertPointer(partitioning, 2);
307: PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
308: PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
309: PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
310: PetscUseTypeMethod(matp, apply, partitioning);
311: PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
313: PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
314: PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
316: PetscObjectOptionsBegin((PetscObject)matp);
317: viewbalance = PETSC_FALSE;
318: PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
319: improve = PETSC_FALSE;
320: PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
321: PetscOptionsEnd();
323: if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));
325: if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
326: PetscFunctionReturn(PETSC_SUCCESS);
327: }
329: /*@
330: MatPartitioningImprove - Improves the quality of a given partition.
332: Collective
334: Input Parameters:
335: + matp - the matrix partitioning object
336: - partitioning - the original partitioning. For each local node this tells the processor
337: number that that node is assigned to.
339: Options Database Key:
340: . -mat_partitioning_improve - improve the quality of the given partition
342: Level: beginner
344: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
345: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
346: `ISPartitioningCount()`
347: @*/
348: PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
349: {
350: PetscFunctionBegin;
352: PetscAssertPointer(partitioning, 2);
353: PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
354: PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
355: PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
356: PetscTryTypeMethod(matp, improve, partitioning);
357: PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
358: PetscFunctionReturn(PETSC_SUCCESS);
359: }
361: /*@
362: MatPartitioningViewImbalance - Display partitioning imbalance information.
364: Collective
366: Input Parameters:
367: + matp - the matrix partitioning object
368: - partitioning - the partitioning. For each local node this tells the processor
369: number that that node is assigned to.
371: Options Database Key:
372: . -mat_partitioning_view_balance - view the balance information from the last partitioning
374: Level: beginner
376: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
377: @*/
378: PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
379: {
380: PetscInt nparts, *subdomainsizes, *subdomainsizes_tmp, nlocal, i, maxsub, minsub, avgsub;
381: const PetscInt *indices;
382: PetscViewer viewer;
384: PetscFunctionBegin;
387: nparts = matp->n;
388: PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
389: PetscCall(ISGetLocalSize(partitioning, &nlocal));
390: PetscCall(ISGetIndices(partitioning, &indices));
391: for (i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
392: PetscCall(MPIU_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
393: PetscCall(ISRestoreIndices(partitioning, &indices));
394: minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub = 0;
395: for (i = 0; i < nparts; i++) {
396: minsub = PetscMin(minsub, subdomainsizes[i]);
397: maxsub = PetscMax(maxsub, subdomainsizes[i]);
398: avgsub += subdomainsizes[i];
399: }
400: avgsub /= nparts;
401: PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
402: PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
403: PetscCall(MatPartitioningView(matp, viewer));
404: PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
405: PetscFunctionReturn(PETSC_SUCCESS);
406: }
408: /*@
409: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
410: partitioned.
412: Collective
414: Input Parameters:
415: + part - the partitioning context
416: - adj - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`
418: Level: beginner
420: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
421: @*/
422: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
423: {
424: PetscFunctionBegin;
427: part->adj = adj;
428: PetscFunctionReturn(PETSC_SUCCESS);
429: }
431: /*@
432: MatPartitioningDestroy - Destroys the partitioning context.
434: Collective
436: Input Parameter:
437: . part - the partitioning context
439: Level: beginner
441: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
442: @*/
443: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
444: {
445: PetscFunctionBegin;
446: if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
448: if (--((PetscObject)*part)->refct > 0) {
449: *part = NULL;
450: PetscFunctionReturn(PETSC_SUCCESS);
451: }
453: PetscTryTypeMethod(*part, destroy);
454: PetscCall(PetscFree((*part)->vertex_weights));
455: PetscCall(PetscFree((*part)->part_weights));
456: PetscCall(PetscHeaderDestroy(part));
457: PetscFunctionReturn(PETSC_SUCCESS);
458: }
460: /*@C
461: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
463: Logically Collective
465: Input Parameters:
466: + part - the partitioning context
467: - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
468: 1 if that is not provided
470: Level: beginner
472: Notes:
473: The array weights is freed by PETSc so the user should not free the array. In C/C++
474: the array must be obtained with a call to `PetscMalloc()`, not malloc().
476: The weights may not be used by some partitioners
478: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
479: @*/
480: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
481: {
482: PetscFunctionBegin;
484: PetscCall(PetscFree(part->vertex_weights));
485: part->vertex_weights = (PetscInt *)weights;
486: PetscFunctionReturn(PETSC_SUCCESS);
487: }
489: /*@C
490: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
492: Logically Collective
494: Input Parameters:
495: + part - the partitioning context
496: - weights - An array of size nparts that is used to specify the fraction of
497: vertex weight that should be distributed to each sub-domain for
498: the balance constraint. If all of the sub-domains are to be of
499: the same size, then each of the nparts elements should be set
500: to a value of 1/nparts. Note that the sum of all of the weights
501: should be one.
503: Level: beginner
505: Note:
506: The array weights is freed by PETSc so the user should not free the array. In C/C++
507: the array must be obtained with a call to `PetscMalloc()`, not malloc().
509: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`
510: @*/
511: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
512: {
513: PetscFunctionBegin;
515: PetscCall(PetscFree(part->part_weights));
516: part->part_weights = (PetscReal *)weights;
517: PetscFunctionReturn(PETSC_SUCCESS);
518: }
520: /*@
521: MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
523: Logically Collective
525: Input Parameters:
526: + part - the partitioning context
527: - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
528: that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
529: edge weights are stored in an ADJ matrix.
531: Options Database Key:
532: . -mat_partitioning_use_edge_weights - (true or false)
534: Level: beginner
536: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
537: @*/
538: PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
539: {
540: PetscFunctionBegin;
542: part->use_edge_weights = use_edge_weights;
543: PetscFunctionReturn(PETSC_SUCCESS);
544: }
546: /*@
547: MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
549: Logically Collective
551: Input Parameter:
552: . part - the partitioning context
554: Output Parameter:
555: . use_edge_weights - the flag indicateing whether or not to edge weights are used.
557: Level: beginner
559: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
560: `MatPartitioningSetUseEdgeWeights`
561: @*/
562: PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
563: {
564: PetscFunctionBegin;
566: PetscAssertPointer(use_edge_weights, 2);
567: *use_edge_weights = part->use_edge_weights;
568: PetscFunctionReturn(PETSC_SUCCESS);
569: }
571: /*@
572: MatPartitioningCreate - Creates a partitioning context.
574: Collective
576: Input Parameter:
577: . comm - MPI communicator
579: Output Parameter:
580: . newp - location to put the context
582: Level: beginner
584: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
585: `MatPartitioningSetAdjacency()`
586: @*/
587: PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
588: {
589: MatPartitioning part;
590: PetscMPIInt size;
592: PetscFunctionBegin;
593: *newp = NULL;
595: PetscCall(MatInitializePackage());
596: PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatGraphOperations", comm, MatPartitioningDestroy, MatPartitioningView));
597: part->vertex_weights = NULL;
598: part->part_weights = NULL;
599: part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
601: PetscCallMPI(MPI_Comm_size(comm, &size));
602: part->n = (PetscInt)size;
603: part->ncon = 1;
605: *newp = part;
606: PetscFunctionReturn(PETSC_SUCCESS);
607: }
609: /*@C
610: MatPartitioningViewFromOptions - View a partitioning context from the options database
612: Collective
614: Input Parameters:
615: + A - the partitioning context
616: . obj - Optional object that provides the prefix used in the options database check
617: - name - command line option
619: Options Database Key:
620: . -mat_partitioning_view [viewertype]:... - the viewer and its options
622: Level: intermediate
624: Note:
625: .vb
626: If no value is provided ascii:stdout is used
627: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
628: for example ascii::ascii_info prints just the information about the object not all details
629: unless :append is given filename opens in write mode, overwriting what was already there
630: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
631: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
632: socket[:port] defaults to the standard output port
633: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
634: .ve
636: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
637: @*/
638: PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
639: {
640: PetscFunctionBegin;
642: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
643: PetscFunctionReturn(PETSC_SUCCESS);
644: }
646: /*@C
647: MatPartitioningView - Prints the partitioning data structure.
649: Collective
651: Input Parameters:
652: + part - the partitioning context
653: - viewer - optional visualization context
655: Level: intermediate
657: Note:
658: The available visualization contexts include
659: + `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
660: - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
661: output where only the first processor opens
662: the file. All other processors send their
663: data to the first processor to print.
665: The user can open alternative visualization contexts with
666: . `PetscViewerASCIIOpen()` - output to a specified file
668: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
669: @*/
670: PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
671: {
672: PetscBool iascii;
674: PetscFunctionBegin;
676: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
678: PetscCheckSameComm(part, 1, viewer, 2);
680: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
681: if (iascii) {
682: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
683: if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, " Using vertex weights\n"));
684: }
685: PetscCall(PetscViewerASCIIPushTab(viewer));
686: PetscTryTypeMethod(part, view, viewer);
687: PetscCall(PetscViewerASCIIPopTab(viewer));
688: PetscFunctionReturn(PETSC_SUCCESS);
689: }
691: /*@
692: MatPartitioningSetType - Sets the type of partitioner to use
694: Collective
696: Input Parameters:
697: + part - the partitioning context.
698: - type - a known method
700: Options Database Key:
701: . -mat_partitioning_type <type> - (for instance, parmetis), use -help for a list of available methods or see `MatPartitioningType`
703: Level: intermediate
705: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
706: @*/
707: PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
708: {
709: PetscBool match;
710: PetscErrorCode (*r)(MatPartitioning);
712: PetscFunctionBegin;
714: PetscAssertPointer(type, 2);
716: PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
717: if (match) PetscFunctionReturn(PETSC_SUCCESS);
719: PetscTryTypeMethod(part, destroy);
720: part->ops->destroy = NULL;
722: part->setupcalled = 0;
723: part->data = NULL;
724: PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));
726: PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
727: PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);
729: PetscCall((*r)(part));
731: PetscCall(PetscFree(((PetscObject)part)->type_name));
732: PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
733: PetscFunctionReturn(PETSC_SUCCESS);
734: }
736: /*@
737: MatPartitioningSetFromOptions - Sets various partitioning options from the
738: options database for the partitioning object
740: Collective
742: Input Parameter:
743: . part - the partitioning context.
745: Options Database Keys:
746: + -mat_partitioning_type <type> - (for instance, parmetis), use -help for a list of available methods
747: - -mat_partitioning_nparts - number of subgraphs
749: Level: beginner
751: Note:
752: If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
753: no installed partitioners it does no repartioning.
755: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`
756: @*/
757: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
758: {
759: PetscBool flag;
760: char type[256];
761: const char *def;
763: PetscFunctionBegin;
764: PetscObjectOptionsBegin((PetscObject)part);
765: if (!((PetscObject)part)->type_name) {
766: #if defined(PETSC_HAVE_PARMETIS)
767: def = MATPARTITIONINGPARMETIS;
768: #elif defined(PETSC_HAVE_CHACO)
769: def = MATPARTITIONINGCHACO;
770: #elif defined(PETSC_HAVE_PARTY)
771: def = MATPARTITIONINGPARTY;
772: #elif defined(PETSC_HAVE_PTSCOTCH)
773: def = MATPARTITIONINGPTSCOTCH;
774: #else
775: def = MATPARTITIONINGCURRENT;
776: #endif
777: } else {
778: def = ((PetscObject)part)->type_name;
779: }
780: PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
781: if (flag) PetscCall(MatPartitioningSetType(part, type));
783: PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));
785: PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));
787: /*
788: Set the type if it was never set.
789: */
790: if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));
792: PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
793: PetscOptionsEnd();
794: PetscFunctionReturn(PETSC_SUCCESS);
795: }
797: /*@
798: MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex
800: Not Collective
802: Input Parameters:
803: + partitioning - the partitioning context
804: - ncon - the number of weights
806: Level: intermediate
808: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
809: @*/
810: PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
811: {
812: PetscFunctionBegin;
814: partitioning->ncon = ncon;
815: PetscFunctionReturn(PETSC_SUCCESS);
816: }