Actual source code: party.c
1: #include <../src/mat/impls/adj/mpi/mpiadj.h>
3: #if defined(PETSC_HAVE_UNISTD_H)
4: #include <unistd.h>
5: #endif
7: EXTERN_C_BEGIN
8: #include <party_lib.h>
9: EXTERN_C_END
11: typedef struct {
12: PetscBool redm;
13: PetscBool redo;
14: PetscBool recursive;
15: PetscBool verbose;
16: char global[15]; /* global method */
17: char local[15]; /* local method */
18: PetscInt nbvtxcoarsed; /* number of vertices for the coarse graph */
19: } MatPartitioning_Party;
21: #define SIZE_LOG 10000 /* size of buffer for mesg_log */
23: static PetscErrorCode MatPartitioningApply_Party(MatPartitioning part, IS *partitioning)
24: {
25: int perr;
26: PetscInt i, *parttab, *locals, nb_locals, M, N;
27: PetscMPIInt size, rank;
28: Mat mat = part->adj, matAdj, matSeq, *A;
29: Mat_MPIAdj *adj;
30: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
31: PetscBool flg;
32: IS isrow, iscol;
33: int n, *edge_p, *edge, *vertex_w, p, *part_party, cutsize, redl, rec;
34: const char *redm, *redo;
35: char *mesg_log;
36: #if defined(PETSC_HAVE_UNISTD_H)
37: int fd_stdout, fd_pipe[2], count;
38: #endif
40: PetscFunctionBegin;
41: PetscCheck(!part->use_edge_weights, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Party does not support edge weights");
42: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
43: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
44: PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
45: if (size > 1) {
46: if (flg) PetscCall(MatMPIAdjToSeq(mat, &matSeq));
47: else {
48: PetscCall(PetscInfo(part, "Converting distributed matrix to sequential: this could be a performance loss\n"));
49: PetscCall(MatGetSize(mat, &M, &N));
50: PetscCall(ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow));
51: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
52: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A));
53: PetscCall(ISDestroy(&isrow));
54: PetscCall(ISDestroy(&iscol));
55: matSeq = *A;
56: PetscCall(PetscFree(A));
57: }
58: } else {
59: PetscCall(PetscObjectReference((PetscObject)mat));
60: matSeq = mat;
61: }
63: if (!flg) { /* convert regular matrix to MPIADJ */
64: PetscCall(MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matAdj));
65: } else {
66: PetscCall(PetscObjectReference((PetscObject)matSeq));
67: matAdj = matSeq;
68: }
70: adj = (Mat_MPIAdj *)matAdj->data; /* finally adj contains adjacency graph */
72: /* arguments for Party library */
73: n = mat->rmap->N; /* number of vertices in full graph */
74: edge_p = adj->i; /* start of edge list for each vertex */
75: edge = adj->j; /* edge list data */
76: vertex_w = part->vertex_weights; /* weights for all vertices */
77: p = part->n; /* number of parts to create */
78: redl = party->nbvtxcoarsed; /* how many vertices to coarsen down to? */
79: rec = party->recursive ? 1 : 0; /* recursive bisection */
80: redm = party->redm ? "lam" : ""; /* matching method */
81: redo = party->redo ? "w3" : ""; /* matching optimization method */
83: PetscCall(PetscMalloc1(mat->rmap->N, &part_party));
85: /* redirect output to buffer */
86: #if defined(PETSC_HAVE_UNISTD_H)
87: fd_stdout = dup(1);
88: PetscCheck(!pipe(fd_pipe), PETSC_COMM_SELF, PETSC_ERR_SYS, "Could not open pipe");
89: close(1);
90: dup2(fd_pipe[1], 1);
91: PetscCall(PetscMalloc1(SIZE_LOG, &mesg_log));
92: #endif
94: /* library call */
95: party_lib_times_start();
96: perr = party_lib(n, vertex_w, NULL, NULL, NULL, edge_p, edge, NULL, p, part_party, &cutsize, redl, (char *)redm, (char *)redo, party->global, party->local, rec, 1);
98: party_lib_times_output(1);
99: part_info(n, vertex_w, edge_p, edge, NULL, p, part_party, 1);
101: #if defined(PETSC_HAVE_UNISTD_H)
102: PetscCall(PetscFFlush(stdout));
103: count = read(fd_pipe[0], mesg_log, (SIZE_LOG - 1) * sizeof(char));
104: if (count < 0) count = 0;
105: mesg_log[count] = 0;
106: close(1);
107: dup2(fd_stdout, 1);
108: close(fd_stdout);
109: close(fd_pipe[0]);
110: close(fd_pipe[1]);
111: if (party->verbose) PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "%s", mesg_log));
112: PetscCall(PetscFree(mesg_log));
113: #endif
114: PetscCheck(!perr, PETSC_COMM_SELF, PETSC_ERR_LIB, "Party failed");
116: PetscCall(PetscMalloc1(mat->rmap->N, &parttab));
117: for (i = 0; i < mat->rmap->N; i++) parttab[i] = part_party[i];
119: /* creation of the index set */
120: nb_locals = mat->rmap->n;
121: locals = parttab + mat->rmap->rstart;
123: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), nb_locals, locals, PETSC_COPY_VALUES, partitioning));
125: /* clean up */
126: PetscCall(PetscFree(parttab));
127: PetscCall(PetscFree(part_party));
128: PetscCall(MatDestroy(&matSeq));
129: PetscCall(MatDestroy(&matAdj));
130: PetscFunctionReturn(PETSC_SUCCESS);
131: }
133: static PetscErrorCode MatPartitioningView_Party(MatPartitioning part, PetscViewer viewer)
134: {
135: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
136: PetscBool isascii;
138: PetscFunctionBegin;
139: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
140: if (isascii) {
141: PetscCall(PetscViewerASCIIPrintf(viewer, " Global method: %s\n", party->global));
142: PetscCall(PetscViewerASCIIPrintf(viewer, " Local method: %s\n", party->local));
143: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of vertices for the coarse graph: %d\n", party->nbvtxcoarsed));
144: if (party->redm) PetscCall(PetscViewerASCIIPrintf(viewer, " Using matching method for graph reduction\n"));
145: if (party->redo) PetscCall(PetscViewerASCIIPrintf(viewer, " Using matching optimization\n"));
146: if (party->recursive) PetscCall(PetscViewerASCIIPrintf(viewer, " Using recursive bipartitioning\n"));
147: }
148: PetscFunctionReturn(PETSC_SUCCESS);
149: }
151: /*@
152: MatPartitioningPartySetGlobal - Set global method for Party partitioner.
154: Collective
156: Input Parameters:
157: + part - the partitioning context
158: - global - a string representing the method
160: Options Database Key:
161: . -mat_partitioning_party_global method - the global method
163: Level: advanced
165: Note:
166: The method may be one of `MP_PARTY_OPT`, `MP_PARTY_LIN`, `MP_PARTY_SCA`,
167: `MP_PARTY_RAN`, `MP_PARTY_GBF`, `MP_PARTY_GCF`, `MP_PARTY_BUB` or `MP_PARTY_DEF`, or
168: alternatively a string describing the method. Two or more methods can be
169: combined like "gbf,gcf". Check the Party Library Users Manual for details.
171: Developer Note:
172: Should be `MatPartitioningPartySetGlobalType()` and all uses of method should be changed to type
174: .seealso: `MATPARTITIONINGPARTY`, `MatPartitioningPartySetLocal()`
175: @*/
176: PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning part, const char *global)
177: {
178: PetscFunctionBegin;
180: PetscTryMethod(part, "MatPartitioningPartySetGlobal_C", (MatPartitioning, const char *), (part, global));
181: PetscFunctionReturn(PETSC_SUCCESS);
182: }
184: static PetscErrorCode MatPartitioningPartySetGlobal_Party(MatPartitioning part, const char *global)
185: {
186: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
188: PetscFunctionBegin;
189: PetscCall(PetscStrncpy(party->global, global, 15));
190: PetscFunctionReturn(PETSC_SUCCESS);
191: }
193: /*@
194: MatPartitioningPartySetLocal - Set local method used by the Party partitioner.
196: Collective
198: Input Parameters:
199: + part - the partitioning context
200: - local - a string representing the method
202: Options Database Key:
203: . -mat_partitioning_party_local method - the local method
205: Level: advanced
207: Note:
208: The method may be one of `MP_PARTY_HELPFUL_SETS`, `MP_PARTY_KERNIGHAN_LIN`, or
209: `MP_PARTY_NONE`. Check the Party Library Users Manual for details.
211: Developer Note:
212: Should be `MatPartitioningPartySetLocalType()` and all uses of method should be changed to type
214: .seealso: `MATPARTITIONINGPARTY`, `MatPartitioningPartySetGlobal()`
215: @*/
216: PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning part, const char *local)
217: {
218: PetscFunctionBegin;
220: PetscTryMethod(part, "MatPartitioningPartySetLocal_C", (MatPartitioning, const char *), (part, local));
221: PetscFunctionReturn(PETSC_SUCCESS);
222: }
224: static PetscErrorCode MatPartitioningPartySetLocal_Party(MatPartitioning part, const char *local)
225: {
226: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
228: PetscFunctionBegin;
229: PetscCall(PetscStrncpy(party->local, local, 15));
230: PetscFunctionReturn(PETSC_SUCCESS);
231: }
233: /*@
234: MatPartitioningPartySetCoarseLevel - Set the coarse level parameter for the
235: Party partitioner.
237: Collective
239: Input Parameters:
240: + part - the partitioning context
241: - level - the coarse level in range [0.0,1.0]
243: Options Database Key:
244: . -mat_partitioning_party_coarse l - Coarse level
246: Level: advanced
248: .seealso: `MATPARTITIONINGPARTY`
249: @*/
250: PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning part, PetscReal level)
251: {
252: PetscFunctionBegin;
255: PetscTryMethod(part, "MatPartitioningPartySetCoarseLevel_C", (MatPartitioning, PetscReal), (part, level));
256: PetscFunctionReturn(PETSC_SUCCESS);
257: }
259: static PetscErrorCode MatPartitioningPartySetCoarseLevel_Party(MatPartitioning part, PetscReal level)
260: {
261: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
263: PetscFunctionBegin;
264: PetscCheck(level >= 0.0 && level <= 1.0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Party: level of coarsening out of range [0.0-1.0]");
265: party->nbvtxcoarsed = (PetscInt)(part->adj->cmap->N * level);
266: if (party->nbvtxcoarsed < 20) party->nbvtxcoarsed = 20;
267: PetscFunctionReturn(PETSC_SUCCESS);
268: }
270: /*@
271: MatPartitioningPartySetMatchOptimization - Activate matching optimization for
272: graph reduction.
274: Collective
276: Input Parameters:
277: + part - the partitioning context
278: - opt - boolean flag
280: Options Database Key:
281: . -mat_partitioning_party_match_optimization - Matching optimization on/off
283: Level: advanced
285: .seealso: `MATPARTITIONINGPARTY`
286: @*/
287: PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning part, PetscBool opt)
288: {
289: PetscFunctionBegin;
292: PetscTryMethod(part, "MatPartitioningPartySetMatchOptimization_C", (MatPartitioning, PetscBool), (part, opt));
293: PetscFunctionReturn(PETSC_SUCCESS);
294: }
296: static PetscErrorCode MatPartitioningPartySetMatchOptimization_Party(MatPartitioning part, PetscBool opt)
297: {
298: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
300: PetscFunctionBegin;
301: party->redo = opt;
302: PetscFunctionReturn(PETSC_SUCCESS);
303: }
305: /*@
306: MatPartitioningPartySetBipart - Activate or deactivate recursive bisection in the Party partitioner
308: Collective
310: Input Parameters:
311: + part - the partitioning context
312: - bp - boolean flag
314: Options Database Key:
315: . -mat_partitioning_party_bipart - Bipartitioning option on/off
317: Level: advanced
319: .seealso: `MATPARTITIONINGPARTY`
320: @*/
321: PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning part, PetscBool bp)
322: {
323: PetscFunctionBegin;
326: PetscTryMethod(part, "MatPartitioningPartySetBipart_C", (MatPartitioning, PetscBool), (part, bp));
327: PetscFunctionReturn(PETSC_SUCCESS);
328: }
330: static PetscErrorCode MatPartitioningPartySetBipart_Party(MatPartitioning part, PetscBool bp)
331: {
332: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
334: PetscFunctionBegin;
335: party->recursive = bp;
336: PetscFunctionReturn(PETSC_SUCCESS);
337: }
339: static PetscErrorCode MatPartitioningSetFromOptions_Party(MatPartitioning part, PetscOptionItems PetscOptionsObject)
340: {
341: PetscBool flag;
342: char value[256];
343: PetscReal r;
344: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
346: PetscFunctionBegin;
347: PetscOptionsHeadBegin(PetscOptionsObject, "Set Party partitioning options");
348: PetscCall(PetscOptionsString("-mat_partitioning_party_global", "Global method", "MatPartitioningPartySetGlobal", party->global, value, sizeof(value), &flag));
349: if (flag) PetscCall(MatPartitioningPartySetGlobal(part, value));
350: PetscCall(PetscOptionsString("-mat_partitioning_party_local", "Local method", "MatPartitioningPartySetLocal", party->local, value, sizeof(value), &flag));
351: if (flag) PetscCall(MatPartitioningPartySetLocal(part, value));
352: PetscCall(PetscOptionsReal("-mat_partitioning_party_coarse", "Coarse level", "MatPartitioningPartySetCoarseLevel", 0.0, &r, &flag));
353: if (flag) PetscCall(MatPartitioningPartySetCoarseLevel(part, r));
354: PetscCall(PetscOptionsBool("-mat_partitioning_party_match_optimization", "Matching optimization on/off", "MatPartitioningPartySetMatchOptimization", party->redo, &party->redo, NULL));
355: PetscCall(PetscOptionsBool("-mat_partitioning_party_bipart", "Bipartitioning on/off", "MatPartitioningPartySetBipart", party->recursive, &party->recursive, NULL));
356: PetscCall(PetscOptionsBool("-mat_partitioning_party_verbose", "Show library output", "", party->verbose, &party->verbose, NULL));
357: PetscOptionsHeadEnd();
358: PetscFunctionReturn(PETSC_SUCCESS);
359: }
361: static PetscErrorCode MatPartitioningDestroy_Party(MatPartitioning part)
362: {
363: MatPartitioning_Party *party = (MatPartitioning_Party *)part->data;
365: PetscFunctionBegin;
366: PetscCall(PetscFree(party));
367: /* clear composed functions */
368: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetGlobal_C", NULL));
369: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetLocal_C", NULL));
370: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetCoarseLevel_C", NULL));
371: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetMatchOptimization_C", NULL));
372: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetBipart_C", NULL));
373: PetscFunctionReturn(PETSC_SUCCESS);
374: }
376: /*MC
377: MATPARTITIONINGPARTY - Creates a partitioning context via the external package Party <
378: http://wwwcs.upb.de/fachbereich/AG/monien/RESEARCH/PART/party.htm>.
380: Level: beginner
382: Note:
383: Does not support the `MatPartitioningSetUseEdgeWeights()` option
385: .seealso: `MatPartitioningSetType()`, `MatPartitioningType`, `MatPartitioningPartySetGlobal()`, `MatPartitioningPartySetLocal()`,
386: `MatPartitioningPartySetCoarseLevel()`, `MatPartitioningPartySetMatchOptimization()`, `MatPartitioningPartySetBipart()`
387: M*/
389: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Party(MatPartitioning part)
390: {
391: MatPartitioning_Party *party;
393: PetscFunctionBegin;
394: PetscCall(PetscNew(&party));
395: part->data = (void *)party;
397: PetscCall(PetscStrncpy(party->global, "gcf,gbf", sizeof(party->global)));
398: PetscCall(PetscStrncpy(party->local, "kl", sizeof(party->local)));
400: party->redm = PETSC_TRUE;
401: party->redo = PETSC_TRUE;
402: party->recursive = PETSC_TRUE;
403: party->verbose = PETSC_FALSE;
404: party->nbvtxcoarsed = 200;
406: part->ops->apply = MatPartitioningApply_Party;
407: part->ops->view = MatPartitioningView_Party;
408: part->ops->destroy = MatPartitioningDestroy_Party;
409: part->ops->setfromoptions = MatPartitioningSetFromOptions_Party;
411: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetGlobal_C", MatPartitioningPartySetGlobal_Party));
412: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetLocal_C", MatPartitioningPartySetLocal_Party));
413: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetCoarseLevel_C", MatPartitioningPartySetCoarseLevel_Party));
414: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetMatchOptimization_C", MatPartitioningPartySetMatchOptimization_Party));
415: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPartySetBipart_C", MatPartitioningPartySetBipart_Party));
416: PetscFunctionReturn(PETSC_SUCCESS);
417: }