Actual source code: chaco.c
1: #include <../src/mat/impls/adj/mpi/mpiadj.h>
3: #if defined(PETSC_HAVE_UNISTD_H)
4: #include <unistd.h>
5: #endif
7: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
8: #include <chaco.h>
9: #else
10: /* Older versions of Chaco do not have an include file */
11: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts, float *ewgts, float *x, float *y, float *z, char *outassignname, char *outfilename, short *assignment, int architecture, int ndims_tot, int mesh_dims[3], double *goal, int global_method, int local_method, int rqi_flag, int vmax, int ndims, double eigtol, long seed);
12: #endif
14: extern int FREE_GRAPH;
16: /*
17: int nvtxs; number of vertices in full graph
18: int *start; start of edge list for each vertex
19: int *adjacency; edge list data
20: int *vwgts; weights for all vertices
21: float *ewgts; weights for all edges
22: float *x, *y, *z; coordinates for inertial method
23: char *outassignname; name of assignment output file
24: char *outfilename; output file name
25: short *assignment; set number of each vtx (length n)
26: int architecture; 0 => hypercube, d => d-dimensional mesh
27: int ndims_tot; total number of cube dimensions to divide
28: int mesh_dims[3]; dimensions of mesh of processors
29: double *goal; desired set sizes for each set
30: int global_method; global partitioning algorithm
31: int local_method; local partitioning algorithm
32: int rqi_flag; should I use RQI/Symmlq eigensolver?
33: int vmax; how many vertices to coarsen down to?
34: int ndims; number of eigenvectors (2^d sets)
35: double eigtol; tolerance on eigenvectors
36: long seed; for random graph mutations
37: */
39: typedef struct {
40: PetscBool verbose;
41: PetscInt eignum;
42: PetscReal eigtol;
43: MPChacoGlobalType global_method; /* global method */
44: MPChacoLocalType local_method; /* local method */
45: MPChacoEigenType eigen_method; /* eigensolver */
46: PetscInt nbvtxcoarsed; /* number of vertices for the coarse graph */
47: } MatPartitioning_Chaco;
49: #define SIZE_LOG 10000 /* size of buffer for mesg_log */
51: static PetscErrorCode MatPartitioningApply_Chaco(MatPartitioning part, IS *partitioning)
52: {
53: int cerr;
54: PetscInt *parttab, *locals, i, nb_locals, M, N;
55: PetscMPIInt size, rank;
56: Mat mat = part->adj, matAdj, matSeq, *A;
57: Mat_MPIAdj *adj;
58: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
59: PetscBool flg;
60: IS isrow, iscol;
61: int nvtxs, *start, *adjacency, *vwgts, architecture, ndims_tot;
62: int mesh_dims[3], global_method, local_method, rqi_flag, vmax, ndims;
63: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
64: int *assignment;
65: #else
66: short *assignment;
67: #endif
68: double eigtol;
69: long seed;
70: char *mesg_log;
71: #if defined(PETSC_HAVE_UNISTD_H)
72: int fd_stdout, fd_pipe[2], count;
73: #endif
75: PetscFunctionBegin;
76: PetscCheck(!part->use_edge_weights, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Chaco does not support edge weights");
77: FREE_GRAPH = 0; /* otherwise Chaco will attempt to free memory for adjacency graph */
78: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
79: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
80: PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
81: if (size > 1) {
82: if (flg) {
83: PetscCall(MatMPIAdjToSeq(mat, &matSeq));
84: } else {
85: PetscCall(PetscInfo(part, "Converting distributed matrix to sequential: this could be a performance loss\n"));
86: PetscCall(MatGetSize(mat, &M, &N));
87: PetscCall(ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow));
88: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
89: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A));
90: PetscCall(ISDestroy(&isrow));
91: PetscCall(ISDestroy(&iscol));
92: matSeq = *A;
93: PetscCall(PetscFree(A));
94: }
95: } else {
96: PetscCall(PetscObjectReference((PetscObject)mat));
97: matSeq = mat;
98: }
100: if (!flg) { /* convert regular matrix to MPIADJ */
101: PetscCall(MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matAdj));
102: } else {
103: PetscCall(PetscObjectReference((PetscObject)matSeq));
104: matAdj = matSeq;
105: }
107: adj = (Mat_MPIAdj *)matAdj->data; /* finally adj contains adjacency graph */
109: /* arguments for Chaco library */
110: nvtxs = mat->rmap->N; /* number of vertices in full graph */
111: start = adj->i; /* start of edge list for each vertex */
112: vwgts = part->vertex_weights; /* weights for all vertices */
113: architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */
114: ndims_tot = 0; /* total number of cube dimensions to divide */
115: mesh_dims[0] = part->n; /* dimensions of mesh of processors */
116: global_method = chaco->global_method; /* global partitioning algorithm */
117: local_method = chaco->local_method; /* local partitioning algorithm */
118: rqi_flag = chaco->eigen_method; /* should I use RQI/Symmlq eigensolver? */
119: vmax = chaco->nbvtxcoarsed; /* how many vertices to coarsen down to? */
120: ndims = chaco->eignum; /* number of eigenvectors (2^d sets) */
121: eigtol = chaco->eigtol; /* tolerance on eigenvectors */
122: seed = 123636512; /* for random graph mutations */
124: PetscCall(PetscMalloc1(mat->rmap->N, &assignment));
125: PetscCall(PetscMalloc1(start[nvtxs], &adjacency));
126: for (i = 0; i < start[nvtxs]; i++) adjacency[i] = (adj->j)[i] + 1; /* 1-based indexing */
128: /* redirect output to buffer */
129: #if defined(PETSC_HAVE_UNISTD_H)
130: fd_stdout = dup(1);
131: PetscCheck(!pipe(fd_pipe), PETSC_COMM_SELF, PETSC_ERR_SYS, "Could not open pipe");
132: close(1);
133: dup2(fd_pipe[1], 1);
134: PetscCall(PetscMalloc1(SIZE_LOG, &mesg_log));
135: #endif
137: /* library call */
138: cerr = interface(nvtxs, start, adjacency, vwgts, NULL, NULL, NULL, NULL, NULL, NULL, assignment, architecture, ndims_tot, mesh_dims, NULL, global_method, local_method, rqi_flag, vmax, ndims, eigtol, seed);
140: #if defined(PETSC_HAVE_UNISTD_H)
141: PetscCall(PetscFFlush(stdout));
142: count = (int)read(fd_pipe[0], mesg_log, (int)((SIZE_LOG - 1) * sizeof(char)));
143: if (count < 0) count = 0;
144: mesg_log[count] = 0;
145: close(1);
146: dup2(fd_stdout, 1);
147: close(fd_stdout);
148: close(fd_pipe[0]);
149: close(fd_pipe[1]);
150: if (chaco->verbose) PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "%s", mesg_log));
151: PetscCall(PetscFree(mesg_log));
152: #endif
153: PetscCheck(!cerr, PETSC_COMM_SELF, PETSC_ERR_LIB, "Chaco failed");
155: PetscCall(PetscMalloc1(mat->rmap->N, &parttab));
156: for (i = 0; i < nvtxs; i++) parttab[i] = assignment[i];
158: /* creation of the index set */
159: nb_locals = mat->rmap->n;
160: locals = parttab + mat->rmap->rstart;
161: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), nb_locals, locals, PETSC_COPY_VALUES, partitioning));
163: /* clean up */
164: PetscCall(PetscFree(parttab));
165: PetscCall(PetscFree(adjacency));
166: PetscCall(PetscFree(assignment));
167: PetscCall(MatDestroy(&matSeq));
168: PetscCall(MatDestroy(&matAdj));
169: PetscFunctionReturn(PETSC_SUCCESS);
170: }
172: static PetscErrorCode MatPartitioningView_Chaco(MatPartitioning part, PetscViewer viewer)
173: {
174: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
175: PetscBool isascii;
177: PetscFunctionBegin;
178: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
179: if (isascii) {
180: PetscCall(PetscViewerASCIIPrintf(viewer, " Global method: %s\n", MPChacoGlobalTypes[chaco->global_method]));
181: PetscCall(PetscViewerASCIIPrintf(viewer, " Local method: %s\n", MPChacoLocalTypes[chaco->local_method]));
182: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of vertices for the coarse graph: %" PetscInt_FMT "\n", chaco->nbvtxcoarsed));
183: PetscCall(PetscViewerASCIIPrintf(viewer, " Eigensolver: %s\n", MPChacoEigenTypes[chaco->eigen_method]));
184: PetscCall(PetscViewerASCIIPrintf(viewer, " Tolerance for eigensolver: %g\n", chaco->eigtol));
185: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of eigenvectors: %" PetscInt_FMT "\n", chaco->eignum));
186: }
187: PetscFunctionReturn(PETSC_SUCCESS);
188: }
190: /*@
191: MatPartitioningChacoSetGlobal - Set the global method for Chaco partitioner.
193: Collective
195: Input Parameters:
196: + part - the partitioning context
197: - method - one of `MP_CHACO_MULTILEVEL`, `MP_CHACO_SPECTRAL`, `MP_CHACO_LINEAR`,
198: `MP_CHACO_RANDOM` or `MP_CHACO_SCATTERED`
200: Options Database Key:
201: . -mat_partitioning_chaco_global <method> - the global method
203: Level: advanced
205: Note:
206: The default is the multi-level method. See Chaco documentation for
207: additional details.
209: .seealso: `MatPartitioning`, `MatPartioningSetType()`, `MatPartitioningType`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetLocal()`, `MatPartitioningChacoGetGlobal()`
210: @*/
211: PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning part, MPChacoGlobalType method)
212: {
213: PetscFunctionBegin;
216: PetscTryMethod(part, "MatPartitioningChacoSetGlobal_C", (MatPartitioning, MPChacoGlobalType), (part, method));
217: PetscFunctionReturn(PETSC_SUCCESS);
218: }
220: static PetscErrorCode MatPartitioningChacoSetGlobal_Chaco(MatPartitioning part, MPChacoGlobalType method)
221: {
222: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
224: PetscFunctionBegin;
225: switch (method) {
226: case MP_CHACO_MULTILEVEL:
227: case MP_CHACO_SPECTRAL:
228: case MP_CHACO_LINEAR:
229: case MP_CHACO_RANDOM:
230: case MP_CHACO_SCATTERED:
231: chaco->global_method = method;
232: break;
233: default:
234: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Chaco: Unknown or unsupported option");
235: }
236: PetscFunctionReturn(PETSC_SUCCESS);
237: }
239: /*@
240: MatPartitioningChacoGetGlobal - Get the global method used by the Chaco partitioner.
242: Not Collective
244: Input Parameter:
245: . part - the partitioning context
247: Output Parameter:
248: . method - the method
250: Level: advanced
252: .seealso: `MatPartitioningType`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetGlobal()`
253: @*/
254: PetscErrorCode MatPartitioningChacoGetGlobal(MatPartitioning part, MPChacoGlobalType *method)
255: {
256: PetscFunctionBegin;
258: PetscAssertPointer(method, 2);
259: PetscTryMethod(part, "MatPartitioningChacoGetGlobal_C", (MatPartitioning, MPChacoGlobalType *), (part, method));
260: PetscFunctionReturn(PETSC_SUCCESS);
261: }
263: static PetscErrorCode MatPartitioningChacoGetGlobal_Chaco(MatPartitioning part, MPChacoGlobalType *method)
264: {
265: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
267: PetscFunctionBegin;
268: *method = chaco->global_method;
269: PetscFunctionReturn(PETSC_SUCCESS);
270: }
272: /*@
273: MatPartitioningChacoSetLocal - Set the local method for the Chaco partitioner.
275: Collective
277: Input Parameters:
278: + part - the partitioning context
279: - method - one of `MP_CHACO_KERNIGHAN` or `MP_CHACO_NONE`
281: Options Database Key:
282: . -mat_partitioning_chaco_local <method> - the local method
284: Level: advanced
286: Note:
287: The default is to apply the Kernighan-Lin heuristic. See Chaco documentation
288: for additional details.
290: .seealso: `MatPartitioningType`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetGlobal()`, `MatPartitioningChacoGetLocal()`
291: @*/
292: PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning part, MPChacoLocalType method)
293: {
294: PetscFunctionBegin;
297: PetscTryMethod(part, "MatPartitioningChacoSetLocal_C", (MatPartitioning, MPChacoLocalType), (part, method));
298: PetscFunctionReturn(PETSC_SUCCESS);
299: }
301: static PetscErrorCode MatPartitioningChacoSetLocal_Chaco(MatPartitioning part, MPChacoLocalType method)
302: {
303: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
305: PetscFunctionBegin;
306: switch (method) {
307: case MP_CHACO_KERNIGHAN:
308: case MP_CHACO_NONE:
309: chaco->local_method = method;
310: break;
311: default:
312: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Chaco: Unknown or unsupported option");
313: }
314: PetscFunctionReturn(PETSC_SUCCESS);
315: }
317: /*@
318: MatPartitioningChacoGetLocal - Get local method used by the Chaco partitioner.
320: Not Collective
322: Input Parameter:
323: . part - the partitioning context
325: Output Parameter:
326: . method - the method
328: Level: advanced
330: .seealso: `MatPartitioningType`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetLocal()`
331: @*/
332: PetscErrorCode MatPartitioningChacoGetLocal(MatPartitioning part, MPChacoLocalType *method)
333: {
334: PetscFunctionBegin;
336: PetscAssertPointer(method, 2);
337: PetscUseMethod(part, "MatPartitioningChacoGetLocal_C", (MatPartitioning, MPChacoLocalType *), (part, method));
338: PetscFunctionReturn(PETSC_SUCCESS);
339: }
341: static PetscErrorCode MatPartitioningChacoGetLocal_Chaco(MatPartitioning part, MPChacoLocalType *method)
342: {
343: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
345: PetscFunctionBegin;
346: *method = chaco->local_method;
347: PetscFunctionReturn(PETSC_SUCCESS);
348: }
350: /*@
351: MatPartitioningChacoSetCoarseLevel - Set the coarse level parameter for the
352: Chaco partitioner.
354: Collective
356: Input Parameters:
357: + part - the partitioning context
358: - level - the coarse level in range [0.0,1.0]
360: Options Database Key:
361: . -mat_partitioning_chaco_coarse <l> - Coarse level
363: Level: advanced
365: .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`
366: @*/
367: PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning part, PetscReal level)
368: {
369: PetscFunctionBegin;
372: PetscTryMethod(part, "MatPartitioningChacoSetCoarseLevel_C", (MatPartitioning, PetscReal), (part, level));
373: PetscFunctionReturn(PETSC_SUCCESS);
374: }
376: static PetscErrorCode MatPartitioningChacoSetCoarseLevel_Chaco(MatPartitioning part, PetscReal level)
377: {
378: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
380: PetscFunctionBegin;
381: PetscCheck(level >= 0.0 && level < 1.0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Chaco: level of coarsening out of range [0.0-1.0]");
382: chaco->nbvtxcoarsed = (PetscInt)(part->adj->cmap->N * level);
383: if (chaco->nbvtxcoarsed < 20) chaco->nbvtxcoarsed = 20;
384: PetscFunctionReturn(PETSC_SUCCESS);
385: }
387: /*@
388: MatPartitioningChacoSetEigenSolver - Set the eigensolver method for Chaco partitioner.
390: Collective
392: Input Parameters:
393: + part - the partitioning context
394: - method - one of `MP_CHACO_LANCZOS` or `MP_CHACO_RQI`
396: Options Database Key:
397: . -mat_partitioning_chaco_eigen_solver <method> - the eigensolver
399: Level: advanced
401: Note:
402: The default is to use a Lanczos method. See Chaco documentation for details.
404: .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenTol()`, `MatPartitioningChacoSetEigenNumber()`,
405: `MatPartitioningChacoGetEigenSolver()`
406: @*/
407: PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning part, MPChacoEigenType method)
408: {
409: PetscFunctionBegin;
412: PetscTryMethod(part, "MatPartitioningChacoSetEigenSolver_C", (MatPartitioning, MPChacoEigenType), (part, method));
413: PetscFunctionReturn(PETSC_SUCCESS);
414: }
416: static PetscErrorCode MatPartitioningChacoSetEigenSolver_Chaco(MatPartitioning part, MPChacoEigenType method)
417: {
418: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
420: PetscFunctionBegin;
421: switch (method) {
422: case MP_CHACO_LANCZOS:
423: case MP_CHACO_RQI:
424: chaco->eigen_method = method;
425: break;
426: default:
427: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Chaco: Unknown or unsupported option");
428: }
429: PetscFunctionReturn(PETSC_SUCCESS);
430: }
432: /*@
433: MatPartitioningChacoGetEigenSolver - Get the eigensolver used by the Chaco partitioner.
435: Not Collective
437: Input Parameter:
438: . part - the partitioning context
440: Output Parameter:
441: . method - the method
443: Level: advanced
445: .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenSolver()`
446: @*/
447: PetscErrorCode MatPartitioningChacoGetEigenSolver(MatPartitioning part, MPChacoEigenType *method)
448: {
449: PetscFunctionBegin;
451: PetscAssertPointer(method, 2);
452: PetscUseMethod(part, "MatPartitioningChacoGetEigenSolver_C", (MatPartitioning, MPChacoEigenType *), (part, method));
453: PetscFunctionReturn(PETSC_SUCCESS);
454: }
456: static PetscErrorCode MatPartitioningChacoGetEigenSolver_Chaco(MatPartitioning part, MPChacoEigenType *method)
457: {
458: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
460: PetscFunctionBegin;
461: *method = chaco->eigen_method;
462: PetscFunctionReturn(PETSC_SUCCESS);
463: }
465: /*@
466: MatPartitioningChacoSetEigenTol - Sets the tolerance for the eigensolver used by Chaco
468: Collective
470: Input Parameters:
471: + part - the partitioning context
472: - tol - the tolerance
474: Options Database Key:
475: . -mat_partitioning_chaco_eigen_tol <tol> - Tolerance for eigensolver
477: Note:
478: Must be positive. The default value is 0.001.
480: Level: advanced
482: .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenSolver()`, `MatPartitioningChacoGetEigenTol()`
483: @*/
484: PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning part, PetscReal tol)
485: {
486: PetscFunctionBegin;
489: PetscTryMethod(part, "MatPartitioningChacoSetEigenTol_C", (MatPartitioning, PetscReal), (part, tol));
490: PetscFunctionReturn(PETSC_SUCCESS);
491: }
493: static PetscErrorCode MatPartitioningChacoSetEigenTol_Chaco(MatPartitioning part, PetscReal tol)
494: {
495: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
497: PetscFunctionBegin;
498: if (tol == PETSC_DEFAULT) chaco->eigtol = 0.001;
499: else {
500: PetscCheck(tol > 0.0, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_OUTOFRANGE, "Tolerance must be positive");
501: chaco->eigtol = tol;
502: }
503: PetscFunctionReturn(PETSC_SUCCESS);
504: }
506: /*@
507: MatPartitioningChacoGetEigenTol - Gets the eigensolver tolerance used by Chaco
509: Not Collective
511: Input Parameter:
512: . part - the partitioning context
514: Output Parameter:
515: . tol - the tolerance
517: Level: advanced
519: .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenTol()`
520: @*/
521: PetscErrorCode MatPartitioningChacoGetEigenTol(MatPartitioning part, PetscReal *tol)
522: {
523: PetscFunctionBegin;
525: PetscAssertPointer(tol, 2);
526: PetscUseMethod(part, "MatPartitioningChacoGetEigenTol_C", (MatPartitioning, PetscReal *), (part, tol));
527: PetscFunctionReturn(PETSC_SUCCESS);
528: }
530: static PetscErrorCode MatPartitioningChacoGetEigenTol_Chaco(MatPartitioning part, PetscReal *tol)
531: {
532: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
534: PetscFunctionBegin;
535: *tol = chaco->eigtol;
536: PetscFunctionReturn(PETSC_SUCCESS);
537: }
539: /*@
540: MatPartitioningChacoSetEigenNumber - Sets the number of eigenvectors to compute by Chaco during partitioning
541: during partitioning.
543: Collective
545: Input Parameters:
546: + part - the partitioning context
547: - num - the number of eigenvectors
549: Options Database Key:
550: . -mat_partitioning_chaco_eigen_number <n> - Number of eigenvectors
552: Note:
553: Accepted values are 1, 2 or 3, indicating partitioning by bisection,
554: quadrisection, or octosection.
556: Level: advanced
558: .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenSolver()`, `MatPartitioningChacoGetEigenTol()`
559: @*/
560: PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning part, PetscInt num)
561: {
562: PetscFunctionBegin;
565: PetscTryMethod(part, "MatPartitioningChacoSetEigenNumber_C", (MatPartitioning, PetscInt), (part, num));
566: PetscFunctionReturn(PETSC_SUCCESS);
567: }
569: static PetscErrorCode MatPartitioningChacoSetEigenNumber_Chaco(MatPartitioning part, PetscInt num)
570: {
571: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
573: PetscFunctionBegin;
574: if (num == PETSC_DEFAULT) chaco->eignum = 1;
575: else {
576: PetscCheck(num >= 1 && num <= 3, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_OUTOFRANGE, "Can only specify 1, 2 or 3 eigenvectors");
577: chaco->eignum = num;
578: }
579: PetscFunctionReturn(PETSC_SUCCESS);
580: }
582: /*@
583: MatPartitioningChacoGetEigenNumber - Gets the number of eigenvectors used by Chaco.
585: Not Collective
587: Input Parameter:
588: . part - the partitioning context
590: Output Parameter:
591: . num - number of eigenvectors
593: Level: advanced
595: .seealso: `MatPartitioningType`, `MatPartitioning`, `MATPARTITIONINGCHACO`, `MatPartitioningChacoSetEigenNumber()`
596: @*/
597: PetscErrorCode MatPartitioningChacoGetEigenNumber(MatPartitioning part, PetscInt *num)
598: {
599: PetscFunctionBegin;
601: PetscAssertPointer(num, 2);
602: PetscUseMethod(part, "MatPartitioningChacoGetEigenNumber_C", (MatPartitioning, PetscInt *), (part, num));
603: PetscFunctionReturn(PETSC_SUCCESS);
604: }
606: static PetscErrorCode MatPartitioningChacoGetEigenNumber_Chaco(MatPartitioning part, PetscInt *num)
607: {
608: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
610: PetscFunctionBegin;
611: *num = chaco->eignum;
612: PetscFunctionReturn(PETSC_SUCCESS);
613: }
615: static PetscErrorCode MatPartitioningSetFromOptions_Chaco(MatPartitioning part, PetscOptionItems *PetscOptionsObject)
616: {
617: PetscInt i;
618: PetscReal r;
619: PetscBool flag;
620: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
621: MPChacoGlobalType global;
622: MPChacoLocalType local;
623: MPChacoEigenType eigen;
625: PetscFunctionBegin;
626: PetscOptionsHeadBegin(PetscOptionsObject, "Chaco partitioning options");
627: PetscCall(PetscOptionsEnum("-mat_partitioning_chaco_global", "Global method", "MatPartitioningChacoSetGlobal", MPChacoGlobalTypes, (PetscEnum)chaco->global_method, (PetscEnum *)&global, &flag));
628: if (flag) PetscCall(MatPartitioningChacoSetGlobal(part, global));
629: PetscCall(PetscOptionsEnum("-mat_partitioning_chaco_local", "Local method", "MatPartitioningChacoSetLocal", MPChacoLocalTypes, (PetscEnum)chaco->local_method, (PetscEnum *)&local, &flag));
630: if (flag) PetscCall(MatPartitioningChacoSetLocal(part, local));
631: PetscCall(PetscOptionsReal("-mat_partitioning_chaco_coarse", "Coarse level", "MatPartitioningChacoSetCoarseLevel", 0.0, &r, &flag));
632: if (flag) PetscCall(MatPartitioningChacoSetCoarseLevel(part, r));
633: PetscCall(PetscOptionsEnum("-mat_partitioning_chaco_eigen_solver", "Eigensolver method", "MatPartitioningChacoSetEigenSolver", MPChacoEigenTypes, (PetscEnum)chaco->eigen_method, (PetscEnum *)&eigen, &flag));
634: if (flag) PetscCall(MatPartitioningChacoSetEigenSolver(part, eigen));
635: PetscCall(PetscOptionsReal("-mat_partitioning_chaco_eigen_tol", "Eigensolver tolerance", "MatPartitioningChacoSetEigenTol", chaco->eigtol, &r, &flag));
636: if (flag) PetscCall(MatPartitioningChacoSetEigenTol(part, r));
637: PetscCall(PetscOptionsInt("-mat_partitioning_chaco_eigen_number", "Number of eigenvectors: 1, 2, or 3 (bi-, quadri-, or octosection)", "MatPartitioningChacoSetEigenNumber", chaco->eignum, &i, &flag));
638: if (flag) PetscCall(MatPartitioningChacoSetEigenNumber(part, i));
639: PetscCall(PetscOptionsBool("-mat_partitioning_chaco_verbose", "Show library output", "", chaco->verbose, &chaco->verbose, NULL));
640: PetscOptionsHeadEnd();
641: PetscFunctionReturn(PETSC_SUCCESS);
642: }
644: static PetscErrorCode MatPartitioningDestroy_Chaco(MatPartitioning part)
645: {
646: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *)part->data;
648: PetscFunctionBegin;
649: PetscCall(PetscFree(chaco));
650: /* clear composed functions */
651: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetGlobal_C", NULL));
652: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetGlobal_C", NULL));
653: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetLocal_C", NULL));
654: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetLocal_C", NULL));
655: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetCoarseLevel_C", NULL));
656: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenSolver_C", NULL));
657: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenSolver_C", NULL));
658: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenTol_C", NULL));
659: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenTol_C", NULL));
660: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenNumber_C", NULL));
661: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenNumber_C", NULL));
662: PetscFunctionReturn(PETSC_SUCCESS);
663: }
665: /*MC
666: MATPARTITIONINGCHACO - Creates a partitioning context that uses the external package Chaco {cite}`chaco95`
668: Level: beginner
670: Note:
671: Does not use the `MatPartitioningSetUseEdgeWeights()` option
673: .seealso: `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningType`
674: M*/
676: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Chaco(MatPartitioning part)
677: {
678: MatPartitioning_Chaco *chaco;
680: PetscFunctionBegin;
681: PetscCall(PetscNew(&chaco));
682: part->data = (void *)chaco;
684: chaco->global_method = MP_CHACO_MULTILEVEL;
685: chaco->local_method = MP_CHACO_KERNIGHAN;
686: chaco->eigen_method = MP_CHACO_LANCZOS;
687: chaco->nbvtxcoarsed = 200;
688: chaco->eignum = 1;
689: chaco->eigtol = 0.001;
690: chaco->verbose = PETSC_FALSE;
692: part->ops->apply = MatPartitioningApply_Chaco;
693: part->ops->view = MatPartitioningView_Chaco;
694: part->ops->destroy = MatPartitioningDestroy_Chaco;
695: part->ops->setfromoptions = MatPartitioningSetFromOptions_Chaco;
697: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetGlobal_C", MatPartitioningChacoSetGlobal_Chaco));
698: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetGlobal_C", MatPartitioningChacoGetGlobal_Chaco));
699: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetLocal_C", MatPartitioningChacoSetLocal_Chaco));
700: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetLocal_C", MatPartitioningChacoGetLocal_Chaco));
701: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetCoarseLevel_C", MatPartitioningChacoSetCoarseLevel_Chaco));
702: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenSolver_C", MatPartitioningChacoSetEigenSolver_Chaco));
703: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenSolver_C", MatPartitioningChacoGetEigenSolver_Chaco));
704: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenTol_C", MatPartitioningChacoSetEigenTol_Chaco));
705: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenTol_C", MatPartitioningChacoGetEigenTol_Chaco));
706: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoSetEigenNumber_C", MatPartitioningChacoSetEigenNumber_Chaco));
707: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningChacoGetEigenNumber_C", MatPartitioningChacoGetEigenNumber_Chaco));
708: PetscFunctionReturn(PETSC_SUCCESS);
709: }