Actual source code: hierarchical.c
2: #include <../src/mat/impls/adj/mpi/mpiadj.h>
3: #include <petscsf.h>
4: #include <petsc/private/matimpl.h>
6: /*
7: It is a hierarchical partitioning. The partitioner has two goals:
8: (1) Most of current partitioners fail at a large scale. The hierarchical partitioning
9: strategy is trying to produce large number of subdomains when number of processor cores is large.
10: (2) PCGASM needs one 'big' subdomain across multi-cores. The partitioner provides two
11: consistent partitions, coarse parts and fine parts. A coarse part is a 'big' subdomain consisting
12: of several small subdomains.
13: */
15: PetscErrorCode MatPartitioningHierarchical_DetermineDestination(MatPartitioning, IS, PetscInt, PetscInt, IS *);
16: PetscErrorCode MatPartitioningHierarchical_AssembleSubdomain(Mat, IS, IS, IS *, Mat *, ISLocalToGlobalMapping *);
17: PetscErrorCode MatPartitioningHierarchical_ReassembleFineparts(Mat, IS, ISLocalToGlobalMapping, IS *);
19: typedef struct {
20: char *fineparttype; /* partitioner on fine level */
21: char *coarseparttype; /* partitioner on coarse level */
22: PetscInt nfineparts; /* number of fine parts on each coarse subdomain */
23: PetscInt ncoarseparts; /* number of coarse parts */
24: IS coarseparts; /* partitioning on coarse level */
25: IS fineparts; /* partitioning on fine level */
26: MatPartitioning coarseMatPart; /* MatPartititioning on coarse level (first level) */
27: MatPartitioning fineMatPart; /* MatPartitioning on fine level (second level) */
28: MatPartitioning improver; /* Improve the quality of a partition */
29: } MatPartitioning_Hierarchical;
31: /*
32: Uses a hierarchical partitioning strategy to partition the matrix in parallel.
33: Use this interface to make the partitioner consistent with others
34: */
35: static PetscErrorCode MatPartitioningApply_Hierarchical(MatPartitioning part, IS *partitioning)
36: {
37: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
38: const PetscInt *fineparts_indices, *coarseparts_indices;
39: PetscInt *fineparts_indices_tmp;
40: PetscInt *parts_indices, i, j, mat_localsize, *offsets;
41: Mat mat = part->adj, adj, sadj;
42: PetscReal *part_weights;
43: PetscBool flg;
44: PetscInt bs = 1;
45: PetscInt *coarse_vertex_weights = NULL;
46: PetscMPIInt size, rank;
47: MPI_Comm comm, scomm;
48: IS destination, fineparts_temp, vweights, svweights;
49: PetscInt nsvwegihts, *fp_vweights;
50: const PetscInt *svweights_indices;
51: ISLocalToGlobalMapping mapping;
52: const char *prefix;
53: PetscBool use_edge_weights;
55: PetscObjectGetComm((PetscObject)part, &comm);
56: MPI_Comm_size(comm, &size);
57: MPI_Comm_rank(comm, &rank);
58: PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg);
59: if (flg) {
60: adj = mat;
61: PetscObjectReference((PetscObject)adj);
62: } else {
63: /* bs indicates if the converted matrix is "reduced" from the original and hence the
64: resulting partition results need to be stretched to match the original matrix */
65: MatConvert(mat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
66: if (adj->rmap->n > 0) bs = mat->rmap->n / adj->rmap->n;
67: }
68: /* local size of mat */
69: mat_localsize = adj->rmap->n;
70: /* check parameters */
71: /* how many small subdomains we want from a given 'big' suddomain */
75: /* Partitioning the domain into one single subdomain is a trivial case, and we should just return */
76: if (part->n == 1) {
77: PetscCalloc1(bs * adj->rmap->n, &parts_indices);
78: ISCreateGeneral(comm, bs * adj->rmap->n, parts_indices, PETSC_OWN_POINTER, partitioning);
79: hpart->ncoarseparts = 1;
80: hpart->nfineparts = 1;
81: PetscStrallocpy("NONE", &hpart->coarseparttype);
82: PetscStrallocpy("NONE", &hpart->fineparttype);
83: MatDestroy(&adj);
84: return 0;
85: }
87: if (part->n) {
88: hpart->ncoarseparts = part->n / hpart->nfineparts;
90: if (part->n % hpart->nfineparts != 0) hpart->ncoarseparts++;
91: } else {
92: part->n = hpart->ncoarseparts * hpart->nfineparts;
93: }
95: PetscMalloc1(hpart->ncoarseparts + 1, &offsets);
96: PetscMalloc1(hpart->ncoarseparts, &part_weights);
98: offsets[0] = 0;
99: if (part->n % hpart->nfineparts != 0) offsets[1] = part->n % hpart->nfineparts;
100: else offsets[1] = hpart->nfineparts;
102: part_weights[0] = ((PetscReal)offsets[1]) / part->n;
104: for (i = 2; i <= hpart->ncoarseparts; i++) {
105: offsets[i] = hpart->nfineparts;
106: part_weights[i - 1] = ((PetscReal)offsets[i]) / part->n;
107: }
109: offsets[0] = 0;
110: for (i = 1; i <= hpart->ncoarseparts; i++) offsets[i] += offsets[i - 1];
112: /* If these exists a mat partitioner, we should delete it */
113: MatPartitioningDestroy(&hpart->coarseMatPart);
114: MatPartitioningCreate(comm, &hpart->coarseMatPart);
115: PetscObjectGetOptionsPrefix((PetscObject)part, &prefix);
116: PetscObjectSetOptionsPrefix((PetscObject)hpart->coarseMatPart, prefix);
117: PetscObjectAppendOptionsPrefix((PetscObject)hpart->coarseMatPart, "hierarch_coarse_");
118: /* if did not set partitioning type yet, use parmetis by default */
119: if (!hpart->coarseparttype) {
120: #if defined(PETSC_HAVE_PARMETIS)
121: MatPartitioningSetType(hpart->coarseMatPart, MATPARTITIONINGPARMETIS);
122: PetscStrallocpy(MATPARTITIONINGPARMETIS, &hpart->coarseparttype);
123: #elif defined(PETSC_HAVE_PTSCOTCH)
124: MatPartitioningSetType(hpart->coarseMatPart, MATPARTITIONINGPTSCOTCH);
125: PetscStrallocpy(MATPARTITIONINGPTSCOTCH, &hpart->coarseparttype);
126: #else
127: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Requires PETSc be installed with ParMetis or run with -mat_partitioning_hierarchical_coarseparttype partitiontype");
128: #endif
129: } else {
130: MatPartitioningSetType(hpart->coarseMatPart, hpart->coarseparttype);
131: }
132: MatPartitioningSetAdjacency(hpart->coarseMatPart, adj);
133: MatPartitioningSetNParts(hpart->coarseMatPart, hpart->ncoarseparts);
134: /* copy over vertex weights */
135: if (part->vertex_weights) {
136: PetscMalloc1(mat_localsize, &coarse_vertex_weights);
137: PetscArraycpy(coarse_vertex_weights, part->vertex_weights, mat_localsize);
138: MatPartitioningSetVertexWeights(hpart->coarseMatPart, coarse_vertex_weights);
139: }
140: /* Copy use_edge_weights flag from part to coarse part */
141: MatPartitioningGetUseEdgeWeights(part, &use_edge_weights);
142: MatPartitioningSetUseEdgeWeights(hpart->coarseMatPart, use_edge_weights);
144: MatPartitioningSetPartitionWeights(hpart->coarseMatPart, part_weights);
145: MatPartitioningApply(hpart->coarseMatPart, &hpart->coarseparts);
147: /* Wrap the original vertex weights into an index set so that we can extract the corresponding
148: * vertex weights for each big subdomain using ISCreateSubIS().
149: * */
150: if (part->vertex_weights) ISCreateGeneral(comm, mat_localsize, part->vertex_weights, PETSC_COPY_VALUES, &vweights);
152: PetscCalloc1(mat_localsize, &fineparts_indices_tmp);
153: for (i = 0; i < hpart->ncoarseparts; i += size) {
154: /* Determine where we want to send big subdomains */
155: MatPartitioningHierarchical_DetermineDestination(part, hpart->coarseparts, i, i + size, &destination);
156: /* Assemble a submatrix and its vertex weights for partitioning subdomains */
157: MatPartitioningHierarchical_AssembleSubdomain(adj, part->vertex_weights ? vweights : NULL, destination, part->vertex_weights ? &svweights : NULL, &sadj, &mapping);
158: /* We have to create a new array to hold vertex weights since coarse partitioner needs to own the vertex-weights array */
159: if (part->vertex_weights) {
160: ISGetLocalSize(svweights, &nsvwegihts);
161: PetscMalloc1(nsvwegihts, &fp_vweights);
162: ISGetIndices(svweights, &svweights_indices);
163: PetscArraycpy(fp_vweights, svweights_indices, nsvwegihts);
164: ISRestoreIndices(svweights, &svweights_indices);
165: ISDestroy(&svweights);
166: }
168: ISDestroy(&destination);
169: PetscObjectGetComm((PetscObject)sadj, &scomm);
171: /*
172: * If the number of big subdomains is smaller than the number of processor cores, the higher ranks do not
173: * need to do partitioning
174: * */
175: if ((i + rank) < hpart->ncoarseparts) {
176: MatPartitioningDestroy(&hpart->fineMatPart);
177: /* create a fine partitioner */
178: MatPartitioningCreate(scomm, &hpart->fineMatPart);
179: PetscObjectSetOptionsPrefix((PetscObject)hpart->fineMatPart, prefix);
180: PetscObjectAppendOptionsPrefix((PetscObject)hpart->fineMatPart, "hierarch_fine_");
181: /* if do not set partitioning type, use parmetis by default */
182: if (!hpart->fineparttype) {
183: #if defined(PETSC_HAVE_PARMETIS)
184: MatPartitioningSetType(hpart->fineMatPart, MATPARTITIONINGPARMETIS);
185: PetscStrallocpy(MATPARTITIONINGPARMETIS, &hpart->fineparttype);
186: #elif defined(PETSC_HAVE_PTSCOTCH)
187: MatPartitioningSetType(hpart->fineMatPart, MATPARTITIONINGPTSCOTCH);
188: PetscStrallocpy(MATPARTITIONINGPTSCOTCH, &hpart->fineparttype);
189: #elif defined(PETSC_HAVE_CHACO)
190: MatPartitioningSetType(hpart->fineMatPart, MATPARTITIONINGCHACO);
191: PetscStrallocpy(MATPARTITIONINGCHACO, &hpart->fineparttype);
192: #elif defined(PETSC_HAVE_PARTY)
193: MatPartitioningSetType(hpart->fineMatPart, MATPARTITIONINGPARTY);
194: PetscStrallocpy(PETSC_HAVE_PARTY, &hpart->fineparttype);
195: #else
196: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Requires PETSc be installed with ParMetis or run with -mat_partitioning_hierarchical_coarseparttype partitiontype");
197: #endif
198: } else {
199: MatPartitioningSetType(hpart->fineMatPart, hpart->fineparttype);
200: }
201: MatPartitioningSetUseEdgeWeights(hpart->fineMatPart, use_edge_weights);
202: MatPartitioningSetAdjacency(hpart->fineMatPart, sadj);
203: MatPartitioningSetNParts(hpart->fineMatPart, offsets[rank + 1 + i] - offsets[rank + i]);
204: if (part->vertex_weights) MatPartitioningSetVertexWeights(hpart->fineMatPart, fp_vweights);
205: MatPartitioningApply(hpart->fineMatPart, &fineparts_temp);
206: } else {
207: ISCreateGeneral(scomm, 0, NULL, PETSC_OWN_POINTER, &fineparts_temp);
208: }
210: MatDestroy(&sadj);
212: /* Send partition back to the original owners */
213: MatPartitioningHierarchical_ReassembleFineparts(adj, fineparts_temp, mapping, &hpart->fineparts);
214: ISGetIndices(hpart->fineparts, &fineparts_indices);
215: for (j = 0; j < mat_localsize; j++)
216: if (fineparts_indices[j] >= 0) fineparts_indices_tmp[j] = fineparts_indices[j];
218: ISRestoreIndices(hpart->fineparts, &fineparts_indices);
219: ISDestroy(&hpart->fineparts);
220: ISDestroy(&fineparts_temp);
221: ISLocalToGlobalMappingDestroy(&mapping);
222: }
224: if (part->vertex_weights) ISDestroy(&vweights);
226: ISCreateGeneral(comm, mat_localsize, fineparts_indices_tmp, PETSC_OWN_POINTER, &hpart->fineparts);
227: ISGetIndices(hpart->fineparts, &fineparts_indices);
228: ISGetIndices(hpart->coarseparts, &coarseparts_indices);
229: PetscMalloc1(bs * adj->rmap->n, &parts_indices);
230: /* Modify the local indices to the global indices by combing the coarse partition and the fine partitions */
231: for (i = 0; i < adj->rmap->n; i++) {
232: for (j = 0; j < bs; j++) parts_indices[bs * i + j] = fineparts_indices[i] + offsets[coarseparts_indices[i]];
233: }
234: ISRestoreIndices(hpart->fineparts, &fineparts_indices);
235: ISRestoreIndices(hpart->coarseparts, &coarseparts_indices);
236: PetscFree(offsets);
237: ISCreateGeneral(comm, bs * adj->rmap->n, parts_indices, PETSC_OWN_POINTER, partitioning);
238: MatDestroy(&adj);
239: return 0;
240: }
242: PetscErrorCode MatPartitioningHierarchical_ReassembleFineparts(Mat adj, IS fineparts, ISLocalToGlobalMapping mapping, IS *sfineparts)
243: {
244: PetscInt *local_indices, *global_indices, *sfineparts_indices, localsize, i;
245: const PetscInt *ranges, *fineparts_indices;
246: PetscMPIInt rank, *owners;
247: MPI_Comm comm;
248: PetscLayout rmap;
249: PetscSFNode *remote;
250: PetscSF sf;
253: PetscObjectGetComm((PetscObject)adj, &comm);
254: MPI_Comm_rank(comm, &rank);
255: MatGetLayouts(adj, &rmap, NULL);
256: ISGetLocalSize(fineparts, &localsize);
257: PetscMalloc2(localsize, &global_indices, localsize, &local_indices);
258: for (i = 0; i < localsize; i++) local_indices[i] = i;
259: /* map local indices back to global so that we can permulate data globally */
260: ISLocalToGlobalMappingApply(mapping, localsize, local_indices, global_indices);
261: PetscCalloc1(localsize, &owners);
262: /* find owners for global indices */
263: for (i = 0; i < localsize; i++) PetscLayoutFindOwner(rmap, global_indices[i], &owners[i]);
264: PetscLayoutGetRanges(rmap, &ranges);
265: PetscMalloc1(ranges[rank + 1] - ranges[rank], &sfineparts_indices);
267: for (i = 0; i < ranges[rank + 1] - ranges[rank]; i++) sfineparts_indices[i] = -1;
269: ISGetIndices(fineparts, &fineparts_indices);
270: PetscSFCreate(comm, &sf);
271: PetscMalloc1(localsize, &remote);
272: for (i = 0; i < localsize; i++) {
273: remote[i].rank = owners[i];
274: remote[i].index = global_indices[i] - ranges[owners[i]];
275: }
276: PetscSFSetType(sf, PETSCSFBASIC);
277: /* not sure how to add prefix to sf */
278: PetscSFSetFromOptions(sf);
279: PetscSFSetGraph(sf, localsize, localsize, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
280: PetscSFReduceBegin(sf, MPIU_INT, fineparts_indices, sfineparts_indices, MPI_REPLACE);
281: PetscSFReduceEnd(sf, MPIU_INT, fineparts_indices, sfineparts_indices, MPI_REPLACE);
282: PetscSFDestroy(&sf);
283: ISRestoreIndices(fineparts, &fineparts_indices);
284: ISCreateGeneral(comm, ranges[rank + 1] - ranges[rank], sfineparts_indices, PETSC_OWN_POINTER, sfineparts);
285: PetscFree2(global_indices, local_indices);
286: PetscFree(owners);
287: return 0;
288: }
290: PetscErrorCode MatPartitioningHierarchical_AssembleSubdomain(Mat adj, IS vweights, IS destination, IS *svweights, Mat *sadj, ISLocalToGlobalMapping *mapping)
291: {
292: IS irows, icols;
293: PetscInt irows_ln;
294: PetscMPIInt rank;
295: const PetscInt *irows_indices;
296: MPI_Comm comm;
298: PetscObjectGetComm((PetscObject)adj, &comm);
299: MPI_Comm_rank(comm, &rank);
300: /* figure out where data comes from */
301: ISBuildTwoSided(destination, NULL, &irows);
302: ISDuplicate(irows, &icols);
303: ISGetLocalSize(irows, &irows_ln);
304: ISGetIndices(irows, &irows_indices);
305: ISLocalToGlobalMappingCreate(comm, 1, irows_ln, irows_indices, PETSC_COPY_VALUES, mapping);
306: ISRestoreIndices(irows, &irows_indices);
307: MatCreateSubMatrices(adj, 1, &irows, &icols, MAT_INITIAL_MATRIX, &sadj);
308: if (vweights && svweights) ISCreateSubIS(vweights, irows, svweights);
309: ISDestroy(&irows);
310: ISDestroy(&icols);
311: return 0;
312: }
314: PetscErrorCode MatPartitioningHierarchical_DetermineDestination(MatPartitioning part, IS partitioning, PetscInt pstart, PetscInt pend, IS *destination)
315: {
316: MPI_Comm comm;
317: PetscMPIInt rank, size, target;
318: PetscInt plocalsize, *dest_indices, i;
319: const PetscInt *part_indices;
321: PetscObjectGetComm((PetscObject)part, &comm);
322: MPI_Comm_rank(comm, &rank);
323: MPI_Comm_size(comm, &size);
326: ISGetLocalSize(partitioning, &plocalsize);
327: PetscMalloc1(plocalsize, &dest_indices);
328: ISGetIndices(partitioning, &part_indices);
329: for (i = 0; i < plocalsize; i++) {
330: /* compute target */
331: target = part_indices[i] - pstart;
332: /* mark out of range entity as -1 */
333: if (part_indices[i] < pstart || part_indices[i] >= pend) target = -1;
334: dest_indices[i] = target;
335: }
336: ISCreateGeneral(comm, plocalsize, dest_indices, PETSC_OWN_POINTER, destination);
337: return 0;
338: }
340: PetscErrorCode MatPartitioningView_Hierarchical(MatPartitioning part, PetscViewer viewer)
341: {
342: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
343: PetscMPIInt rank;
344: PetscBool iascii;
345: PetscViewer sviewer;
347: MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank);
348: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
349: if (iascii) {
350: PetscViewerASCIIPrintf(viewer, " Number of coarse parts: %" PetscInt_FMT "\n", hpart->ncoarseparts);
351: PetscViewerASCIIPrintf(viewer, " Coarse partitioner: %s\n", hpart->coarseparttype);
352: if (hpart->coarseMatPart) {
353: PetscViewerASCIIPushTab(viewer);
354: MatPartitioningView(hpart->coarseMatPart, viewer);
355: PetscViewerASCIIPopTab(viewer);
356: }
357: PetscViewerASCIIPrintf(viewer, " Number of fine parts: %" PetscInt_FMT "\n", hpart->nfineparts);
358: PetscViewerASCIIPrintf(viewer, " Fine partitioner: %s\n", hpart->fineparttype);
359: PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer);
360: if (rank == 0 && hpart->fineMatPart) {
361: PetscViewerASCIIPushTab(viewer);
362: MatPartitioningView(hpart->fineMatPart, sviewer);
363: PetscViewerASCIIPopTab(viewer);
364: }
365: PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer);
366: }
367: return 0;
368: }
370: PetscErrorCode MatPartitioningHierarchicalGetFineparts(MatPartitioning part, IS *fineparts)
371: {
372: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
374: *fineparts = hpart->fineparts;
375: PetscObjectReference((PetscObject)hpart->fineparts);
376: return 0;
377: }
379: PetscErrorCode MatPartitioningHierarchicalGetCoarseparts(MatPartitioning part, IS *coarseparts)
380: {
381: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
383: *coarseparts = hpart->coarseparts;
384: PetscObjectReference((PetscObject)hpart->coarseparts);
385: return 0;
386: }
388: PetscErrorCode MatPartitioningHierarchicalSetNcoarseparts(MatPartitioning part, PetscInt ncoarseparts)
389: {
390: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
392: hpart->ncoarseparts = ncoarseparts;
393: return 0;
394: }
396: PetscErrorCode MatPartitioningHierarchicalSetNfineparts(MatPartitioning part, PetscInt nfineparts)
397: {
398: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
400: hpart->nfineparts = nfineparts;
401: return 0;
402: }
404: PetscErrorCode MatPartitioningSetFromOptions_Hierarchical(MatPartitioning part, PetscOptionItems *PetscOptionsObject)
405: {
406: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
407: char value[1024];
408: PetscBool flag = PETSC_FALSE;
410: PetscOptionsHeadBegin(PetscOptionsObject, "Set hierarchical partitioning options");
411: PetscOptionsString("-mat_partitioning_hierarchical_coarseparttype", "coarse part type", NULL, NULL, value, sizeof(value), &flag);
412: if (flag) {
413: PetscFree(hpart->coarseparttype);
414: PetscStrallocpy(value, &hpart->coarseparttype);
415: }
416: PetscOptionsString("-mat_partitioning_hierarchical_fineparttype", "fine part type", NULL, NULL, value, sizeof(value), &flag);
417: if (flag) {
418: PetscFree(hpart->fineparttype);
419: PetscStrallocpy(value, &hpart->fineparttype);
420: }
421: PetscOptionsInt("-mat_partitioning_hierarchical_ncoarseparts", "number of coarse parts", NULL, hpart->ncoarseparts, &hpart->ncoarseparts, &flag);
422: PetscOptionsInt("-mat_partitioning_hierarchical_nfineparts", "number of fine parts", NULL, hpart->nfineparts, &hpart->nfineparts, &flag);
423: PetscOptionsHeadEnd();
424: return 0;
425: }
427: PetscErrorCode MatPartitioningDestroy_Hierarchical(MatPartitioning part)
428: {
429: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
431: if (hpart->coarseparttype) PetscFree(hpart->coarseparttype);
432: if (hpart->fineparttype) PetscFree(hpart->fineparttype);
433: ISDestroy(&hpart->fineparts);
434: ISDestroy(&hpart->coarseparts);
435: MatPartitioningDestroy(&hpart->coarseMatPart);
436: MatPartitioningDestroy(&hpart->fineMatPart);
437: MatPartitioningDestroy(&hpart->improver);
438: PetscFree(hpart);
439: return 0;
440: }
442: /*
443: Improves the quality of a partition
444: */
445: static PetscErrorCode MatPartitioningImprove_Hierarchical(MatPartitioning part, IS *partitioning)
446: {
447: MatPartitioning_Hierarchical *hpart = (MatPartitioning_Hierarchical *)part->data;
448: Mat mat = part->adj, adj;
449: PetscBool flg;
450: const char *prefix;
451: #if defined(PETSC_HAVE_PARMETIS)
452: PetscInt *vertex_weights;
453: #endif
455: PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg);
456: if (flg) {
457: adj = mat;
458: PetscObjectReference((PetscObject)adj);
459: } else {
460: /* bs indicates if the converted matrix is "reduced" from the original and hence the
461: resulting partition results need to be stretched to match the original matrix */
462: MatConvert(mat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
463: }
465: /* If there exists a mat partitioner, we should delete it */
466: MatPartitioningDestroy(&hpart->improver);
467: MatPartitioningCreate(PetscObjectComm((PetscObject)part), &hpart->improver);
468: PetscObjectGetOptionsPrefix((PetscObject)part, &prefix);
469: PetscObjectSetOptionsPrefix((PetscObject)hpart->improver, prefix);
470: PetscObjectAppendOptionsPrefix((PetscObject)hpart->improver, "hierarch_improver_");
471: /* Only parmetis supports to refine a partition */
472: #if defined(PETSC_HAVE_PARMETIS)
473: MatPartitioningSetType(hpart->improver, MATPARTITIONINGPARMETIS);
474: MatPartitioningSetAdjacency(hpart->improver, adj);
475: MatPartitioningSetNParts(hpart->improver, part->n);
476: /* copy over vertex weights */
477: if (part->vertex_weights) {
478: PetscMalloc1(adj->rmap->n, &vertex_weights);
479: PetscArraycpy(vertex_weights, part->vertex_weights, adj->rmap->n);
480: MatPartitioningSetVertexWeights(hpart->improver, vertex_weights);
481: }
482: MatPartitioningImprove(hpart->improver, partitioning);
483: MatDestroy(&adj);
484: return 0;
485: #else
486: SETERRQ(PetscObjectComm((PetscObject)adj), PETSC_ERR_SUP, "Requires PETSc be installed with ParMetis");
487: #endif
488: }
490: /*MC
491: MATPARTITIONINGHIERARCH - Creates a partitioning context via hierarchical partitioning strategy.
492: The graph is partitioned into a number of subgraphs, and each subgraph is further split into a few smaller
493: subgraphs. The idea can be applied in a recursive manner. It is useful when you want to partition the graph
494: into a large number of subgraphs (often more than 10K) since partitions obtained with existing partitioners
495: such as ParMETIS and PTScotch are far from ideal. The hierarchical partitioning also tries to avoid off-node
496: communication as much as possible for multi-core processor. Another user case for the hierarchical partitioning
497: is to improve `PCGASM` convergence by generating multi-rank connected subdomain.
499: Collective
501: Input Parameter:
502: . part - the partitioning context
504: Options Database Keys:
505: + -mat_partitioning_hierarchical_coarseparttype - partitioner type at the first level and parmetis is used by default
506: . -mat_partitioning_hierarchical_fineparttype - partitioner type at the second level and parmetis is used by default
507: . -mat_partitioning_hierarchical_ncoarseparts - number of subgraphs is required at the first level, which is often the number of compute nodes
508: - -mat_partitioning_hierarchical_nfineparts - number of smaller subgraphs for each subgraph, which is often the number of cores per compute node
510: Level: beginner
512: References:
513: + * - Fande Kong, Xiao-Chuan Cai, A highly scalable multilevel Schwarz method with boundary geometry preserving coarse spaces for 3D elasticity
514: problems on domains with complex geometry, SIAM Journal on Scientific Computing 38 (2), C73-C95, 2016
515: - * - Fande Kong, Roy H. Stogner, Derek Gaston, John W. Peterson, Cody J. Permann, Andrew E. Slaughter, and Richard C. Martineau,
516: A general-purpose hierarchical mesh partitioning method with node balancing strategies for large-scale numerical simulations,
517: arXiv preprint arXiv:1809.02666CoRR, 2018.
519: .seealso: `MatPartitioningSetType()`, `MatPartitioningType`, `MATPARTITIONINGMETIS`, `MATPARTITIONINGPARMETIS`,
520: M*/
522: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Hierarchical(MatPartitioning part)
523: {
524: MatPartitioning_Hierarchical *hpart;
526: PetscNew(&hpart);
527: part->data = (void *)hpart;
529: hpart->fineparttype = NULL; /* fine level (second) partitioner */
530: hpart->coarseparttype = NULL; /* coarse level (first) partitioner */
531: hpart->nfineparts = 1; /* we do not further partition coarse partition any more by default */
532: hpart->ncoarseparts = 0; /* number of coarse parts (first level) */
533: hpart->coarseparts = NULL;
534: hpart->fineparts = NULL;
535: hpart->coarseMatPart = NULL;
536: hpart->fineMatPart = NULL;
538: part->ops->apply = MatPartitioningApply_Hierarchical;
539: part->ops->view = MatPartitioningView_Hierarchical;
540: part->ops->destroy = MatPartitioningDestroy_Hierarchical;
541: part->ops->setfromoptions = MatPartitioningSetFromOptions_Hierarchical;
542: part->ops->improve = MatPartitioningImprove_Hierarchical;
543: return 0;
544: }