Actual source code: misk.c
1: #include <petsc/private/matimpl.h>
2: #include <../src/mat/impls/aij/seq/aij.h>
3: #include <../src/mat/impls/aij/mpi/mpiaij.h>
4: #include <petscsf.h>
6: #define MIS_NOT_DONE -2
7: #define MIS_DELETED -1
8: #define MIS_REMOVED -3
9: #define MIS_IS_SELECTED(s) (s >= 0)
11: /* edge for priority queue */
12: typedef struct edge_tag {
13: PetscReal weight;
14: PetscInt lid0, gid1, cpid1;
15: } Edge;
17: static PetscErrorCode PetscCoarsenDataView_private(PetscCoarsenData *agg_lists, PetscViewer viewer)
18: {
19: PetscCDIntNd *pos, *pos2;
21: PetscFunctionBegin;
22: for (PetscInt kk = 0; kk < agg_lists->size; kk++) {
23: PetscCall(PetscCDGetHeadPos(agg_lists, kk, &pos));
24: if ((pos2 = pos)) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "selected local %" PetscInt_FMT ": ", kk));
25: while (pos) {
26: PetscInt gid1;
27: PetscCall(PetscCDIntNdGetID(pos, &gid1));
28: PetscCall(PetscCDGetNextPos(agg_lists, kk, &pos));
29: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", gid1));
30: }
31: if (pos2) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
32: }
33: PetscFunctionReturn(PETSC_SUCCESS);
34: }
36: /*
37: MatCoarsenApply_MISK_private - parallel heavy edge matching
39: Input Parameter:
40: . perm - permutation
41: . Gmat - global matrix of graph (data not defined)
43: Output Parameter:
44: . a_locals_llist - array of list of local nodes rooted at local node
45: */
46: static PetscErrorCode MatCoarsenApply_MISK_private(IS perm, const PetscInt misk, Mat Gmat, PetscCoarsenData **a_locals_llist)
47: {
48: PetscBool isMPI;
49: MPI_Comm comm;
50: PetscMPIInt rank, size;
51: Mat cMat, Prols[5], Rtot;
52: PetscScalar one = 1;
54: PetscFunctionBegin;
57: PetscAssertPointer(a_locals_llist, 4);
58: PetscCheck(misk < 5 && misk > 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "too many/few levels: %" PetscInt_FMT, misk);
59: PetscCall(PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPI));
60: PetscCall(PetscObjectGetComm((PetscObject)Gmat, &comm));
61: PetscCallMPI(MPI_Comm_rank(comm, &rank));
62: PetscCallMPI(MPI_Comm_size(comm, &size));
63: PetscCall(PetscInfo(Gmat, "misk %" PetscInt_FMT "\n", misk));
64: /* make a copy of the graph, this gets destroyed in iterates */
65: if (misk > 1) PetscCall(MatDuplicate(Gmat, MAT_COPY_VALUES, &cMat));
66: else cMat = Gmat;
67: for (PetscInt iterIdx = 0; iterIdx < misk; iterIdx++) {
68: Mat_SeqAIJ *matA, *matB = NULL;
69: Mat_MPIAIJ *mpimat = NULL;
70: const PetscInt *perm_ix;
71: const PetscInt nloc_inner = cMat->rmap->n;
72: PetscCoarsenData *agg_lists;
73: PetscInt *cpcol_gid = NULL, *cpcol_state, *lid_cprowID, *lid_state, *lid_parent_gid = NULL;
74: PetscInt num_fine_ghosts, kk, n, ix, j, *idx, *ai, Iend, my0, nremoved, gid, cpid, lidj, sgid, t1, t2, slid, nDone, nselected = 0, state;
75: PetscBool *lid_removed, isOK;
76: PetscLayout layout;
77: PetscSF sf;
79: if (isMPI) {
80: mpimat = (Mat_MPIAIJ *)cMat->data;
81: matA = (Mat_SeqAIJ *)mpimat->A->data;
82: matB = (Mat_SeqAIJ *)mpimat->B->data;
83: /* force compressed storage of B */
84: PetscCall(MatCheckCompressedRow(mpimat->B, matB->nonzerorowcnt, &matB->compressedrow, matB->i, cMat->rmap->n, -1.0));
85: } else {
86: PetscBool isAIJ;
88: matA = (Mat_SeqAIJ *)cMat->data;
89: PetscCall(PetscObjectBaseTypeCompare((PetscObject)cMat, MATSEQAIJ, &isAIJ));
90: PetscCheck(isAIJ, PETSC_COMM_SELF, PETSC_ERR_USER, "Require AIJ matrix.");
91: }
92: PetscCall(MatGetOwnershipRange(cMat, &my0, &Iend));
93: if (isMPI) {
94: PetscInt *lid_gid;
96: PetscCall(PetscMalloc1(nloc_inner, &lid_gid)); /* explicit array needed */
97: for (kk = 0, gid = my0; kk < nloc_inner; kk++, gid++) lid_gid[kk] = gid;
98: PetscCall(VecGetLocalSize(mpimat->lvec, &num_fine_ghosts));
99: PetscCall(PetscMalloc2(num_fine_ghosts, &cpcol_gid, num_fine_ghosts, &cpcol_state));
100: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)cMat), &sf));
101: PetscCall(MatGetLayouts(cMat, &layout, NULL));
102: PetscCall(PetscSFSetGraphLayout(sf, layout, num_fine_ghosts, NULL, PETSC_COPY_VALUES, mpimat->garray));
103: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_gid, cpcol_gid, MPI_REPLACE));
104: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_gid, cpcol_gid, MPI_REPLACE));
105: for (kk = 0; kk < num_fine_ghosts; kk++) cpcol_state[kk] = MIS_NOT_DONE;
106: PetscCall(PetscFree(lid_gid));
107: } else num_fine_ghosts = 0;
109: PetscCall(PetscMalloc4(nloc_inner, &lid_cprowID, nloc_inner, &lid_removed, nloc_inner, &lid_parent_gid, nloc_inner, &lid_state));
110: PetscCall(PetscCDCreate(nloc_inner, &agg_lists));
111: /* need an inverse map - locals */
112: for (kk = 0; kk < nloc_inner; kk++) {
113: lid_cprowID[kk] = -1;
114: lid_removed[kk] = PETSC_FALSE;
115: lid_parent_gid[kk] = -1.0;
116: lid_state[kk] = MIS_NOT_DONE;
117: }
118: /* set index into cmpressed row 'lid_cprowID' */
119: if (matB) {
120: for (ix = 0; ix < matB->compressedrow.nrows; ix++) {
121: const PetscInt lid = matB->compressedrow.rindex[ix];
122: if (lid >= 0) lid_cprowID[lid] = ix;
123: }
124: }
125: /* MIS */
126: nremoved = nDone = 0;
127: if (!iterIdx) PetscCall(ISGetIndices(perm, &perm_ix)); // use permutation on first MIS
128: else perm_ix = NULL;
129: while (nDone < nloc_inner || PETSC_TRUE) { /* asynchronous not implemented */
130: /* check all vertices */
131: for (kk = 0; kk < nloc_inner; kk++) {
132: const PetscInt lid = perm_ix ? perm_ix[kk] : kk;
133: state = lid_state[lid];
134: if (iterIdx == 0 && lid_removed[lid]) continue;
135: if (state == MIS_NOT_DONE) {
136: /* parallel test, delete if selected ghost */
137: isOK = PETSC_TRUE;
138: /* parallel test */
139: if ((ix = lid_cprowID[lid]) != -1) { /* if I have any ghost neighbors */
140: ai = matB->compressedrow.i;
141: n = ai[ix + 1] - ai[ix];
142: idx = matB->j + ai[ix];
143: for (j = 0; j < n; j++) {
144: cpid = idx[j]; /* compressed row ID in B mat */
145: gid = cpcol_gid[cpid];
146: if (cpcol_state[cpid] == MIS_NOT_DONE && gid >= Iend) { /* or pe>rank */
147: isOK = PETSC_FALSE; /* can not delete */
148: break;
149: }
150: }
151: }
152: if (isOK) { /* select or remove this vertex if it is a true singleton like a BC */
153: nDone++;
154: /* check for singleton */
155: ai = matA->i;
156: n = ai[lid + 1] - ai[lid];
157: if (n < 2) {
158: /* if I have any ghost adj then not a singleton */
159: ix = lid_cprowID[lid];
160: if (ix == -1 || !(matB->compressedrow.i[ix + 1] - matB->compressedrow.i[ix])) {
161: if (iterIdx == 0) {
162: lid_removed[lid] = PETSC_TRUE;
163: nremoved++; // let it get selected
164: }
165: // PetscCall(PetscCDAppendID(agg_lists, lid, lid + my0));
166: // lid_state[lid] = nselected; // >= 0 is selected, cache for ordering coarse grid
167: /* should select this because it is technically in the MIS but lets not */
168: continue; /* one local adj (me) and no ghost - singleton */
169: }
170: }
171: /* SELECTED state encoded with global index */
172: lid_state[lid] = nselected; // >= 0 is selected, cache for ordering coarse grid
173: nselected++;
174: PetscCall(PetscCDAppendID(agg_lists, lid, lid + my0));
175: /* delete local adj */
176: idx = matA->j + ai[lid];
177: for (j = 0; j < n; j++) {
178: lidj = idx[j];
179: if (lid_state[lidj] == MIS_NOT_DONE) {
180: nDone++;
181: PetscCall(PetscCDAppendID(agg_lists, lid, lidj + my0));
182: lid_state[lidj] = MIS_DELETED; /* delete this */
183: }
184: }
185: } /* selected */
186: } /* not done vertex */
187: } /* vertex loop */
189: /* update ghost states and count todos */
190: if (isMPI) {
191: /* scatter states, check for done */
192: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE));
193: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_state, cpcol_state, MPI_REPLACE));
194: ai = matB->compressedrow.i;
195: for (ix = 0; ix < matB->compressedrow.nrows; ix++) {
196: const PetscInt lidj = matB->compressedrow.rindex[ix]; /* local boundary node */
197: state = lid_state[lidj];
198: if (state == MIS_NOT_DONE) {
199: /* look at ghosts */
200: n = ai[ix + 1] - ai[ix];
201: idx = matB->j + ai[ix];
202: for (j = 0; j < n; j++) {
203: cpid = idx[j]; /* compressed row ID in B mat */
204: if (MIS_IS_SELECTED(cpcol_state[cpid])) { /* lid is now deleted by ghost */
205: nDone++;
206: lid_state[lidj] = MIS_DELETED; /* delete this */
207: sgid = cpcol_gid[cpid];
208: lid_parent_gid[lidj] = sgid; /* keep track of proc that I belong to */
209: break;
210: }
211: }
212: }
213: }
214: /* all done? */
215: t1 = nloc_inner - nDone;
216: PetscCallMPI(MPIU_Allreduce(&t1, &t2, 1, MPIU_INT, MPI_SUM, comm)); /* synchronous version */
217: if (!t2) break;
218: } else break; /* no mpi - all done */
219: } /* outer parallel MIS loop */
220: if (!iterIdx) PetscCall(ISRestoreIndices(perm, &perm_ix));
221: PetscCall(PetscInfo(Gmat, "\t removed %" PetscInt_FMT " of %" PetscInt_FMT " vertices. %" PetscInt_FMT " selected.\n", nremoved, nloc_inner, nselected));
223: /* tell adj who my lid_parent_gid vertices belong to - fill in agg_lists selected ghost lists */
224: if (matB) {
225: PetscInt *cpcol_sel_gid, *icpcol_gid;
227: /* need to copy this to free buffer -- should do this globally */
228: PetscCall(PetscMalloc2(num_fine_ghosts, &icpcol_gid, num_fine_ghosts, &cpcol_sel_gid));
229: for (cpid = 0; cpid < num_fine_ghosts; cpid++) icpcol_gid[cpid] = cpcol_gid[cpid];
230: /* get proc of deleted ghost */
231: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, lid_parent_gid, cpcol_sel_gid, MPI_REPLACE));
232: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, lid_parent_gid, cpcol_sel_gid, MPI_REPLACE));
233: for (cpid = 0; cpid < num_fine_ghosts; cpid++) {
234: sgid = cpcol_sel_gid[cpid];
235: gid = icpcol_gid[cpid];
236: if (sgid >= my0 && sgid < Iend) { /* I own this deleted */
237: slid = sgid - my0;
238: PetscCall(PetscCDAppendID(agg_lists, slid, gid));
239: }
240: }
241: // done - cleanup
242: PetscCall(PetscFree2(icpcol_gid, cpcol_sel_gid));
243: PetscCall(PetscSFDestroy(&sf));
244: PetscCall(PetscFree2(cpcol_gid, cpcol_state));
245: }
246: PetscCall(PetscFree4(lid_cprowID, lid_removed, lid_parent_gid, lid_state));
248: /* MIS done - make projection matrix - P */
249: MatType jtype;
250: PetscCall(MatGetType(Gmat, &jtype));
251: PetscCall(MatCreate(comm, &Prols[iterIdx]));
252: PetscCall(MatSetType(Prols[iterIdx], jtype));
253: PetscCall(MatSetSizes(Prols[iterIdx], nloc_inner, nselected, PETSC_DETERMINE, PETSC_DETERMINE));
254: PetscCall(MatSeqAIJSetPreallocation(Prols[iterIdx], 1, NULL));
255: PetscCall(MatMPIAIJSetPreallocation(Prols[iterIdx], 1, NULL, 1, NULL));
256: {
257: PetscCDIntNd *pos, *pos2;
258: PetscInt colIndex, Iend, fgid;
260: PetscCall(MatGetOwnershipRangeColumn(Prols[iterIdx], &colIndex, &Iend));
261: // TODO - order with permutation in lid_selected (reversed)
262: for (PetscInt lid = 0; lid < agg_lists->size; lid++) {
263: PetscCall(PetscCDGetHeadPos(agg_lists, lid, &pos));
264: pos2 = pos;
265: while (pos) {
266: PetscCall(PetscCDIntNdGetID(pos, &fgid));
267: PetscCall(PetscCDGetNextPos(agg_lists, lid, &pos));
268: PetscCall(MatSetValues(Prols[iterIdx], 1, &fgid, 1, &colIndex, &one, INSERT_VALUES));
269: }
270: if (pos2) colIndex++;
271: }
272: PetscCheck(Iend == colIndex, PETSC_COMM_SELF, PETSC_ERR_SUP, "Iend!=colIndex: %" PetscInt_FMT " %" PetscInt_FMT, Iend, colIndex);
273: }
274: PetscCall(MatAssemblyBegin(Prols[iterIdx], MAT_FINAL_ASSEMBLY));
275: PetscCall(MatAssemblyEnd(Prols[iterIdx], MAT_FINAL_ASSEMBLY));
276: /* project to make new graph for next MIS, skip if last */
277: if (iterIdx < misk - 1) {
278: Mat new_mat;
279: PetscCall(MatPtAP(cMat, Prols[iterIdx], MAT_INITIAL_MATRIX, PETSC_DETERMINE, &new_mat));
280: PetscCall(MatDestroy(&cMat));
281: cMat = new_mat; // next iter
282: } else if (cMat != Gmat) PetscCall(MatDestroy(&cMat));
283: // cleanup
284: PetscCall(PetscCDDestroy(agg_lists));
285: } /* MIS-k iteration */
286: /* make total prolongator Rtot = P_0 * P_1 * ... */
287: Rtot = Prols[misk - 1]; // compose P then transpose to get R
288: for (PetscInt iterIdx = misk - 1; iterIdx > 0; iterIdx--) {
289: Mat P;
291: PetscCall(MatMatMult(Prols[iterIdx - 1], Rtot, MAT_INITIAL_MATRIX, PETSC_CURRENT, &P));
292: PetscCall(MatDestroy(&Prols[iterIdx - 1]));
293: PetscCall(MatDestroy(&Rtot));
294: Rtot = P;
295: }
296: PetscCall(MatTranspose(Rtot, MAT_INPLACE_MATRIX, &Rtot)); // R now
297: PetscCall(MatViewFromOptions(Rtot, NULL, "-misk_aggregation_view"));
298: /* make aggregates with Rtot - could use Rtot directly in theory but have to go through the aggregate list data structure */
299: {
300: PetscInt Istart, Iend, ncols, NN, MM, jj = 0, max_osz = 0;
301: const PetscInt nloc = Gmat->rmap->n;
302: PetscCoarsenData *agg_lists;
303: Mat mat;
305: PetscCall(PetscCDCreate(nloc, &agg_lists));
306: *a_locals_llist = agg_lists; // return
307: PetscCall(MatGetOwnershipRange(Rtot, &Istart, &Iend));
308: for (PetscInt grow = Istart, lid = 0; grow < Iend; grow++, lid++) {
309: const PetscInt *idx;
311: PetscCall(MatGetRow(Rtot, grow, &ncols, &idx, NULL));
312: for (PetscInt jj = 0; jj < ncols; jj++) {
313: PetscInt gcol = idx[jj];
315: PetscCall(PetscCDAppendID(agg_lists, lid, gcol)); // local row, global column
316: }
317: PetscCall(MatRestoreRow(Rtot, grow, &ncols, &idx, NULL));
318: }
319: PetscCall(MatDestroy(&Rtot));
321: /* make fake matrix, get largest nnz */
322: for (PetscInt lid = 0; lid < nloc; lid++) {
323: PetscCall(PetscCDCountAt(agg_lists, lid, &jj));
324: if (jj > max_osz) max_osz = jj;
325: }
326: PetscCall(MatGetSize(Gmat, &MM, &NN));
327: if (max_osz > MM - nloc) max_osz = MM - nloc;
328: PetscCall(MatGetOwnershipRange(Gmat, &Istart, NULL));
329: /* matrix of ghost adj for square graph */
330: PetscCall(MatCreateAIJ(comm, nloc, nloc, PETSC_DETERMINE, PETSC_DETERMINE, 0, NULL, max_osz, NULL, &mat));
331: for (PetscInt lid = 0, gidi = Istart; lid < nloc; lid++, gidi++) {
332: PetscCDIntNd *pos;
334: PetscCall(PetscCDGetHeadPos(agg_lists, lid, &pos));
335: while (pos) {
336: PetscInt gidj;
338: PetscCall(PetscCDIntNdGetID(pos, &gidj));
339: PetscCall(PetscCDGetNextPos(agg_lists, lid, &pos));
340: if (gidj < Istart || gidj >= Istart + nloc) PetscCall(MatSetValues(mat, 1, &gidi, 1, &gidj, &one, ADD_VALUES));
341: }
342: }
343: PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
344: PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
345: PetscCall(PetscCDSetMat(agg_lists, mat));
346: }
347: PetscFunctionReturn(PETSC_SUCCESS);
348: }
350: /*
351: Distance k MIS. k is in 'subctx'
352: */
353: static PetscErrorCode MatCoarsenApply_MISK(MatCoarsen coarse)
354: {
355: Mat mat = coarse->graph;
356: PetscInt k;
358: PetscFunctionBegin;
359: PetscCall(MatCoarsenMISKGetDistance(coarse, &k));
360: PetscCheck(k > 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "too few levels: %" PetscInt_FMT, k);
361: if (!coarse->perm) {
362: IS perm;
363: PetscInt n, m;
365: PetscCall(MatGetLocalSize(mat, &m, &n));
366: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), m, 0, 1, &perm));
367: PetscCall(MatCoarsenApply_MISK_private(perm, k, mat, &coarse->agg_lists));
368: PetscCall(ISDestroy(&perm));
369: } else {
370: PetscCall(MatCoarsenApply_MISK_private(coarse->perm, k, mat, &coarse->agg_lists));
371: }
372: PetscFunctionReturn(PETSC_SUCCESS);
373: }
375: static PetscErrorCode MatCoarsenView_MISK(MatCoarsen coarse, PetscViewer viewer)
376: {
377: PetscMPIInt rank;
378: PetscBool iascii;
379: PetscViewerFormat format;
381: PetscFunctionBegin;
382: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)coarse), &rank));
383: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
384: PetscCall(PetscViewerGetFormat(viewer, &format));
385: if (iascii && format == PETSC_VIEWER_ASCII_INFO_DETAIL && coarse->agg_lists) {
386: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
387: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " [%d] MISK aggregator\n", rank));
388: if (!rank) PetscCall(PetscCoarsenDataView_private(coarse->agg_lists, viewer));
389: PetscCall(PetscViewerFlush(viewer));
390: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
391: }
392: PetscFunctionReturn(PETSC_SUCCESS);
393: }
395: static PetscErrorCode MatCoarsenSetFromOptions_MISK(MatCoarsen coarse, PetscOptionItems *PetscOptionsObject)
396: {
397: PetscInt k = 1;
398: PetscBool flg;
400: PetscFunctionBegin;
401: PetscOptionsHeadBegin(PetscOptionsObject, "MatCoarsen-MISk options");
402: PetscCall(PetscOptionsInt("-mat_coarsen_misk_distance", "k distance for MIS", "", k, &k, &flg));
403: if (flg) coarse->subctx = (void *)(size_t)k;
404: PetscOptionsHeadEnd();
405: PetscFunctionReturn(PETSC_SUCCESS);
406: }
408: /*MC
409: MATCOARSENMISK - A coarsener that uses MISK, a simple greedy coarsener
411: Level: beginner
413: Options Database Key:
414: . -mat_coarsen_misk_distance <k> - distance for MIS
416: Note:
417: When the coarsening is used inside `PCGAMG` then the options database key is `-pc_gamg_mat_coarsen_misk_distance`
419: .seealso: `MatCoarsen`, `MatCoarsenMISKSetDistance()`, `MatCoarsenApply()`, `MatCoarsenSetType()`, `MatCoarsenType`, `MatCoarsenCreate()`, `MATCOARSENHEM`, `MATCOARSENMIS`
420: M*/
422: PETSC_EXTERN PetscErrorCode MatCoarsenCreate_MISK(MatCoarsen coarse)
423: {
424: PetscFunctionBegin;
425: coarse->ops->apply = MatCoarsenApply_MISK;
426: coarse->ops->view = MatCoarsenView_MISK;
427: coarse->subctx = (void *)(size_t)1;
428: coarse->ops->setfromoptions = MatCoarsenSetFromOptions_MISK;
429: PetscFunctionReturn(PETSC_SUCCESS);
430: }
432: /*@
433: MatCoarsenMISKSetDistance - the distance to be used by MISK
435: Collective
437: Input Parameters:
438: + crs - the coarsen
439: - k - the distance
441: Options Database Key:
442: . -mat_coarsen_misk_distance <k> - distance for MIS
444: Level: advanced
446: Note:
447: When the coarsening is used inside `PCGAMG` then the options database key is `-pc_gamg_mat_coarsen_misk_distance`
449: .seealso: `MATCOARSENMISK`, `MatCoarsen`, `MatCoarsenSetFromOptions()`, `MatCoarsenSetType()`, `MatCoarsenRegister()`, `MatCoarsenCreate()`,
450: `MatCoarsenDestroy()`, `MatCoarsenSetAdjacency()`, `MatCoarsenMISKGetDistance()`
451: `MatCoarsenGetData()`
452: @*/
453: PetscErrorCode MatCoarsenMISKSetDistance(MatCoarsen crs, PetscInt k)
454: {
455: PetscFunctionBegin;
456: crs->subctx = (void *)(size_t)k;
457: PetscFunctionReturn(PETSC_SUCCESS);
458: }
460: /*@
461: MatCoarsenMISKGetDistance - gets the distance to be used by MISK
463: Collective
465: Input Parameter:
466: . crs - the coarsen
468: Output Parameter:
469: . k - the distance
471: Level: advanced
473: .seealso: `MATCOARSENMISK`, `MatCoarsen`, `MatCoarsenSetFromOptions()`, `MatCoarsenSetType()`,
474: `MatCoarsenRegister()`, `MatCoarsenCreate()`, `MatCoarsenDestroy()`,
475: `MatCoarsenSetAdjacency()`, `MatCoarsenGetData()`
476: @*/
477: PetscErrorCode MatCoarsenMISKGetDistance(MatCoarsen crs, PetscInt *k)
478: {
479: PetscFunctionBegin;
480: *k = (PetscInt)(size_t)crs->subctx;
481: PetscFunctionReturn(PETSC_SUCCESS);
482: }