Actual source code: mpiadj.c
1: /*
2: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3: */
4: #include <../src/mat/impls/adj/mpi/mpiadj.h>
5: #include <petscsf.h>
7: /*
8: The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
9: */
10: static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
11: {
12: PetscInt nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
13: PetscInt nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
14: PetscInt *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
15: const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
16: PetscMPIInt owner;
17: Mat_MPIAdj *a = (Mat_MPIAdj *)adj->data;
18: PetscLayout rmap;
19: MPI_Comm comm;
20: PetscSF sf;
21: PetscSFNode *iremote;
22: PetscBool done;
24: PetscFunctionBegin;
25: PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
26: PetscCall(MatGetLayouts(adj, &rmap, NULL));
27: PetscCall(ISGetLocalSize(irows, &nlrows_is));
28: PetscCall(ISGetIndices(irows, &irows_indices));
29: PetscCall(PetscMalloc1(nlrows_is, &iremote));
30: /* construct sf graph*/
31: nleaves = nlrows_is;
32: for (i = 0; i < nlrows_is; i++) {
33: owner = -1;
34: rlocalindex = -1;
35: PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
36: iremote[i].rank = owner;
37: iremote[i].index = rlocalindex;
38: }
39: PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
40: PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
41: nroots = nlrows_mat;
42: for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
43: PetscCall(PetscSFCreate(comm, &sf));
44: PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
45: PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
46: PetscCall(PetscSFSetFromOptions(sf));
47: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
48: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
50: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51: PetscCall(PetscSFDestroy(&sf));
52: Ncols_recv = 0;
53: for (i = 0; i < nlrows_is; i++) {
54: Ncols_recv += ncols_recv[i];
55: ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
56: }
57: Ncols_send = 0;
58: for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
59: PetscCall(PetscCalloc1(Ncols_recv, &iremote));
60: PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
61: nleaves = Ncols_recv;
62: Ncols_recv = 0;
63: for (i = 0; i < nlrows_is; i++) {
64: PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
65: for (j = 0; j < ncols_recv[i]; j++) {
66: iremote[Ncols_recv].rank = owner;
67: iremote[Ncols_recv++].index = xadj_recv[i] + j;
68: }
69: }
70: PetscCall(ISRestoreIndices(irows, &irows_indices));
71: /*if we need to deal with edge weights ???*/
72: if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
73: nroots = Ncols_send;
74: PetscCall(PetscSFCreate(comm, &sf));
75: PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
76: PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
77: PetscCall(PetscSFSetFromOptions(sf));
78: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
79: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80: if (a->useedgeweights) {
81: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
82: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83: }
84: PetscCall(PetscSFDestroy(&sf));
85: PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
86: PetscCall(ISGetLocalSize(icols, &icols_n));
87: PetscCall(ISGetIndices(icols, &icols_indices));
88: rnclos = 0;
89: for (i = 0; i < nlrows_is; i++) {
90: for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
91: PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
92: if (loc < 0) {
93: adjncy_recv[j] = -1;
94: if (a->useedgeweights) values_recv[j] = -1;
95: ncols_recv[i]--;
96: } else {
97: rnclos++;
98: }
99: }
100: }
101: PetscCall(ISRestoreIndices(icols, &icols_indices));
102: PetscCall(PetscCalloc1(rnclos, &sadjncy));
103: if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
104: PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
105: rnclos = 0;
106: for (i = 0; i < nlrows_is; i++) {
107: for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
108: if (adjncy_recv[j] < 0) continue;
109: sadjncy[rnclos] = adjncy_recv[j];
110: if (a->useedgeweights) svalues[rnclos] = values_recv[j];
111: rnclos++;
112: }
113: }
114: for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
115: if (sadj_xadj) {
116: *sadj_xadj = sxadj;
117: } else PetscCall(PetscFree(sxadj));
118: if (sadj_adjncy) {
119: *sadj_adjncy = sadjncy;
120: } else PetscCall(PetscFree(sadjncy));
121: if (sadj_values) {
122: if (a->useedgeweights) *sadj_values = svalues;
123: else *sadj_values = NULL;
124: } else {
125: if (a->useedgeweights) PetscCall(PetscFree(svalues));
126: }
127: PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
128: PetscCall(PetscFree(adjncy_recv));
129: if (a->useedgeweights) PetscCall(PetscFree(values_recv));
130: PetscFunctionReturn(PETSC_SUCCESS);
131: }
133: static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
134: {
135: PetscInt i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
136: PetscInt *indices, nindx, j, k, loc;
137: PetscMPIInt issame;
138: const PetscInt *irow_indices, *icol_indices;
139: MPI_Comm scomm_row, scomm_col, scomm_mat;
141: PetscFunctionBegin;
142: nindx = 0;
143: /*
144: * Estimate a maximum number for allocating memory
145: */
146: for (i = 0; i < n; i++) {
147: PetscCall(ISGetLocalSize(irow[i], &irow_n));
148: PetscCall(ISGetLocalSize(icol[i], &icol_n));
149: nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
150: }
151: PetscCall(PetscMalloc1(nindx, &indices));
152: /* construct a submat */
153: // if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
155: for (i = 0; i < n; i++) {
156: if (subcomm) {
157: PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
158: PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
159: PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
160: PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
161: PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
162: PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163: } else {
164: scomm_row = PETSC_COMM_SELF;
165: }
166: /*get sub-matrix data*/
167: sxadj = NULL;
168: sadjncy = NULL;
169: svalues = NULL;
170: PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
171: PetscCall(ISGetLocalSize(irow[i], &irow_n));
172: PetscCall(ISGetLocalSize(icol[i], &icol_n));
173: PetscCall(ISGetIndices(irow[i], &irow_indices));
174: PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
175: PetscCall(ISRestoreIndices(irow[i], &irow_indices));
176: PetscCall(ISGetIndices(icol[i], &icol_indices));
177: PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n));
178: PetscCall(ISRestoreIndices(icol[i], &icol_indices));
179: nindx = irow_n + icol_n;
180: PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
181: /* renumber columns */
182: for (j = 0; j < irow_n; j++) {
183: for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
184: PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
185: PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
186: sadjncy[k] = loc;
187: }
188: }
189: if (scall == MAT_INITIAL_MATRIX) {
190: PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
191: } else {
192: Mat sadj = *submat[i];
193: Mat_MPIAdj *sa = (Mat_MPIAdj *)((sadj)->data);
194: PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
195: PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
196: PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix must have the same comm as the col index set");
197: PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
198: PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
199: if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
200: PetscCall(PetscFree(sxadj));
201: PetscCall(PetscFree(sadjncy));
202: PetscCall(PetscFree(svalues));
203: }
204: }
205: PetscCall(PetscFree(indices));
206: PetscFunctionReturn(PETSC_SUCCESS);
207: }
209: static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
210: {
211: /*get sub-matrices across a sub communicator */
212: PetscFunctionBegin;
213: PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
214: PetscFunctionReturn(PETSC_SUCCESS);
215: }
217: static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
218: {
219: PetscFunctionBegin;
220: /*get sub-matrices based on PETSC_COMM_SELF */
221: PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
222: PetscFunctionReturn(PETSC_SUCCESS);
223: }
225: static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
226: {
227: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
228: PetscInt i, j, m = A->rmap->n;
229: const char *name;
230: PetscViewerFormat format;
232: PetscFunctionBegin;
233: PetscCall(PetscObjectGetName((PetscObject)A, &name));
234: PetscCall(PetscViewerGetFormat(viewer, &format));
235: if (format == PETSC_VIEWER_ASCII_INFO) {
236: PetscFunctionReturn(PETSC_SUCCESS);
237: } else {
238: PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
239: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
240: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241: for (i = 0; i < m; i++) {
242: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
243: for (j = a->i[i]; j < a->i[i + 1]; j++) {
244: if (a->values) {
245: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
246: } else {
247: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
248: }
249: }
250: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
251: }
252: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
253: PetscCall(PetscViewerFlush(viewer));
254: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255: }
256: PetscFunctionReturn(PETSC_SUCCESS);
257: }
259: static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
260: {
261: PetscBool isascii;
263: PetscFunctionBegin;
264: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
265: if (isascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
266: else {
267: Mat B;
268: PetscCall(MatConvert(A, MATMPIAIJ, MAT_INITIAL_MATRIX, &B));
269: PetscCall(MatView(B, viewer));
270: PetscCall(MatDestroy(&B));
271: }
272: PetscFunctionReturn(PETSC_SUCCESS);
273: }
275: static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
276: {
277: Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
279: PetscFunctionBegin;
280: PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
281: PetscCall(PetscFree(a->diag));
282: if (a->freeaij) {
283: if (a->freeaijwithfree) {
284: if (a->i) free(a->i);
285: if (a->j) free(a->j);
286: } else {
287: PetscCall(PetscFree(a->i));
288: PetscCall(PetscFree(a->j));
289: PetscCall(PetscFree(a->values));
290: }
291: }
292: PetscCall(PetscFree(a->rowvalues));
293: PetscCall(PetscFree(mat->data));
294: PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
295: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
296: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
297: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
298: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
299: PetscFunctionReturn(PETSC_SUCCESS);
300: }
302: static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
303: {
304: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
306: PetscFunctionBegin;
307: switch (op) {
308: case MAT_SYMMETRIC:
309: case MAT_STRUCTURALLY_SYMMETRIC:
310: case MAT_HERMITIAN:
311: case MAT_SPD:
312: a->symmetric = flg;
313: break;
314: default:
315: break;
316: }
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
321: {
322: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
324: PetscFunctionBegin;
325: row -= A->rmap->rstart;
326: PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
327: *nz = a->i[row + 1] - a->i[row];
328: if (v) {
329: PetscInt j;
330: if (a->rowvalues_alloc < *nz) {
331: PetscCall(PetscFree(a->rowvalues));
332: a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
333: PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
334: }
335: for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
336: *v = (*nz) ? a->rowvalues : NULL;
337: }
338: if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
339: PetscFunctionReturn(PETSC_SUCCESS);
340: }
342: static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
343: {
344: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
345: PetscBool flag;
347: PetscFunctionBegin;
348: /* If the matrix dimensions are not equal,or no of nonzeros */
349: if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
351: /* if the a->i are the same */
352: PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
354: /* if a->j are the same */
355: PetscCall(PetscArraycmp(a->j, b->j, a->nz, &flag));
357: PetscCallMPI(MPIU_Allreduce(&flag, flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
358: PetscFunctionReturn(PETSC_SUCCESS);
359: }
361: static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
362: {
363: PetscInt i;
364: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
365: PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
367: PetscFunctionBegin;
368: *m = A->rmap->n;
369: *ia = a->i;
370: *ja = a->j;
371: *done = PETSC_TRUE;
372: if (oshift) {
373: for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
374: for (i = 0; i <= (*m); i++) (*ia)[i]++;
375: }
376: PetscFunctionReturn(PETSC_SUCCESS);
377: }
379: static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
380: {
381: PetscInt i;
382: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
383: PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
385: PetscFunctionBegin;
386: PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
387: PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
388: if (oshift) {
389: PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
390: PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
391: for (i = 0; i <= (*m); i++) (*ia)[i]--;
392: for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
393: }
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
398: {
399: Mat B;
400: PetscInt i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
401: const PetscInt *rj;
402: const PetscScalar *ra;
403: MPI_Comm comm;
405: PetscFunctionBegin;
406: PetscCall(MatGetSize(A, NULL, &N));
407: PetscCall(MatGetLocalSize(A, &m, NULL));
408: PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
410: /* count the number of nonzeros per row */
411: for (i = 0; i < m; i++) {
412: PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
413: for (j = 0; j < len; j++) {
414: if (rj[j] == i + rstart) {
415: len--;
416: break;
417: } /* don't count diagonal */
418: }
419: nzeros += len;
420: PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
421: }
423: /* malloc space for nonzeros */
424: PetscCall(PetscMalloc1(nzeros + 1, &a));
425: PetscCall(PetscMalloc1(N + 1, &ia));
426: PetscCall(PetscMalloc1(nzeros + 1, &ja));
428: nzeros = 0;
429: ia[0] = 0;
430: for (i = 0; i < m; i++) {
431: PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
432: cnt = 0;
433: for (j = 0; j < len; j++) {
434: if (rj[j] != i + rstart) { /* if not diagonal */
435: a[nzeros + cnt] = (PetscInt)PetscAbsScalar(ra[j]);
436: ja[nzeros + cnt++] = rj[j];
437: }
438: }
439: PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
440: nzeros += cnt;
441: ia[i + 1] = nzeros;
442: }
444: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
445: PetscCall(MatCreate(comm, &B));
446: PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
447: PetscCall(MatSetType(B, type));
448: PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
450: if (reuse == MAT_INPLACE_MATRIX) {
451: PetscCall(MatHeaderReplace(A, &B));
452: } else {
453: *newmat = B;
454: }
455: PetscFunctionReturn(PETSC_SUCCESS);
456: }
458: static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
459: {
460: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
461: PetscInt rStart, rEnd, cStart, cEnd;
463: PetscFunctionBegin;
464: PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
465: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
466: PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
467: if (!adj->ht) {
468: PetscCall(PetscHSetIJCreate(&adj->ht));
469: PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
470: PetscCall(PetscLayoutSetUp(A->rmap));
471: PetscCall(PetscLayoutSetUp(A->cmap));
472: }
473: for (PetscInt r = 0; r < m; ++r) {
474: PetscHashIJKey key;
476: key.i = rows[r];
477: if (key.i < 0) continue;
478: if ((key.i < rStart) || (key.i >= rEnd)) {
479: PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
480: } else {
481: for (PetscInt c = 0; c < n; ++c) {
482: key.j = cols[c];
483: if (key.j < 0 || key.i == key.j) continue;
484: PetscCall(PetscHSetIJAdd(adj->ht, key));
485: }
486: }
487: }
488: PetscFunctionReturn(PETSC_SUCCESS);
489: }
491: static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
492: {
493: PetscInt nstash, reallocs;
494: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
496: PetscFunctionBegin;
497: if (!adj->ht) {
498: PetscCall(PetscHSetIJCreate(&adj->ht));
499: PetscCall(PetscLayoutSetUp(A->rmap));
500: PetscCall(PetscLayoutSetUp(A->cmap));
501: }
502: PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
503: PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
504: PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
505: PetscFunctionReturn(PETSC_SUCCESS);
506: }
508: static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
509: {
510: PetscScalar *val;
511: PetscInt *row, *col, m, rstart, *rowstarts;
512: PetscInt i, j, ncols, flg, nz;
513: PetscMPIInt n;
514: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
515: PetscHashIter hi;
516: PetscHashIJKey key;
517: PetscHSetIJ ht = adj->ht;
519: PetscFunctionBegin;
520: while (1) {
521: PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
522: if (!flg) break;
524: for (i = 0; i < n;) {
525: /* Identify the consecutive vals belonging to the same row */
526: for (j = i, rstart = row[j]; j < n; j++) {
527: if (row[j] != rstart) break;
528: }
529: if (j < n) ncols = j - i;
530: else ncols = n - i;
531: /* Set all these values with a single function call */
532: PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
533: i = j;
534: }
535: }
536: PetscCall(MatStashScatterEnd_Private(&A->stash));
537: PetscCall(MatStashDestroy_Private(&A->stash));
539: PetscCall(MatGetLocalSize(A, &m, NULL));
540: PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
541: PetscCall(PetscCalloc1(m + 1, &rowstarts));
542: PetscHashIterBegin(ht, hi);
543: for (; !PetscHashIterAtEnd(ht, hi);) {
544: PetscHashIterGetKey(ht, hi, key);
545: rowstarts[key.i - rstart + 1]++;
546: PetscHashIterNext(ht, hi);
547: }
548: for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
550: PetscCall(PetscHSetIJGetSize(ht, &nz));
551: PetscCall(PetscMalloc1(nz, &col));
552: PetscHashIterBegin(ht, hi);
553: for (; !PetscHashIterAtEnd(ht, hi);) {
554: PetscHashIterGetKey(ht, hi, key);
555: col[rowstarts[key.i - rstart]++] = key.j;
556: PetscHashIterNext(ht, hi);
557: }
558: PetscCall(PetscHSetIJDestroy(&ht));
559: for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
560: rowstarts[0] = 0;
562: for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
564: adj->i = rowstarts;
565: adj->j = col;
566: adj->nz = rowstarts[m];
567: adj->freeaij = PETSC_TRUE;
568: PetscFunctionReturn(PETSC_SUCCESS);
569: }
571: static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
572: MatGetRow_MPIAdj,
573: NULL,
574: NULL,
575: /* 4*/ NULL,
576: NULL,
577: NULL,
578: NULL,
579: NULL,
580: NULL,
581: /*10*/ NULL,
582: NULL,
583: NULL,
584: NULL,
585: NULL,
586: /*15*/ NULL,
587: MatEqual_MPIAdj,
588: NULL,
589: NULL,
590: NULL,
591: /*20*/ MatAssemblyBegin_MPIAdj,
592: MatAssemblyEnd_MPIAdj,
593: MatSetOption_MPIAdj,
594: NULL,
595: /*24*/ NULL,
596: NULL,
597: NULL,
598: NULL,
599: NULL,
600: /*29*/ NULL,
601: NULL,
602: NULL,
603: NULL,
604: NULL,
605: /*34*/ NULL,
606: NULL,
607: NULL,
608: NULL,
609: NULL,
610: /*39*/ NULL,
611: MatCreateSubMatrices_MPIAdj,
612: NULL,
613: NULL,
614: NULL,
615: /*44*/ NULL,
616: NULL,
617: MatShift_Basic,
618: NULL,
619: NULL,
620: /*49*/ NULL,
621: MatGetRowIJ_MPIAdj,
622: MatRestoreRowIJ_MPIAdj,
623: NULL,
624: NULL,
625: /*54*/ NULL,
626: NULL,
627: NULL,
628: NULL,
629: NULL,
630: /*59*/ NULL,
631: MatDestroy_MPIAdj,
632: MatView_MPIAdj,
633: MatConvertFrom_MPIAdj,
634: NULL,
635: /*64*/ NULL,
636: NULL,
637: NULL,
638: NULL,
639: NULL,
640: /*69*/ NULL,
641: NULL,
642: NULL,
643: NULL,
644: NULL,
645: /*74*/ NULL,
646: NULL,
647: NULL,
648: NULL,
649: NULL,
650: /*79*/ NULL,
651: NULL,
652: NULL,
653: NULL,
654: NULL,
655: /*84*/ NULL,
656: NULL,
657: NULL,
658: NULL,
659: NULL,
660: /*89*/ NULL,
661: NULL,
662: NULL,
663: NULL,
664: NULL,
665: /*94*/ NULL,
666: NULL,
667: NULL,
668: NULL,
669: NULL,
670: /*99*/ NULL,
671: NULL,
672: NULL,
673: NULL,
674: NULL,
675: /*104*/ NULL,
676: NULL,
677: NULL,
678: NULL,
679: NULL,
680: /*109*/ NULL,
681: NULL,
682: NULL,
683: NULL,
684: NULL,
685: /*114*/ NULL,
686: NULL,
687: NULL,
688: NULL,
689: MatCreateSubMatricesMPI_MPIAdj,
690: /*119*/ NULL,
691: NULL,
692: NULL,
693: NULL,
694: NULL,
695: /*124*/ NULL,
696: NULL,
697: NULL,
698: NULL,
699: NULL,
700: /*129*/ NULL,
701: NULL,
702: NULL,
703: NULL,
704: NULL,
705: /*134*/ NULL,
706: NULL,
707: NULL,
708: NULL,
709: NULL,
710: /*139*/ NULL,
711: NULL,
712: NULL,
713: NULL,
714: NULL};
716: static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
717: {
718: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
719: PetscBool useedgeweights;
721: PetscFunctionBegin;
722: PetscCall(PetscLayoutSetUp(B->rmap));
723: PetscCall(PetscLayoutSetUp(B->cmap));
724: if (values) useedgeweights = PETSC_TRUE;
725: else useedgeweights = PETSC_FALSE;
726: /* Make everybody knows if they are using edge weights or not */
727: PetscCallMPI(MPIU_Allreduce(&useedgeweights, &b->useedgeweights, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)B)));
729: if (PetscDefined(USE_DEBUG)) {
730: PetscInt ii;
732: PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
733: for (ii = 1; ii < B->rmap->n; ii++) {
734: PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
735: }
736: for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
737: for (ii = 0; ii < B->rmap->n; ii++) {
738: PetscInt jj;
739: for (jj = i[ii] + 1; jj < i[ii + 1]; jj++)
740: PetscCheck(j[jj] > j[jj - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Row %" PetscInt_FMT " column indices are not sorted: j[%" PetscInt_FMT "]=%" PetscInt_FMT " >= j[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, jj - 1, j[jj - 1], jj, j[jj]);
741: }
742: }
743: b->j = j;
744: b->i = i;
745: b->values = values;
747: b->nz = i[B->rmap->n];
748: b->diag = NULL;
749: b->symmetric = PETSC_FALSE;
750: b->freeaij = PETSC_TRUE;
752: B->ops->assemblybegin = NULL;
753: B->ops->assemblyend = NULL;
754: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
755: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
756: PetscCall(MatStashDestroy_Private(&B->stash));
757: PetscFunctionReturn(PETSC_SUCCESS);
758: }
760: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
761: {
762: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
763: const PetscInt *ranges;
764: MPI_Comm acomm, bcomm;
765: MPI_Group agroup, bgroup;
766: PetscMPIInt i, size, nranks, *ranks;
768: PetscFunctionBegin;
769: *B = NULL;
770: PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
771: PetscCallMPI(MPI_Comm_size(acomm, &size));
772: PetscCall(MatGetOwnershipRanges(A, &ranges));
773: for (i = 0, nranks = 0; i < size; i++) {
774: if (ranges[i + 1] - ranges[i] > 0) nranks++;
775: }
776: if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
777: PetscCall(PetscObjectReference((PetscObject)A));
778: *B = A;
779: PetscFunctionReturn(PETSC_SUCCESS);
780: }
782: PetscCall(PetscMalloc1(nranks, &ranks));
783: for (i = 0, nranks = 0; i < size; i++) {
784: if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
785: }
786: PetscCallMPI(MPI_Comm_group(acomm, &agroup));
787: PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
788: PetscCall(PetscFree(ranks));
789: PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
790: PetscCallMPI(MPI_Group_free(&agroup));
791: PetscCallMPI(MPI_Group_free(&bgroup));
792: if (bcomm != MPI_COMM_NULL) {
793: PetscInt m, N;
794: Mat_MPIAdj *b;
795: PetscCall(MatGetLocalSize(A, &m, NULL));
796: PetscCall(MatGetSize(A, NULL, &N));
797: PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
798: b = (Mat_MPIAdj *)(*B)->data;
799: b->freeaij = PETSC_FALSE;
800: PetscCallMPI(MPI_Comm_free(&bcomm));
801: }
802: PetscFunctionReturn(PETSC_SUCCESS);
803: }
805: static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
806: {
807: PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i;
808: PetscInt *Values = NULL;
809: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
810: PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
812: PetscFunctionBegin;
813: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
814: PetscCall(MatGetSize(A, &M, &N));
815: PetscCall(MatGetLocalSize(A, &m, NULL));
816: nz = adj->nz;
817: PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
818: PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
820: PetscCall(PetscMPIIntCast(nz, &mnz));
821: PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
822: PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
823: dispnz[0] = 0;
824: for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
825: if (adj->values) {
826: PetscCall(PetscMalloc1(NZ, &Values));
827: PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
828: }
829: PetscCall(PetscMalloc1(NZ, &J));
830: PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
831: PetscCall(PetscFree2(allnz, dispnz));
832: PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
833: nzstart -= nz;
834: /* shift the i[] values so they will be correct after being received */
835: for (i = 0; i < m; i++) adj->i[i] += nzstart;
836: PetscCall(PetscMalloc1(M + 1, &II));
837: PetscCall(PetscMPIIntCast(m, &mm));
838: PetscCall(PetscMalloc2(size, &allm, size, &dispm));
839: PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
840: dispm[0] = 0;
841: for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
842: PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
843: PetscCall(PetscFree2(allm, dispm));
844: II[M] = NZ;
845: /* shift the i[] values back */
846: for (i = 0; i < m; i++) adj->i[i] -= nzstart;
847: PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
848: PetscFunctionReturn(PETSC_SUCCESS);
849: }
851: static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
852: {
853: PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i;
854: PetscInt *Values = NULL;
855: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
856: PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
858: PetscFunctionBegin;
859: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
860: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
861: PetscCall(MatGetSize(A, &M, &N));
862: PetscCall(MatGetLocalSize(A, &m, NULL));
863: nz = adj->nz;
864: PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
865: PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
867: PetscCall(PetscMPIIntCast(nz, &mnz));
868: if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
869: PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
870: if (!rank) {
871: dispnz[0] = 0;
872: for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
873: if (adj->values) {
874: PetscCall(PetscMalloc1(NZ, &Values));
875: PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
876: }
877: PetscCall(PetscMalloc1(NZ, &J));
878: PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
879: PetscCall(PetscFree2(allnz, dispnz));
880: } else {
881: if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
882: PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
883: }
884: PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
885: nzstart -= nz;
886: /* shift the i[] values so they will be correct after being received */
887: for (i = 0; i < m; i++) adj->i[i] += nzstart;
888: PetscCall(PetscMPIIntCast(m, &mm));
889: if (!rank) {
890: PetscCall(PetscMalloc1(M + 1, &II));
891: PetscCall(PetscMalloc2(size, &allm, size, &dispm));
892: PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
893: dispm[0] = 0;
894: for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
895: PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
896: PetscCall(PetscFree2(allm, dispm));
897: II[M] = NZ;
898: } else {
899: PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
900: PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
901: }
902: /* shift the i[] values back */
903: for (i = 0; i < m; i++) adj->i[i] -= nzstart;
904: if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
905: PetscFunctionReturn(PETSC_SUCCESS);
906: }
908: /*@
909: MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
911: Collective
913: Input Parameter:
914: . A - original `MATMPIADJ` matrix
916: Output Parameter:
917: . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A`
919: Level: developer
921: Note:
922: The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed.
924: Developer Note:
925: This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row.
927: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()`
928: @*/
929: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
930: {
931: PetscFunctionBegin;
933: PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
934: PetscFunctionReturn(PETSC_SUCCESS);
935: }
937: /*MC
938: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
939: intended for use constructing orderings and partitionings.
941: Level: beginner
943: Note:
944: You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
945: by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
947: .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
948: M*/
949: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
950: {
951: Mat_MPIAdj *b;
953: PetscFunctionBegin;
954: PetscCall(PetscNew(&b));
955: B->data = (void *)b;
956: B->ops[0] = MatOps_Values;
957: B->assembled = PETSC_FALSE;
958: B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
960: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
961: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
962: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
963: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
964: PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
965: PetscFunctionReturn(PETSC_SUCCESS);
966: }
968: /*@
969: MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners)
971: Logically Collective
973: Input Parameter:
974: . A - the matrix
976: Output Parameter:
977: . B - the same matrix on all processes
979: Level: intermediate
981: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
982: @*/
983: PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
984: {
985: PetscFunctionBegin;
986: PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
987: PetscFunctionReturn(PETSC_SUCCESS);
988: }
990: /*@
991: MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners)
993: Logically Collective
995: Input Parameter:
996: . A - the matrix
998: Output Parameter:
999: . B - the same matrix on rank zero, not set on other ranks
1001: Level: intermediate
1003: Note:
1004: This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1005: is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1006: parallel graph sequentially.
1008: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
1009: @*/
1010: PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1011: {
1012: PetscFunctionBegin;
1013: PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1014: PetscFunctionReturn(PETSC_SUCCESS);
1015: }
1017: /*@
1018: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1020: Logically Collective
1022: Input Parameters:
1023: + B - the matrix
1024: . i - the indices into `j` for the start of each row
1025: . j - the column indices for each row (sorted for each row).
1026: The indices in `i` and `j` start with zero (NOT with one).
1027: - values - [use `NULL` if not provided] edge weights
1029: Level: intermediate
1031: Notes:
1032: The indices in `i` and `j` start with zero (NOT with one).
1034: You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1035: when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1037: You should not include the matrix diagonal elements.
1039: If you already have a matrix, you can create its adjacency matrix by a call
1040: to `MatConvert()`, specifying a type of `MATMPIADJ`.
1042: Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1044: Fortran Note:
1045: From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`.
1047: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`
1048: @*/
1049: PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1050: {
1051: PetscFunctionBegin;
1052: PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1053: PetscFunctionReturn(PETSC_SUCCESS);
1054: }
1056: /*@C
1057: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1058: The matrix need not have numerical values associated with it, it is
1059: intended for ordering (to reduce bandwidth etc) and partitioning.
1061: Collective
1063: Input Parameters:
1064: + comm - MPI communicator
1065: . m - number of local rows
1066: . N - number of global columns
1067: . i - the indices into `j` for the start of each row
1068: . j - the column indices for each row (sorted for each row).
1069: - values - the values, optional, use `NULL` if not provided
1071: Output Parameter:
1072: . A - the matrix
1074: Level: intermediate
1076: Notes:
1077: The indices in `i` and `j` start with zero (NOT with one).
1079: You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1080: when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1082: You should not include the matrix diagonals.
1084: If you already have a matrix, you can create its adjacency matrix by a call
1085: to `MatConvert()`, specifying a type of `MATMPIADJ`.
1087: Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1089: Fortran Note:
1090: From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed
1092: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1093: @*/
1094: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt i[], PetscInt j[], PetscInt values[], Mat *A) PeNSS
1095: {
1096: PetscFunctionBegin;
1097: PetscCall(MatCreate(comm, A));
1098: PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1099: PetscCall(MatSetType(*A, MATMPIADJ));
1100: PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1101: PetscFunctionReturn(PETSC_SUCCESS);
1102: }