Actual source code: mpiadj.c
1: /*
2: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3: */
4: #include <../src/mat/impls/adj/mpi/mpiadj.h>
5: #include <petscsf.h>
7: /*
8: The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
9: */
10: static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
11: {
12: PetscInt nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
13: PetscInt nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
14: PetscInt *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
15: const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
16: PetscMPIInt owner;
17: Mat_MPIAdj *a = (Mat_MPIAdj *)adj->data;
18: PetscLayout rmap;
19: MPI_Comm comm;
20: PetscSF sf;
21: PetscSFNode *iremote;
22: PetscBool done;
24: PetscFunctionBegin;
25: PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
26: PetscCall(MatGetLayouts(adj, &rmap, NULL));
27: PetscCall(ISGetLocalSize(irows, &nlrows_is));
28: PetscCall(ISGetIndices(irows, &irows_indices));
29: PetscCall(PetscMalloc1(nlrows_is, &iremote));
30: /* construct sf graph*/
31: nleaves = nlrows_is;
32: for (i = 0; i < nlrows_is; i++) {
33: owner = -1;
34: rlocalindex = -1;
35: PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
36: iremote[i].rank = owner;
37: iremote[i].index = rlocalindex;
38: }
39: PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
40: PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
41: nroots = nlrows_mat;
42: for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
43: PetscCall(PetscSFCreate(comm, &sf));
44: PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
45: PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
46: PetscCall(PetscSFSetFromOptions(sf));
47: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
48: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
50: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51: PetscCall(PetscSFDestroy(&sf));
52: Ncols_recv = 0;
53: for (i = 0; i < nlrows_is; i++) {
54: Ncols_recv += ncols_recv[i];
55: ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
56: }
57: Ncols_send = 0;
58: for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
59: PetscCall(PetscCalloc1(Ncols_recv, &iremote));
60: PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
61: nleaves = Ncols_recv;
62: Ncols_recv = 0;
63: for (i = 0; i < nlrows_is; i++) {
64: PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
65: for (j = 0; j < ncols_recv[i]; j++) {
66: iremote[Ncols_recv].rank = owner;
67: iremote[Ncols_recv++].index = xadj_recv[i] + j;
68: }
69: }
70: PetscCall(ISRestoreIndices(irows, &irows_indices));
71: /*if we need to deal with edge weights ???*/
72: if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
73: nroots = Ncols_send;
74: PetscCall(PetscSFCreate(comm, &sf));
75: PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
76: PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
77: PetscCall(PetscSFSetFromOptions(sf));
78: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
79: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80: if (a->useedgeweights) {
81: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
82: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83: }
84: PetscCall(PetscSFDestroy(&sf));
85: PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
86: PetscCall(ISGetLocalSize(icols, &icols_n));
87: PetscCall(ISGetIndices(icols, &icols_indices));
88: rnclos = 0;
89: for (i = 0; i < nlrows_is; i++) {
90: for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
91: PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
92: if (loc < 0) {
93: adjncy_recv[j] = -1;
94: if (a->useedgeweights) values_recv[j] = -1;
95: ncols_recv[i]--;
96: } else {
97: rnclos++;
98: }
99: }
100: }
101: PetscCall(ISRestoreIndices(icols, &icols_indices));
102: PetscCall(PetscCalloc1(rnclos, &sadjncy));
103: if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
104: PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
105: rnclos = 0;
106: for (i = 0; i < nlrows_is; i++) {
107: for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
108: if (adjncy_recv[j] < 0) continue;
109: sadjncy[rnclos] = adjncy_recv[j];
110: if (a->useedgeweights) svalues[rnclos] = values_recv[j];
111: rnclos++;
112: }
113: }
114: for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
115: if (sadj_xadj) {
116: *sadj_xadj = sxadj;
117: } else PetscCall(PetscFree(sxadj));
118: if (sadj_adjncy) {
119: *sadj_adjncy = sadjncy;
120: } else PetscCall(PetscFree(sadjncy));
121: if (sadj_values) {
122: if (a->useedgeweights) *sadj_values = svalues;
123: else *sadj_values = NULL;
124: } else {
125: if (a->useedgeweights) PetscCall(PetscFree(svalues));
126: }
127: PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
128: PetscCall(PetscFree(adjncy_recv));
129: if (a->useedgeweights) PetscCall(PetscFree(values_recv));
130: PetscFunctionReturn(PETSC_SUCCESS);
131: }
133: static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
134: {
135: PetscInt i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
136: PetscInt *indices, nindx, j, k, loc;
137: PetscMPIInt issame;
138: const PetscInt *irow_indices, *icol_indices;
139: MPI_Comm scomm_row, scomm_col, scomm_mat;
141: PetscFunctionBegin;
142: nindx = 0;
143: /*
144: * Estimate a maximum number for allocating memory
145: */
146: for (i = 0; i < n; i++) {
147: PetscCall(ISGetLocalSize(irow[i], &irow_n));
148: PetscCall(ISGetLocalSize(icol[i], &icol_n));
149: nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
150: }
151: PetscCall(PetscMalloc1(nindx, &indices));
152: /* construct a submat */
153: // if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
155: for (i = 0; i < n; i++) {
156: if (subcomm) {
157: PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
158: PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
159: PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
160: PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
161: PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
162: PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163: } else {
164: scomm_row = PETSC_COMM_SELF;
165: }
166: /*get sub-matrix data*/
167: sxadj = NULL;
168: sadjncy = NULL;
169: svalues = NULL;
170: PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
171: PetscCall(ISGetLocalSize(irow[i], &irow_n));
172: PetscCall(ISGetLocalSize(icol[i], &icol_n));
173: PetscCall(ISGetIndices(irow[i], &irow_indices));
174: PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
175: PetscCall(ISRestoreIndices(irow[i], &irow_indices));
176: PetscCall(ISGetIndices(icol[i], &icol_indices));
177: PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n));
178: PetscCall(ISRestoreIndices(icol[i], &icol_indices));
179: nindx = irow_n + icol_n;
180: PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
181: /* renumber columns */
182: for (j = 0; j < irow_n; j++) {
183: for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
184: PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
185: PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
186: sadjncy[k] = loc;
187: }
188: }
189: if (scall == MAT_INITIAL_MATRIX) {
190: PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
191: } else {
192: Mat sadj = *submat[i];
193: Mat_MPIAdj *sa = (Mat_MPIAdj *)((sadj)->data);
194: PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
195: PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
196: PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix must have the same comm as the col index set");
197: PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
198: PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
199: if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
200: PetscCall(PetscFree(sxadj));
201: PetscCall(PetscFree(sadjncy));
202: PetscCall(PetscFree(svalues));
203: }
204: }
205: PetscCall(PetscFree(indices));
206: PetscFunctionReturn(PETSC_SUCCESS);
207: }
209: static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
210: {
211: /*get sub-matrices across a sub communicator */
212: PetscFunctionBegin;
213: PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
214: PetscFunctionReturn(PETSC_SUCCESS);
215: }
217: static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
218: {
219: PetscFunctionBegin;
220: /*get sub-matrices based on PETSC_COMM_SELF */
221: PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
222: PetscFunctionReturn(PETSC_SUCCESS);
223: }
225: static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
226: {
227: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
228: PetscInt i, j, m = A->rmap->n;
229: const char *name;
230: PetscViewerFormat format;
232: PetscFunctionBegin;
233: PetscCall(PetscObjectGetName((PetscObject)A, &name));
234: PetscCall(PetscViewerGetFormat(viewer, &format));
235: if (format == PETSC_VIEWER_ASCII_INFO) {
236: PetscFunctionReturn(PETSC_SUCCESS);
237: } else {
238: PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
239: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
240: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241: for (i = 0; i < m; i++) {
242: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
243: for (j = a->i[i]; j < a->i[i + 1]; j++) {
244: if (a->values) {
245: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
246: } else {
247: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
248: }
249: }
250: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
251: }
252: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
253: PetscCall(PetscViewerFlush(viewer));
254: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255: }
256: PetscFunctionReturn(PETSC_SUCCESS);
257: }
259: static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
260: {
261: PetscBool iascii;
263: PetscFunctionBegin;
264: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
265: if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
266: PetscFunctionReturn(PETSC_SUCCESS);
267: }
269: static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270: {
271: Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
273: PetscFunctionBegin;
274: PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
275: PetscCall(PetscFree(a->diag));
276: if (a->freeaij) {
277: if (a->freeaijwithfree) {
278: if (a->i) free(a->i);
279: if (a->j) free(a->j);
280: } else {
281: PetscCall(PetscFree(a->i));
282: PetscCall(PetscFree(a->j));
283: PetscCall(PetscFree(a->values));
284: }
285: }
286: PetscCall(PetscFree(a->rowvalues));
287: PetscCall(PetscFree(mat->data));
288: PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
289: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
290: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
291: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
292: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
293: PetscFunctionReturn(PETSC_SUCCESS);
294: }
296: static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
297: {
298: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
300: PetscFunctionBegin;
301: switch (op) {
302: case MAT_SYMMETRIC:
303: case MAT_STRUCTURALLY_SYMMETRIC:
304: case MAT_HERMITIAN:
305: case MAT_SPD:
306: a->symmetric = flg;
307: break;
308: default:
309: break;
310: }
311: PetscFunctionReturn(PETSC_SUCCESS);
312: }
314: static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
315: {
316: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
318: PetscFunctionBegin;
319: row -= A->rmap->rstart;
320: PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
321: *nz = a->i[row + 1] - a->i[row];
322: if (v) {
323: PetscInt j;
324: if (a->rowvalues_alloc < *nz) {
325: PetscCall(PetscFree(a->rowvalues));
326: a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
327: PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
328: }
329: for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
330: *v = (*nz) ? a->rowvalues : NULL;
331: }
332: if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
333: PetscFunctionReturn(PETSC_SUCCESS);
334: }
336: static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
337: {
338: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
339: PetscBool flag;
341: PetscFunctionBegin;
342: /* If the matrix dimensions are not equal,or no of nonzeros */
343: if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
345: /* if the a->i are the same */
346: PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
348: /* if a->j are the same */
349: PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag));
351: PetscCallMPI(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
352: PetscFunctionReturn(PETSC_SUCCESS);
353: }
355: static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
356: {
357: PetscInt i;
358: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
359: PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
361: PetscFunctionBegin;
362: *m = A->rmap->n;
363: *ia = a->i;
364: *ja = a->j;
365: *done = PETSC_TRUE;
366: if (oshift) {
367: for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
368: for (i = 0; i <= (*m); i++) (*ia)[i]++;
369: }
370: PetscFunctionReturn(PETSC_SUCCESS);
371: }
373: static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
374: {
375: PetscInt i;
376: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
377: PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
379: PetscFunctionBegin;
380: PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
381: PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
382: if (oshift) {
383: PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
384: PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
385: for (i = 0; i <= (*m); i++) (*ia)[i]--;
386: for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
387: }
388: PetscFunctionReturn(PETSC_SUCCESS);
389: }
391: static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
392: {
393: Mat B;
394: PetscInt i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
395: const PetscInt *rj;
396: const PetscScalar *ra;
397: MPI_Comm comm;
399: PetscFunctionBegin;
400: PetscCall(MatGetSize(A, NULL, &N));
401: PetscCall(MatGetLocalSize(A, &m, NULL));
402: PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
404: /* count the number of nonzeros per row */
405: for (i = 0; i < m; i++) {
406: PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
407: for (j = 0; j < len; j++) {
408: if (rj[j] == i + rstart) {
409: len--;
410: break;
411: } /* don't count diagonal */
412: }
413: nzeros += len;
414: PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
415: }
417: /* malloc space for nonzeros */
418: PetscCall(PetscMalloc1(nzeros + 1, &a));
419: PetscCall(PetscMalloc1(N + 1, &ia));
420: PetscCall(PetscMalloc1(nzeros + 1, &ja));
422: nzeros = 0;
423: ia[0] = 0;
424: for (i = 0; i < m; i++) {
425: PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
426: cnt = 0;
427: for (j = 0; j < len; j++) {
428: if (rj[j] != i + rstart) { /* if not diagonal */
429: a[nzeros + cnt] = (PetscInt)PetscAbsScalar(ra[j]);
430: ja[nzeros + cnt++] = rj[j];
431: }
432: }
433: PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
434: nzeros += cnt;
435: ia[i + 1] = nzeros;
436: }
438: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
439: PetscCall(MatCreate(comm, &B));
440: PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
441: PetscCall(MatSetType(B, type));
442: PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
444: if (reuse == MAT_INPLACE_MATRIX) {
445: PetscCall(MatHeaderReplace(A, &B));
446: } else {
447: *newmat = B;
448: }
449: PetscFunctionReturn(PETSC_SUCCESS);
450: }
452: static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
453: {
454: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
455: PetscInt rStart, rEnd, cStart, cEnd;
457: PetscFunctionBegin;
458: PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
459: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
460: PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
461: if (!adj->ht) {
462: PetscCall(PetscHSetIJCreate(&adj->ht));
463: PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
464: PetscCall(PetscLayoutSetUp(A->rmap));
465: PetscCall(PetscLayoutSetUp(A->cmap));
466: }
467: for (PetscInt r = 0; r < m; ++r) {
468: PetscHashIJKey key;
470: key.i = rows[r];
471: if (key.i < 0) continue;
472: if ((key.i < rStart) || (key.i >= rEnd)) {
473: PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
474: } else {
475: for (PetscInt c = 0; c < n; ++c) {
476: key.j = cols[c];
477: if (key.j < 0 || key.i == key.j) continue;
478: PetscCall(PetscHSetIJAdd(adj->ht, key));
479: }
480: }
481: }
482: PetscFunctionReturn(PETSC_SUCCESS);
483: }
485: static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
486: {
487: PetscInt nstash, reallocs;
488: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
490: PetscFunctionBegin;
491: if (!adj->ht) {
492: PetscCall(PetscHSetIJCreate(&adj->ht));
493: PetscCall(PetscLayoutSetUp(A->rmap));
494: PetscCall(PetscLayoutSetUp(A->cmap));
495: }
496: PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
497: PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
498: PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
499: PetscFunctionReturn(PETSC_SUCCESS);
500: }
502: static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
503: {
504: PetscScalar *val;
505: PetscInt *row, *col, m, rstart, *rowstarts;
506: PetscInt i, j, ncols, flg, nz;
507: PetscMPIInt n;
508: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
509: PetscHashIter hi;
510: PetscHashIJKey key;
511: PetscHSetIJ ht = adj->ht;
513: PetscFunctionBegin;
514: while (1) {
515: PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
516: if (!flg) break;
518: for (i = 0; i < n;) {
519: /* Identify the consecutive vals belonging to the same row */
520: for (j = i, rstart = row[j]; j < n; j++) {
521: if (row[j] != rstart) break;
522: }
523: if (j < n) ncols = j - i;
524: else ncols = n - i;
525: /* Set all these values with a single function call */
526: PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
527: i = j;
528: }
529: }
530: PetscCall(MatStashScatterEnd_Private(&A->stash));
531: PetscCall(MatStashDestroy_Private(&A->stash));
533: PetscCall(MatGetLocalSize(A, &m, NULL));
534: PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
535: PetscCall(PetscCalloc1(m + 1, &rowstarts));
536: PetscHashIterBegin(ht, hi);
537: for (; !PetscHashIterAtEnd(ht, hi);) {
538: PetscHashIterGetKey(ht, hi, key);
539: rowstarts[key.i - rstart + 1]++;
540: PetscHashIterNext(ht, hi);
541: }
542: for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
544: PetscCall(PetscHSetIJGetSize(ht, &nz));
545: PetscCall(PetscMalloc1(nz, &col));
546: PetscHashIterBegin(ht, hi);
547: for (; !PetscHashIterAtEnd(ht, hi);) {
548: PetscHashIterGetKey(ht, hi, key);
549: col[rowstarts[key.i - rstart]++] = key.j;
550: PetscHashIterNext(ht, hi);
551: }
552: PetscCall(PetscHSetIJDestroy(&ht));
553: for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
554: rowstarts[0] = 0;
556: for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
558: adj->i = rowstarts;
559: adj->j = col;
560: adj->nz = rowstarts[m];
561: adj->freeaij = PETSC_TRUE;
562: PetscFunctionReturn(PETSC_SUCCESS);
563: }
565: static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
566: MatGetRow_MPIAdj,
567: NULL,
568: NULL,
569: /* 4*/ NULL,
570: NULL,
571: NULL,
572: NULL,
573: NULL,
574: NULL,
575: /*10*/ NULL,
576: NULL,
577: NULL,
578: NULL,
579: NULL,
580: /*15*/ NULL,
581: MatEqual_MPIAdj,
582: NULL,
583: NULL,
584: NULL,
585: /*20*/ MatAssemblyBegin_MPIAdj,
586: MatAssemblyEnd_MPIAdj,
587: MatSetOption_MPIAdj,
588: NULL,
589: /*24*/ NULL,
590: NULL,
591: NULL,
592: NULL,
593: NULL,
594: /*29*/ NULL,
595: NULL,
596: NULL,
597: NULL,
598: NULL,
599: /*34*/ NULL,
600: NULL,
601: NULL,
602: NULL,
603: NULL,
604: /*39*/ NULL,
605: MatCreateSubMatrices_MPIAdj,
606: NULL,
607: NULL,
608: NULL,
609: /*44*/ NULL,
610: NULL,
611: MatShift_Basic,
612: NULL,
613: NULL,
614: /*49*/ NULL,
615: MatGetRowIJ_MPIAdj,
616: MatRestoreRowIJ_MPIAdj,
617: NULL,
618: NULL,
619: /*54*/ NULL,
620: NULL,
621: NULL,
622: NULL,
623: NULL,
624: /*59*/ NULL,
625: MatDestroy_MPIAdj,
626: MatView_MPIAdj,
627: MatConvertFrom_MPIAdj,
628: NULL,
629: /*64*/ NULL,
630: NULL,
631: NULL,
632: NULL,
633: NULL,
634: /*69*/ NULL,
635: NULL,
636: NULL,
637: NULL,
638: NULL,
639: /*74*/ NULL,
640: NULL,
641: NULL,
642: NULL,
643: NULL,
644: /*79*/ NULL,
645: NULL,
646: NULL,
647: NULL,
648: NULL,
649: /*84*/ NULL,
650: NULL,
651: NULL,
652: NULL,
653: NULL,
654: /*89*/ NULL,
655: NULL,
656: NULL,
657: NULL,
658: NULL,
659: /*94*/ NULL,
660: NULL,
661: NULL,
662: NULL,
663: NULL,
664: /*99*/ NULL,
665: NULL,
666: NULL,
667: NULL,
668: NULL,
669: /*104*/ NULL,
670: NULL,
671: NULL,
672: NULL,
673: NULL,
674: /*109*/ NULL,
675: NULL,
676: NULL,
677: NULL,
678: NULL,
679: /*114*/ NULL,
680: NULL,
681: NULL,
682: NULL,
683: NULL,
684: /*119*/ NULL,
685: NULL,
686: NULL,
687: NULL,
688: NULL,
689: /*124*/ NULL,
690: NULL,
691: NULL,
692: NULL,
693: MatCreateSubMatricesMPI_MPIAdj,
694: /*129*/ NULL,
695: NULL,
696: NULL,
697: NULL,
698: NULL,
699: /*134*/ NULL,
700: NULL,
701: NULL,
702: NULL,
703: NULL,
704: /*139*/ NULL,
705: NULL,
706: NULL,
707: NULL,
708: NULL,
709: /*144*/ NULL,
710: NULL,
711: NULL,
712: NULL,
713: NULL,
714: NULL,
715: /*150*/ NULL,
716: NULL,
717: NULL,
718: NULL,
719: NULL,
720: /*155*/ NULL,
721: NULL};
723: static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
724: {
725: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
726: PetscBool useedgeweights;
728: PetscFunctionBegin;
729: PetscCall(PetscLayoutSetUp(B->rmap));
730: PetscCall(PetscLayoutSetUp(B->cmap));
731: if (values) useedgeweights = PETSC_TRUE;
732: else useedgeweights = PETSC_FALSE;
733: /* Make everybody knows if they are using edge weights or not */
734: PetscCallMPI(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B)));
736: if (PetscDefined(USE_DEBUG)) {
737: PetscInt ii;
739: PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
740: for (ii = 1; ii < B->rmap->n; ii++) {
741: PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
742: }
743: for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
744: }
745: b->j = j;
746: b->i = i;
747: b->values = values;
749: b->nz = i[B->rmap->n];
750: b->diag = NULL;
751: b->symmetric = PETSC_FALSE;
752: b->freeaij = PETSC_TRUE;
754: B->ops->assemblybegin = NULL;
755: B->ops->assemblyend = NULL;
756: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
757: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
758: PetscCall(MatStashDestroy_Private(&B->stash));
759: PetscFunctionReturn(PETSC_SUCCESS);
760: }
762: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
763: {
764: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
765: const PetscInt *ranges;
766: MPI_Comm acomm, bcomm;
767: MPI_Group agroup, bgroup;
768: PetscMPIInt i, rank, size, nranks, *ranks;
770: PetscFunctionBegin;
771: *B = NULL;
772: PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
773: PetscCallMPI(MPI_Comm_size(acomm, &size));
774: PetscCallMPI(MPI_Comm_size(acomm, &rank));
775: PetscCall(MatGetOwnershipRanges(A, &ranges));
776: for (i = 0, nranks = 0; i < size; i++) {
777: if (ranges[i + 1] - ranges[i] > 0) nranks++;
778: }
779: if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
780: PetscCall(PetscObjectReference((PetscObject)A));
781: *B = A;
782: PetscFunctionReturn(PETSC_SUCCESS);
783: }
785: PetscCall(PetscMalloc1(nranks, &ranks));
786: for (i = 0, nranks = 0; i < size; i++) {
787: if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
788: }
789: PetscCallMPI(MPI_Comm_group(acomm, &agroup));
790: PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
791: PetscCall(PetscFree(ranks));
792: PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
793: PetscCallMPI(MPI_Group_free(&agroup));
794: PetscCallMPI(MPI_Group_free(&bgroup));
795: if (bcomm != MPI_COMM_NULL) {
796: PetscInt m, N;
797: Mat_MPIAdj *b;
798: PetscCall(MatGetLocalSize(A, &m, NULL));
799: PetscCall(MatGetSize(A, NULL, &N));
800: PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
801: b = (Mat_MPIAdj *)(*B)->data;
802: b->freeaij = PETSC_FALSE;
803: PetscCallMPI(MPI_Comm_free(&bcomm));
804: }
805: PetscFunctionReturn(PETSC_SUCCESS);
806: }
808: static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
809: {
810: PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i;
811: PetscInt *Values = NULL;
812: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
813: PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
815: PetscFunctionBegin;
816: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
817: PetscCall(MatGetSize(A, &M, &N));
818: PetscCall(MatGetLocalSize(A, &m, NULL));
819: nz = adj->nz;
820: PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
821: PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
823: PetscCall(PetscMPIIntCast(nz, &mnz));
824: PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
825: PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
826: dispnz[0] = 0;
827: for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
828: if (adj->values) {
829: PetscCall(PetscMalloc1(NZ, &Values));
830: PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
831: }
832: PetscCall(PetscMalloc1(NZ, &J));
833: PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
834: PetscCall(PetscFree2(allnz, dispnz));
835: PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
836: nzstart -= nz;
837: /* shift the i[] values so they will be correct after being received */
838: for (i = 0; i < m; i++) adj->i[i] += nzstart;
839: PetscCall(PetscMalloc1(M + 1, &II));
840: PetscCall(PetscMPIIntCast(m, &mm));
841: PetscCall(PetscMalloc2(size, &allm, size, &dispm));
842: PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
843: dispm[0] = 0;
844: for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
845: PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
846: PetscCall(PetscFree2(allm, dispm));
847: II[M] = NZ;
848: /* shift the i[] values back */
849: for (i = 0; i < m; i++) adj->i[i] -= nzstart;
850: PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
851: PetscFunctionReturn(PETSC_SUCCESS);
852: }
854: static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
855: {
856: PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i;
857: PetscInt *Values = NULL;
858: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
859: PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
861: PetscFunctionBegin;
862: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
863: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
864: PetscCall(MatGetSize(A, &M, &N));
865: PetscCall(MatGetLocalSize(A, &m, NULL));
866: nz = adj->nz;
867: PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
868: PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
870: PetscCall(PetscMPIIntCast(nz, &mnz));
871: if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
872: PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
873: if (!rank) {
874: dispnz[0] = 0;
875: for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
876: if (adj->values) {
877: PetscCall(PetscMalloc1(NZ, &Values));
878: PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
879: }
880: PetscCall(PetscMalloc1(NZ, &J));
881: PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
882: PetscCall(PetscFree2(allnz, dispnz));
883: } else {
884: if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
885: PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
886: }
887: PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
888: nzstart -= nz;
889: /* shift the i[] values so they will be correct after being received */
890: for (i = 0; i < m; i++) adj->i[i] += nzstart;
891: PetscCall(PetscMPIIntCast(m, &mm));
892: if (!rank) {
893: PetscCall(PetscMalloc1(M + 1, &II));
894: PetscCall(PetscMalloc2(size, &allm, size, &dispm));
895: PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
896: dispm[0] = 0;
897: for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
898: PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
899: PetscCall(PetscFree2(allm, dispm));
900: II[M] = NZ;
901: } else {
902: PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
903: PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
904: }
905: /* shift the i[] values back */
906: for (i = 0; i < m; i++) adj->i[i] -= nzstart;
907: if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
908: PetscFunctionReturn(PETSC_SUCCESS);
909: }
911: /*@
912: MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
914: Collective
916: Input Parameter:
917: . A - original `MATMPIADJ` matrix
919: Output Parameter:
920: . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A`
922: Level: developer
924: Note:
925: The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed.
927: Developer Note:
928: This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row.
930: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()`
931: @*/
932: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
933: {
934: PetscFunctionBegin;
936: PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
937: PetscFunctionReturn(PETSC_SUCCESS);
938: }
940: /*MC
941: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
942: intended for use constructing orderings and partitionings.
944: Level: beginner
946: Note:
947: You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
948: by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
950: .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
951: M*/
952: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
953: {
954: Mat_MPIAdj *b;
956: PetscFunctionBegin;
957: PetscCall(PetscNew(&b));
958: B->data = (void *)b;
959: B->ops[0] = MatOps_Values;
960: B->assembled = PETSC_FALSE;
961: B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
963: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
964: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
965: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
966: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
967: PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
968: PetscFunctionReturn(PETSC_SUCCESS);
969: }
971: /*@
972: MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners)
974: Logically Collective
976: Input Parameter:
977: . A - the matrix
979: Output Parameter:
980: . B - the same matrix on all processes
982: Level: intermediate
984: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
985: @*/
986: PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
987: {
988: PetscFunctionBegin;
989: PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
990: PetscFunctionReturn(PETSC_SUCCESS);
991: }
993: /*@
994: MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners)
996: Logically Collective
998: Input Parameter:
999: . A - the matrix
1001: Output Parameter:
1002: . B - the same matrix on rank zero, not set on other ranks
1004: Level: intermediate
1006: Note:
1007: This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1008: is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1009: parallel graph sequentially.
1011: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
1012: @*/
1013: PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1014: {
1015: PetscFunctionBegin;
1016: PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1017: PetscFunctionReturn(PETSC_SUCCESS);
1018: }
1020: /*@
1021: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1023: Logically Collective
1025: Input Parameters:
1026: + B - the matrix
1027: . i - the indices into `j` for the start of each row
1028: . j - the column indices for each row (sorted for each row).
1029: The indices in `i` and `j` start with zero (NOT with one).
1030: - values - [use `NULL` if not provided] edge weights
1032: Level: intermediate
1034: Notes:
1035: The indices in `i` and `j` start with zero (NOT with one).
1037: You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1038: when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1040: You should not include the matrix diagonal elements.
1042: If you already have a matrix, you can create its adjacency matrix by a call
1043: to `MatConvert()`, specifying a type of `MATMPIADJ`.
1045: Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1047: Fortran Note:
1048: From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`.
1050: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`
1051: @*/
1052: PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1053: {
1054: PetscFunctionBegin;
1055: PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1056: PetscFunctionReturn(PETSC_SUCCESS);
1057: }
1059: /*@C
1060: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1061: The matrix need not have numerical values associated with it, it is
1062: intended for ordering (to reduce bandwidth etc) and partitioning.
1064: Collective
1066: Input Parameters:
1067: + comm - MPI communicator
1068: . m - number of local rows
1069: . N - number of global columns
1070: . i - the indices into `j` for the start of each row
1071: . j - the column indices for each row (sorted for each row).
1072: - values - the values, optional, use `NULL` if not provided
1074: Output Parameter:
1075: . A - the matrix
1077: Level: intermediate
1079: Notes:
1080: The indices in `i` and `j` start with zero (NOT with one).
1082: You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1083: when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1085: You should not include the matrix diagonals.
1087: If you already have a matrix, you can create its adjacency matrix by a call
1088: to `MatConvert()`, specifying a type of `MATMPIADJ`.
1090: Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1092: Fortran Note:
1093: From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed
1095: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1096: @*/
1097: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A)
1098: {
1099: PetscFunctionBegin;
1100: PetscCall(MatCreate(comm, A));
1101: PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1102: PetscCall(MatSetType(*A, MATMPIADJ));
1103: PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1104: PetscFunctionReturn(PETSC_SUCCESS);
1105: }