Actual source code: mpiadj.c
1: /*
2: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3: */
4: #include <../src/mat/impls/adj/mpi/mpiadj.h>
5: #include <petscsf.h>
7: /*
8: The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
9: */
10: static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
11: {
12: PetscInt nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
13: PetscInt nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
14: PetscInt *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
15: const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
16: PetscMPIInt owner;
17: Mat_MPIAdj *a = (Mat_MPIAdj *)adj->data;
18: PetscLayout rmap;
19: MPI_Comm comm;
20: PetscSF sf;
21: PetscSFNode *iremote;
22: PetscBool done;
24: PetscFunctionBegin;
25: PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
26: PetscCall(MatGetLayouts(adj, &rmap, NULL));
27: PetscCall(ISGetLocalSize(irows, &nlrows_is));
28: PetscCall(ISGetIndices(irows, &irows_indices));
29: PetscCall(PetscMalloc1(nlrows_is, &iremote));
30: /* construct sf graph*/
31: nleaves = nlrows_is;
32: for (i = 0; i < nlrows_is; i++) {
33: owner = -1;
34: rlocalindex = -1;
35: PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
36: iremote[i].rank = owner;
37: iremote[i].index = rlocalindex;
38: }
39: PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
40: PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
41: nroots = nlrows_mat;
42: for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
43: PetscCall(PetscSFCreate(comm, &sf));
44: PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
45: PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
46: PetscCall(PetscSFSetFromOptions(sf));
47: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
48: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
50: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51: PetscCall(PetscSFDestroy(&sf));
52: Ncols_recv = 0;
53: for (i = 0; i < nlrows_is; i++) {
54: Ncols_recv += ncols_recv[i];
55: ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
56: }
57: Ncols_send = 0;
58: for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
59: PetscCall(PetscCalloc1(Ncols_recv, &iremote));
60: PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
61: nleaves = Ncols_recv;
62: Ncols_recv = 0;
63: for (i = 0; i < nlrows_is; i++) {
64: PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
65: for (j = 0; j < ncols_recv[i]; j++) {
66: iremote[Ncols_recv].rank = owner;
67: iremote[Ncols_recv++].index = xadj_recv[i] + j;
68: }
69: }
70: PetscCall(ISRestoreIndices(irows, &irows_indices));
71: /*if we need to deal with edge weights ???*/
72: if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
73: nroots = Ncols_send;
74: PetscCall(PetscSFCreate(comm, &sf));
75: PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
76: PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
77: PetscCall(PetscSFSetFromOptions(sf));
78: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
79: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80: if (a->useedgeweights) {
81: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
82: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83: }
84: PetscCall(PetscSFDestroy(&sf));
85: PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
86: PetscCall(ISGetLocalSize(icols, &icols_n));
87: PetscCall(ISGetIndices(icols, &icols_indices));
88: rnclos = 0;
89: for (i = 0; i < nlrows_is; i++) {
90: for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
91: PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
92: if (loc < 0) {
93: adjncy_recv[j] = -1;
94: if (a->useedgeweights) values_recv[j] = -1;
95: ncols_recv[i]--;
96: } else {
97: rnclos++;
98: }
99: }
100: }
101: PetscCall(ISRestoreIndices(icols, &icols_indices));
102: PetscCall(PetscCalloc1(rnclos, &sadjncy));
103: if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
104: PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
105: rnclos = 0;
106: for (i = 0; i < nlrows_is; i++) {
107: for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
108: if (adjncy_recv[j] < 0) continue;
109: sadjncy[rnclos] = adjncy_recv[j];
110: if (a->useedgeweights) svalues[rnclos] = values_recv[j];
111: rnclos++;
112: }
113: }
114: for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
115: if (sadj_xadj) {
116: *sadj_xadj = sxadj;
117: } else PetscCall(PetscFree(sxadj));
118: if (sadj_adjncy) {
119: *sadj_adjncy = sadjncy;
120: } else PetscCall(PetscFree(sadjncy));
121: if (sadj_values) {
122: if (a->useedgeweights) *sadj_values = svalues;
123: else *sadj_values = NULL;
124: } else {
125: if (a->useedgeweights) PetscCall(PetscFree(svalues));
126: }
127: PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
128: PetscCall(PetscFree(adjncy_recv));
129: if (a->useedgeweights) PetscCall(PetscFree(values_recv));
130: PetscFunctionReturn(PETSC_SUCCESS);
131: }
133: static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
134: {
135: PetscInt i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
136: PetscInt *indices, nindx, j, k, loc;
137: PetscMPIInt issame;
138: const PetscInt *irow_indices, *icol_indices;
139: MPI_Comm scomm_row, scomm_col, scomm_mat;
141: PetscFunctionBegin;
142: nindx = 0;
143: /*
144: * Estimate a maximum number for allocating memory
145: */
146: for (i = 0; i < n; i++) {
147: PetscCall(ISGetLocalSize(irow[i], &irow_n));
148: PetscCall(ISGetLocalSize(icol[i], &icol_n));
149: nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
150: }
151: PetscCall(PetscMalloc1(nindx, &indices));
152: /* construct a submat */
153: // if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
155: for (i = 0; i < n; i++) {
156: if (subcomm) {
157: PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
158: PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
159: PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
160: PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
161: PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
162: PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163: } else {
164: scomm_row = PETSC_COMM_SELF;
165: }
166: /*get sub-matrix data*/
167: sxadj = NULL;
168: sadjncy = NULL;
169: svalues = NULL;
170: PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
171: PetscCall(ISGetLocalSize(irow[i], &irow_n));
172: PetscCall(ISGetLocalSize(icol[i], &icol_n));
173: PetscCall(ISGetIndices(irow[i], &irow_indices));
174: PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
175: PetscCall(ISRestoreIndices(irow[i], &irow_indices));
176: PetscCall(ISGetIndices(icol[i], &icol_indices));
177: PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n));
178: PetscCall(ISRestoreIndices(icol[i], &icol_indices));
179: nindx = irow_n + icol_n;
180: PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
181: /* renumber columns */
182: for (j = 0; j < irow_n; j++) {
183: for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
184: PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
185: PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
186: sadjncy[k] = loc;
187: }
188: }
189: if (scall == MAT_INITIAL_MATRIX) {
190: PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
191: } else {
192: Mat sadj = *submat[i];
193: Mat_MPIAdj *sa = (Mat_MPIAdj *)((sadj)->data);
194: PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
195: PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
196: PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix must have the same comm as the col index set");
197: PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
198: PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
199: if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
200: PetscCall(PetscFree(sxadj));
201: PetscCall(PetscFree(sadjncy));
202: PetscCall(PetscFree(svalues));
203: }
204: }
205: PetscCall(PetscFree(indices));
206: PetscFunctionReturn(PETSC_SUCCESS);
207: }
209: static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
210: {
211: /*get sub-matrices across a sub communicator */
212: PetscFunctionBegin;
213: PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
214: PetscFunctionReturn(PETSC_SUCCESS);
215: }
217: static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
218: {
219: PetscFunctionBegin;
220: /*get sub-matrices based on PETSC_COMM_SELF */
221: PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
222: PetscFunctionReturn(PETSC_SUCCESS);
223: }
225: static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
226: {
227: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
228: PetscInt i, j, m = A->rmap->n;
229: const char *name;
230: PetscViewerFormat format;
232: PetscFunctionBegin;
233: PetscCall(PetscObjectGetName((PetscObject)A, &name));
234: PetscCall(PetscViewerGetFormat(viewer, &format));
235: if (format == PETSC_VIEWER_ASCII_INFO) {
236: PetscFunctionReturn(PETSC_SUCCESS);
237: } else {
238: PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
239: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
240: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241: for (i = 0; i < m; i++) {
242: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
243: for (j = a->i[i]; j < a->i[i + 1]; j++) {
244: if (a->values) {
245: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
246: } else {
247: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
248: }
249: }
250: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
251: }
252: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
253: PetscCall(PetscViewerFlush(viewer));
254: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255: }
256: PetscFunctionReturn(PETSC_SUCCESS);
257: }
259: static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
260: {
261: PetscBool iascii;
263: PetscFunctionBegin;
264: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
265: if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
266: PetscFunctionReturn(PETSC_SUCCESS);
267: }
269: static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270: {
271: Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
273: PetscFunctionBegin;
274: PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
275: PetscCall(PetscFree(a->diag));
276: if (a->freeaij) {
277: if (a->freeaijwithfree) {
278: if (a->i) free(a->i);
279: if (a->j) free(a->j);
280: } else {
281: PetscCall(PetscFree(a->i));
282: PetscCall(PetscFree(a->j));
283: PetscCall(PetscFree(a->values));
284: }
285: }
286: PetscCall(PetscFree(a->rowvalues));
287: PetscCall(PetscFree(mat->data));
288: PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
289: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
290: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
291: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
292: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
293: PetscFunctionReturn(PETSC_SUCCESS);
294: }
296: static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
297: {
298: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
300: PetscFunctionBegin;
301: switch (op) {
302: case MAT_SYMMETRIC:
303: case MAT_STRUCTURALLY_SYMMETRIC:
304: case MAT_HERMITIAN:
305: case MAT_SPD:
306: a->symmetric = flg;
307: break;
308: case MAT_SYMMETRY_ETERNAL:
309: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
310: case MAT_SPD_ETERNAL:
311: break;
312: default:
313: PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
314: break;
315: }
316: PetscFunctionReturn(PETSC_SUCCESS);
317: }
319: static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
320: {
321: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
323: PetscFunctionBegin;
324: row -= A->rmap->rstart;
325: PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
326: *nz = a->i[row + 1] - a->i[row];
327: if (v) {
328: PetscInt j;
329: if (a->rowvalues_alloc < *nz) {
330: PetscCall(PetscFree(a->rowvalues));
331: a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
332: PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
333: }
334: for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
335: *v = (*nz) ? a->rowvalues : NULL;
336: }
337: if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
338: PetscFunctionReturn(PETSC_SUCCESS);
339: }
341: static PetscErrorCode MatRestoreRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
342: {
343: PetscFunctionBegin;
344: PetscFunctionReturn(PETSC_SUCCESS);
345: }
347: static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
348: {
349: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
350: PetscBool flag;
352: PetscFunctionBegin;
353: /* If the matrix dimensions are not equal,or no of nonzeros */
354: if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
356: /* if the a->i are the same */
357: PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
359: /* if a->j are the same */
360: PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag));
362: PetscCallMPI(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
363: PetscFunctionReturn(PETSC_SUCCESS);
364: }
366: static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
367: {
368: PetscInt i;
369: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
370: PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
372: PetscFunctionBegin;
373: *m = A->rmap->n;
374: *ia = a->i;
375: *ja = a->j;
376: *done = PETSC_TRUE;
377: if (oshift) {
378: for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
379: for (i = 0; i <= (*m); i++) (*ia)[i]++;
380: }
381: PetscFunctionReturn(PETSC_SUCCESS);
382: }
384: static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
385: {
386: PetscInt i;
387: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
388: PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
390: PetscFunctionBegin;
391: PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
392: PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
393: if (oshift) {
394: PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
395: PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
396: for (i = 0; i <= (*m); i++) (*ia)[i]--;
397: for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
398: }
399: PetscFunctionReturn(PETSC_SUCCESS);
400: }
402: static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
403: {
404: Mat B;
405: PetscInt i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
406: const PetscInt *rj;
407: const PetscScalar *ra;
408: MPI_Comm comm;
410: PetscFunctionBegin;
411: PetscCall(MatGetSize(A, NULL, &N));
412: PetscCall(MatGetLocalSize(A, &m, NULL));
413: PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
415: /* count the number of nonzeros per row */
416: for (i = 0; i < m; i++) {
417: PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
418: for (j = 0; j < len; j++) {
419: if (rj[j] == i + rstart) {
420: len--;
421: break;
422: } /* don't count diagonal */
423: }
424: nzeros += len;
425: PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
426: }
428: /* malloc space for nonzeros */
429: PetscCall(PetscMalloc1(nzeros + 1, &a));
430: PetscCall(PetscMalloc1(N + 1, &ia));
431: PetscCall(PetscMalloc1(nzeros + 1, &ja));
433: nzeros = 0;
434: ia[0] = 0;
435: for (i = 0; i < m; i++) {
436: PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
437: cnt = 0;
438: for (j = 0; j < len; j++) {
439: if (rj[j] != i + rstart) { /* if not diagonal */
440: a[nzeros + cnt] = (PetscInt)PetscAbsScalar(ra[j]);
441: ja[nzeros + cnt++] = rj[j];
442: }
443: }
444: PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
445: nzeros += cnt;
446: ia[i + 1] = nzeros;
447: }
449: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
450: PetscCall(MatCreate(comm, &B));
451: PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
452: PetscCall(MatSetType(B, type));
453: PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
455: if (reuse == MAT_INPLACE_MATRIX) {
456: PetscCall(MatHeaderReplace(A, &B));
457: } else {
458: *newmat = B;
459: }
460: PetscFunctionReturn(PETSC_SUCCESS);
461: }
463: static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
464: {
465: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
466: PetscInt rStart, rEnd, cStart, cEnd;
468: PetscFunctionBegin;
469: PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
470: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
471: PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
472: if (!adj->ht) {
473: PetscCall(PetscHSetIJCreate(&adj->ht));
474: PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
475: PetscCall(PetscLayoutSetUp(A->rmap));
476: PetscCall(PetscLayoutSetUp(A->cmap));
477: }
478: for (PetscInt r = 0; r < m; ++r) {
479: PetscHashIJKey key;
481: key.i = rows[r];
482: if (key.i < 0) continue;
483: if ((key.i < rStart) || (key.i >= rEnd)) {
484: PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
485: } else {
486: for (PetscInt c = 0; c < n; ++c) {
487: key.j = cols[c];
488: if (key.j < 0 || key.i == key.j) continue;
489: PetscCall(PetscHSetIJAdd(adj->ht, key));
490: }
491: }
492: }
493: PetscFunctionReturn(PETSC_SUCCESS);
494: }
496: static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
497: {
498: PetscInt nstash, reallocs;
499: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
501: PetscFunctionBegin;
502: if (!adj->ht) {
503: PetscCall(PetscHSetIJCreate(&adj->ht));
504: PetscCall(PetscLayoutSetUp(A->rmap));
505: PetscCall(PetscLayoutSetUp(A->cmap));
506: }
507: PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
508: PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
509: PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
510: PetscFunctionReturn(PETSC_SUCCESS);
511: }
513: static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
514: {
515: PetscScalar *val;
516: PetscInt *row, *col, m, rstart, *rowstarts;
517: PetscInt i, j, ncols, flg, nz;
518: PetscMPIInt n;
519: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
520: PetscHashIter hi;
521: PetscHashIJKey key;
522: PetscHSetIJ ht = adj->ht;
524: PetscFunctionBegin;
525: while (1) {
526: PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
527: if (!flg) break;
529: for (i = 0; i < n;) {
530: /* Identify the consecutive vals belonging to the same row */
531: for (j = i, rstart = row[j]; j < n; j++) {
532: if (row[j] != rstart) break;
533: }
534: if (j < n) ncols = j - i;
535: else ncols = n - i;
536: /* Set all these values with a single function call */
537: PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
538: i = j;
539: }
540: }
541: PetscCall(MatStashScatterEnd_Private(&A->stash));
542: PetscCall(MatStashDestroy_Private(&A->stash));
544: PetscCall(MatGetLocalSize(A, &m, NULL));
545: PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
546: PetscCall(PetscCalloc1(m + 1, &rowstarts));
547: PetscHashIterBegin(ht, hi);
548: for (; !PetscHashIterAtEnd(ht, hi);) {
549: PetscHashIterGetKey(ht, hi, key);
550: rowstarts[key.i - rstart + 1]++;
551: PetscHashIterNext(ht, hi);
552: }
553: for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
555: PetscCall(PetscHSetIJGetSize(ht, &nz));
556: PetscCall(PetscMalloc1(nz, &col));
557: PetscHashIterBegin(ht, hi);
558: for (; !PetscHashIterAtEnd(ht, hi);) {
559: PetscHashIterGetKey(ht, hi, key);
560: col[rowstarts[key.i - rstart]++] = key.j;
561: PetscHashIterNext(ht, hi);
562: }
563: PetscCall(PetscHSetIJDestroy(&ht));
564: for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
565: rowstarts[0] = 0;
567: for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
569: adj->i = rowstarts;
570: adj->j = col;
571: adj->nz = rowstarts[m];
572: adj->freeaij = PETSC_TRUE;
573: PetscFunctionReturn(PETSC_SUCCESS);
574: }
576: static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
577: MatGetRow_MPIAdj,
578: MatRestoreRow_MPIAdj,
579: NULL,
580: /* 4*/ NULL,
581: NULL,
582: NULL,
583: NULL,
584: NULL,
585: NULL,
586: /*10*/ NULL,
587: NULL,
588: NULL,
589: NULL,
590: NULL,
591: /*15*/ NULL,
592: MatEqual_MPIAdj,
593: NULL,
594: NULL,
595: NULL,
596: /*20*/ MatAssemblyBegin_MPIAdj,
597: MatAssemblyEnd_MPIAdj,
598: MatSetOption_MPIAdj,
599: NULL,
600: /*24*/ NULL,
601: NULL,
602: NULL,
603: NULL,
604: NULL,
605: /*29*/ NULL,
606: NULL,
607: NULL,
608: NULL,
609: NULL,
610: /*34*/ NULL,
611: NULL,
612: NULL,
613: NULL,
614: NULL,
615: /*39*/ NULL,
616: MatCreateSubMatrices_MPIAdj,
617: NULL,
618: NULL,
619: NULL,
620: /*44*/ NULL,
621: NULL,
622: MatShift_Basic,
623: NULL,
624: NULL,
625: /*49*/ NULL,
626: MatGetRowIJ_MPIAdj,
627: MatRestoreRowIJ_MPIAdj,
628: NULL,
629: NULL,
630: /*54*/ NULL,
631: NULL,
632: NULL,
633: NULL,
634: NULL,
635: /*59*/ NULL,
636: MatDestroy_MPIAdj,
637: MatView_MPIAdj,
638: MatConvertFrom_MPIAdj,
639: NULL,
640: /*64*/ NULL,
641: NULL,
642: NULL,
643: NULL,
644: NULL,
645: /*69*/ NULL,
646: NULL,
647: NULL,
648: NULL,
649: NULL,
650: /*74*/ NULL,
651: NULL,
652: NULL,
653: NULL,
654: NULL,
655: /*79*/ NULL,
656: NULL,
657: NULL,
658: NULL,
659: NULL,
660: /*84*/ NULL,
661: NULL,
662: NULL,
663: NULL,
664: NULL,
665: /*89*/ NULL,
666: NULL,
667: NULL,
668: NULL,
669: NULL,
670: /*94*/ NULL,
671: NULL,
672: NULL,
673: NULL,
674: NULL,
675: /*99*/ NULL,
676: NULL,
677: NULL,
678: NULL,
679: NULL,
680: /*104*/ NULL,
681: NULL,
682: NULL,
683: NULL,
684: NULL,
685: /*109*/ NULL,
686: NULL,
687: NULL,
688: NULL,
689: NULL,
690: /*114*/ NULL,
691: NULL,
692: NULL,
693: NULL,
694: NULL,
695: /*119*/ NULL,
696: NULL,
697: NULL,
698: NULL,
699: NULL,
700: /*124*/ NULL,
701: NULL,
702: NULL,
703: NULL,
704: MatCreateSubMatricesMPI_MPIAdj,
705: /*129*/ NULL,
706: NULL,
707: NULL,
708: NULL,
709: NULL,
710: /*134*/ NULL,
711: NULL,
712: NULL,
713: NULL,
714: NULL,
715: /*139*/ NULL,
716: NULL,
717: NULL,
718: NULL,
719: NULL,
720: /*144*/ NULL,
721: NULL,
722: NULL,
723: NULL,
724: NULL,
725: NULL,
726: /*150*/ NULL,
727: NULL,
728: NULL,
729: NULL,
730: NULL,
731: NULL};
733: static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
734: {
735: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
736: PetscBool useedgeweights;
738: PetscFunctionBegin;
739: PetscCall(PetscLayoutSetUp(B->rmap));
740: PetscCall(PetscLayoutSetUp(B->cmap));
741: if (values) useedgeweights = PETSC_TRUE;
742: else useedgeweights = PETSC_FALSE;
743: /* Make everybody knows if they are using edge weights or not */
744: PetscCallMPI(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B)));
746: if (PetscDefined(USE_DEBUG)) {
747: PetscInt ii;
749: PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
750: for (ii = 1; ii < B->rmap->n; ii++) {
751: PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
752: }
753: for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
754: }
755: b->j = j;
756: b->i = i;
757: b->values = values;
759: b->nz = i[B->rmap->n];
760: b->diag = NULL;
761: b->symmetric = PETSC_FALSE;
762: b->freeaij = PETSC_TRUE;
764: B->ops->assemblybegin = NULL;
765: B->ops->assemblyend = NULL;
766: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
767: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
768: PetscCall(MatStashDestroy_Private(&B->stash));
769: PetscFunctionReturn(PETSC_SUCCESS);
770: }
772: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
773: {
774: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
775: const PetscInt *ranges;
776: MPI_Comm acomm, bcomm;
777: MPI_Group agroup, bgroup;
778: PetscMPIInt i, rank, size, nranks, *ranks;
780: PetscFunctionBegin;
781: *B = NULL;
782: PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
783: PetscCallMPI(MPI_Comm_size(acomm, &size));
784: PetscCallMPI(MPI_Comm_size(acomm, &rank));
785: PetscCall(MatGetOwnershipRanges(A, &ranges));
786: for (i = 0, nranks = 0; i < size; i++) {
787: if (ranges[i + 1] - ranges[i] > 0) nranks++;
788: }
789: if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
790: PetscCall(PetscObjectReference((PetscObject)A));
791: *B = A;
792: PetscFunctionReturn(PETSC_SUCCESS);
793: }
795: PetscCall(PetscMalloc1(nranks, &ranks));
796: for (i = 0, nranks = 0; i < size; i++) {
797: if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
798: }
799: PetscCallMPI(MPI_Comm_group(acomm, &agroup));
800: PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
801: PetscCall(PetscFree(ranks));
802: PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
803: PetscCallMPI(MPI_Group_free(&agroup));
804: PetscCallMPI(MPI_Group_free(&bgroup));
805: if (bcomm != MPI_COMM_NULL) {
806: PetscInt m, N;
807: Mat_MPIAdj *b;
808: PetscCall(MatGetLocalSize(A, &m, NULL));
809: PetscCall(MatGetSize(A, NULL, &N));
810: PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
811: b = (Mat_MPIAdj *)(*B)->data;
812: b->freeaij = PETSC_FALSE;
813: PetscCallMPI(MPI_Comm_free(&bcomm));
814: }
815: PetscFunctionReturn(PETSC_SUCCESS);
816: }
818: static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
819: {
820: PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i;
821: PetscInt *Values = NULL;
822: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
823: PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
825: PetscFunctionBegin;
826: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
827: PetscCall(MatGetSize(A, &M, &N));
828: PetscCall(MatGetLocalSize(A, &m, NULL));
829: nz = adj->nz;
830: PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
831: PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
833: PetscCall(PetscMPIIntCast(nz, &mnz));
834: PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
835: PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
836: dispnz[0] = 0;
837: for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
838: if (adj->values) {
839: PetscCall(PetscMalloc1(NZ, &Values));
840: PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
841: }
842: PetscCall(PetscMalloc1(NZ, &J));
843: PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
844: PetscCall(PetscFree2(allnz, dispnz));
845: PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
846: nzstart -= nz;
847: /* shift the i[] values so they will be correct after being received */
848: for (i = 0; i < m; i++) adj->i[i] += nzstart;
849: PetscCall(PetscMalloc1(M + 1, &II));
850: PetscCall(PetscMPIIntCast(m, &mm));
851: PetscCall(PetscMalloc2(size, &allm, size, &dispm));
852: PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
853: dispm[0] = 0;
854: for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
855: PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
856: PetscCall(PetscFree2(allm, dispm));
857: II[M] = NZ;
858: /* shift the i[] values back */
859: for (i = 0; i < m; i++) adj->i[i] -= nzstart;
860: PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
861: PetscFunctionReturn(PETSC_SUCCESS);
862: }
864: static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
865: {
866: PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i;
867: PetscInt *Values = NULL;
868: Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
869: PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
871: PetscFunctionBegin;
872: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
873: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
874: PetscCall(MatGetSize(A, &M, &N));
875: PetscCall(MatGetLocalSize(A, &m, NULL));
876: nz = adj->nz;
877: PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
878: PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
880: PetscCall(PetscMPIIntCast(nz, &mnz));
881: if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
882: PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
883: if (!rank) {
884: dispnz[0] = 0;
885: for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
886: if (adj->values) {
887: PetscCall(PetscMalloc1(NZ, &Values));
888: PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
889: }
890: PetscCall(PetscMalloc1(NZ, &J));
891: PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
892: PetscCall(PetscFree2(allnz, dispnz));
893: } else {
894: if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
895: PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
896: }
897: PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
898: nzstart -= nz;
899: /* shift the i[] values so they will be correct after being received */
900: for (i = 0; i < m; i++) adj->i[i] += nzstart;
901: PetscCall(PetscMPIIntCast(m, &mm));
902: if (!rank) {
903: PetscCall(PetscMalloc1(M + 1, &II));
904: PetscCall(PetscMalloc2(size, &allm, size, &dispm));
905: PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
906: dispm[0] = 0;
907: for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
908: PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
909: PetscCall(PetscFree2(allm, dispm));
910: II[M] = NZ;
911: } else {
912: PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
913: PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
914: }
915: /* shift the i[] values back */
916: for (i = 0; i < m; i++) adj->i[i] -= nzstart;
917: if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
918: PetscFunctionReturn(PETSC_SUCCESS);
919: }
921: /*@
922: MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
924: Collective
926: Input Parameter:
927: . A - original `MATMPIADJ` matrix
929: Output Parameter:
930: . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A`
932: Level: developer
934: Note:
935: The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed.
937: Developer Note:
938: This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row.
940: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()`
941: @*/
942: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
943: {
944: PetscFunctionBegin;
946: PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
947: PetscFunctionReturn(PETSC_SUCCESS);
948: }
950: /*MC
951: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
952: intended for use constructing orderings and partitionings.
954: Level: beginner
956: Note:
957: You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
958: by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
960: .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
961: M*/
962: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
963: {
964: Mat_MPIAdj *b;
966: PetscFunctionBegin;
967: PetscCall(PetscNew(&b));
968: B->data = (void *)b;
969: B->ops[0] = MatOps_Values;
970: B->assembled = PETSC_FALSE;
971: B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
973: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
974: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
975: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
976: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
977: PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
978: PetscFunctionReturn(PETSC_SUCCESS);
979: }
981: /*@
982: MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners)
984: Logically Collective
986: Input Parameter:
987: . A - the matrix
989: Output Parameter:
990: . B - the same matrix on all processes
992: Level: intermediate
994: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
995: @*/
996: PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
997: {
998: PetscFunctionBegin;
999: PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
1000: PetscFunctionReturn(PETSC_SUCCESS);
1001: }
1003: /*@
1004: MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners)
1006: Logically Collective
1008: Input Parameter:
1009: . A - the matrix
1011: Output Parameter:
1012: . B - the same matrix on rank zero, not set on other ranks
1014: Level: intermediate
1016: Note:
1017: This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1018: is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1019: parallel graph sequentially.
1021: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
1022: @*/
1023: PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1024: {
1025: PetscFunctionBegin;
1026: PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1027: PetscFunctionReturn(PETSC_SUCCESS);
1028: }
1030: /*@
1031: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1033: Logically Collective
1035: Input Parameters:
1036: + B - the matrix
1037: . i - the indices into `j` for the start of each row
1038: . j - the column indices for each row (sorted for each row).
1039: The indices in `i` and `j` start with zero (NOT with one).
1040: - values - [use `NULL` if not provided] edge weights
1042: Level: intermediate
1044: Notes:
1045: The indices in `i` and `j` start with zero (NOT with one).
1047: You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1048: when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1050: You should not include the matrix diagonal elements.
1052: If you already have a matrix, you can create its adjacency matrix by a call
1053: to `MatConvert()`, specifying a type of `MATMPIADJ`.
1055: Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1057: Fortran Note:
1058: From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`.
1060: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`
1061: @*/
1062: PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1063: {
1064: PetscFunctionBegin;
1065: PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1066: PetscFunctionReturn(PETSC_SUCCESS);
1067: }
1069: /*@C
1070: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1071: The matrix need not have numerical values associated with it, it is
1072: intended for ordering (to reduce bandwidth etc) and partitioning.
1074: Collective
1076: Input Parameters:
1077: + comm - MPI communicator
1078: . m - number of local rows
1079: . N - number of global columns
1080: . i - the indices into `j` for the start of each row
1081: . j - the column indices for each row (sorted for each row).
1082: - values - the values, optional, use `NULL` if not provided
1084: Output Parameter:
1085: . A - the matrix
1087: Level: intermediate
1089: Notes:
1090: The indices in `i` and `j` start with zero (NOT with one).
1092: You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1093: when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1095: You should not include the matrix diagonals.
1097: If you already have a matrix, you can create its adjacency matrix by a call
1098: to `MatConvert()`, specifying a type of `MATMPIADJ`.
1100: Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1102: Fortran Note:
1103: From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed
1105: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1106: @*/
1107: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A)
1108: {
1109: PetscFunctionBegin;
1110: PetscCall(MatCreate(comm, A));
1111: PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1112: PetscCall(MatSetType(*A, MATMPIADJ));
1113: PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1114: PetscFunctionReturn(PETSC_SUCCESS);
1115: }