Actual source code: mpisbaij.c
1: #include <../src/mat/impls/baij/mpi/mpibaij.h>
2: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
3: #include <../src/mat/impls/sbaij/seq/sbaij.h>
4: #include <petscblaslapack.h>
5: #include <petscsf.h>
7: static PetscErrorCode MatDestroy_MPISBAIJ(Mat mat)
8: {
9: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
11: PetscFunctionBegin;
12: PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ",Cols=%" PetscInt_FMT, mat->rmap->N, mat->cmap->N));
13: PetscCall(MatStashDestroy_Private(&mat->stash));
14: PetscCall(MatStashDestroy_Private(&mat->bstash));
15: PetscCall(MatDestroy(&baij->A));
16: PetscCall(MatDestroy(&baij->B));
17: #if defined(PETSC_USE_CTABLE)
18: PetscCall(PetscHMapIDestroy(&baij->colmap));
19: #else
20: PetscCall(PetscFree(baij->colmap));
21: #endif
22: PetscCall(PetscFree(baij->garray));
23: PetscCall(VecDestroy(&baij->lvec));
24: PetscCall(VecScatterDestroy(&baij->Mvctx));
25: PetscCall(VecDestroy(&baij->slvec0));
26: PetscCall(VecDestroy(&baij->slvec0b));
27: PetscCall(VecDestroy(&baij->slvec1));
28: PetscCall(VecDestroy(&baij->slvec1a));
29: PetscCall(VecDestroy(&baij->slvec1b));
30: PetscCall(VecScatterDestroy(&baij->sMvctx));
31: PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
32: PetscCall(PetscFree(baij->barray));
33: PetscCall(PetscFree(baij->hd));
34: PetscCall(VecDestroy(&baij->diag));
35: PetscCall(VecDestroy(&baij->bb1));
36: PetscCall(VecDestroy(&baij->xx1));
37: #if defined(PETSC_USE_REAL_MAT_SINGLE)
38: PetscCall(PetscFree(baij->setvaluescopy));
39: #endif
40: PetscCall(PetscFree(baij->in_loc));
41: PetscCall(PetscFree(baij->v_loc));
42: PetscCall(PetscFree(baij->rangebs));
43: PetscCall(PetscFree(mat->data));
45: PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
46: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatStoreValues_C", NULL));
47: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatRetrieveValues_C", NULL));
48: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocation_C", NULL));
49: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocationCSR_C", NULL));
50: #if defined(PETSC_HAVE_ELEMENTAL)
51: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_elemental_C", NULL));
52: #endif
53: #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
54: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_scalapack_C", NULL));
55: #endif
56: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpiaij_C", NULL));
57: PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpibaij_C", NULL));
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: /* defines MatSetValues_MPI_Hash(), MatAssemblyBegin_MPI_Hash(), MatAssemblyEnd_MPI_Hash(), MatSetUp_MPI_Hash() */
62: #define TYPE SBAIJ
63: #define TYPE_SBAIJ
64: #include "../src/mat/impls/aij/mpi/mpihashmat.h"
65: #undef TYPE
66: #undef TYPE_SBAIJ
68: #if defined(PETSC_HAVE_ELEMENTAL)
69: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat, MatType, MatReuse, Mat *);
70: #endif
71: #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
72: PETSC_INTERN PetscErrorCode MatConvert_SBAIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
73: #endif
75: /* This could be moved to matimpl.h */
76: static PetscErrorCode MatPreallocateWithMats_Private(Mat B, PetscInt nm, Mat X[], PetscBool symm[], PetscBool fill)
77: {
78: Mat preallocator;
79: PetscInt r, rstart, rend;
80: PetscInt bs, i, m, n, M, N;
81: PetscBool cong = PETSC_TRUE;
83: PetscFunctionBegin;
86: for (i = 0; i < nm; i++) {
88: PetscCall(PetscLayoutCompare(B->rmap, X[i]->rmap, &cong));
89: PetscCheck(cong, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for different layouts");
90: }
92: PetscCall(MatGetBlockSize(B, &bs));
93: PetscCall(MatGetSize(B, &M, &N));
94: PetscCall(MatGetLocalSize(B, &m, &n));
95: PetscCall(MatCreate(PetscObjectComm((PetscObject)B), &preallocator));
96: PetscCall(MatSetType(preallocator, MATPREALLOCATOR));
97: PetscCall(MatSetBlockSize(preallocator, bs));
98: PetscCall(MatSetSizes(preallocator, m, n, M, N));
99: PetscCall(MatSetUp(preallocator));
100: PetscCall(MatGetOwnershipRange(preallocator, &rstart, &rend));
101: for (r = rstart; r < rend; ++r) {
102: PetscInt ncols;
103: const PetscInt *row;
104: const PetscScalar *vals;
106: for (i = 0; i < nm; i++) {
107: PetscCall(MatGetRow(X[i], r, &ncols, &row, &vals));
108: PetscCall(MatSetValues(preallocator, 1, &r, ncols, row, vals, INSERT_VALUES));
109: if (symm && symm[i]) PetscCall(MatSetValues(preallocator, ncols, row, 1, &r, vals, INSERT_VALUES));
110: PetscCall(MatRestoreRow(X[i], r, &ncols, &row, &vals));
111: }
112: }
113: PetscCall(MatAssemblyBegin(preallocator, MAT_FINAL_ASSEMBLY));
114: PetscCall(MatAssemblyEnd(preallocator, MAT_FINAL_ASSEMBLY));
115: PetscCall(MatPreallocatorPreallocate(preallocator, fill, B));
116: PetscCall(MatDestroy(&preallocator));
117: PetscFunctionReturn(PETSC_SUCCESS);
118: }
120: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Basic(Mat A, MatType newtype, MatReuse reuse, Mat *newmat)
121: {
122: Mat B;
124: PetscFunctionBegin;
125: if (reuse != MAT_REUSE_MATRIX) {
126: PetscBool symm = PETSC_TRUE, isdense;
127: PetscInt bs;
129: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
130: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
131: PetscCall(MatSetType(B, newtype));
132: PetscCall(MatGetBlockSize(A, &bs));
133: PetscCall(MatSetBlockSize(B, bs));
134: PetscCall(PetscLayoutSetUp(B->rmap));
135: PetscCall(PetscLayoutSetUp(B->cmap));
136: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, ""));
137: if (!isdense) {
138: PetscCall(MatGetRowUpperTriangular(A));
139: PetscCall(MatPreallocateWithMats_Private(B, 1, &A, &symm, PETSC_TRUE));
140: PetscCall(MatRestoreRowUpperTriangular(A));
141: } else {
142: PetscCall(MatSetUp(B));
143: }
144: } else {
145: B = *newmat;
146: PetscCall(MatZeroEntries(B));
147: }
149: PetscCall(MatGetRowUpperTriangular(A));
150: for (PetscInt r = A->rmap->rstart; r < A->rmap->rend; r++) {
151: PetscInt ncols;
152: const PetscInt *row;
153: const PetscScalar *vals;
155: PetscCall(MatGetRow(A, r, &ncols, &row, &vals));
156: PetscCall(MatSetValues(B, 1, &r, ncols, row, vals, INSERT_VALUES));
157: #if defined(PETSC_USE_COMPLEX)
158: if (A->hermitian == PETSC_BOOL3_TRUE) {
159: PetscInt i;
160: for (i = 0; i < ncols; i++) PetscCall(MatSetValue(B, row[i], r, PetscConj(vals[i]), INSERT_VALUES));
161: } else {
162: PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
163: }
164: #else
165: PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
166: #endif
167: PetscCall(MatRestoreRow(A, r, &ncols, &row, &vals));
168: }
169: PetscCall(MatRestoreRowUpperTriangular(A));
170: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
171: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
173: if (reuse == MAT_INPLACE_MATRIX) {
174: PetscCall(MatHeaderReplace(A, &B));
175: } else {
176: *newmat = B;
177: }
178: PetscFunctionReturn(PETSC_SUCCESS);
179: }
181: static PetscErrorCode MatStoreValues_MPISBAIJ(Mat mat)
182: {
183: Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;
185: PetscFunctionBegin;
186: PetscCall(MatStoreValues(aij->A));
187: PetscCall(MatStoreValues(aij->B));
188: PetscFunctionReturn(PETSC_SUCCESS);
189: }
191: static PetscErrorCode MatRetrieveValues_MPISBAIJ(Mat mat)
192: {
193: Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;
195: PetscFunctionBegin;
196: PetscCall(MatRetrieveValues(aij->A));
197: PetscCall(MatRetrieveValues(aij->B));
198: PetscFunctionReturn(PETSC_SUCCESS);
199: }
201: #define MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, orow, ocol) \
202: do { \
203: brow = row / bs; \
204: rp = aj + ai[brow]; \
205: ap = aa + bs2 * ai[brow]; \
206: rmax = aimax[brow]; \
207: nrow = ailen[brow]; \
208: bcol = col / bs; \
209: ridx = row % bs; \
210: cidx = col % bs; \
211: low = 0; \
212: high = nrow; \
213: while (high - low > 3) { \
214: t = (low + high) / 2; \
215: if (rp[t] > bcol) high = t; \
216: else low = t; \
217: } \
218: for (_i = low; _i < high; _i++) { \
219: if (rp[_i] > bcol) break; \
220: if (rp[_i] == bcol) { \
221: bap = ap + bs2 * _i + bs * cidx + ridx; \
222: if (addv == ADD_VALUES) *bap += value; \
223: else *bap = value; \
224: goto a_noinsert; \
225: } \
226: } \
227: if (a->nonew == 1) goto a_noinsert; \
228: PetscCheck(a->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
229: MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, brow, bcol, rmax, aa, ai, aj, rp, ap, aimax, a->nonew, MatScalar); \
230: N = nrow++ - 1; \
231: /* shift up all the later entries in this row */ \
232: PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
233: PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
234: PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
235: rp[_i] = bcol; \
236: ap[bs2 * _i + bs * cidx + ridx] = value; \
237: a_noinsert:; \
238: ailen[brow] = nrow; \
239: } while (0)
241: #define MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, orow, ocol) \
242: do { \
243: brow = row / bs; \
244: rp = bj + bi[brow]; \
245: ap = ba + bs2 * bi[brow]; \
246: rmax = bimax[brow]; \
247: nrow = bilen[brow]; \
248: bcol = col / bs; \
249: ridx = row % bs; \
250: cidx = col % bs; \
251: low = 0; \
252: high = nrow; \
253: while (high - low > 3) { \
254: t = (low + high) / 2; \
255: if (rp[t] > bcol) high = t; \
256: else low = t; \
257: } \
258: for (_i = low; _i < high; _i++) { \
259: if (rp[_i] > bcol) break; \
260: if (rp[_i] == bcol) { \
261: bap = ap + bs2 * _i + bs * cidx + ridx; \
262: if (addv == ADD_VALUES) *bap += value; \
263: else *bap = value; \
264: goto b_noinsert; \
265: } \
266: } \
267: if (b->nonew == 1) goto b_noinsert; \
268: PetscCheck(b->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
269: MatSeqXAIJReallocateAIJ(B, b->mbs, bs2, nrow, brow, bcol, rmax, ba, bi, bj, rp, ap, bimax, b->nonew, MatScalar); \
270: N = nrow++ - 1; \
271: /* shift up all the later entries in this row */ \
272: PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
273: PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
274: PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
275: rp[_i] = bcol; \
276: ap[bs2 * _i + bs * cidx + ridx] = value; \
277: b_noinsert:; \
278: bilen[brow] = nrow; \
279: } while (0)
281: /* Only add/insert a(i,j) with i<=j (blocks).
282: Any a(i,j) with i>j input by user is ignored or generates an error
283: */
284: static PetscErrorCode MatSetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
285: {
286: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
287: MatScalar value;
288: PetscBool roworiented = baij->roworiented;
289: PetscInt i, j, row, col;
290: PetscInt rstart_orig = mat->rmap->rstart;
291: PetscInt rend_orig = mat->rmap->rend, cstart_orig = mat->cmap->rstart;
292: PetscInt cend_orig = mat->cmap->rend, bs = mat->rmap->bs;
294: /* Some Variables required in the macro */
295: Mat A = baij->A;
296: Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ *)A->data;
297: PetscInt *aimax = a->imax, *ai = a->i, *ailen = a->ilen, *aj = a->j;
298: MatScalar *aa = a->a;
300: Mat B = baij->B;
301: Mat_SeqBAIJ *b = (Mat_SeqBAIJ *)B->data;
302: PetscInt *bimax = b->imax, *bi = b->i, *bilen = b->ilen, *bj = b->j;
303: MatScalar *ba = b->a;
305: PetscInt *rp, ii, nrow, _i, rmax, N, brow, bcol;
306: PetscInt low, high, t, ridx, cidx, bs2 = a->bs2;
307: MatScalar *ap, *bap;
309: /* for stash */
310: PetscInt n_loc, *in_loc = NULL;
311: MatScalar *v_loc = NULL;
313: PetscFunctionBegin;
314: if (!baij->donotstash) {
315: if (n > baij->n_loc) {
316: PetscCall(PetscFree(baij->in_loc));
317: PetscCall(PetscFree(baij->v_loc));
318: PetscCall(PetscMalloc1(n, &baij->in_loc));
319: PetscCall(PetscMalloc1(n, &baij->v_loc));
321: baij->n_loc = n;
322: }
323: in_loc = baij->in_loc;
324: v_loc = baij->v_loc;
325: }
327: for (i = 0; i < m; i++) {
328: if (im[i] < 0) continue;
329: PetscCheck(im[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], mat->rmap->N - 1);
330: if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
331: row = im[i] - rstart_orig; /* local row index */
332: for (j = 0; j < n; j++) {
333: if (im[i] / bs > in[j] / bs) {
334: PetscCheck(a->ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
335: continue; /* ignore lower triangular blocks */
336: }
337: if (in[j] >= cstart_orig && in[j] < cend_orig) { /* diag entry (A) */
338: col = in[j] - cstart_orig; /* local col index */
339: brow = row / bs;
340: bcol = col / bs;
341: if (brow > bcol) continue; /* ignore lower triangular blocks of A */
342: if (roworiented) value = v[i * n + j];
343: else value = v[i + j * m];
344: MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, im[i], in[j]);
345: /* PetscCall(MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv)); */
346: } else if (in[j] < 0) {
347: continue;
348: } else {
349: PetscCheck(in[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[j], mat->cmap->N - 1);
350: /* off-diag entry (B) */
351: if (mat->was_assembled) {
352: if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
353: #if defined(PETSC_USE_CTABLE)
354: PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] / bs + 1, 0, &col));
355: col = col - 1;
356: #else
357: col = baij->colmap[in[j] / bs] - 1;
358: #endif
359: if (col < 0 && !((Mat_SeqSBAIJ *)baij->A->data)->nonew) {
360: PetscCall(MatDisAssemble_MPISBAIJ(mat));
361: col = in[j];
362: /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
363: B = baij->B;
364: b = (Mat_SeqBAIJ *)B->data;
365: bimax = b->imax;
366: bi = b->i;
367: bilen = b->ilen;
368: bj = b->j;
369: ba = b->a;
370: } else col += in[j] % bs;
371: } else col = in[j];
372: if (roworiented) value = v[i * n + j];
373: else value = v[i + j * m];
374: MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, im[i], in[j]);
375: /* PetscCall(MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv)); */
376: }
377: }
378: } else { /* off processor entry */
379: PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
380: if (!baij->donotstash) {
381: mat->assembled = PETSC_FALSE;
382: n_loc = 0;
383: for (j = 0; j < n; j++) {
384: if (im[i] / bs > in[j] / bs) continue; /* ignore lower triangular blocks */
385: in_loc[n_loc] = in[j];
386: if (roworiented) {
387: v_loc[n_loc] = v[i * n + j];
388: } else {
389: v_loc[n_loc] = v[j * m + i];
390: }
391: n_loc++;
392: }
393: PetscCall(MatStashValuesRow_Private(&mat->stash, im[i], n_loc, in_loc, v_loc, PETSC_FALSE));
394: }
395: }
396: }
397: PetscFunctionReturn(PETSC_SUCCESS);
398: }
400: static inline PetscErrorCode MatSetValuesBlocked_SeqSBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
401: {
402: Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ *)A->data;
403: PetscInt *rp, low, high, t, ii, jj, nrow, i, rmax, N;
404: PetscInt *imax = a->imax, *ai = a->i, *ailen = a->ilen;
405: PetscInt *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
406: PetscBool roworiented = a->roworiented;
407: const PetscScalar *value = v;
408: MatScalar *ap, *aa = a->a, *bap;
410: PetscFunctionBegin;
411: if (col < row) {
412: PetscCheck(a->ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
413: PetscFunctionReturn(PETSC_SUCCESS); /* ignore lower triangular block */
414: }
415: rp = aj + ai[row];
416: ap = aa + bs2 * ai[row];
417: rmax = imax[row];
418: nrow = ailen[row];
419: value = v;
420: low = 0;
421: high = nrow;
423: while (high - low > 7) {
424: t = (low + high) / 2;
425: if (rp[t] > col) high = t;
426: else low = t;
427: }
428: for (i = low; i < high; i++) {
429: if (rp[i] > col) break;
430: if (rp[i] == col) {
431: bap = ap + bs2 * i;
432: if (roworiented) {
433: if (is == ADD_VALUES) {
434: for (ii = 0; ii < bs; ii++) {
435: for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
436: }
437: } else {
438: for (ii = 0; ii < bs; ii++) {
439: for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
440: }
441: }
442: } else {
443: if (is == ADD_VALUES) {
444: for (ii = 0; ii < bs; ii++) {
445: for (jj = 0; jj < bs; jj++) *bap++ += *value++;
446: }
447: } else {
448: for (ii = 0; ii < bs; ii++) {
449: for (jj = 0; jj < bs; jj++) *bap++ = *value++;
450: }
451: }
452: }
453: goto noinsert2;
454: }
455: }
456: if (nonew == 1) goto noinsert2;
457: PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new block index nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
458: MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
459: N = nrow++ - 1;
460: high++;
461: /* shift up all the later entries in this row */
462: PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
463: PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
464: rp[i] = col;
465: bap = ap + bs2 * i;
466: if (roworiented) {
467: for (ii = 0; ii < bs; ii++) {
468: for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
469: }
470: } else {
471: for (ii = 0; ii < bs; ii++) {
472: for (jj = 0; jj < bs; jj++) *bap++ = *value++;
473: }
474: }
475: noinsert2:;
476: ailen[row] = nrow;
477: PetscFunctionReturn(PETSC_SUCCESS);
478: }
480: /*
481: This routine is exactly duplicated in mpibaij.c
482: */
483: static inline PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
484: {
485: Mat_SeqBAIJ *a = (Mat_SeqBAIJ *)A->data;
486: PetscInt *rp, low, high, t, ii, jj, nrow, i, rmax, N;
487: PetscInt *imax = a->imax, *ai = a->i, *ailen = a->ilen;
488: PetscInt *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
489: PetscBool roworiented = a->roworiented;
490: const PetscScalar *value = v;
491: MatScalar *ap, *aa = a->a, *bap;
493: PetscFunctionBegin;
494: rp = aj + ai[row];
495: ap = aa + bs2 * ai[row];
496: rmax = imax[row];
497: nrow = ailen[row];
498: low = 0;
499: high = nrow;
500: value = v;
501: while (high - low > 7) {
502: t = (low + high) / 2;
503: if (rp[t] > col) high = t;
504: else low = t;
505: }
506: for (i = low; i < high; i++) {
507: if (rp[i] > col) break;
508: if (rp[i] == col) {
509: bap = ap + bs2 * i;
510: if (roworiented) {
511: if (is == ADD_VALUES) {
512: for (ii = 0; ii < bs; ii++) {
513: for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
514: }
515: } else {
516: for (ii = 0; ii < bs; ii++) {
517: for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
518: }
519: }
520: } else {
521: if (is == ADD_VALUES) {
522: for (ii = 0; ii < bs; ii++, value += bs) {
523: for (jj = 0; jj < bs; jj++) bap[jj] += value[jj];
524: bap += bs;
525: }
526: } else {
527: for (ii = 0; ii < bs; ii++, value += bs) {
528: for (jj = 0; jj < bs; jj++) bap[jj] = value[jj];
529: bap += bs;
530: }
531: }
532: }
533: goto noinsert2;
534: }
535: }
536: if (nonew == 1) goto noinsert2;
537: PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new global block indexed nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
538: MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
539: N = nrow++ - 1;
540: high++;
541: /* shift up all the later entries in this row */
542: PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
543: PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
544: rp[i] = col;
545: bap = ap + bs2 * i;
546: if (roworiented) {
547: for (ii = 0; ii < bs; ii++) {
548: for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
549: }
550: } else {
551: for (ii = 0; ii < bs; ii++) {
552: for (jj = 0; jj < bs; jj++) *bap++ = *value++;
553: }
554: }
555: noinsert2:;
556: ailen[row] = nrow;
557: PetscFunctionReturn(PETSC_SUCCESS);
558: }
560: /*
561: This routine could be optimized by removing the need for the block copy below and passing stride information
562: to the above inline routines; similarly in MatSetValuesBlocked_MPIBAIJ()
563: */
564: static PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const MatScalar v[], InsertMode addv)
565: {
566: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
567: const MatScalar *value;
568: MatScalar *barray = baij->barray;
569: PetscBool roworiented = baij->roworiented, ignore_ltriangular = ((Mat_SeqSBAIJ *)baij->A->data)->ignore_ltriangular;
570: PetscInt i, j, ii, jj, row, col, rstart = baij->rstartbs;
571: PetscInt rend = baij->rendbs, cstart = baij->cstartbs, stepval;
572: PetscInt cend = baij->cendbs, bs = mat->rmap->bs, bs2 = baij->bs2;
574: PetscFunctionBegin;
575: if (!barray) {
576: PetscCall(PetscMalloc1(bs2, &barray));
577: baij->barray = barray;
578: }
580: if (roworiented) {
581: stepval = (n - 1) * bs;
582: } else {
583: stepval = (m - 1) * bs;
584: }
585: for (i = 0; i < m; i++) {
586: if (im[i] < 0) continue;
587: PetscCheck(im[i] < baij->Mbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed row too large %" PetscInt_FMT " max %" PetscInt_FMT, im[i], baij->Mbs - 1);
588: if (im[i] >= rstart && im[i] < rend) {
589: row = im[i] - rstart;
590: for (j = 0; j < n; j++) {
591: if (im[i] > in[j]) {
592: PetscCheck(ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
593: continue; /* ignore lower triangular blocks */
594: }
595: /* If NumCol = 1 then a copy is not required */
596: if (roworiented && n == 1) {
597: barray = (MatScalar *)v + i * bs2;
598: } else if ((!roworiented) && (m == 1)) {
599: barray = (MatScalar *)v + j * bs2;
600: } else { /* Here a copy is required */
601: if (roworiented) {
602: value = v + i * (stepval + bs) * bs + j * bs;
603: } else {
604: value = v + j * (stepval + bs) * bs + i * bs;
605: }
606: for (ii = 0; ii < bs; ii++, value += stepval) {
607: for (jj = 0; jj < bs; jj++) *barray++ = *value++;
608: }
609: barray -= bs2;
610: }
612: if (in[j] >= cstart && in[j] < cend) {
613: col = in[j] - cstart;
614: PetscCall(MatSetValuesBlocked_SeqSBAIJ_Inlined(baij->A, row, col, barray, addv, im[i], in[j]));
615: } else if (in[j] < 0) {
616: continue;
617: } else {
618: PetscCheck(in[j] < baij->Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed column too large %" PetscInt_FMT " max %" PetscInt_FMT, in[j], baij->Nbs - 1);
619: if (mat->was_assembled) {
620: if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
622: #if defined(PETSC_USE_CTABLE)
623: PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col));
624: col = col < 1 ? -1 : (col - 1) / bs;
625: #else
626: col = baij->colmap[in[j]] < 1 ? -1 : (baij->colmap[in[j]] - 1) / bs;
627: #endif
628: if (col < 0 && !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
629: PetscCall(MatDisAssemble_MPISBAIJ(mat));
630: col = in[j];
631: }
632: } else col = in[j];
633: PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B, row, col, barray, addv, im[i], in[j]));
634: }
635: }
636: } else {
637: PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process block indexed row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
638: if (!baij->donotstash) {
639: if (roworiented) {
640: PetscCall(MatStashValuesRowBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
641: } else {
642: PetscCall(MatStashValuesColBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
643: }
644: }
645: }
646: }
647: PetscFunctionReturn(PETSC_SUCCESS);
648: }
650: static PetscErrorCode MatGetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
651: {
652: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
653: PetscInt bs = mat->rmap->bs, i, j, bsrstart = mat->rmap->rstart, bsrend = mat->rmap->rend;
654: PetscInt bscstart = mat->cmap->rstart, bscend = mat->cmap->rend, row, col, data;
656: PetscFunctionBegin;
657: for (i = 0; i < m; i++) {
658: if (idxm[i] < 0) continue; /* negative row */
659: PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
660: PetscCheck(idxm[i] >= bsrstart && idxm[i] < bsrend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local values currently supported");
661: row = idxm[i] - bsrstart;
662: for (j = 0; j < n; j++) {
663: if (idxn[j] < 0) continue; /* negative column */
664: PetscCheck(idxn[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, idxn[j], mat->cmap->N - 1);
665: if (idxn[j] >= bscstart && idxn[j] < bscend) {
666: col = idxn[j] - bscstart;
667: PetscCall(MatGetValues_SeqSBAIJ(baij->A, 1, &row, 1, &col, v + i * n + j));
668: } else {
669: if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
670: #if defined(PETSC_USE_CTABLE)
671: PetscCall(PetscHMapIGetWithDefault(baij->colmap, idxn[j] / bs + 1, 0, &data));
672: data--;
673: #else
674: data = baij->colmap[idxn[j] / bs] - 1;
675: #endif
676: if (data < 0 || baij->garray[data / bs] != idxn[j] / bs) *(v + i * n + j) = 0.0;
677: else {
678: col = data + idxn[j] % bs;
679: PetscCall(MatGetValues_SeqBAIJ(baij->B, 1, &row, 1, &col, v + i * n + j));
680: }
681: }
682: }
683: }
684: PetscFunctionReturn(PETSC_SUCCESS);
685: }
687: static PetscErrorCode MatNorm_MPISBAIJ(Mat mat, NormType type, PetscReal *norm)
688: {
689: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
690: PetscReal sum[2], *lnorm2;
692: PetscFunctionBegin;
693: if (baij->size == 1) {
694: PetscCall(MatNorm(baij->A, type, norm));
695: } else {
696: if (type == NORM_FROBENIUS) {
697: PetscCall(PetscMalloc1(2, &lnorm2));
698: PetscCall(MatNorm(baij->A, type, lnorm2));
699: *lnorm2 = (*lnorm2) * (*lnorm2);
700: lnorm2++; /* square power of norm(A) */
701: PetscCall(MatNorm(baij->B, type, lnorm2));
702: *lnorm2 = (*lnorm2) * (*lnorm2);
703: lnorm2--; /* square power of norm(B) */
704: PetscCallMPI(MPIU_Allreduce(lnorm2, sum, 2, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
705: *norm = PetscSqrtReal(sum[0] + 2 * sum[1]);
706: PetscCall(PetscFree(lnorm2));
707: } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */
708: Mat_SeqSBAIJ *amat = (Mat_SeqSBAIJ *)baij->A->data;
709: Mat_SeqBAIJ *bmat = (Mat_SeqBAIJ *)baij->B->data;
710: PetscReal *rsum, vabs;
711: PetscInt *jj, *garray = baij->garray, rstart = baij->rstartbs, nz;
712: PetscInt brow, bcol, col, bs = baij->A->rmap->bs, row, grow, gcol, mbs = amat->mbs;
713: MatScalar *v;
715: PetscCall(PetscCalloc1(mat->cmap->N, &rsum));
716: /* Amat */
717: v = amat->a;
718: jj = amat->j;
719: for (brow = 0; brow < mbs; brow++) {
720: grow = bs * (rstart + brow);
721: nz = amat->i[brow + 1] - amat->i[brow];
722: for (bcol = 0; bcol < nz; bcol++) {
723: gcol = bs * (rstart + *jj);
724: jj++;
725: for (col = 0; col < bs; col++) {
726: for (row = 0; row < bs; row++) {
727: vabs = PetscAbsScalar(*v);
728: v++;
729: rsum[gcol + col] += vabs;
730: /* non-diagonal block */
731: if (bcol > 0 && vabs > 0.0) rsum[grow + row] += vabs;
732: }
733: }
734: }
735: PetscCall(PetscLogFlops(nz * bs * bs));
736: }
737: /* Bmat */
738: v = bmat->a;
739: jj = bmat->j;
740: for (brow = 0; brow < mbs; brow++) {
741: grow = bs * (rstart + brow);
742: nz = bmat->i[brow + 1] - bmat->i[brow];
743: for (bcol = 0; bcol < nz; bcol++) {
744: gcol = bs * garray[*jj];
745: jj++;
746: for (col = 0; col < bs; col++) {
747: for (row = 0; row < bs; row++) {
748: vabs = PetscAbsScalar(*v);
749: v++;
750: rsum[gcol + col] += vabs;
751: rsum[grow + row] += vabs;
752: }
753: }
754: }
755: PetscCall(PetscLogFlops(nz * bs * bs));
756: }
757: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, rsum, mat->cmap->N, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
758: *norm = 0.0;
759: for (col = 0; col < mat->cmap->N; col++) {
760: if (rsum[col] > *norm) *norm = rsum[col];
761: }
762: PetscCall(PetscFree(rsum));
763: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for this norm yet");
764: }
765: PetscFunctionReturn(PETSC_SUCCESS);
766: }
768: static PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat, MatAssemblyType mode)
769: {
770: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
771: PetscInt nstash, reallocs;
773: PetscFunctionBegin;
774: if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
776: PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
777: PetscCall(MatStashScatterBegin_Private(mat, &mat->bstash, baij->rangebs));
778: PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
779: PetscCall(PetscInfo(mat, "Stash has %" PetscInt_FMT " entries,uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
780: PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
781: PetscCall(PetscInfo(mat, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
782: PetscFunctionReturn(PETSC_SUCCESS);
783: }
785: static PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat, MatAssemblyType mode)
786: {
787: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
788: Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ *)baij->A->data;
789: PetscInt i, j, rstart, ncols, flg, bs2 = baij->bs2;
790: PetscInt *row, *col;
791: PetscBool all_assembled;
792: PetscMPIInt n;
793: PetscBool r1, r2, r3;
794: MatScalar *val;
796: /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
797: PetscFunctionBegin;
798: if (!baij->donotstash && !mat->nooffprocentries) {
799: while (1) {
800: PetscCall(MatStashScatterGetMesg_Private(&mat->stash, &n, &row, &col, &val, &flg));
801: if (!flg) break;
803: for (i = 0; i < n;) {
804: /* Now identify the consecutive vals belonging to the same row */
805: for (j = i, rstart = row[j]; j < n; j++) {
806: if (row[j] != rstart) break;
807: }
808: if (j < n) ncols = j - i;
809: else ncols = n - i;
810: /* Now assemble all these values with a single function call */
811: PetscCall(MatSetValues_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i, mat->insertmode));
812: i = j;
813: }
814: }
815: PetscCall(MatStashScatterEnd_Private(&mat->stash));
816: /* Now process the block-stash. Since the values are stashed column-oriented,
817: set the row-oriented flag to column-oriented, and after MatSetValues()
818: restore the original flags */
819: r1 = baij->roworiented;
820: r2 = a->roworiented;
821: r3 = ((Mat_SeqBAIJ *)baij->B->data)->roworiented;
823: baij->roworiented = PETSC_FALSE;
824: a->roworiented = PETSC_FALSE;
826: ((Mat_SeqBAIJ *)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworiented */
827: while (1) {
828: PetscCall(MatStashScatterGetMesg_Private(&mat->bstash, &n, &row, &col, &val, &flg));
829: if (!flg) break;
831: for (i = 0; i < n;) {
832: /* Now identify the consecutive vals belonging to the same row */
833: for (j = i, rstart = row[j]; j < n; j++) {
834: if (row[j] != rstart) break;
835: }
836: if (j < n) ncols = j - i;
837: else ncols = n - i;
838: PetscCall(MatSetValuesBlocked_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i * bs2, mat->insertmode));
839: i = j;
840: }
841: }
842: PetscCall(MatStashScatterEnd_Private(&mat->bstash));
844: baij->roworiented = r1;
845: a->roworiented = r2;
847: ((Mat_SeqBAIJ *)baij->B->data)->roworiented = r3; /* b->roworiented */
848: }
850: PetscCall(MatAssemblyBegin(baij->A, mode));
851: PetscCall(MatAssemblyEnd(baij->A, mode));
853: /* determine if any process has disassembled, if so we must
854: also disassemble ourselves, in order that we may reassemble. */
855: /*
856: if nonzero structure of submatrix B cannot change then we know that
857: no process disassembled thus we can skip this stuff
858: */
859: if (!((Mat_SeqBAIJ *)baij->B->data)->nonew) {
860: PetscCallMPI(MPIU_Allreduce(&mat->was_assembled, &all_assembled, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
861: if (mat->was_assembled && !all_assembled) PetscCall(MatDisAssemble_MPISBAIJ(mat));
862: }
864: if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) PetscCall(MatSetUpMultiply_MPISBAIJ(mat)); /* setup Mvctx and sMvctx */
865: PetscCall(MatAssemblyBegin(baij->B, mode));
866: PetscCall(MatAssemblyEnd(baij->B, mode));
868: PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
870: baij->rowvalues = NULL;
872: /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
873: if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
874: PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
875: PetscCallMPI(MPIU_Allreduce(&state, &mat->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)mat)));
876: }
877: PetscFunctionReturn(PETSC_SUCCESS);
878: }
880: extern PetscErrorCode MatSetValues_MPIBAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
881: #include <petscdraw.h>
882: static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat, PetscViewer viewer)
883: {
884: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
885: PetscInt bs = mat->rmap->bs;
886: PetscMPIInt rank = baij->rank;
887: PetscBool isascii, isdraw;
888: PetscViewer sviewer;
889: PetscViewerFormat format;
891: PetscFunctionBegin;
892: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
893: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
894: if (isascii) {
895: PetscCall(PetscViewerGetFormat(viewer, &format));
896: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
897: MatInfo info;
898: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
899: PetscCall(MatGetInfo(mat, MAT_LOCAL, &info));
900: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
901: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local rows %" PetscInt_FMT " nz %" PetscInt_FMT " nz alloced %" PetscInt_FMT " bs %" PetscInt_FMT " mem %g\n", rank, mat->rmap->n, (PetscInt)info.nz_used, (PetscInt)info.nz_allocated,
902: mat->rmap->bs, info.memory));
903: PetscCall(MatGetInfo(baij->A, MAT_LOCAL, &info));
904: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] on-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
905: PetscCall(MatGetInfo(baij->B, MAT_LOCAL, &info));
906: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] off-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
907: PetscCall(PetscViewerFlush(viewer));
908: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
909: PetscCall(PetscViewerASCIIPrintf(viewer, "Information on VecScatter used in matrix-vector product: \n"));
910: PetscCall(VecScatterView(baij->Mvctx, viewer));
911: PetscFunctionReturn(PETSC_SUCCESS);
912: } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_FACTOR_INFO) PetscFunctionReturn(PETSC_SUCCESS);
913: }
915: if (isdraw) {
916: PetscDraw draw;
917: PetscBool isnull;
918: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
919: PetscCall(PetscDrawIsNull(draw, &isnull));
920: if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
921: }
923: {
924: /* assemble the entire matrix onto first processor. */
925: Mat A;
926: Mat_SeqSBAIJ *Aloc;
927: Mat_SeqBAIJ *Bloc;
928: PetscInt M = mat->rmap->N, N = mat->cmap->N, *ai, *aj, col, i, j, k, *rvals, mbs = baij->mbs;
929: MatScalar *a;
930: const char *matname;
932: /* Should this be the same type as mat? */
933: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &A));
934: if (rank == 0) {
935: PetscCall(MatSetSizes(A, M, N, M, N));
936: } else {
937: PetscCall(MatSetSizes(A, 0, 0, M, N));
938: }
939: PetscCall(MatSetType(A, MATMPISBAIJ));
940: PetscCall(MatMPISBAIJSetPreallocation(A, mat->rmap->bs, 0, NULL, 0, NULL));
941: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
943: /* copy over the A part */
944: Aloc = (Mat_SeqSBAIJ *)baij->A->data;
945: ai = Aloc->i;
946: aj = Aloc->j;
947: a = Aloc->a;
948: PetscCall(PetscMalloc1(bs, &rvals));
950: for (i = 0; i < mbs; i++) {
951: rvals[0] = bs * (baij->rstartbs + i);
952: for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
953: for (j = ai[i]; j < ai[i + 1]; j++) {
954: col = (baij->cstartbs + aj[j]) * bs;
955: for (k = 0; k < bs; k++) {
956: PetscCall(MatSetValues_MPISBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
957: col++;
958: a += bs;
959: }
960: }
961: }
962: /* copy over the B part */
963: Bloc = (Mat_SeqBAIJ *)baij->B->data;
964: ai = Bloc->i;
965: aj = Bloc->j;
966: a = Bloc->a;
967: for (i = 0; i < mbs; i++) {
968: rvals[0] = bs * (baij->rstartbs + i);
969: for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
970: for (j = ai[i]; j < ai[i + 1]; j++) {
971: col = baij->garray[aj[j]] * bs;
972: for (k = 0; k < bs; k++) {
973: PetscCall(MatSetValues_MPIBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
974: col++;
975: a += bs;
976: }
977: }
978: }
979: PetscCall(PetscFree(rvals));
980: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
981: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
982: /*
983: Everyone has to call to draw the matrix since the graphics waits are
984: synchronized across all processors that share the PetscDraw object
985: */
986: PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
987: if (((PetscObject)mat)->name) PetscCall(PetscObjectGetName((PetscObject)mat, &matname));
988: if (rank == 0) {
989: if (((PetscObject)mat)->name) PetscCall(PetscObjectSetName((PetscObject)((Mat_MPISBAIJ *)A->data)->A, matname));
990: PetscCall(MatView_SeqSBAIJ(((Mat_MPISBAIJ *)A->data)->A, sviewer));
991: }
992: PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
993: PetscCall(MatDestroy(&A));
994: }
995: PetscFunctionReturn(PETSC_SUCCESS);
996: }
998: /* Used for both MPIBAIJ and MPISBAIJ matrices */
999: #define MatView_MPISBAIJ_Binary MatView_MPIBAIJ_Binary
1001: static PetscErrorCode MatView_MPISBAIJ(Mat mat, PetscViewer viewer)
1002: {
1003: PetscBool isascii, isdraw, issocket, isbinary;
1005: PetscFunctionBegin;
1006: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1007: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1008: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSOCKET, &issocket));
1009: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1010: if (isascii || isdraw || issocket) PetscCall(MatView_MPISBAIJ_ASCIIorDraworSocket(mat, viewer));
1011: else if (isbinary) PetscCall(MatView_MPISBAIJ_Binary(mat, viewer));
1012: PetscFunctionReturn(PETSC_SUCCESS);
1013: }
1015: #if defined(PETSC_USE_COMPLEX)
1016: static PetscErrorCode MatMult_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy)
1017: {
1018: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1019: PetscInt mbs = a->mbs, bs = A->rmap->bs;
1020: PetscScalar *from;
1021: const PetscScalar *x;
1023: PetscFunctionBegin;
1024: /* diagonal part */
1025: PetscUseTypeMethod(a->A, mult, xx, a->slvec1a);
1026: /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1027: PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1028: PetscCall(VecZeroEntries(a->slvec1b));
1030: /* subdiagonal part */
1031: PetscUseTypeMethod(a->B, multhermitiantranspose, xx, a->slvec0b);
1033: /* copy x into the vec slvec0 */
1034: PetscCall(VecGetArray(a->slvec0, &from));
1035: PetscCall(VecGetArrayRead(xx, &x));
1037: PetscCall(PetscArraycpy(from, x, bs * mbs));
1038: PetscCall(VecRestoreArray(a->slvec0, &from));
1039: PetscCall(VecRestoreArrayRead(xx, &x));
1041: PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1042: PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1043: /* supperdiagonal part */
1044: PetscUseTypeMethod(a->B, multadd, a->slvec1b, a->slvec1a, yy);
1045: PetscFunctionReturn(PETSC_SUCCESS);
1046: }
1047: #endif
1049: static PetscErrorCode MatMult_MPISBAIJ(Mat A, Vec xx, Vec yy)
1050: {
1051: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1052: PetscInt mbs = a->mbs, bs = A->rmap->bs;
1053: PetscScalar *from;
1054: const PetscScalar *x;
1056: PetscFunctionBegin;
1057: /* diagonal part */
1058: PetscUseTypeMethod(a->A, mult, xx, a->slvec1a);
1059: /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1060: PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1061: PetscCall(VecZeroEntries(a->slvec1b));
1063: /* subdiagonal part */
1064: PetscUseTypeMethod(a->B, multtranspose, xx, a->slvec0b);
1066: /* copy x into the vec slvec0 */
1067: PetscCall(VecGetArray(a->slvec0, &from));
1068: PetscCall(VecGetArrayRead(xx, &x));
1070: PetscCall(PetscArraycpy(from, x, bs * mbs));
1071: PetscCall(VecRestoreArray(a->slvec0, &from));
1072: PetscCall(VecRestoreArrayRead(xx, &x));
1074: PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1075: PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1076: /* supperdiagonal part */
1077: PetscUseTypeMethod(a->B, multadd, a->slvec1b, a->slvec1a, yy);
1078: PetscFunctionReturn(PETSC_SUCCESS);
1079: }
1081: #if PetscDefined(USE_COMPLEX)
1082: static PetscErrorCode MatMultAdd_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy, Vec zz)
1083: {
1084: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1085: PetscInt mbs = a->mbs, bs = A->rmap->bs;
1086: PetscScalar *from;
1087: const PetscScalar *x;
1089: PetscFunctionBegin;
1090: /* diagonal part */
1091: PetscUseTypeMethod(a->A, multadd, xx, yy, a->slvec1a);
1092: PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1093: PetscCall(VecZeroEntries(a->slvec1b));
1095: /* subdiagonal part */
1096: PetscUseTypeMethod(a->B, multhermitiantranspose, xx, a->slvec0b);
1098: /* copy x into the vec slvec0 */
1099: PetscCall(VecGetArray(a->slvec0, &from));
1100: PetscCall(VecGetArrayRead(xx, &x));
1101: PetscCall(PetscArraycpy(from, x, bs * mbs));
1102: PetscCall(VecRestoreArray(a->slvec0, &from));
1104: PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1105: PetscCall(VecRestoreArrayRead(xx, &x));
1106: PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1108: /* supperdiagonal part */
1109: PetscUseTypeMethod(a->B, multadd, a->slvec1b, a->slvec1a, zz);
1110: PetscFunctionReturn(PETSC_SUCCESS);
1111: }
1112: #endif
1114: static PetscErrorCode MatMultAdd_MPISBAIJ(Mat A, Vec xx, Vec yy, Vec zz)
1115: {
1116: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1117: PetscInt mbs = a->mbs, bs = A->rmap->bs;
1118: PetscScalar *from;
1119: const PetscScalar *x;
1121: PetscFunctionBegin;
1122: /* diagonal part */
1123: PetscUseTypeMethod(a->A, multadd, xx, yy, a->slvec1a);
1124: PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1125: PetscCall(VecZeroEntries(a->slvec1b));
1127: /* subdiagonal part */
1128: PetscUseTypeMethod(a->B, multtranspose, xx, a->slvec0b);
1130: /* copy x into the vec slvec0 */
1131: PetscCall(VecGetArray(a->slvec0, &from));
1132: PetscCall(VecGetArrayRead(xx, &x));
1133: PetscCall(PetscArraycpy(from, x, bs * mbs));
1134: PetscCall(VecRestoreArray(a->slvec0, &from));
1136: PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1137: PetscCall(VecRestoreArrayRead(xx, &x));
1138: PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1140: /* supperdiagonal part */
1141: PetscUseTypeMethod(a->B, multadd, a->slvec1b, a->slvec1a, zz);
1142: PetscFunctionReturn(PETSC_SUCCESS);
1143: }
1145: /*
1146: This only works correctly for square matrices where the subblock A->A is the
1147: diagonal block
1148: */
1149: static PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A, Vec v)
1150: {
1151: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1153: PetscFunctionBegin;
1154: /* PetscCheck(a->rmap->N == a->cmap->N,PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */
1155: PetscCall(MatGetDiagonal(a->A, v));
1156: PetscFunctionReturn(PETSC_SUCCESS);
1157: }
1159: static PetscErrorCode MatScale_MPISBAIJ(Mat A, PetscScalar aa)
1160: {
1161: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1163: PetscFunctionBegin;
1164: PetscCall(MatScale(a->A, aa));
1165: PetscCall(MatScale(a->B, aa));
1166: PetscFunctionReturn(PETSC_SUCCESS);
1167: }
1169: static PetscErrorCode MatGetRow_MPISBAIJ(Mat matin, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1170: {
1171: Mat_MPISBAIJ *mat = (Mat_MPISBAIJ *)matin->data;
1172: PetscScalar *vworkA, *vworkB, **pvA, **pvB, *v_p;
1173: PetscInt bs = matin->rmap->bs, bs2 = mat->bs2, i, *cworkA, *cworkB, **pcA, **pcB;
1174: PetscInt nztot, nzA, nzB, lrow, brstart = matin->rmap->rstart, brend = matin->rmap->rend;
1175: PetscInt *cmap, *idx_p, cstart = mat->rstartbs;
1177: PetscFunctionBegin;
1178: PetscCheck(!mat->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Already active");
1179: mat->getrowactive = PETSC_TRUE;
1181: if (!mat->rowvalues && (idx || v)) {
1182: /*
1183: allocate enough space to hold information from the longest row.
1184: */
1185: Mat_SeqSBAIJ *Aa = (Mat_SeqSBAIJ *)mat->A->data;
1186: Mat_SeqBAIJ *Ba = (Mat_SeqBAIJ *)mat->B->data;
1187: PetscInt max = 1, mbs = mat->mbs, tmp;
1188: for (i = 0; i < mbs; i++) {
1189: tmp = Aa->i[i + 1] - Aa->i[i] + Ba->i[i + 1] - Ba->i[i]; /* row length */
1190: if (max < tmp) max = tmp;
1191: }
1192: PetscCall(PetscMalloc2(max * bs2, &mat->rowvalues, max * bs2, &mat->rowindices));
1193: }
1195: PetscCheck(row >= brstart && row < brend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local rows");
1196: lrow = row - brstart; /* local row index */
1198: pvA = &vworkA;
1199: pcA = &cworkA;
1200: pvB = &vworkB;
1201: pcB = &cworkB;
1202: if (!v) {
1203: pvA = NULL;
1204: pvB = NULL;
1205: }
1206: if (!idx) {
1207: pcA = NULL;
1208: if (!v) pcB = NULL;
1209: }
1210: PetscUseTypeMethod(mat->A, getrow, lrow, &nzA, pcA, pvA);
1211: PetscUseTypeMethod(mat->B, getrow, lrow, &nzB, pcB, pvB);
1212: nztot = nzA + nzB;
1214: cmap = mat->garray;
1215: if (v || idx) {
1216: if (nztot) {
1217: /* Sort by increasing column numbers, assuming A and B already sorted */
1218: PetscInt imark = -1;
1219: if (v) {
1220: *v = v_p = mat->rowvalues;
1221: for (i = 0; i < nzB; i++) {
1222: if (cmap[cworkB[i] / bs] < cstart) v_p[i] = vworkB[i];
1223: else break;
1224: }
1225: imark = i;
1226: for (i = 0; i < nzA; i++) v_p[imark + i] = vworkA[i];
1227: for (i = imark; i < nzB; i++) v_p[nzA + i] = vworkB[i];
1228: }
1229: if (idx) {
1230: *idx = idx_p = mat->rowindices;
1231: if (imark > -1) {
1232: for (i = 0; i < imark; i++) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1233: } else {
1234: for (i = 0; i < nzB; i++) {
1235: if (cmap[cworkB[i] / bs] < cstart) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1236: else break;
1237: }
1238: imark = i;
1239: }
1240: for (i = 0; i < nzA; i++) idx_p[imark + i] = cstart * bs + cworkA[i];
1241: for (i = imark; i < nzB; i++) idx_p[nzA + i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1242: }
1243: } else {
1244: if (idx) *idx = NULL;
1245: if (v) *v = NULL;
1246: }
1247: }
1248: *nz = nztot;
1249: PetscUseTypeMethod(mat->A, restorerow, lrow, &nzA, pcA, pvA);
1250: PetscUseTypeMethod(mat->B, restorerow, lrow, &nzB, pcB, pvB);
1251: PetscFunctionReturn(PETSC_SUCCESS);
1252: }
1254: static PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1255: {
1256: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1258: PetscFunctionBegin;
1259: PetscCheck(baij->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "MatGetRow() must be called first");
1260: baij->getrowactive = PETSC_FALSE;
1261: PetscFunctionReturn(PETSC_SUCCESS);
1262: }
1264: static PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A)
1265: {
1266: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1267: Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1269: PetscFunctionBegin;
1270: aA->getrow_utriangular = PETSC_TRUE;
1271: PetscFunctionReturn(PETSC_SUCCESS);
1272: }
1273: static PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A)
1274: {
1275: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1276: Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1278: PetscFunctionBegin;
1279: aA->getrow_utriangular = PETSC_FALSE;
1280: PetscFunctionReturn(PETSC_SUCCESS);
1281: }
1283: static PetscErrorCode MatConjugate_MPISBAIJ(Mat mat)
1284: {
1285: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)mat->data;
1287: PetscFunctionBegin;
1288: PetscCall(MatConjugate(a->A));
1289: PetscCall(MatConjugate(a->B));
1290: PetscFunctionReturn(PETSC_SUCCESS);
1291: }
1293: static PetscErrorCode MatRealPart_MPISBAIJ(Mat A)
1294: {
1295: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1297: PetscFunctionBegin;
1298: PetscCall(MatRealPart(a->A));
1299: PetscCall(MatRealPart(a->B));
1300: PetscFunctionReturn(PETSC_SUCCESS);
1301: }
1303: static PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A)
1304: {
1305: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1307: PetscFunctionBegin;
1308: PetscCall(MatImaginaryPart(a->A));
1309: PetscCall(MatImaginaryPart(a->B));
1310: PetscFunctionReturn(PETSC_SUCCESS);
1311: }
1313: /* Check if isrow is a subset of iscol_local, called by MatCreateSubMatrix_MPISBAIJ()
1314: Input: isrow - distributed(parallel),
1315: iscol_local - locally owned (seq)
1316: */
1317: static PetscErrorCode ISEqual_private(IS isrow, IS iscol_local, PetscBool *flg)
1318: {
1319: PetscInt sz1, sz2, *a1, *a2, i, j, k, nmatch;
1320: const PetscInt *ptr1, *ptr2;
1322: PetscFunctionBegin;
1323: *flg = PETSC_FALSE;
1324: PetscCall(ISGetLocalSize(isrow, &sz1));
1325: PetscCall(ISGetLocalSize(iscol_local, &sz2));
1326: if (sz1 > sz2) PetscFunctionReturn(PETSC_SUCCESS);
1328: PetscCall(ISGetIndices(isrow, &ptr1));
1329: PetscCall(ISGetIndices(iscol_local, &ptr2));
1331: PetscCall(PetscMalloc1(sz1, &a1));
1332: PetscCall(PetscMalloc1(sz2, &a2));
1333: PetscCall(PetscArraycpy(a1, ptr1, sz1));
1334: PetscCall(PetscArraycpy(a2, ptr2, sz2));
1335: PetscCall(PetscSortInt(sz1, a1));
1336: PetscCall(PetscSortInt(sz2, a2));
1338: nmatch = 0;
1339: k = 0;
1340: for (i = 0; i < sz1; i++) {
1341: for (j = k; j < sz2; j++) {
1342: if (a1[i] == a2[j]) {
1343: k = j;
1344: nmatch++;
1345: break;
1346: }
1347: }
1348: }
1349: PetscCall(ISRestoreIndices(isrow, &ptr1));
1350: PetscCall(ISRestoreIndices(iscol_local, &ptr2));
1351: PetscCall(PetscFree(a1));
1352: PetscCall(PetscFree(a2));
1353: if (nmatch < sz1) {
1354: *flg = PETSC_FALSE;
1355: } else {
1356: *flg = PETSC_TRUE;
1357: }
1358: PetscFunctionReturn(PETSC_SUCCESS);
1359: }
1361: static PetscErrorCode MatCreateSubMatrix_MPISBAIJ(Mat mat, IS isrow, IS iscol, MatReuse call, Mat *newmat)
1362: {
1363: Mat C[2];
1364: IS iscol_local, isrow_local;
1365: PetscInt csize, csize_local, rsize;
1366: PetscBool isequal, issorted, isidentity = PETSC_FALSE;
1368: PetscFunctionBegin;
1369: PetscCall(ISGetLocalSize(iscol, &csize));
1370: PetscCall(ISGetLocalSize(isrow, &rsize));
1371: if (call == MAT_REUSE_MATRIX) {
1372: PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather", (PetscObject *)&iscol_local));
1373: PetscCheck(iscol_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1374: } else {
1375: PetscCall(ISAllGather(iscol, &iscol_local));
1376: PetscCall(ISSorted(iscol_local, &issorted));
1377: PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, iscol must be sorted");
1378: }
1379: PetscCall(ISEqual_private(isrow, iscol_local, &isequal));
1380: if (!isequal) {
1381: PetscCall(ISGetLocalSize(iscol_local, &csize_local));
1382: isidentity = (PetscBool)(mat->cmap->N == csize_local);
1383: if (!isidentity) {
1384: if (call == MAT_REUSE_MATRIX) {
1385: PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather_other", (PetscObject *)&isrow_local));
1386: PetscCheck(isrow_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1387: } else {
1388: PetscCall(ISAllGather(isrow, &isrow_local));
1389: PetscCall(ISSorted(isrow_local, &issorted));
1390: PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, isrow must be sorted");
1391: }
1392: }
1393: }
1394: /* now call MatCreateSubMatrix_MPIBAIJ() */
1395: PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, isrow, iscol_local, csize, isequal || isidentity ? call : MAT_INITIAL_MATRIX, isequal || isidentity ? newmat : C, (PetscBool)(isequal || isidentity)));
1396: if (!isequal && !isidentity) {
1397: if (call == MAT_INITIAL_MATRIX) {
1398: IS intersect;
1399: PetscInt ni;
1401: PetscCall(ISIntersect(isrow_local, iscol_local, &intersect));
1402: PetscCall(ISGetLocalSize(intersect, &ni));
1403: PetscCall(ISDestroy(&intersect));
1404: PetscCheck(ni == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot create such a submatrix: for symmetric format, when requesting an off-diagonal submatrix, isrow and iscol should have an empty intersection (number of common indices is %" PetscInt_FMT ")", ni);
1405: }
1406: PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, iscol, isrow_local, rsize, MAT_INITIAL_MATRIX, C + 1, PETSC_FALSE));
1407: PetscCall(MatTranspose(C[1], MAT_INPLACE_MATRIX, C + 1));
1408: PetscCall(MatAXPY(C[0], 1.0, C[1], DIFFERENT_NONZERO_PATTERN));
1409: if (call == MAT_REUSE_MATRIX) PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1410: else if (mat->rmap->bs == 1) PetscCall(MatConvert(C[0], MATAIJ, MAT_INITIAL_MATRIX, newmat));
1411: else PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1412: PetscCall(MatDestroy(C));
1413: PetscCall(MatDestroy(C + 1));
1414: }
1415: if (call == MAT_INITIAL_MATRIX) {
1416: if (!isequal && !isidentity) {
1417: PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather_other", (PetscObject)isrow_local));
1418: PetscCall(ISDestroy(&isrow_local));
1419: }
1420: PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather", (PetscObject)iscol_local));
1421: PetscCall(ISDestroy(&iscol_local));
1422: }
1423: PetscFunctionReturn(PETSC_SUCCESS);
1424: }
1426: static PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A)
1427: {
1428: Mat_MPISBAIJ *l = (Mat_MPISBAIJ *)A->data;
1430: PetscFunctionBegin;
1431: PetscCall(MatZeroEntries(l->A));
1432: PetscCall(MatZeroEntries(l->B));
1433: PetscFunctionReturn(PETSC_SUCCESS);
1434: }
1436: static PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin, MatInfoType flag, MatInfo *info)
1437: {
1438: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)matin->data;
1439: Mat A = a->A, B = a->B;
1440: PetscLogDouble isend[5], irecv[5];
1442: PetscFunctionBegin;
1443: info->block_size = (PetscReal)matin->rmap->bs;
1445: PetscCall(MatGetInfo(A, MAT_LOCAL, info));
1447: isend[0] = info->nz_used;
1448: isend[1] = info->nz_allocated;
1449: isend[2] = info->nz_unneeded;
1450: isend[3] = info->memory;
1451: isend[4] = info->mallocs;
1453: PetscCall(MatGetInfo(B, MAT_LOCAL, info));
1455: isend[0] += info->nz_used;
1456: isend[1] += info->nz_allocated;
1457: isend[2] += info->nz_unneeded;
1458: isend[3] += info->memory;
1459: isend[4] += info->mallocs;
1460: if (flag == MAT_LOCAL) {
1461: info->nz_used = isend[0];
1462: info->nz_allocated = isend[1];
1463: info->nz_unneeded = isend[2];
1464: info->memory = isend[3];
1465: info->mallocs = isend[4];
1466: } else if (flag == MAT_GLOBAL_MAX) {
1467: PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)matin)));
1469: info->nz_used = irecv[0];
1470: info->nz_allocated = irecv[1];
1471: info->nz_unneeded = irecv[2];
1472: info->memory = irecv[3];
1473: info->mallocs = irecv[4];
1474: } else if (flag == MAT_GLOBAL_SUM) {
1475: PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)matin)));
1477: info->nz_used = irecv[0];
1478: info->nz_allocated = irecv[1];
1479: info->nz_unneeded = irecv[2];
1480: info->memory = irecv[3];
1481: info->mallocs = irecv[4];
1482: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown MatInfoType argument %d", (int)flag);
1483: info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */
1484: info->fill_ratio_needed = 0;
1485: info->factor_mallocs = 0;
1486: PetscFunctionReturn(PETSC_SUCCESS);
1487: }
1489: static PetscErrorCode MatSetOption_MPISBAIJ(Mat A, MatOption op, PetscBool flg)
1490: {
1491: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1492: Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1494: PetscFunctionBegin;
1495: switch (op) {
1496: case MAT_NEW_NONZERO_LOCATIONS:
1497: case MAT_NEW_NONZERO_ALLOCATION_ERR:
1498: case MAT_UNUSED_NONZERO_LOCATION_ERR:
1499: case MAT_KEEP_NONZERO_PATTERN:
1500: case MAT_NEW_NONZERO_LOCATION_ERR:
1501: MatCheckPreallocated(A, 1);
1502: PetscCall(MatSetOption(a->A, op, flg));
1503: PetscCall(MatSetOption(a->B, op, flg));
1504: break;
1505: case MAT_ROW_ORIENTED:
1506: MatCheckPreallocated(A, 1);
1507: a->roworiented = flg;
1509: PetscCall(MatSetOption(a->A, op, flg));
1510: PetscCall(MatSetOption(a->B, op, flg));
1511: break;
1512: case MAT_IGNORE_OFF_PROC_ENTRIES:
1513: a->donotstash = flg;
1514: break;
1515: case MAT_USE_HASH_TABLE:
1516: a->ht_flag = flg;
1517: break;
1518: case MAT_HERMITIAN:
1519: if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1520: #if defined(PETSC_USE_COMPLEX)
1521: if (flg) { /* need different mat-vec ops */
1522: A->ops->mult = MatMult_MPISBAIJ_Hermitian;
1523: A->ops->multadd = MatMultAdd_MPISBAIJ_Hermitian;
1524: A->ops->multtranspose = NULL;
1525: A->ops->multtransposeadd = NULL;
1526: }
1527: #endif
1528: break;
1529: case MAT_SPD:
1530: case MAT_SYMMETRIC:
1531: if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1532: #if defined(PETSC_USE_COMPLEX)
1533: if (flg) { /* restore to use default mat-vec ops */
1534: A->ops->mult = MatMult_MPISBAIJ;
1535: A->ops->multadd = MatMultAdd_MPISBAIJ;
1536: A->ops->multtranspose = MatMult_MPISBAIJ;
1537: A->ops->multtransposeadd = MatMultAdd_MPISBAIJ;
1538: }
1539: #endif
1540: break;
1541: case MAT_STRUCTURALLY_SYMMETRIC:
1542: if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1543: break;
1544: case MAT_IGNORE_LOWER_TRIANGULAR:
1545: case MAT_ERROR_LOWER_TRIANGULAR:
1546: aA->ignore_ltriangular = flg;
1547: break;
1548: case MAT_GETROW_UPPERTRIANGULAR:
1549: aA->getrow_utriangular = flg;
1550: break;
1551: default:
1552: break;
1553: }
1554: PetscFunctionReturn(PETSC_SUCCESS);
1555: }
1557: static PetscErrorCode MatTranspose_MPISBAIJ(Mat A, MatReuse reuse, Mat *B)
1558: {
1559: PetscFunctionBegin;
1560: if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
1561: if (reuse == MAT_INITIAL_MATRIX) {
1562: PetscCall(MatDuplicate(A, MAT_COPY_VALUES, B));
1563: } else if (reuse == MAT_REUSE_MATRIX) {
1564: PetscCall(MatCopy(A, *B, SAME_NONZERO_PATTERN));
1565: }
1566: PetscFunctionReturn(PETSC_SUCCESS);
1567: }
1569: static PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat, Vec ll, Vec rr)
1570: {
1571: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1572: Mat a = baij->A, b = baij->B;
1573: PetscInt nv, m, n;
1575: PetscFunctionBegin;
1576: if (!ll) PetscFunctionReturn(PETSC_SUCCESS);
1578: PetscCall(MatGetLocalSize(mat, &m, &n));
1579: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "For symmetric format, local size %" PetscInt_FMT " %" PetscInt_FMT " must be same", m, n);
1581: PetscCall(VecGetLocalSize(rr, &nv));
1582: PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Left and right vector non-conforming local size");
1584: PetscCall(VecScatterBegin(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1586: /* left diagonalscale the off-diagonal part */
1587: PetscUseTypeMethod(b, diagonalscale, ll, NULL);
1589: /* scale the diagonal part */
1590: PetscUseTypeMethod(a, diagonalscale, ll, rr);
1592: /* right diagonalscale the off-diagonal part */
1593: PetscCall(VecScatterEnd(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1594: PetscUseTypeMethod(b, diagonalscale, NULL, baij->lvec);
1595: PetscFunctionReturn(PETSC_SUCCESS);
1596: }
1598: static PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A)
1599: {
1600: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1602: PetscFunctionBegin;
1603: PetscCall(MatSetUnfactored(a->A));
1604: PetscFunctionReturn(PETSC_SUCCESS);
1605: }
1607: static PetscErrorCode MatDuplicate_MPISBAIJ(Mat, MatDuplicateOption, Mat *);
1609: static PetscErrorCode MatEqual_MPISBAIJ(Mat A, Mat B, PetscBool *flag)
1610: {
1611: Mat_MPISBAIJ *matB = (Mat_MPISBAIJ *)B->data, *matA = (Mat_MPISBAIJ *)A->data;
1612: Mat a, b, c, d;
1613: PetscBool flg;
1615: PetscFunctionBegin;
1616: a = matA->A;
1617: b = matA->B;
1618: c = matB->A;
1619: d = matB->B;
1621: PetscCall(MatEqual(a, c, &flg));
1622: if (flg) PetscCall(MatEqual(b, d, &flg));
1623: PetscCallMPI(MPIU_Allreduce(&flg, flag, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
1624: PetscFunctionReturn(PETSC_SUCCESS);
1625: }
1627: static PetscErrorCode MatCopy_MPISBAIJ(Mat A, Mat B, MatStructure str)
1628: {
1629: PetscBool isbaij;
1631: PetscFunctionBegin;
1632: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isbaij, MATSEQSBAIJ, MATMPISBAIJ, ""));
1633: PetscCheck(isbaij, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)B)->type_name);
1634: /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1635: if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1636: PetscCall(MatGetRowUpperTriangular(A));
1637: PetscCall(MatCopy_Basic(A, B, str));
1638: PetscCall(MatRestoreRowUpperTriangular(A));
1639: } else {
1640: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1641: Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
1643: PetscCall(MatCopy(a->A, b->A, str));
1644: PetscCall(MatCopy(a->B, b->B, str));
1645: }
1646: PetscCall(PetscObjectStateIncrease((PetscObject)B));
1647: PetscFunctionReturn(PETSC_SUCCESS);
1648: }
1650: static PetscErrorCode MatAXPY_MPISBAIJ(Mat Y, PetscScalar a, Mat X, MatStructure str)
1651: {
1652: Mat_MPISBAIJ *xx = (Mat_MPISBAIJ *)X->data, *yy = (Mat_MPISBAIJ *)Y->data;
1653: PetscBLASInt bnz, one = 1;
1654: Mat_SeqSBAIJ *xa, *ya;
1655: Mat_SeqBAIJ *xb, *yb;
1657: PetscFunctionBegin;
1658: if (str == SAME_NONZERO_PATTERN) {
1659: PetscScalar alpha = a;
1660: xa = (Mat_SeqSBAIJ *)xx->A->data;
1661: ya = (Mat_SeqSBAIJ *)yy->A->data;
1662: PetscCall(PetscBLASIntCast(xa->nz, &bnz));
1663: PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xa->a, &one, ya->a, &one));
1664: xb = (Mat_SeqBAIJ *)xx->B->data;
1665: yb = (Mat_SeqBAIJ *)yy->B->data;
1666: PetscCall(PetscBLASIntCast(xb->nz, &bnz));
1667: PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xb->a, &one, yb->a, &one));
1668: PetscCall(PetscObjectStateIncrease((PetscObject)Y));
1669: } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
1670: PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
1671: PetscCall(MatAXPY_Basic(Y, a, X, str));
1672: PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
1673: } else {
1674: Mat B;
1675: PetscInt *nnz_d, *nnz_o, bs = Y->rmap->bs;
1676: PetscCheck(bs == X->rmap->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Matrices must have same block size");
1677: PetscCall(MatGetRowUpperTriangular(X));
1678: PetscCall(MatGetRowUpperTriangular(Y));
1679: PetscCall(PetscMalloc1(yy->A->rmap->N, &nnz_d));
1680: PetscCall(PetscMalloc1(yy->B->rmap->N, &nnz_o));
1681: PetscCall(MatCreate(PetscObjectComm((PetscObject)Y), &B));
1682: PetscCall(PetscObjectSetName((PetscObject)B, ((PetscObject)Y)->name));
1683: PetscCall(MatSetSizes(B, Y->rmap->n, Y->cmap->n, Y->rmap->N, Y->cmap->N));
1684: PetscCall(MatSetBlockSizesFromMats(B, Y, Y));
1685: PetscCall(MatSetType(B, MATMPISBAIJ));
1686: PetscCall(MatAXPYGetPreallocation_SeqSBAIJ(yy->A, xx->A, nnz_d));
1687: PetscCall(MatAXPYGetPreallocation_MPIBAIJ(yy->B, yy->garray, xx->B, xx->garray, nnz_o));
1688: PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, nnz_d, 0, nnz_o));
1689: PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
1690: PetscCall(MatHeaderMerge(Y, &B));
1691: PetscCall(PetscFree(nnz_d));
1692: PetscCall(PetscFree(nnz_o));
1693: PetscCall(MatRestoreRowUpperTriangular(X));
1694: PetscCall(MatRestoreRowUpperTriangular(Y));
1695: }
1696: PetscFunctionReturn(PETSC_SUCCESS);
1697: }
1699: static PetscErrorCode MatCreateSubMatrices_MPISBAIJ(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *B[])
1700: {
1701: PetscInt i;
1702: PetscBool flg;
1704: PetscFunctionBegin;
1705: PetscCall(MatCreateSubMatrices_MPIBAIJ(A, n, irow, icol, scall, B)); /* B[] are sbaij matrices */
1706: for (i = 0; i < n; i++) {
1707: PetscCall(ISEqual(irow[i], icol[i], &flg));
1708: if (!flg) PetscCall(MatSeqSBAIJZeroOps_Private(*B[i]));
1709: }
1710: PetscFunctionReturn(PETSC_SUCCESS);
1711: }
1713: static PetscErrorCode MatShift_MPISBAIJ(Mat Y, PetscScalar a)
1714: {
1715: Mat_MPISBAIJ *maij = (Mat_MPISBAIJ *)Y->data;
1716: Mat_SeqSBAIJ *aij = (Mat_SeqSBAIJ *)maij->A->data;
1718: PetscFunctionBegin;
1719: if (!Y->preallocated) {
1720: PetscCall(MatMPISBAIJSetPreallocation(Y, Y->rmap->bs, 1, NULL, 0, NULL));
1721: } else if (!aij->nz) {
1722: PetscInt nonew = aij->nonew;
1723: PetscCall(MatSeqSBAIJSetPreallocation(maij->A, Y->rmap->bs, 1, NULL));
1724: aij->nonew = nonew;
1725: }
1726: PetscCall(MatShift_Basic(Y, a));
1727: PetscFunctionReturn(PETSC_SUCCESS);
1728: }
1730: static PetscErrorCode MatZeroRowsColumns_MPISBAIJ(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1731: {
1732: Mat_MPISBAIJ *l = (Mat_MPISBAIJ *)A->data;
1733: PetscMPIInt n, p = 0;
1734: PetscInt i, j, k, r, len = 0, row, col, count;
1735: PetscInt *lrows, *owners = A->rmap->range;
1736: PetscSFNode *rrows;
1737: PetscSF sf;
1738: const PetscScalar *xx;
1739: PetscScalar *bb, *mask;
1740: Vec xmask, lmask, lvec_contrib = NULL;
1741: Mat_SeqBAIJ *baij = (Mat_SeqBAIJ *)l->B->data;
1742: PetscInt bs = A->rmap->bs, bs2 = baij->bs2;
1743: PetscScalar *aa;
1745: PetscFunctionBegin;
1746: PetscCall(PetscMPIIntCast(A->rmap->n, &n));
1747: /* create PetscSF where leaves are input rows and roots are owned rows */
1748: PetscCall(PetscMalloc1(n, &lrows));
1749: for (r = 0; r < n; ++r) lrows[r] = -1;
1750: PetscCall(PetscMalloc1(N, &rrows));
1751: for (r = 0; r < N; ++r) {
1752: const PetscInt idx = rows[r];
1753: PetscCheck(idx >= 0 && A->rmap->N > idx, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row %" PetscInt_FMT " out of range [0,%" PetscInt_FMT ")", idx, A->rmap->N);
1754: if (idx < owners[p] || owners[p + 1] <= idx) { /* short-circuit the search if the last p owns this row too */
1755: PetscCall(PetscLayoutFindOwner(A->rmap, idx, &p));
1756: }
1757: rrows[r].rank = p;
1758: rrows[r].index = rows[r] - owners[p];
1759: }
1760: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &sf));
1761: PetscCall(PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER));
1762: /* collect flags for rows to be zeroed */
1763: PetscCall(PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *)rows, lrows, MPI_LOR));
1764: PetscCall(PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *)rows, lrows, MPI_LOR));
1765: PetscCall(PetscSFDestroy(&sf));
1766: /* compress and put in row numbers */
1767: for (r = 0; r < n; ++r) {
1768: if (lrows[r] >= 0) lrows[len++] = r;
1769: }
1770: /* zero diagonal part of matrix */
1771: PetscCall(MatZeroRowsColumns(l->A, len, lrows, diag, x, b));
1772: /* handle off-diagonal part of matrix */
1773: PetscCall(MatCreateVecs(A, &xmask, NULL));
1774: PetscCall(VecDuplicate(l->lvec, &lmask));
1775: PetscCall(VecGetArray(xmask, &bb));
1776: for (i = 0; i < len; i++) bb[lrows[i]] = 1;
1777: PetscCall(VecRestoreArray(xmask, &bb));
1778: PetscCall(VecScatterBegin(l->Mvctx, xmask, lmask, ADD_VALUES, SCATTER_FORWARD));
1779: PetscCall(VecScatterEnd(l->Mvctx, xmask, lmask, ADD_VALUES, SCATTER_FORWARD));
1780: PetscCall(VecDestroy(&xmask));
1781: if (x) {
1782: PetscCall(VecScatterBegin(l->Mvctx, x, l->lvec, INSERT_VALUES, SCATTER_FORWARD));
1783: PetscCall(VecScatterEnd(l->Mvctx, x, l->lvec, INSERT_VALUES, SCATTER_FORWARD));
1784: PetscCall(VecGetArrayRead(l->lvec, &xx));
1785: PetscCall(VecGetArray(b, &bb));
1786: }
1787: PetscCall(VecGetArray(lmask, &mask));
1788: /* MPISBAIJ stores only the upper off-diagonal in l->B; for each zeroed local row r and
1789: non-zeroed off-process column c in that row, accumulate -A[r,c] * x[r] into lvec_contrib.
1790: A SCATTER_REVERSE below sends these contributions to b[c] on the owning (higher-rank)
1791: process, the missing symmetric lower-triangular update. We skip entries where c is
1792: also a zeroed row (mask[col] != 0) since b[c] = diag * x[c] is handled separately. */
1793: if (x) {
1794: const PetscScalar *x_vals;
1795: PetscScalar *c_vals;
1797: PetscCall(VecDuplicate(l->lvec, &lvec_contrib));
1798: PetscCall(VecGetArray(lvec_contrib, &c_vals));
1799: PetscCall(VecGetArrayRead(x, &x_vals));
1800: /* Only accumulate b[c] -= A[r,c] * x[r] when off-process col c is not also a zeroed row
1801: (mask[c] non-zero means col c is zeroed, so b[c] = diag * x[c] is already set).
1802: This mirrors the MatSeqSBAIJ pattern: if (zeroed[r] && !zeroed[c]) bb[c] -= A[r,c] * x[r].
1803: c_vals is indexed by the local B column index. */
1804: for (i = 0; i < len; ++i) {
1805: row = lrows[i];
1806: for (j = baij->i[row / bs]; j < baij->i[row / bs + 1]; ++j) {
1807: for (k = 0; k < bs; ++k) {
1808: col = baij->j[j] * bs + k;
1809: if (!PetscAbsScalar(mask[col])) {
1810: aa = baij->a + j * bs2 + (row % bs) + bs * k;
1811: c_vals[col] -= aa[0] * x_vals[row];
1812: }
1813: }
1814: }
1815: }
1816: PetscCall(VecRestoreArrayRead(x, &x_vals));
1817: PetscCall(VecRestoreArray(lvec_contrib, &c_vals));
1818: }
1819: /* remove zeroed rows of off-diagonal matrix */
1820: for (i = 0; i < len; ++i) {
1821: row = lrows[i];
1822: count = (baij->i[row / bs + 1] - baij->i[row / bs]) * bs;
1823: aa = baij->a + baij->i[row / bs] * bs2 + (row % bs);
1824: for (k = 0; k < count; ++k) {
1825: aa[0] = 0.0;
1826: aa += bs;
1827: }
1828: }
1829: /* loop over all elements of off process part of matrix zeroing removed columns */
1830: for (i = 0; i < l->B->rmap->N; ++i) {
1831: row = i / bs;
1832: for (j = baij->i[row]; j < baij->i[row + 1]; ++j) {
1833: for (k = 0; k < bs; ++k) {
1834: col = bs * baij->j[j] + k;
1835: if (PetscAbsScalar(mask[col])) {
1836: aa = baij->a + j * bs2 + (i % bs) + bs * k;
1837: if (x) bb[i] -= aa[0] * xx[col];
1838: aa[0] = 0.0;
1839: }
1840: }
1841: }
1842: }
1843: if (x) {
1844: PetscCall(VecRestoreArray(b, &bb));
1845: PetscCall(VecRestoreArrayRead(l->lvec, &xx));
1846: /* scatter the accumulated contributions to b[c] on higher-rank processes owning column c */
1847: PetscCall(VecScatterBegin(l->Mvctx, lvec_contrib, b, ADD_VALUES, SCATTER_REVERSE));
1848: PetscCall(VecScatterEnd(l->Mvctx, lvec_contrib, b, ADD_VALUES, SCATTER_REVERSE));
1849: PetscCall(VecDestroy(&lvec_contrib));
1850: }
1851: PetscCall(VecRestoreArray(lmask, &mask));
1852: PetscCall(VecDestroy(&lmask));
1853: PetscCall(PetscFree(lrows));
1855: /* only change matrix nonzero state if pattern was allowed to be changed */
1856: if (!((Mat_SeqSBAIJ *)l->A->data)->nonew) {
1857: PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1858: PetscCallMPI(MPIU_Allreduce(&state, &A->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)A)));
1859: }
1860: PetscFunctionReturn(PETSC_SUCCESS);
1861: }
1863: static PetscErrorCode MatGetDiagonalBlock_MPISBAIJ(Mat A, Mat *a)
1864: {
1865: PetscFunctionBegin;
1866: *a = ((Mat_MPISBAIJ *)A->data)->A;
1867: PetscFunctionReturn(PETSC_SUCCESS);
1868: }
1870: static PetscErrorCode MatEliminateZeros_MPISBAIJ(Mat A, PetscBool keep)
1871: {
1872: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1874: PetscFunctionBegin;
1875: PetscCall(MatEliminateZeros_SeqSBAIJ(a->A, keep)); // possibly keep zero diagonal coefficients
1876: PetscCall(MatEliminateZeros_SeqBAIJ(a->B, PETSC_FALSE)); // never keep zero diagonal coefficients
1877: PetscFunctionReturn(PETSC_SUCCESS);
1878: }
1880: static PetscErrorCode MatLoad_MPISBAIJ(Mat, PetscViewer);
1881: static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat, Vec, PetscInt[]);
1882: static PetscErrorCode MatSOR_MPISBAIJ(Mat, Vec, PetscReal, MatSORType, PetscReal, PetscInt, PetscInt, Vec);
1884: static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ,
1885: MatGetRow_MPISBAIJ,
1886: MatRestoreRow_MPISBAIJ,
1887: MatMult_MPISBAIJ,
1888: /* 4*/ MatMultAdd_MPISBAIJ,
1889: MatMult_MPISBAIJ, /* transpose versions are same as non-transpose */
1890: MatMultAdd_MPISBAIJ,
1891: NULL,
1892: NULL,
1893: NULL,
1894: /* 10*/ NULL,
1895: NULL,
1896: NULL,
1897: MatSOR_MPISBAIJ,
1898: MatTranspose_MPISBAIJ,
1899: /* 15*/ MatGetInfo_MPISBAIJ,
1900: MatEqual_MPISBAIJ,
1901: MatGetDiagonal_MPISBAIJ,
1902: MatDiagonalScale_MPISBAIJ,
1903: MatNorm_MPISBAIJ,
1904: /* 20*/ MatAssemblyBegin_MPISBAIJ,
1905: MatAssemblyEnd_MPISBAIJ,
1906: MatSetOption_MPISBAIJ,
1907: MatZeroEntries_MPISBAIJ,
1908: /* 24*/ NULL,
1909: NULL,
1910: NULL,
1911: NULL,
1912: NULL,
1913: /* 29*/ MatSetUp_MPI_Hash,
1914: NULL,
1915: NULL,
1916: MatGetDiagonalBlock_MPISBAIJ,
1917: NULL,
1918: /* 34*/ MatDuplicate_MPISBAIJ,
1919: NULL,
1920: NULL,
1921: NULL,
1922: NULL,
1923: /* 39*/ MatAXPY_MPISBAIJ,
1924: MatCreateSubMatrices_MPISBAIJ,
1925: MatIncreaseOverlap_MPISBAIJ,
1926: MatGetValues_MPISBAIJ,
1927: MatCopy_MPISBAIJ,
1928: /* 44*/ NULL,
1929: MatScale_MPISBAIJ,
1930: MatShift_MPISBAIJ,
1931: NULL,
1932: MatZeroRowsColumns_MPISBAIJ,
1933: /* 49*/ NULL,
1934: NULL,
1935: NULL,
1936: NULL,
1937: NULL,
1938: /* 54*/ NULL,
1939: NULL,
1940: MatSetUnfactored_MPISBAIJ,
1941: NULL,
1942: MatSetValuesBlocked_MPISBAIJ,
1943: /* 59*/ MatCreateSubMatrix_MPISBAIJ,
1944: NULL,
1945: NULL,
1946: NULL,
1947: NULL,
1948: /* 64*/ NULL,
1949: NULL,
1950: NULL,
1951: NULL,
1952: MatGetRowMaxAbs_MPISBAIJ,
1953: /* 69*/ NULL,
1954: MatConvert_MPISBAIJ_Basic,
1955: NULL,
1956: NULL,
1957: NULL,
1958: NULL,
1959: NULL,
1960: NULL,
1961: NULL,
1962: MatLoad_MPISBAIJ,
1963: /* 79*/ NULL,
1964: NULL,
1965: NULL,
1966: NULL,
1967: NULL,
1968: /* 84*/ NULL,
1969: NULL,
1970: NULL,
1971: NULL,
1972: NULL,
1973: /* 89*/ NULL,
1974: NULL,
1975: NULL,
1976: NULL,
1977: MatConjugate_MPISBAIJ,
1978: /* 94*/ NULL,
1979: NULL,
1980: MatRealPart_MPISBAIJ,
1981: MatImaginaryPart_MPISBAIJ,
1982: MatGetRowUpperTriangular_MPISBAIJ,
1983: /* 99*/ MatRestoreRowUpperTriangular_MPISBAIJ,
1984: NULL,
1985: NULL,
1986: NULL,
1987: NULL,
1988: /*104*/ NULL,
1989: NULL,
1990: NULL,
1991: NULL,
1992: NULL,
1993: /*109*/ NULL,
1994: NULL,
1995: NULL,
1996: NULL,
1997: NULL,
1998: /*114*/ NULL,
1999: NULL,
2000: NULL,
2001: NULL,
2002: NULL,
2003: /*119*/ NULL,
2004: NULL,
2005: NULL,
2006: NULL,
2007: NULL,
2008: /*124*/ NULL,
2009: MatSetBlockSizes_Default,
2010: NULL,
2011: NULL,
2012: NULL,
2013: /*129*/ MatCreateMPIMatConcatenateSeqMat_MPISBAIJ,
2014: NULL,
2015: NULL,
2016: NULL,
2017: NULL,
2018: /*134*/ NULL,
2019: MatEliminateZeros_MPISBAIJ,
2020: NULL,
2021: NULL,
2022: NULL,
2023: /*139*/ NULL,
2024: MatCopyHashToXAIJ_MPI_Hash,
2025: NULL,
2026: NULL,
2027: MatADot_Default,
2028: /*144*/ MatANorm_Default,
2029: NULL,
2030: NULL};
2032: static PetscErrorCode MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz)
2033: {
2034: Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
2035: PetscInt i, mbs, Mbs;
2036: PetscMPIInt size;
2038: PetscFunctionBegin;
2039: if (B->hash_active) {
2040: B->ops[0] = b->cops;
2041: B->hash_active = PETSC_FALSE;
2042: }
2043: if (!B->preallocated) PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), bs, &B->bstash));
2044: PetscCall(MatSetBlockSize(B, bs));
2045: PetscCall(PetscLayoutSetUp(B->rmap));
2046: PetscCall(PetscLayoutSetUp(B->cmap));
2047: PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
2048: PetscCheck(B->rmap->N <= B->cmap->N, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->N, B->cmap->N);
2049: PetscCheck(B->rmap->n <= B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more local rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->n, B->cmap->n);
2051: mbs = B->rmap->n / bs;
2052: Mbs = B->rmap->N / bs;
2053: PetscCheck(mbs * bs == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "No of local rows %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, B->rmap->N, bs);
2055: B->rmap->bs = bs;
2056: b->bs2 = bs * bs;
2057: b->mbs = mbs;
2058: b->Mbs = Mbs;
2059: b->nbs = B->cmap->n / bs;
2060: b->Nbs = B->cmap->N / bs;
2062: for (i = 0; i <= b->size; i++) b->rangebs[i] = B->rmap->range[i] / bs;
2063: b->rstartbs = B->rmap->rstart / bs;
2064: b->rendbs = B->rmap->rend / bs;
2066: b->cstartbs = B->cmap->rstart / bs;
2067: b->cendbs = B->cmap->rend / bs;
2069: #if defined(PETSC_USE_CTABLE)
2070: PetscCall(PetscHMapIDestroy(&b->colmap));
2071: #else
2072: PetscCall(PetscFree(b->colmap));
2073: #endif
2074: PetscCall(PetscFree(b->garray));
2075: PetscCall(VecDestroy(&b->lvec));
2076: PetscCall(VecScatterDestroy(&b->Mvctx));
2077: PetscCall(VecDestroy(&b->slvec0));
2078: PetscCall(VecDestroy(&b->slvec0b));
2079: PetscCall(VecDestroy(&b->slvec1));
2080: PetscCall(VecDestroy(&b->slvec1a));
2081: PetscCall(VecDestroy(&b->slvec1b));
2082: PetscCall(VecScatterDestroy(&b->sMvctx));
2084: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &size));
2086: MatSeqXAIJGetOptions_Private(b->B);
2087: PetscCall(MatDestroy(&b->B));
2088: PetscCall(MatCreate(PETSC_COMM_SELF, &b->B));
2089: PetscCall(MatSetSizes(b->B, B->rmap->n, size > 1 ? B->cmap->N : 0, B->rmap->n, size > 1 ? B->cmap->N : 0));
2090: PetscCall(MatSetType(b->B, MATSEQBAIJ));
2091: MatSeqXAIJRestoreOptions_Private(b->B);
2093: MatSeqXAIJGetOptions_Private(b->A);
2094: PetscCall(MatDestroy(&b->A));
2095: PetscCall(MatCreate(PETSC_COMM_SELF, &b->A));
2096: PetscCall(MatSetSizes(b->A, B->rmap->n, B->cmap->n, B->rmap->n, B->cmap->n));
2097: PetscCall(MatSetType(b->A, MATSEQSBAIJ));
2098: MatSeqXAIJRestoreOptions_Private(b->A);
2100: PetscCall(MatSeqSBAIJSetPreallocation(b->A, bs, d_nz, d_nnz));
2101: PetscCall(MatSeqBAIJSetPreallocation(b->B, bs, o_nz, o_nnz));
2103: B->preallocated = PETSC_TRUE;
2104: B->was_assembled = PETSC_FALSE;
2105: B->assembled = PETSC_FALSE;
2106: PetscFunctionReturn(PETSC_SUCCESS);
2107: }
2109: static PetscErrorCode MatMPISBAIJSetPreallocationCSR_MPISBAIJ(Mat B, PetscInt bs, const PetscInt ii[], const PetscInt jj[], const PetscScalar V[])
2110: {
2111: PetscInt m, rstart, cend;
2112: PetscInt i, j, d, nz, bd, nz_max = 0, *d_nnz = NULL, *o_nnz = NULL;
2113: const PetscInt *JJ = NULL;
2114: PetscScalar *values = NULL;
2115: PetscBool roworiented = ((Mat_MPISBAIJ *)B->data)->roworiented;
2116: PetscBool nooffprocentries;
2118: PetscFunctionBegin;
2119: PetscCheck(bs >= 1, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_OUTOFRANGE, "Invalid block size specified, must be positive but it is %" PetscInt_FMT, bs);
2120: PetscCall(PetscLayoutSetBlockSize(B->rmap, bs));
2121: PetscCall(PetscLayoutSetBlockSize(B->cmap, bs));
2122: PetscCall(PetscLayoutSetUp(B->rmap));
2123: PetscCall(PetscLayoutSetUp(B->cmap));
2124: PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
2125: m = B->rmap->n / bs;
2126: rstart = B->rmap->rstart / bs;
2127: cend = B->cmap->rend / bs;
2129: PetscCheck(!ii[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ii[0] must be 0 but it is %" PetscInt_FMT, ii[0]);
2130: PetscCall(PetscMalloc2(m, &d_nnz, m, &o_nnz));
2131: for (i = 0; i < m; i++) {
2132: nz = ii[i + 1] - ii[i];
2133: PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz);
2134: /* count the ones on the diagonal and above, split into diagonal and off-diagonal portions. */
2135: JJ = jj + ii[i];
2136: bd = 0;
2137: for (j = 0; j < nz; j++) {
2138: if (*JJ >= i + rstart) break;
2139: JJ++;
2140: bd++;
2141: }
2142: d = 0;
2143: for (; j < nz; j++) {
2144: if (*JJ++ >= cend) break;
2145: d++;
2146: }
2147: d_nnz[i] = d;
2148: o_nnz[i] = nz - d - bd;
2149: nz = nz - bd;
2150: nz_max = PetscMax(nz_max, nz);
2151: }
2152: PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, d_nnz, 0, o_nnz));
2153: PetscCall(MatSetOption(B, MAT_IGNORE_LOWER_TRIANGULAR, PETSC_TRUE));
2154: PetscCall(PetscFree2(d_nnz, o_nnz));
2156: values = (PetscScalar *)V;
2157: if (!values) PetscCall(PetscCalloc1(bs * bs * nz_max, &values));
2158: for (i = 0; i < m; i++) {
2159: PetscInt row = i + rstart;
2160: PetscInt ncols = ii[i + 1] - ii[i];
2161: const PetscInt *icols = jj + ii[i];
2162: if (bs == 1 || !roworiented) { /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2163: const PetscScalar *svals = values + (V ? (bs * bs * ii[i]) : 0);
2164: PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, ncols, icols, svals, INSERT_VALUES));
2165: } else { /* block ordering does not match so we can only insert one block at a time. */
2166: for (PetscInt j = 0; j < ncols; j++) {
2167: const PetscScalar *svals = values + (V ? (bs * bs * (ii[i] + j)) : 0);
2168: PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, 1, &icols[j], svals, INSERT_VALUES));
2169: }
2170: }
2171: }
2173: if (!V) PetscCall(PetscFree(values));
2174: nooffprocentries = B->nooffprocentries;
2175: B->nooffprocentries = PETSC_TRUE;
2176: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2177: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2178: B->nooffprocentries = nooffprocentries;
2180: PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
2181: PetscFunctionReturn(PETSC_SUCCESS);
2182: }
2184: /*MC
2185: MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices,
2186: based on block compressed sparse row format. Only the upper triangular portion of the "diagonal" portion of
2187: the matrix is stored.
2189: For complex numbers by default this matrix is symmetric, NOT Hermitian symmetric. To make it Hermitian symmetric you
2190: can call `MatSetOption`(`Mat`, `MAT_HERMITIAN`);
2192: Options Database Key:
2193: . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to `MatSetFromOptions()`
2195: Level: beginner
2197: Note:
2198: The number of rows in the matrix must be less than or equal to the number of columns. Similarly the number of rows in the
2199: diagonal portion of the matrix of each process has to less than or equal the number of columns.
2201: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MATBAIJ`, `MatCreateBAIJ()`, `MATSEQSBAIJ`, `MatType`
2202: M*/
2204: PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B)
2205: {
2206: Mat_MPISBAIJ *b;
2207: PetscBool flg = PETSC_FALSE;
2209: PetscFunctionBegin;
2210: PetscCall(PetscNew(&b));
2211: B->data = (void *)b;
2212: B->ops[0] = MatOps_Values;
2214: B->ops->destroy = MatDestroy_MPISBAIJ;
2215: B->ops->view = MatView_MPISBAIJ;
2216: B->assembled = PETSC_FALSE;
2217: B->insertmode = NOT_SET_VALUES;
2219: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)B), &b->rank));
2220: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &b->size));
2222: /* build local table of row and column ownerships */
2223: PetscCall(PetscMalloc1(b->size + 2, &b->rangebs));
2225: /* build cache for off array entries formed */
2226: PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2228: b->donotstash = PETSC_FALSE;
2229: b->colmap = NULL;
2230: b->garray = NULL;
2231: b->roworiented = PETSC_TRUE;
2233: /* stuff used in block assembly */
2234: b->barray = NULL;
2236: /* stuff used for matrix vector multiply */
2237: b->lvec = NULL;
2238: b->Mvctx = NULL;
2239: b->slvec0 = NULL;
2240: b->slvec0b = NULL;
2241: b->slvec1 = NULL;
2242: b->slvec1a = NULL;
2243: b->slvec1b = NULL;
2244: b->sMvctx = NULL;
2246: /* stuff for MatGetRow() */
2247: b->rowindices = NULL;
2248: b->rowvalues = NULL;
2249: b->getrowactive = PETSC_FALSE;
2251: /* hash table stuff */
2252: b->ht = NULL;
2253: b->hd = NULL;
2254: b->ht_size = 0;
2255: b->ht_flag = PETSC_FALSE;
2256: b->ht_fact = 0;
2257: b->ht_total_ct = 0;
2258: b->ht_insert_ct = 0;
2260: /* stuff for MatCreateSubMatrices_MPIBAIJ_local() */
2261: b->ijonly = PETSC_FALSE;
2263: b->in_loc = NULL;
2264: b->v_loc = NULL;
2265: b->n_loc = 0;
2267: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatStoreValues_C", MatStoreValues_MPISBAIJ));
2268: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatRetrieveValues_C", MatRetrieveValues_MPISBAIJ));
2269: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocation_C", MatMPISBAIJSetPreallocation_MPISBAIJ));
2270: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocationCSR_C", MatMPISBAIJSetPreallocationCSR_MPISBAIJ));
2271: #if defined(PETSC_HAVE_ELEMENTAL)
2272: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_elemental_C", MatConvert_MPISBAIJ_Elemental));
2273: #endif
2274: #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
2275: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_scalapack_C", MatConvert_SBAIJ_ScaLAPACK));
2276: #endif
2277: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpiaij_C", MatConvert_MPISBAIJ_Basic));
2278: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpibaij_C", MatConvert_MPISBAIJ_Basic));
2280: B->symmetric = PETSC_BOOL3_TRUE;
2281: B->structurally_symmetric = PETSC_BOOL3_TRUE;
2282: B->symmetry_eternal = PETSC_TRUE;
2283: B->structural_symmetry_eternal = PETSC_TRUE;
2284: #if !defined(PETSC_USE_COMPLEX)
2285: B->hermitian = PETSC_BOOL3_TRUE;
2286: #endif
2288: PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPISBAIJ));
2289: PetscOptionsBegin(PetscObjectComm((PetscObject)B), NULL, "Options for loading MPISBAIJ matrix 1", "Mat");
2290: PetscCall(PetscOptionsBool("-mat_use_hash_table", "Use hash table to save memory in constructing matrix", "MatSetOption", flg, &flg, NULL));
2291: if (flg) {
2292: PetscReal fact = 1.39;
2293: PetscCall(MatSetOption(B, MAT_USE_HASH_TABLE, PETSC_TRUE));
2294: PetscCall(PetscOptionsReal("-mat_use_hash_table", "Use hash table factor", "MatMPIBAIJSetHashTableFactor", fact, &fact, NULL));
2295: if (fact <= 1.0) fact = 1.39;
2296: PetscCall(MatMPIBAIJSetHashTableFactor(B, fact));
2297: PetscCall(PetscInfo(B, "Hash table Factor used %5.2g\n", (double)fact));
2298: }
2299: PetscOptionsEnd();
2300: PetscFunctionReturn(PETSC_SUCCESS);
2301: }
2303: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2304: /*MC
2305: MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices.
2307: This matrix type is identical to `MATSEQSBAIJ` when constructed with a single process communicator,
2308: and `MATMPISBAIJ` otherwise.
2310: Options Database Key:
2311: . -mat_type sbaij - sets the matrix type to `MATSBAIJ` during a call to `MatSetFromOptions()`
2313: Level: beginner
2315: .seealso: [](ch_matrices), `Mat`, `MATSEQSBAIJ`, `MATMPISBAIJ`, `MatCreateSBAIJ()`, `MATSEQBAIJ`, `MATMPIBAIJ`
2316: M*/
2318: /*@
2319: MatMPISBAIJSetPreallocation - For good matrix assembly performance
2320: the user should preallocate the matrix storage by setting the parameters
2321: d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately,
2322: performance can be increased by more than a factor of 50.
2324: Collective
2326: Input Parameters:
2327: + B - the matrix
2328: . bs - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
2329: blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
2330: . d_nz - number of block nonzeros per block row in diagonal portion of local
2331: submatrix (same for all local rows)
2332: . d_nnz - array containing the number of block nonzeros in the various block rows
2333: in the upper triangular and diagonal part of the in diagonal portion of the local
2334: (possibly different for each block row) or `NULL`. If you plan to factor the matrix you must leave room
2335: for the diagonal entry and set a value even if it is zero.
2336: . o_nz - number of block nonzeros per block row in the off-diagonal portion of local
2337: submatrix (same for all local rows).
2338: - o_nnz - array containing the number of nonzeros in the various block rows of the
2339: off-diagonal portion of the local submatrix that is right of the diagonal
2340: (possibly different for each block row) or `NULL`.
2342: Options Database Keys:
2343: + -mat_no_unroll - uses code that does not unroll the loops in the
2344: block calculations (much slower)
2345: - -mat_block_size - size of the blocks to use
2347: Level: intermediate
2349: Notes:
2351: If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2352: than it must be used on all processors that share the object for that argument.
2354: If the *_nnz parameter is given then the *_nz parameter is ignored
2356: Storage Information:
2357: For a square global matrix we define each processor's diagonal portion
2358: to be its local rows and the corresponding columns (a square submatrix);
2359: each processor's off-diagonal portion encompasses the remainder of the
2360: local matrix (a rectangular submatrix).
2362: The user can specify preallocated storage for the diagonal part of
2363: the local submatrix with either `d_nz` or `d_nnz` (not both). Set
2364: `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2365: memory allocation. Likewise, specify preallocated storage for the
2366: off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2368: You can call `MatGetInfo()` to get information on how effective the preallocation was;
2369: for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
2370: You can also run with the option `-info` and look for messages with the string
2371: malloc in them to see if additional memory allocation was needed.
2373: Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2374: the figure below we depict these three local rows and all columns (0-11).
2376: .vb
2377: 0 1 2 3 4 5 6 7 8 9 10 11
2378: --------------------------
2379: row 3 |. . . d d d o o o o o o
2380: row 4 |. . . d d d o o o o o o
2381: row 5 |. . . d d d o o o o o o
2382: --------------------------
2383: .ve
2385: Thus, any entries in the d locations are stored in the d (diagonal)
2386: submatrix, and any entries in the o locations are stored in the
2387: o (off-diagonal) submatrix. Note that the d matrix is stored in
2388: `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.
2390: Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2391: plus the diagonal part of the d matrix,
2392: and `o_nz` should indicate the number of block nonzeros per row in the o matrix
2394: In general, for PDE problems in which most nonzeros are near the diagonal,
2395: one expects `d_nz` >> `o_nz`.
2397: .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`, `PetscSplitOwnership()`
2398: @*/
2399: PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
2400: {
2401: PetscFunctionBegin;
2405: PetscTryMethod(B, "MatMPISBAIJSetPreallocation_C", (Mat, PetscInt, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, bs, d_nz, d_nnz, o_nz, o_nnz));
2406: PetscFunctionReturn(PETSC_SUCCESS);
2407: }
2409: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2410: /*@
2411: MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format, `MATSBAIJ`,
2412: (block compressed row). For good matrix assembly performance
2413: the user should preallocate the matrix storage by setting the parameters
2414: `d_nz` (or `d_nnz`) and `o_nz` (or `o_nnz`).
2416: Collective
2418: Input Parameters:
2419: + comm - MPI communicator
2420: . bs - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row
2421: blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()`
2422: . m - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
2423: This value should be the same as the local size used in creating the
2424: y vector for the matrix-vector product y = Ax.
2425: . n - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
2426: This value should be the same as the local size used in creating the
2427: x vector for the matrix-vector product y = Ax.
2428: . M - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2429: . N - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2430: . d_nz - number of block nonzeros per block row in diagonal portion of local
2431: submatrix (same for all local rows)
2432: . d_nnz - array containing the number of block nonzeros in the various block rows
2433: in the upper triangular portion of the in diagonal portion of the local
2434: (possibly different for each block block row) or `NULL`.
2435: If you plan to factor the matrix you must leave room for the diagonal entry and
2436: set its value even if it is zero.
2437: . o_nz - number of block nonzeros per block row in the off-diagonal portion of local
2438: submatrix (same for all local rows).
2439: - o_nnz - array containing the number of nonzeros in the various block rows of the
2440: off-diagonal portion of the local submatrix (possibly different for
2441: each block row) or `NULL`.
2443: Output Parameter:
2444: . A - the matrix
2446: Options Database Keys:
2447: + -mat_no_unroll - uses code that does not unroll the loops in the
2448: block calculations (much slower)
2449: . -mat_block_size - size of the blocks to use
2450: - -mat_mpi - use the parallel matrix data structures even on one processor
2451: (defaults to using SeqBAIJ format on one processor)
2453: Level: intermediate
2455: Notes:
2456: It is recommended that one use `MatCreateFromOptions()` or the `MatCreate()`, `MatSetType()` and/or `MatSetFromOptions()`,
2457: MatXXXXSetPreallocation() paradigm instead of this routine directly.
2458: [MatXXXXSetPreallocation() is, for example, `MatSeqAIJSetPreallocation()`]
2460: The number of rows and columns must be divisible by blocksize.
2461: This matrix type does not support complex Hermitian operation.
2463: The user MUST specify either the local or global matrix dimensions
2464: (possibly both).
2466: If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2467: than it must be used on all processors that share the object for that argument.
2469: If `m` and `n` are not `PETSC_DECIDE`, then the values determines the `PetscLayout` of the matrix and the ranges returned by
2470: `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`.
2472: If the *_nnz parameter is given then the *_nz parameter is ignored
2474: Storage Information:
2475: For a square global matrix we define each processor's diagonal portion
2476: to be its local rows and the corresponding columns (a square submatrix);
2477: each processor's off-diagonal portion encompasses the remainder of the
2478: local matrix (a rectangular submatrix).
2480: The user can specify preallocated storage for the diagonal part of
2481: the local submatrix with either `d_nz` or `d_nnz` (not both). Set
2482: `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2483: memory allocation. Likewise, specify preallocated storage for the
2484: off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2486: Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2487: the figure below we depict these three local rows and all columns (0-11).
2489: .vb
2490: 0 1 2 3 4 5 6 7 8 9 10 11
2491: --------------------------
2492: row 3 |. . . d d d o o o o o o
2493: row 4 |. . . d d d o o o o o o
2494: row 5 |. . . d d d o o o o o o
2495: --------------------------
2496: .ve
2498: Thus, any entries in the d locations are stored in the d (diagonal)
2499: submatrix, and any entries in the o locations are stored in the
2500: o (off-diagonal) submatrix. Note that the d matrix is stored in
2501: `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.
2503: Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2504: plus the diagonal part of the d matrix,
2505: and `o_nz` should indicate the number of block nonzeros per row in the o matrix.
2506: In general, for PDE problems in which most nonzeros are near the diagonal,
2507: one expects `d_nz` >> `o_nz`.
2509: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`,
2510: `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
2511: @*/
2512: PetscErrorCode MatCreateSBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A)
2513: {
2514: PetscMPIInt size;
2516: PetscFunctionBegin;
2517: PetscCall(MatCreate(comm, A));
2518: PetscCall(MatSetSizes(*A, m, n, M, N));
2519: PetscCallMPI(MPI_Comm_size(comm, &size));
2520: if (size > 1) {
2521: PetscCall(MatSetType(*A, MATMPISBAIJ));
2522: PetscCall(MatMPISBAIJSetPreallocation(*A, bs, d_nz, d_nnz, o_nz, o_nnz));
2523: } else {
2524: PetscCall(MatSetType(*A, MATSEQSBAIJ));
2525: PetscCall(MatSeqSBAIJSetPreallocation(*A, bs, d_nz, d_nnz));
2526: }
2527: PetscFunctionReturn(PETSC_SUCCESS);
2528: }
2530: static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin, MatDuplicateOption cpvalues, Mat *newmat)
2531: {
2532: Mat mat;
2533: Mat_MPISBAIJ *a, *oldmat = (Mat_MPISBAIJ *)matin->data;
2534: PetscInt len = 0, nt, bs = matin->rmap->bs, mbs = oldmat->mbs;
2535: PetscScalar *array;
2537: PetscFunctionBegin;
2538: *newmat = NULL;
2540: PetscCall(MatCreate(PetscObjectComm((PetscObject)matin), &mat));
2541: PetscCall(MatSetSizes(mat, matin->rmap->n, matin->cmap->n, matin->rmap->N, matin->cmap->N));
2542: PetscCall(MatSetType(mat, ((PetscObject)matin)->type_name));
2543: PetscCall(PetscLayoutReference(matin->rmap, &mat->rmap));
2544: PetscCall(PetscLayoutReference(matin->cmap, &mat->cmap));
2546: if (matin->hash_active) PetscCall(MatSetUp(mat));
2547: else {
2548: mat->factortype = matin->factortype;
2549: mat->preallocated = PETSC_TRUE;
2550: mat->assembled = PETSC_TRUE;
2551: mat->insertmode = NOT_SET_VALUES;
2553: a = (Mat_MPISBAIJ *)mat->data;
2554: a->bs2 = oldmat->bs2;
2555: a->mbs = oldmat->mbs;
2556: a->nbs = oldmat->nbs;
2557: a->Mbs = oldmat->Mbs;
2558: a->Nbs = oldmat->Nbs;
2560: a->size = oldmat->size;
2561: a->rank = oldmat->rank;
2562: a->donotstash = oldmat->donotstash;
2563: a->roworiented = oldmat->roworiented;
2564: a->rowindices = NULL;
2565: a->rowvalues = NULL;
2566: a->getrowactive = PETSC_FALSE;
2567: a->barray = NULL;
2568: a->rstartbs = oldmat->rstartbs;
2569: a->rendbs = oldmat->rendbs;
2570: a->cstartbs = oldmat->cstartbs;
2571: a->cendbs = oldmat->cendbs;
2573: /* hash table stuff */
2574: a->ht = NULL;
2575: a->hd = NULL;
2576: a->ht_size = 0;
2577: a->ht_flag = oldmat->ht_flag;
2578: a->ht_fact = oldmat->ht_fact;
2579: a->ht_total_ct = 0;
2580: a->ht_insert_ct = 0;
2582: PetscCall(PetscArraycpy(a->rangebs, oldmat->rangebs, a->size + 2));
2583: if (oldmat->colmap) {
2584: #if defined(PETSC_USE_CTABLE)
2585: PetscCall(PetscHMapIDuplicate(oldmat->colmap, &a->colmap));
2586: #else
2587: PetscCall(PetscMalloc1(a->Nbs, &a->colmap));
2588: PetscCall(PetscArraycpy(a->colmap, oldmat->colmap, a->Nbs));
2589: #endif
2590: } else a->colmap = NULL;
2592: if (oldmat->garray && (len = ((Mat_SeqBAIJ *)oldmat->B->data)->nbs)) {
2593: PetscCall(PetscMalloc1(len, &a->garray));
2594: PetscCall(PetscArraycpy(a->garray, oldmat->garray, len));
2595: } else a->garray = NULL;
2597: PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)matin), matin->rmap->bs, &mat->bstash));
2598: PetscCall(VecDuplicate(oldmat->lvec, &a->lvec));
2599: PetscCall(VecScatterCopy(oldmat->Mvctx, &a->Mvctx));
2601: PetscCall(VecDuplicate(oldmat->slvec0, &a->slvec0));
2602: PetscCall(VecDuplicate(oldmat->slvec1, &a->slvec1));
2604: PetscCall(VecGetLocalSize(a->slvec1, &nt));
2605: PetscCall(VecGetArray(a->slvec1, &array));
2606: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, bs * mbs, array, &a->slvec1a));
2607: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec1b));
2608: PetscCall(VecRestoreArray(a->slvec1, &array));
2609: PetscCall(VecGetArray(a->slvec0, &array));
2610: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec0b));
2611: PetscCall(VecRestoreArray(a->slvec0, &array));
2613: /* ierr = VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */
2614: PetscCall(PetscObjectReference((PetscObject)oldmat->sMvctx));
2615: a->sMvctx = oldmat->sMvctx;
2617: PetscCall(MatDuplicate(oldmat->A, cpvalues, &a->A));
2618: PetscCall(MatDuplicate(oldmat->B, cpvalues, &a->B));
2619: }
2620: PetscCall(PetscFunctionListDuplicate(((PetscObject)matin)->qlist, &((PetscObject)mat)->qlist));
2621: *newmat = mat;
2622: PetscFunctionReturn(PETSC_SUCCESS);
2623: }
2625: /* Used for both MPIBAIJ and MPISBAIJ matrices */
2626: #define MatLoad_MPISBAIJ_Binary MatLoad_MPIBAIJ_Binary
2628: static PetscErrorCode MatLoad_MPISBAIJ(Mat mat, PetscViewer viewer)
2629: {
2630: PetscBool isbinary;
2632: PetscFunctionBegin;
2633: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2634: PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Viewer type %s not yet supported for reading %s matrices", ((PetscObject)viewer)->type_name, ((PetscObject)mat)->type_name);
2635: PetscCall(MatLoad_MPISBAIJ_Binary(mat, viewer));
2636: PetscFunctionReturn(PETSC_SUCCESS);
2637: }
2639: static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A, Vec v, PetscInt idx[])
2640: {
2641: Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
2642: Mat_SeqBAIJ *b = (Mat_SeqBAIJ *)a->B->data;
2643: PetscReal atmp;
2644: PetscReal *work, *svalues, *rvalues;
2645: PetscInt i, bs, mbs, *bi, *bj, brow, j, ncols, krow, kcol, col, row, Mbs, bcol;
2646: PetscMPIInt rank, size;
2647: PetscInt *rowners_bs, count, source;
2648: PetscScalar *va;
2649: MatScalar *ba;
2650: MPI_Status stat;
2652: PetscFunctionBegin;
2653: PetscCheck(!idx, PETSC_COMM_SELF, PETSC_ERR_SUP, "Send email to petsc-maint@mcs.anl.gov");
2654: PetscCall(MatGetRowMaxAbs(a->A, v, NULL));
2655: PetscCall(VecGetArray(v, &va));
2657: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
2658: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
2660: bs = A->rmap->bs;
2661: mbs = a->mbs;
2662: Mbs = a->Mbs;
2663: ba = b->a;
2664: bi = b->i;
2665: bj = b->j;
2667: /* find ownerships */
2668: rowners_bs = A->rmap->range;
2670: /* each proc creates an array to be distributed */
2671: PetscCall(PetscCalloc1(bs * Mbs, &work));
2673: /* row_max for B */
2674: if (rank != size - 1) {
2675: for (i = 0; i < mbs; i++) {
2676: ncols = bi[1] - bi[0];
2677: bi++;
2678: brow = bs * i;
2679: for (j = 0; j < ncols; j++) {
2680: bcol = bs * (*bj);
2681: for (kcol = 0; kcol < bs; kcol++) {
2682: col = bcol + kcol; /* local col index */
2683: col += rowners_bs[rank + 1]; /* global col index */
2684: for (krow = 0; krow < bs; krow++) {
2685: atmp = PetscAbsScalar(*ba);
2686: ba++;
2687: row = brow + krow; /* local row index */
2688: if (PetscRealPart(va[row]) < atmp) va[row] = atmp;
2689: if (work[col] < atmp) work[col] = atmp;
2690: }
2691: }
2692: bj++;
2693: }
2694: }
2696: /* send values to its owners */
2697: for (PetscMPIInt dest = rank + 1; dest < size; dest++) {
2698: svalues = work + rowners_bs[dest];
2699: count = rowners_bs[dest + 1] - rowners_bs[dest];
2700: PetscCallMPI(MPIU_Send(svalues, count, MPIU_REAL, dest, rank, PetscObjectComm((PetscObject)A)));
2701: }
2702: }
2704: /* receive values */
2705: if (rank) {
2706: rvalues = work;
2707: count = rowners_bs[rank + 1] - rowners_bs[rank];
2708: for (source = 0; source < rank; source++) {
2709: PetscCallMPI(MPIU_Recv(rvalues, count, MPIU_REAL, MPI_ANY_SOURCE, MPI_ANY_TAG, PetscObjectComm((PetscObject)A), &stat));
2710: /* process values */
2711: for (i = 0; i < count; i++) {
2712: if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i];
2713: }
2714: }
2715: }
2717: PetscCall(VecRestoreArray(v, &va));
2718: PetscCall(PetscFree(work));
2719: PetscFunctionReturn(PETSC_SUCCESS);
2720: }
2722: static PetscErrorCode MatSOR_MPISBAIJ(Mat matin, Vec bb, PetscReal omega, MatSORType flag, PetscReal fshift, PetscInt its, PetscInt lits, Vec xx)
2723: {
2724: Mat_MPISBAIJ *mat = (Mat_MPISBAIJ *)matin->data;
2725: PetscInt mbs = mat->mbs, bs = matin->rmap->bs;
2726: PetscScalar *x, *ptr, *from;
2727: Vec bb1;
2728: const PetscScalar *b;
2730: PetscFunctionBegin;
2731: PetscCheck(its > 0 && lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " and local its %" PetscInt_FMT " both positive", its, lits);
2732: PetscCheck(bs <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "SSOR for block size > 1 is not yet implemented");
2734: if (flag == SOR_APPLY_UPPER) {
2735: PetscUseTypeMethod(mat->A, sor, bb, omega, flag, fshift, lits, 1, xx);
2736: PetscFunctionReturn(PETSC_SUCCESS);
2737: }
2739: if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2740: if (flag & SOR_ZERO_INITIAL_GUESS) {
2741: PetscUseTypeMethod(mat->A, sor, bb, omega, flag, fshift, lits, lits, xx);
2742: its--;
2743: }
2745: PetscCall(VecDuplicate(bb, &bb1));
2746: while (its--) {
2747: /* lower triangular part: slvec0b = - B^T*xx */
2748: PetscUseTypeMethod(mat->B, multtranspose, xx, mat->slvec0b);
2750: /* copy xx into slvec0a */
2751: PetscCall(VecGetArray(mat->slvec0, &ptr));
2752: PetscCall(VecGetArray(xx, &x));
2753: PetscCall(PetscArraycpy(ptr, x, bs * mbs));
2754: PetscCall(VecRestoreArray(mat->slvec0, &ptr));
2756: PetscCall(VecScale(mat->slvec0, -1.0));
2758: /* copy bb into slvec1a */
2759: PetscCall(VecGetArray(mat->slvec1, &ptr));
2760: PetscCall(VecGetArrayRead(bb, &b));
2761: PetscCall(PetscArraycpy(ptr, b, bs * mbs));
2762: PetscCall(VecRestoreArray(mat->slvec1, &ptr));
2764: /* set slvec1b = 0 */
2765: PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2766: PetscCall(VecZeroEntries(mat->slvec1b));
2768: PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2769: PetscCall(VecRestoreArray(xx, &x));
2770: PetscCall(VecRestoreArrayRead(bb, &b));
2771: PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2773: /* upper triangular part: bb1 = bb1 - B*x */
2774: PetscUseTypeMethod(mat->B, multadd, mat->slvec1b, mat->slvec1a, bb1);
2776: /* local diagonal sweep */
2777: PetscUseTypeMethod(mat->A, sor, bb1, omega, SOR_SYMMETRIC_SWEEP, fshift, lits, lits, xx);
2778: }
2779: PetscCall(VecDestroy(&bb1));
2780: } else if ((flag & SOR_LOCAL_FORWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2781: PetscUseTypeMethod(mat->A, sor, bb, omega, flag, fshift, lits, 1, xx);
2782: } else if ((flag & SOR_LOCAL_BACKWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2783: PetscUseTypeMethod(mat->A, sor, bb, omega, flag, fshift, lits, 1, xx);
2784: } else if (flag & SOR_EISENSTAT) {
2785: Vec xx1;
2786: PetscBool hasop;
2787: const PetscScalar *diag;
2788: PetscScalar *sl, scale = (omega - 2.0) / omega;
2789: PetscInt n;
2791: if (!mat->xx1) {
2792: PetscCall(VecDuplicate(bb, &mat->xx1));
2793: PetscCall(VecDuplicate(bb, &mat->bb1));
2794: }
2795: xx1 = mat->xx1;
2796: bb1 = mat->bb1;
2798: PetscUseTypeMethod(mat->A, sor, bb, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP), fshift, lits, 1, xx);
2800: if (!mat->diag) {
2801: /* this is wrong for same matrix with new nonzero values */
2802: PetscCall(MatCreateVecs(matin, &mat->diag, NULL));
2803: PetscCall(MatGetDiagonal(matin, mat->diag));
2804: }
2805: PetscCall(MatHasOperation(matin, MATOP_MULT_DIAGONAL_BLOCK, &hasop));
2807: if (hasop) {
2808: PetscCall(MatMultDiagonalBlock(matin, xx, bb1));
2809: PetscCall(VecAYPX(mat->slvec1a, scale, bb));
2810: } else {
2811: /*
2812: These two lines are replaced by code that may be a bit faster for a good compiler
2813: PetscCall(VecPointwiseMult(mat->slvec1a,mat->diag,xx));
2814: PetscCall(VecAYPX(mat->slvec1a,scale,bb));
2815: */
2816: PetscCall(VecGetArray(mat->slvec1a, &sl));
2817: PetscCall(VecGetArrayRead(mat->diag, &diag));
2818: PetscCall(VecGetArrayRead(bb, &b));
2819: PetscCall(VecGetArray(xx, &x));
2820: PetscCall(VecGetLocalSize(xx, &n));
2821: if (omega == 1.0) {
2822: for (PetscInt i = 0; i < n; i++) sl[i] = b[i] - diag[i] * x[i];
2823: PetscCall(PetscLogFlops(2.0 * n));
2824: } else {
2825: for (PetscInt i = 0; i < n; i++) sl[i] = b[i] + scale * diag[i] * x[i];
2826: PetscCall(PetscLogFlops(3.0 * n));
2827: }
2828: PetscCall(VecRestoreArray(mat->slvec1a, &sl));
2829: PetscCall(VecRestoreArrayRead(mat->diag, &diag));
2830: PetscCall(VecRestoreArrayRead(bb, &b));
2831: PetscCall(VecRestoreArray(xx, &x));
2832: }
2834: /* multiply off-diagonal portion of matrix */
2835: PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2836: PetscCall(VecZeroEntries(mat->slvec1b));
2837: PetscUseTypeMethod(mat->B, multtranspose, xx, mat->slvec0b);
2838: PetscCall(VecGetArray(mat->slvec0, &from));
2839: PetscCall(VecGetArray(xx, &x));
2840: PetscCall(PetscArraycpy(from, x, bs * mbs));
2841: PetscCall(VecRestoreArray(mat->slvec0, &from));
2842: PetscCall(VecRestoreArray(xx, &x));
2843: PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2844: PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2845: PetscUseTypeMethod(mat->B, multadd, mat->slvec1b, mat->slvec1a, mat->slvec1a);
2847: /* local sweep */
2848: PetscUseTypeMethod(mat->A, sor, mat->slvec1a, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP), fshift, lits, 1, xx1);
2849: PetscCall(VecAXPY(xx, 1.0, xx1));
2850: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatSORType is not supported for SBAIJ matrix format");
2851: PetscFunctionReturn(PETSC_SUCCESS);
2852: }
2854: /*@
2855: MatCreateMPISBAIJWithArrays - creates a `MATMPISBAIJ` matrix using arrays that contain in standard CSR format for the local rows.
2857: Collective
2859: Input Parameters:
2860: + comm - MPI communicator
2861: . bs - the block size, only a block size of 1 is supported
2862: . m - number of local rows (Cannot be `PETSC_DECIDE`)
2863: . n - This value should be the same as the local size used in creating the
2864: x vector for the matrix-vector product $ y = Ax $. (or `PETSC_DECIDE` to have
2865: calculated if `N` is given) For square matrices `n` is almost always `m`.
2866: . M - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2867: . N - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2868: . i - row indices; that is i[0] = 0, i[row] = i[row-1] + number of block elements in that row block row of the matrix
2869: . j - column indices
2870: - a - matrix values
2872: Output Parameter:
2873: . mat - the matrix
2875: Level: intermediate
2877: Notes:
2878: The `i`, `j`, and `a` arrays ARE copied by this routine into the internal format used by PETSc;
2879: thus you CANNOT change the matrix entries by changing the values of `a` after you have
2880: called this routine. Use `MatCreateMPIAIJWithSplitArrays()` to avoid needing to copy the arrays.
2882: The `i` and `j` indices are 0 based, and `i` indices are indices corresponding to the local `j` array.
2884: .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`,
2885: `MATMPIAIJ`, `MatCreateAIJ()`, `MatCreateMPIAIJWithSplitArrays()`, `MatMPISBAIJSetPreallocationCSR()`
2886: @*/
2887: PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, const PetscInt i[], const PetscInt j[], const PetscScalar a[], Mat *mat)
2888: {
2889: PetscFunctionBegin;
2890: PetscCheck(!i[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i (row indices) must start with 0");
2891: PetscCheck(m >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "local number of rows (m) cannot be PETSC_DECIDE, or negative");
2892: PetscCall(MatCreate(comm, mat));
2893: PetscCall(MatSetSizes(*mat, m, n, M, N));
2894: PetscCall(MatSetType(*mat, MATMPISBAIJ));
2895: PetscCall(MatMPISBAIJSetPreallocationCSR(*mat, bs, i, j, a));
2896: PetscFunctionReturn(PETSC_SUCCESS);
2897: }
2899: /*@
2900: MatMPISBAIJSetPreallocationCSR - Creates a sparse parallel matrix in `MATMPISBAIJ` format using the given nonzero structure and (optional) numerical values
2902: Collective
2904: Input Parameters:
2905: + B - the matrix
2906: . bs - the block size
2907: . i - the indices into `j` for the start of each local row (indices start with zero)
2908: . j - the column indices for each local row (indices start with zero) these must be sorted for each row
2909: - v - optional values in the matrix, pass `NULL` if not provided
2911: Level: advanced
2913: Notes:
2914: The `i`, `j`, and `v` arrays ARE copied by this routine into the internal format used by PETSc;
2915: thus you CANNOT change the matrix entries by changing the values of `v` after you have
2916: called this routine.
2918: Though this routine has Preallocation() in the name it also sets the exact nonzero locations of the matrix entries
2919: and usually the numerical values as well
2921: Any entries passed in that are below the diagonal are ignored
2923: .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIBAIJSetPreallocation()`, `MatCreateAIJ()`, `MATMPIAIJ`,
2924: `MatCreateMPISBAIJWithArrays()`
2925: @*/
2926: PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat B, PetscInt bs, const PetscInt i[], const PetscInt j[], const PetscScalar v[])
2927: {
2928: PetscFunctionBegin;
2929: PetscTryMethod(B, "MatMPISBAIJSetPreallocationCSR_C", (Mat, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[]), (B, bs, i, j, v));
2930: PetscFunctionReturn(PETSC_SUCCESS);
2931: }
2933: PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPISBAIJ(MPI_Comm comm, Mat inmat, PetscInt n, MatReuse scall, Mat *outmat)
2934: {
2935: PetscInt m, N, i, rstart, nnz, Ii, bs, cbs;
2936: PetscInt *indx;
2937: PetscScalar *values;
2939: PetscFunctionBegin;
2940: PetscCall(MatGetSize(inmat, &m, &N));
2941: if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
2942: Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ *)inmat->data;
2943: PetscInt *dnz, *onz, mbs, Nbs, nbs;
2944: PetscInt *bindx, rmax = a->rmax, j;
2945: PetscMPIInt rank, size;
2947: PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2948: mbs = m / bs;
2949: Nbs = N / cbs;
2950: if (n == PETSC_DECIDE) PetscCall(PetscSplitOwnershipBlock(comm, cbs, &n, &N));
2951: nbs = n / cbs;
2953: PetscCall(PetscMalloc1(rmax, &bindx));
2954: MatPreallocateBegin(comm, mbs, nbs, dnz, onz); /* inline function, output __end and __rstart are used below */
2956: PetscCallMPI(MPI_Comm_rank(comm, &rank));
2957: PetscCallMPI(MPI_Comm_size(comm, &size));
2958: if (rank == size - 1) {
2959: /* Check sum(nbs) = Nbs */
2960: PetscCheck(__end == Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Sum of local block columns %" PetscInt_FMT " != global block columns %" PetscInt_FMT, __end, Nbs);
2961: }
2963: rstart = __rstart; /* block rstart of *outmat; see inline function MatPreallocateBegin */
2964: PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2965: for (i = 0; i < mbs; i++) {
2966: PetscCall(MatGetRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL)); /* non-blocked nnz and indx */
2967: nnz = nnz / bs;
2968: for (j = 0; j < nnz; j++) bindx[j] = indx[j * bs] / bs;
2969: PetscCall(MatPreallocateSet(i + rstart, nnz, bindx, dnz, onz));
2970: PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL));
2971: }
2972: PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2973: PetscCall(PetscFree(bindx));
2975: PetscCall(MatCreate(comm, outmat));
2976: PetscCall(MatSetSizes(*outmat, m, n, PETSC_DETERMINE, PETSC_DETERMINE));
2977: PetscCall(MatSetBlockSizes(*outmat, bs, cbs));
2978: PetscCall(MatSetType(*outmat, MATSBAIJ));
2979: PetscCall(MatSeqSBAIJSetPreallocation(*outmat, bs, 0, dnz));
2980: PetscCall(MatMPISBAIJSetPreallocation(*outmat, bs, 0, dnz, 0, onz));
2981: MatPreallocateEnd(dnz, onz);
2982: }
2984: /* numeric phase */
2985: PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2986: PetscCall(MatGetOwnershipRange(*outmat, &rstart, NULL));
2988: PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2989: for (i = 0; i < m; i++) {
2990: PetscCall(MatGetRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2991: Ii = i + rstart;
2992: PetscCall(MatSetValues(*outmat, 1, &Ii, nnz, indx, values, INSERT_VALUES));
2993: PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2994: }
2995: PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2996: PetscCall(MatAssemblyBegin(*outmat, MAT_FINAL_ASSEMBLY));
2997: PetscCall(MatAssemblyEnd(*outmat, MAT_FINAL_ASSEMBLY));
2998: PetscFunctionReturn(PETSC_SUCCESS);
2999: }