Actual source code: mpisbaij.c

  1: #include <../src/mat/impls/baij/mpi/mpibaij.h>
  2: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
  3: #include <../src/mat/impls/sbaij/seq/sbaij.h>
  4: #include <petscblaslapack.h>

  6: static PetscErrorCode MatDestroy_MPISBAIJ(Mat mat)
  7: {
  8:   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;

 10:   PetscFunctionBegin;
 11:   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ",Cols=%" PetscInt_FMT, mat->rmap->N, mat->cmap->N));
 12:   PetscCall(MatStashDestroy_Private(&mat->stash));
 13:   PetscCall(MatStashDestroy_Private(&mat->bstash));
 14:   PetscCall(MatDestroy(&baij->A));
 15:   PetscCall(MatDestroy(&baij->B));
 16: #if defined(PETSC_USE_CTABLE)
 17:   PetscCall(PetscHMapIDestroy(&baij->colmap));
 18: #else
 19:   PetscCall(PetscFree(baij->colmap));
 20: #endif
 21:   PetscCall(PetscFree(baij->garray));
 22:   PetscCall(VecDestroy(&baij->lvec));
 23:   PetscCall(VecScatterDestroy(&baij->Mvctx));
 24:   PetscCall(VecDestroy(&baij->slvec0));
 25:   PetscCall(VecDestroy(&baij->slvec0b));
 26:   PetscCall(VecDestroy(&baij->slvec1));
 27:   PetscCall(VecDestroy(&baij->slvec1a));
 28:   PetscCall(VecDestroy(&baij->slvec1b));
 29:   PetscCall(VecScatterDestroy(&baij->sMvctx));
 30:   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
 31:   PetscCall(PetscFree(baij->barray));
 32:   PetscCall(PetscFree(baij->hd));
 33:   PetscCall(VecDestroy(&baij->diag));
 34:   PetscCall(VecDestroy(&baij->bb1));
 35:   PetscCall(VecDestroy(&baij->xx1));
 36: #if defined(PETSC_USE_REAL_MAT_SINGLE)
 37:   PetscCall(PetscFree(baij->setvaluescopy));
 38: #endif
 39:   PetscCall(PetscFree(baij->in_loc));
 40:   PetscCall(PetscFree(baij->v_loc));
 41:   PetscCall(PetscFree(baij->rangebs));
 42:   PetscCall(PetscFree(mat->data));

 44:   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
 45:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatStoreValues_C", NULL));
 46:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatRetrieveValues_C", NULL));
 47:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocation_C", NULL));
 48:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocationCSR_C", NULL));
 49: #if defined(PETSC_HAVE_ELEMENTAL)
 50:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_elemental_C", NULL));
 51: #endif
 52: #if defined(PETSC_HAVE_SCALAPACK)
 53:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_scalapack_C", NULL));
 54: #endif
 55:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpiaij_C", NULL));
 56:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpibaij_C", NULL));
 57:   PetscFunctionReturn(PETSC_SUCCESS);
 58: }

 60: /* defines MatSetValues_MPI_Hash(), MatAssemblyBegin_MPI_Hash(), MatAssemblyEnd_MPI_Hash(), MatSetUp_MPI_Hash() */
 61: #define TYPE SBAIJ
 62: #define TYPE_SBAIJ
 63: #include "../src/mat/impls/aij/mpi/mpihashmat.h"
 64: #undef TYPE
 65: #undef TYPE_SBAIJ

 67: #if defined(PETSC_HAVE_ELEMENTAL)
 68: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat, MatType, MatReuse, Mat *);
 69: #endif
 70: #if defined(PETSC_HAVE_SCALAPACK)
 71: PETSC_INTERN PetscErrorCode MatConvert_SBAIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
 72: #endif

 74: /* This could be moved to matimpl.h */
 75: static PetscErrorCode MatPreallocateWithMats_Private(Mat B, PetscInt nm, Mat X[], PetscBool symm[], PetscBool fill)
 76: {
 77:   Mat       preallocator;
 78:   PetscInt  r, rstart, rend;
 79:   PetscInt  bs, i, m, n, M, N;
 80:   PetscBool cong = PETSC_TRUE;

 82:   PetscFunctionBegin;
 85:   for (i = 0; i < nm; i++) {
 87:     PetscCall(PetscLayoutCompare(B->rmap, X[i]->rmap, &cong));
 88:     PetscCheck(cong, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for different layouts");
 89:   }
 91:   PetscCall(MatGetBlockSize(B, &bs));
 92:   PetscCall(MatGetSize(B, &M, &N));
 93:   PetscCall(MatGetLocalSize(B, &m, &n));
 94:   PetscCall(MatCreate(PetscObjectComm((PetscObject)B), &preallocator));
 95:   PetscCall(MatSetType(preallocator, MATPREALLOCATOR));
 96:   PetscCall(MatSetBlockSize(preallocator, bs));
 97:   PetscCall(MatSetSizes(preallocator, m, n, M, N));
 98:   PetscCall(MatSetUp(preallocator));
 99:   PetscCall(MatGetOwnershipRange(preallocator, &rstart, &rend));
100:   for (r = rstart; r < rend; ++r) {
101:     PetscInt           ncols;
102:     const PetscInt    *row;
103:     const PetscScalar *vals;

105:     for (i = 0; i < nm; i++) {
106:       PetscCall(MatGetRow(X[i], r, &ncols, &row, &vals));
107:       PetscCall(MatSetValues(preallocator, 1, &r, ncols, row, vals, INSERT_VALUES));
108:       if (symm && symm[i]) PetscCall(MatSetValues(preallocator, ncols, row, 1, &r, vals, INSERT_VALUES));
109:       PetscCall(MatRestoreRow(X[i], r, &ncols, &row, &vals));
110:     }
111:   }
112:   PetscCall(MatAssemblyBegin(preallocator, MAT_FINAL_ASSEMBLY));
113:   PetscCall(MatAssemblyEnd(preallocator, MAT_FINAL_ASSEMBLY));
114:   PetscCall(MatPreallocatorPreallocate(preallocator, fill, B));
115:   PetscCall(MatDestroy(&preallocator));
116:   PetscFunctionReturn(PETSC_SUCCESS);
117: }

119: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Basic(Mat A, MatType newtype, MatReuse reuse, Mat *newmat)
120: {
121:   Mat      B;
122:   PetscInt r;

124:   PetscFunctionBegin;
125:   if (reuse != MAT_REUSE_MATRIX) {
126:     PetscBool symm = PETSC_TRUE, isdense;
127:     PetscInt  bs;

129:     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
130:     PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
131:     PetscCall(MatSetType(B, newtype));
132:     PetscCall(MatGetBlockSize(A, &bs));
133:     PetscCall(MatSetBlockSize(B, bs));
134:     PetscCall(PetscLayoutSetUp(B->rmap));
135:     PetscCall(PetscLayoutSetUp(B->cmap));
136:     PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, ""));
137:     if (!isdense) {
138:       PetscCall(MatGetRowUpperTriangular(A));
139:       PetscCall(MatPreallocateWithMats_Private(B, 1, &A, &symm, PETSC_TRUE));
140:       PetscCall(MatRestoreRowUpperTriangular(A));
141:     } else {
142:       PetscCall(MatSetUp(B));
143:     }
144:   } else {
145:     B = *newmat;
146:     PetscCall(MatZeroEntries(B));
147:   }

149:   PetscCall(MatGetRowUpperTriangular(A));
150:   for (r = A->rmap->rstart; r < A->rmap->rend; r++) {
151:     PetscInt           ncols;
152:     const PetscInt    *row;
153:     const PetscScalar *vals;

155:     PetscCall(MatGetRow(A, r, &ncols, &row, &vals));
156:     PetscCall(MatSetValues(B, 1, &r, ncols, row, vals, INSERT_VALUES));
157: #if defined(PETSC_USE_COMPLEX)
158:     if (A->hermitian == PETSC_BOOL3_TRUE) {
159:       PetscInt i;
160:       for (i = 0; i < ncols; i++) PetscCall(MatSetValue(B, row[i], r, PetscConj(vals[i]), INSERT_VALUES));
161:     } else {
162:       PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
163:     }
164: #else
165:     PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
166: #endif
167:     PetscCall(MatRestoreRow(A, r, &ncols, &row, &vals));
168:   }
169:   PetscCall(MatRestoreRowUpperTriangular(A));
170:   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
171:   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));

173:   if (reuse == MAT_INPLACE_MATRIX) {
174:     PetscCall(MatHeaderReplace(A, &B));
175:   } else {
176:     *newmat = B;
177:   }
178:   PetscFunctionReturn(PETSC_SUCCESS);
179: }

181: static PetscErrorCode MatStoreValues_MPISBAIJ(Mat mat)
182: {
183:   Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;

185:   PetscFunctionBegin;
186:   PetscCall(MatStoreValues(aij->A));
187:   PetscCall(MatStoreValues(aij->B));
188:   PetscFunctionReturn(PETSC_SUCCESS);
189: }

191: static PetscErrorCode MatRetrieveValues_MPISBAIJ(Mat mat)
192: {
193:   Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;

195:   PetscFunctionBegin;
196:   PetscCall(MatRetrieveValues(aij->A));
197:   PetscCall(MatRetrieveValues(aij->B));
198:   PetscFunctionReturn(PETSC_SUCCESS);
199: }

201: #define MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, orow, ocol) \
202:   do { \
203:     brow = row / bs; \
204:     rp   = aj + ai[brow]; \
205:     ap   = aa + bs2 * ai[brow]; \
206:     rmax = aimax[brow]; \
207:     nrow = ailen[brow]; \
208:     bcol = col / bs; \
209:     ridx = row % bs; \
210:     cidx = col % bs; \
211:     low  = 0; \
212:     high = nrow; \
213:     while (high - low > 3) { \
214:       t = (low + high) / 2; \
215:       if (rp[t] > bcol) high = t; \
216:       else low = t; \
217:     } \
218:     for (_i = low; _i < high; _i++) { \
219:       if (rp[_i] > bcol) break; \
220:       if (rp[_i] == bcol) { \
221:         bap = ap + bs2 * _i + bs * cidx + ridx; \
222:         if (addv == ADD_VALUES) *bap += value; \
223:         else *bap = value; \
224:         goto a_noinsert; \
225:       } \
226:     } \
227:     if (a->nonew == 1) goto a_noinsert; \
228:     PetscCheck(a->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
229:     MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, brow, bcol, rmax, aa, ai, aj, rp, ap, aimax, a->nonew, MatScalar); \
230:     N = nrow++ - 1; \
231:     /* shift up all the later entries in this row */ \
232:     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
233:     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
234:     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
235:     rp[_i]                          = bcol; \
236:     ap[bs2 * _i + bs * cidx + ridx] = value; \
237:   a_noinsert:; \
238:     ailen[brow] = nrow; \
239:   } while (0)

241: #define MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, orow, ocol) \
242:   do { \
243:     brow = row / bs; \
244:     rp   = bj + bi[brow]; \
245:     ap   = ba + bs2 * bi[brow]; \
246:     rmax = bimax[brow]; \
247:     nrow = bilen[brow]; \
248:     bcol = col / bs; \
249:     ridx = row % bs; \
250:     cidx = col % bs; \
251:     low  = 0; \
252:     high = nrow; \
253:     while (high - low > 3) { \
254:       t = (low + high) / 2; \
255:       if (rp[t] > bcol) high = t; \
256:       else low = t; \
257:     } \
258:     for (_i = low; _i < high; _i++) { \
259:       if (rp[_i] > bcol) break; \
260:       if (rp[_i] == bcol) { \
261:         bap = ap + bs2 * _i + bs * cidx + ridx; \
262:         if (addv == ADD_VALUES) *bap += value; \
263:         else *bap = value; \
264:         goto b_noinsert; \
265:       } \
266:     } \
267:     if (b->nonew == 1) goto b_noinsert; \
268:     PetscCheck(b->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
269:     MatSeqXAIJReallocateAIJ(B, b->mbs, bs2, nrow, brow, bcol, rmax, ba, bi, bj, rp, ap, bimax, b->nonew, MatScalar); \
270:     N = nrow++ - 1; \
271:     /* shift up all the later entries in this row */ \
272:     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
273:     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
274:     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
275:     rp[_i]                          = bcol; \
276:     ap[bs2 * _i + bs * cidx + ridx] = value; \
277:   b_noinsert:; \
278:     bilen[brow] = nrow; \
279:   } while (0)

281: /* Only add/insert a(i,j) with i<=j (blocks).
282:    Any a(i,j) with i>j input by user is ignored or generates an error
283: */
284: static PetscErrorCode MatSetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
285: {
286:   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
287:   MatScalar     value;
288:   PetscBool     roworiented = baij->roworiented;
289:   PetscInt      i, j, row, col;
290:   PetscInt      rstart_orig = mat->rmap->rstart;
291:   PetscInt      rend_orig = mat->rmap->rend, cstart_orig = mat->cmap->rstart;
292:   PetscInt      cend_orig = mat->cmap->rend, bs = mat->rmap->bs;

294:   /* Some Variables required in the macro */
295:   Mat           A     = baij->A;
296:   Mat_SeqSBAIJ *a     = (Mat_SeqSBAIJ *)A->data;
297:   PetscInt     *aimax = a->imax, *ai = a->i, *ailen = a->ilen, *aj = a->j;
298:   MatScalar    *aa = a->a;

300:   Mat          B     = baij->B;
301:   Mat_SeqBAIJ *b     = (Mat_SeqBAIJ *)B->data;
302:   PetscInt    *bimax = b->imax, *bi = b->i, *bilen = b->ilen, *bj = b->j;
303:   MatScalar   *ba = b->a;

305:   PetscInt  *rp, ii, nrow, _i, rmax, N, brow, bcol;
306:   PetscInt   low, high, t, ridx, cidx, bs2 = a->bs2;
307:   MatScalar *ap, *bap;

309:   /* for stash */
310:   PetscInt   n_loc, *in_loc = NULL;
311:   MatScalar *v_loc = NULL;

313:   PetscFunctionBegin;
314:   if (!baij->donotstash) {
315:     if (n > baij->n_loc) {
316:       PetscCall(PetscFree(baij->in_loc));
317:       PetscCall(PetscFree(baij->v_loc));
318:       PetscCall(PetscMalloc1(n, &baij->in_loc));
319:       PetscCall(PetscMalloc1(n, &baij->v_loc));

321:       baij->n_loc = n;
322:     }
323:     in_loc = baij->in_loc;
324:     v_loc  = baij->v_loc;
325:   }

327:   for (i = 0; i < m; i++) {
328:     if (im[i] < 0) continue;
329:     PetscCheck(im[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], mat->rmap->N - 1);
330:     if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
331:       row = im[i] - rstart_orig;                     /* local row index */
332:       for (j = 0; j < n; j++) {
333:         if (im[i] / bs > in[j] / bs) {
334:           if (a->ignore_ltriangular) {
335:             continue; /* ignore lower triangular blocks */
336:           } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
337:         }
338:         if (in[j] >= cstart_orig && in[j] < cend_orig) { /* diag entry (A) */
339:           col  = in[j] - cstart_orig;                    /* local col index */
340:           brow = row / bs;
341:           bcol = col / bs;
342:           if (brow > bcol) continue; /* ignore lower triangular blocks of A */
343:           if (roworiented) value = v[i * n + j];
344:           else value = v[i + j * m];
345:           MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, im[i], in[j]);
346:           /* PetscCall(MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv)); */
347:         } else if (in[j] < 0) {
348:           continue;
349:         } else {
350:           PetscCheck(in[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[j], mat->cmap->N - 1);
351:           /* off-diag entry (B) */
352:           if (mat->was_assembled) {
353:             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
354: #if defined(PETSC_USE_CTABLE)
355:             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] / bs + 1, 0, &col));
356:             col = col - 1;
357: #else
358:             col = baij->colmap[in[j] / bs] - 1;
359: #endif
360:             if (col < 0 && !((Mat_SeqSBAIJ *)baij->A->data)->nonew) {
361:               PetscCall(MatDisAssemble_MPISBAIJ(mat));
362:               col = in[j];
363:               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
364:               B     = baij->B;
365:               b     = (Mat_SeqBAIJ *)B->data;
366:               bimax = b->imax;
367:               bi    = b->i;
368:               bilen = b->ilen;
369:               bj    = b->j;
370:               ba    = b->a;
371:             } else col += in[j] % bs;
372:           } else col = in[j];
373:           if (roworiented) value = v[i * n + j];
374:           else value = v[i + j * m];
375:           MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, im[i], in[j]);
376:           /* PetscCall(MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv)); */
377:         }
378:       }
379:     } else { /* off processor entry */
380:       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
381:       if (!baij->donotstash) {
382:         mat->assembled = PETSC_FALSE;
383:         n_loc          = 0;
384:         for (j = 0; j < n; j++) {
385:           if (im[i] / bs > in[j] / bs) continue; /* ignore lower triangular blocks */
386:           in_loc[n_loc] = in[j];
387:           if (roworiented) {
388:             v_loc[n_loc] = v[i * n + j];
389:           } else {
390:             v_loc[n_loc] = v[j * m + i];
391:           }
392:           n_loc++;
393:         }
394:         PetscCall(MatStashValuesRow_Private(&mat->stash, im[i], n_loc, in_loc, v_loc, PETSC_FALSE));
395:       }
396:     }
397:   }
398:   PetscFunctionReturn(PETSC_SUCCESS);
399: }

401: static inline PetscErrorCode MatSetValuesBlocked_SeqSBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
402: {
403:   Mat_SeqSBAIJ      *a = (Mat_SeqSBAIJ *)A->data;
404:   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
405:   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
406:   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
407:   PetscBool          roworiented = a->roworiented;
408:   const PetscScalar *value       = v;
409:   MatScalar         *ap, *aa = a->a, *bap;

411:   PetscFunctionBegin;
412:   if (col < row) {
413:     if (a->ignore_ltriangular) PetscFunctionReturn(PETSC_SUCCESS); /* ignore lower triangular block */
414:     else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
415:   }
416:   rp    = aj + ai[row];
417:   ap    = aa + bs2 * ai[row];
418:   rmax  = imax[row];
419:   nrow  = ailen[row];
420:   value = v;
421:   low   = 0;
422:   high  = nrow;

424:   while (high - low > 7) {
425:     t = (low + high) / 2;
426:     if (rp[t] > col) high = t;
427:     else low = t;
428:   }
429:   for (i = low; i < high; i++) {
430:     if (rp[i] > col) break;
431:     if (rp[i] == col) {
432:       bap = ap + bs2 * i;
433:       if (roworiented) {
434:         if (is == ADD_VALUES) {
435:           for (ii = 0; ii < bs; ii++) {
436:             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
437:           }
438:         } else {
439:           for (ii = 0; ii < bs; ii++) {
440:             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
441:           }
442:         }
443:       } else {
444:         if (is == ADD_VALUES) {
445:           for (ii = 0; ii < bs; ii++) {
446:             for (jj = 0; jj < bs; jj++) *bap++ += *value++;
447:           }
448:         } else {
449:           for (ii = 0; ii < bs; ii++) {
450:             for (jj = 0; jj < bs; jj++) *bap++ = *value++;
451:           }
452:         }
453:       }
454:       goto noinsert2;
455:     }
456:   }
457:   if (nonew == 1) goto noinsert2;
458:   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new block index nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
459:   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
460:   N = nrow++ - 1;
461:   high++;
462:   /* shift up all the later entries in this row */
463:   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
464:   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
465:   rp[i] = col;
466:   bap   = ap + bs2 * i;
467:   if (roworiented) {
468:     for (ii = 0; ii < bs; ii++) {
469:       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
470:     }
471:   } else {
472:     for (ii = 0; ii < bs; ii++) {
473:       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
474:     }
475:   }
476: noinsert2:;
477:   ailen[row] = nrow;
478:   PetscFunctionReturn(PETSC_SUCCESS);
479: }

481: /*
482:    This routine is exactly duplicated in mpibaij.c
483: */
484: static inline PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
485: {
486:   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ *)A->data;
487:   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
488:   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
489:   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
490:   PetscBool          roworiented = a->roworiented;
491:   const PetscScalar *value       = v;
492:   MatScalar         *ap, *aa = a->a, *bap;

494:   PetscFunctionBegin;
495:   rp    = aj + ai[row];
496:   ap    = aa + bs2 * ai[row];
497:   rmax  = imax[row];
498:   nrow  = ailen[row];
499:   low   = 0;
500:   high  = nrow;
501:   value = v;
502:   while (high - low > 7) {
503:     t = (low + high) / 2;
504:     if (rp[t] > col) high = t;
505:     else low = t;
506:   }
507:   for (i = low; i < high; i++) {
508:     if (rp[i] > col) break;
509:     if (rp[i] == col) {
510:       bap = ap + bs2 * i;
511:       if (roworiented) {
512:         if (is == ADD_VALUES) {
513:           for (ii = 0; ii < bs; ii++) {
514:             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
515:           }
516:         } else {
517:           for (ii = 0; ii < bs; ii++) {
518:             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
519:           }
520:         }
521:       } else {
522:         if (is == ADD_VALUES) {
523:           for (ii = 0; ii < bs; ii++, value += bs) {
524:             for (jj = 0; jj < bs; jj++) bap[jj] += value[jj];
525:             bap += bs;
526:           }
527:         } else {
528:           for (ii = 0; ii < bs; ii++, value += bs) {
529:             for (jj = 0; jj < bs; jj++) bap[jj] = value[jj];
530:             bap += bs;
531:           }
532:         }
533:       }
534:       goto noinsert2;
535:     }
536:   }
537:   if (nonew == 1) goto noinsert2;
538:   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new global block indexed nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
539:   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
540:   N = nrow++ - 1;
541:   high++;
542:   /* shift up all the later entries in this row */
543:   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
544:   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
545:   rp[i] = col;
546:   bap   = ap + bs2 * i;
547:   if (roworiented) {
548:     for (ii = 0; ii < bs; ii++) {
549:       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
550:     }
551:   } else {
552:     for (ii = 0; ii < bs; ii++) {
553:       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
554:     }
555:   }
556: noinsert2:;
557:   ailen[row] = nrow;
558:   PetscFunctionReturn(PETSC_SUCCESS);
559: }

561: /*
562:     This routine could be optimized by removing the need for the block copy below and passing stride information
563:   to the above inline routines; similarly in MatSetValuesBlocked_MPIBAIJ()
564: */
565: static PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const MatScalar v[], InsertMode addv)
566: {
567:   Mat_MPISBAIJ    *baij = (Mat_MPISBAIJ *)mat->data;
568:   const MatScalar *value;
569:   MatScalar       *barray      = baij->barray;
570:   PetscBool        roworiented = baij->roworiented, ignore_ltriangular = ((Mat_SeqSBAIJ *)baij->A->data)->ignore_ltriangular;
571:   PetscInt         i, j, ii, jj, row, col, rstart = baij->rstartbs;
572:   PetscInt         rend = baij->rendbs, cstart = baij->cstartbs, stepval;
573:   PetscInt         cend = baij->cendbs, bs = mat->rmap->bs, bs2 = baij->bs2;

575:   PetscFunctionBegin;
576:   if (!barray) {
577:     PetscCall(PetscMalloc1(bs2, &barray));
578:     baij->barray = barray;
579:   }

581:   if (roworiented) {
582:     stepval = (n - 1) * bs;
583:   } else {
584:     stepval = (m - 1) * bs;
585:   }
586:   for (i = 0; i < m; i++) {
587:     if (im[i] < 0) continue;
588:     PetscCheck(im[i] < baij->Mbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed row too large %" PetscInt_FMT " max %" PetscInt_FMT, im[i], baij->Mbs - 1);
589:     if (im[i] >= rstart && im[i] < rend) {
590:       row = im[i] - rstart;
591:       for (j = 0; j < n; j++) {
592:         if (im[i] > in[j]) {
593:           if (ignore_ltriangular) continue; /* ignore lower triangular blocks */
594:           else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
595:         }
596:         /* If NumCol = 1 then a copy is not required */
597:         if ((roworiented) && (n == 1)) {
598:           barray = (MatScalar *)v + i * bs2;
599:         } else if ((!roworiented) && (m == 1)) {
600:           barray = (MatScalar *)v + j * bs2;
601:         } else { /* Here a copy is required */
602:           if (roworiented) {
603:             value = v + i * (stepval + bs) * bs + j * bs;
604:           } else {
605:             value = v + j * (stepval + bs) * bs + i * bs;
606:           }
607:           for (ii = 0; ii < bs; ii++, value += stepval) {
608:             for (jj = 0; jj < bs; jj++) *barray++ = *value++;
609:           }
610:           barray -= bs2;
611:         }

613:         if (in[j] >= cstart && in[j] < cend) {
614:           col = in[j] - cstart;
615:           PetscCall(MatSetValuesBlocked_SeqSBAIJ_Inlined(baij->A, row, col, barray, addv, im[i], in[j]));
616:         } else if (in[j] < 0) {
617:           continue;
618:         } else {
619:           PetscCheck(in[j] < baij->Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed column too large %" PetscInt_FMT " max %" PetscInt_FMT, in[j], baij->Nbs - 1);
620:           if (mat->was_assembled) {
621:             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));

623: #if defined(PETSC_USE_CTABLE)
624:             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col));
625:             col = col < 1 ? -1 : (col - 1) / bs;
626: #else
627:             col = baij->colmap[in[j]] < 1 ? -1 : (baij->colmap[in[j]] - 1) / bs;
628: #endif
629:             if (col < 0 && !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
630:               PetscCall(MatDisAssemble_MPISBAIJ(mat));
631:               col = in[j];
632:             }
633:           } else col = in[j];
634:           PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B, row, col, barray, addv, im[i], in[j]));
635:         }
636:       }
637:     } else {
638:       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process block indexed row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
639:       if (!baij->donotstash) {
640:         if (roworiented) {
641:           PetscCall(MatStashValuesRowBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
642:         } else {
643:           PetscCall(MatStashValuesColBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
644:         }
645:       }
646:     }
647:   }
648:   PetscFunctionReturn(PETSC_SUCCESS);
649: }

651: static PetscErrorCode MatGetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
652: {
653:   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
654:   PetscInt      bs = mat->rmap->bs, i, j, bsrstart = mat->rmap->rstart, bsrend = mat->rmap->rend;
655:   PetscInt      bscstart = mat->cmap->rstart, bscend = mat->cmap->rend, row, col, data;

657:   PetscFunctionBegin;
658:   for (i = 0; i < m; i++) {
659:     if (idxm[i] < 0) continue; /* negative row */
660:     PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
661:     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
662:       row = idxm[i] - bsrstart;
663:       for (j = 0; j < n; j++) {
664:         if (idxn[j] < 0) continue; /* negative column */
665:         PetscCheck(idxn[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, idxn[j], mat->cmap->N - 1);
666:         if (idxn[j] >= bscstart && idxn[j] < bscend) {
667:           col = idxn[j] - bscstart;
668:           PetscCall(MatGetValues_SeqSBAIJ(baij->A, 1, &row, 1, &col, v + i * n + j));
669:         } else {
670:           if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
671: #if defined(PETSC_USE_CTABLE)
672:           PetscCall(PetscHMapIGetWithDefault(baij->colmap, idxn[j] / bs + 1, 0, &data));
673:           data--;
674: #else
675:           data = baij->colmap[idxn[j] / bs] - 1;
676: #endif
677:           if ((data < 0) || (baij->garray[data / bs] != idxn[j] / bs)) *(v + i * n + j) = 0.0;
678:           else {
679:             col = data + idxn[j] % bs;
680:             PetscCall(MatGetValues_SeqBAIJ(baij->B, 1, &row, 1, &col, v + i * n + j));
681:           }
682:         }
683:       }
684:     } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local values currently supported");
685:   }
686:   PetscFunctionReturn(PETSC_SUCCESS);
687: }

689: static PetscErrorCode MatNorm_MPISBAIJ(Mat mat, NormType type, PetscReal *norm)
690: {
691:   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
692:   PetscReal     sum[2], *lnorm2;

694:   PetscFunctionBegin;
695:   if (baij->size == 1) {
696:     PetscCall(MatNorm(baij->A, type, norm));
697:   } else {
698:     if (type == NORM_FROBENIUS) {
699:       PetscCall(PetscMalloc1(2, &lnorm2));
700:       PetscCall(MatNorm(baij->A, type, lnorm2));
701:       *lnorm2 = (*lnorm2) * (*lnorm2);
702:       lnorm2++; /* squar power of norm(A) */
703:       PetscCall(MatNorm(baij->B, type, lnorm2));
704:       *lnorm2 = (*lnorm2) * (*lnorm2);
705:       lnorm2--; /* squar power of norm(B) */
706:       PetscCallMPI(MPIU_Allreduce(lnorm2, sum, 2, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
707:       *norm = PetscSqrtReal(sum[0] + 2 * sum[1]);
708:       PetscCall(PetscFree(lnorm2));
709:     } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */
710:       Mat_SeqSBAIJ *amat = (Mat_SeqSBAIJ *)baij->A->data;
711:       Mat_SeqBAIJ  *bmat = (Mat_SeqBAIJ *)baij->B->data;
712:       PetscReal    *rsum, *rsum2, vabs;
713:       PetscInt     *jj, *garray = baij->garray, rstart = baij->rstartbs, nz;
714:       PetscInt      brow, bcol, col, bs = baij->A->rmap->bs, row, grow, gcol, mbs = amat->mbs;
715:       MatScalar    *v;
716:       PetscMPIInt   iN;

718:       PetscCall(PetscMalloc2(mat->cmap->N, &rsum, mat->cmap->N, &rsum2));
719:       PetscCall(PetscArrayzero(rsum, mat->cmap->N));
720:       /* Amat */
721:       v  = amat->a;
722:       jj = amat->j;
723:       for (brow = 0; brow < mbs; brow++) {
724:         grow = bs * (rstart + brow);
725:         nz   = amat->i[brow + 1] - amat->i[brow];
726:         for (bcol = 0; bcol < nz; bcol++) {
727:           gcol = bs * (rstart + *jj);
728:           jj++;
729:           for (col = 0; col < bs; col++) {
730:             for (row = 0; row < bs; row++) {
731:               vabs = PetscAbsScalar(*v);
732:               v++;
733:               rsum[gcol + col] += vabs;
734:               /* non-diagonal block */
735:               if (bcol > 0 && vabs > 0.0) rsum[grow + row] += vabs;
736:             }
737:           }
738:         }
739:         PetscCall(PetscLogFlops(nz * bs * bs));
740:       }
741:       /* Bmat */
742:       v  = bmat->a;
743:       jj = bmat->j;
744:       for (brow = 0; brow < mbs; brow++) {
745:         grow = bs * (rstart + brow);
746:         nz   = bmat->i[brow + 1] - bmat->i[brow];
747:         for (bcol = 0; bcol < nz; bcol++) {
748:           gcol = bs * garray[*jj];
749:           jj++;
750:           for (col = 0; col < bs; col++) {
751:             for (row = 0; row < bs; row++) {
752:               vabs = PetscAbsScalar(*v);
753:               v++;
754:               rsum[gcol + col] += vabs;
755:               rsum[grow + row] += vabs;
756:             }
757:           }
758:         }
759:         PetscCall(PetscLogFlops(nz * bs * bs));
760:       }
761:       PetscCall(PetscMPIIntCast(mat->cmap->N, &iN));
762:       PetscCallMPI(MPIU_Allreduce(rsum, rsum2, iN, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
763:       *norm = 0.0;
764:       for (col = 0; col < mat->cmap->N; col++) {
765:         if (rsum2[col] > *norm) *norm = rsum2[col];
766:       }
767:       PetscCall(PetscFree2(rsum, rsum2));
768:     } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for this norm yet");
769:   }
770:   PetscFunctionReturn(PETSC_SUCCESS);
771: }

773: static PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat, MatAssemblyType mode)
774: {
775:   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
776:   PetscInt      nstash, reallocs;

778:   PetscFunctionBegin;
779:   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);

781:   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
782:   PetscCall(MatStashScatterBegin_Private(mat, &mat->bstash, baij->rangebs));
783:   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
784:   PetscCall(PetscInfo(mat, "Stash has %" PetscInt_FMT " entries,uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
785:   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
786:   PetscCall(PetscInfo(mat, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
787:   PetscFunctionReturn(PETSC_SUCCESS);
788: }

790: static PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat, MatAssemblyType mode)
791: {
792:   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
793:   Mat_SeqSBAIJ *a    = (Mat_SeqSBAIJ *)baij->A->data;
794:   PetscInt      i, j, rstart, ncols, flg, bs2 = baij->bs2;
795:   PetscInt     *row, *col;
796:   PetscBool     other_disassembled;
797:   PetscMPIInt   n;
798:   PetscBool     r1, r2, r3;
799:   MatScalar    *val;

801:   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
802:   PetscFunctionBegin;
803:   if (!baij->donotstash && !mat->nooffprocentries) {
804:     while (1) {
805:       PetscCall(MatStashScatterGetMesg_Private(&mat->stash, &n, &row, &col, &val, &flg));
806:       if (!flg) break;

808:       for (i = 0; i < n;) {
809:         /* Now identify the consecutive vals belonging to the same row */
810:         for (j = i, rstart = row[j]; j < n; j++) {
811:           if (row[j] != rstart) break;
812:         }
813:         if (j < n) ncols = j - i;
814:         else ncols = n - i;
815:         /* Now assemble all these values with a single function call */
816:         PetscCall(MatSetValues_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i, mat->insertmode));
817:         i = j;
818:       }
819:     }
820:     PetscCall(MatStashScatterEnd_Private(&mat->stash));
821:     /* Now process the block-stash. Since the values are stashed column-oriented,
822:        set the row-oriented flag to column-oriented, and after MatSetValues()
823:        restore the original flags */
824:     r1 = baij->roworiented;
825:     r2 = a->roworiented;
826:     r3 = ((Mat_SeqBAIJ *)baij->B->data)->roworiented;

828:     baij->roworiented = PETSC_FALSE;
829:     a->roworiented    = PETSC_FALSE;

831:     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworinted */
832:     while (1) {
833:       PetscCall(MatStashScatterGetMesg_Private(&mat->bstash, &n, &row, &col, &val, &flg));
834:       if (!flg) break;

836:       for (i = 0; i < n;) {
837:         /* Now identify the consecutive vals belonging to the same row */
838:         for (j = i, rstart = row[j]; j < n; j++) {
839:           if (row[j] != rstart) break;
840:         }
841:         if (j < n) ncols = j - i;
842:         else ncols = n - i;
843:         PetscCall(MatSetValuesBlocked_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i * bs2, mat->insertmode));
844:         i = j;
845:       }
846:     }
847:     PetscCall(MatStashScatterEnd_Private(&mat->bstash));

849:     baij->roworiented = r1;
850:     a->roworiented    = r2;

852:     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = r3; /* b->roworinted */
853:   }

855:   PetscCall(MatAssemblyBegin(baij->A, mode));
856:   PetscCall(MatAssemblyEnd(baij->A, mode));

858:   /* determine if any processor has disassembled, if so we must
859:      also disassemble ourselves, in order that we may reassemble. */
860:   /*
861:      if nonzero structure of submatrix B cannot change then we know that
862:      no processor disassembled thus we can skip this stuff
863:   */
864:   if (!((Mat_SeqBAIJ *)baij->B->data)->nonew) {
865:     PetscCallMPI(MPIU_Allreduce(&mat->was_assembled, &other_disassembled, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
866:     if (mat->was_assembled && !other_disassembled) PetscCall(MatDisAssemble_MPISBAIJ(mat));
867:   }

869:   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { PetscCall(MatSetUpMultiply_MPISBAIJ(mat)); /* setup Mvctx and sMvctx */ }
870:   PetscCall(MatAssemblyBegin(baij->B, mode));
871:   PetscCall(MatAssemblyEnd(baij->B, mode));

873:   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));

875:   baij->rowvalues = NULL;

877:   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
878:   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
879:     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
880:     PetscCallMPI(MPIU_Allreduce(&state, &mat->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)mat)));
881:   }
882:   PetscFunctionReturn(PETSC_SUCCESS);
883: }

885: extern PetscErrorCode MatSetValues_MPIBAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
886: #include <petscdraw.h>
887: static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat, PetscViewer viewer)
888: {
889:   Mat_MPISBAIJ     *baij = (Mat_MPISBAIJ *)mat->data;
890:   PetscInt          bs   = mat->rmap->bs;
891:   PetscMPIInt       rank = baij->rank;
892:   PetscBool         iascii, isdraw;
893:   PetscViewer       sviewer;
894:   PetscViewerFormat format;

896:   PetscFunctionBegin;
897:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
898:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
899:   if (iascii) {
900:     PetscCall(PetscViewerGetFormat(viewer, &format));
901:     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
902:       MatInfo info;
903:       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
904:       PetscCall(MatGetInfo(mat, MAT_LOCAL, &info));
905:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
906:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local rows %" PetscInt_FMT " nz %" PetscInt_FMT " nz alloced %" PetscInt_FMT " bs %" PetscInt_FMT " mem %g\n", rank, mat->rmap->n, (PetscInt)info.nz_used, (PetscInt)info.nz_allocated,
907:                                                    mat->rmap->bs, info.memory));
908:       PetscCall(MatGetInfo(baij->A, MAT_LOCAL, &info));
909:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] on-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
910:       PetscCall(MatGetInfo(baij->B, MAT_LOCAL, &info));
911:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] off-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
912:       PetscCall(PetscViewerFlush(viewer));
913:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
914:       PetscCall(PetscViewerASCIIPrintf(viewer, "Information on VecScatter used in matrix-vector product: \n"));
915:       PetscCall(VecScatterView(baij->Mvctx, viewer));
916:       PetscFunctionReturn(PETSC_SUCCESS);
917:     } else if (format == PETSC_VIEWER_ASCII_INFO) {
918:       PetscCall(PetscViewerASCIIPrintf(viewer, "  block size is %" PetscInt_FMT "\n", bs));
919:       PetscFunctionReturn(PETSC_SUCCESS);
920:     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
921:       PetscFunctionReturn(PETSC_SUCCESS);
922:     }
923:   }

925:   if (isdraw) {
926:     PetscDraw draw;
927:     PetscBool isnull;
928:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
929:     PetscCall(PetscDrawIsNull(draw, &isnull));
930:     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
931:   }

933:   {
934:     /* assemble the entire matrix onto first processor. */
935:     Mat           A;
936:     Mat_SeqSBAIJ *Aloc;
937:     Mat_SeqBAIJ  *Bloc;
938:     PetscInt      M = mat->rmap->N, N = mat->cmap->N, *ai, *aj, col, i, j, k, *rvals, mbs = baij->mbs;
939:     MatScalar    *a;
940:     const char   *matname;

942:     /* Should this be the same type as mat? */
943:     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &A));
944:     if (rank == 0) {
945:       PetscCall(MatSetSizes(A, M, N, M, N));
946:     } else {
947:       PetscCall(MatSetSizes(A, 0, 0, M, N));
948:     }
949:     PetscCall(MatSetType(A, MATMPISBAIJ));
950:     PetscCall(MatMPISBAIJSetPreallocation(A, mat->rmap->bs, 0, NULL, 0, NULL));
951:     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));

953:     /* copy over the A part */
954:     Aloc = (Mat_SeqSBAIJ *)baij->A->data;
955:     ai   = Aloc->i;
956:     aj   = Aloc->j;
957:     a    = Aloc->a;
958:     PetscCall(PetscMalloc1(bs, &rvals));

960:     for (i = 0; i < mbs; i++) {
961:       rvals[0] = bs * (baij->rstartbs + i);
962:       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
963:       for (j = ai[i]; j < ai[i + 1]; j++) {
964:         col = (baij->cstartbs + aj[j]) * bs;
965:         for (k = 0; k < bs; k++) {
966:           PetscCall(MatSetValues_MPISBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
967:           col++;
968:           a += bs;
969:         }
970:       }
971:     }
972:     /* copy over the B part */
973:     Bloc = (Mat_SeqBAIJ *)baij->B->data;
974:     ai   = Bloc->i;
975:     aj   = Bloc->j;
976:     a    = Bloc->a;
977:     for (i = 0; i < mbs; i++) {
978:       rvals[0] = bs * (baij->rstartbs + i);
979:       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
980:       for (j = ai[i]; j < ai[i + 1]; j++) {
981:         col = baij->garray[aj[j]] * bs;
982:         for (k = 0; k < bs; k++) {
983:           PetscCall(MatSetValues_MPIBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
984:           col++;
985:           a += bs;
986:         }
987:       }
988:     }
989:     PetscCall(PetscFree(rvals));
990:     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
991:     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
992:     /*
993:        Everyone has to call to draw the matrix since the graphics waits are
994:        synchronized across all processors that share the PetscDraw object
995:     */
996:     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
997:     if (((PetscObject)mat)->name) PetscCall(PetscObjectGetName((PetscObject)mat, &matname));
998:     if (rank == 0) {
999:       if (((PetscObject)mat)->name) PetscCall(PetscObjectSetName((PetscObject)((Mat_MPISBAIJ *)A->data)->A, matname));
1000:       PetscCall(MatView_SeqSBAIJ(((Mat_MPISBAIJ *)A->data)->A, sviewer));
1001:     }
1002:     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
1003:     PetscCall(MatDestroy(&A));
1004:   }
1005:   PetscFunctionReturn(PETSC_SUCCESS);
1006: }

1008: /* Used for both MPIBAIJ and MPISBAIJ matrices */
1009: #define MatView_MPISBAIJ_Binary MatView_MPIBAIJ_Binary

1011: static PetscErrorCode MatView_MPISBAIJ(Mat mat, PetscViewer viewer)
1012: {
1013:   PetscBool iascii, isdraw, issocket, isbinary;

1015:   PetscFunctionBegin;
1016:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1017:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1018:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSOCKET, &issocket));
1019:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1020:   if (iascii || isdraw || issocket) {
1021:     PetscCall(MatView_MPISBAIJ_ASCIIorDraworSocket(mat, viewer));
1022:   } else if (isbinary) PetscCall(MatView_MPISBAIJ_Binary(mat, viewer));
1023:   PetscFunctionReturn(PETSC_SUCCESS);
1024: }

1026: #if defined(PETSC_USE_COMPLEX)
1027: static PetscErrorCode MatMult_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy)
1028: {
1029:   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1030:   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1031:   PetscScalar       *from;
1032:   const PetscScalar *x;

1034:   PetscFunctionBegin;
1035:   /* diagonal part */
1036:   PetscCall((*a->A->ops->mult)(a->A, xx, a->slvec1a));
1037:   /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1038:   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1039:   PetscCall(VecZeroEntries(a->slvec1b));

1041:   /* subdiagonal part */
1042:   PetscCheck(a->B->ops->multhermitiantranspose, PetscObjectComm((PetscObject)a->B), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)a->B)->type_name);
1043:   PetscCall((*a->B->ops->multhermitiantranspose)(a->B, xx, a->slvec0b));

1045:   /* copy x into the vec slvec0 */
1046:   PetscCall(VecGetArray(a->slvec0, &from));
1047:   PetscCall(VecGetArrayRead(xx, &x));

1049:   PetscCall(PetscArraycpy(from, x, bs * mbs));
1050:   PetscCall(VecRestoreArray(a->slvec0, &from));
1051:   PetscCall(VecRestoreArrayRead(xx, &x));

1053:   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1054:   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1055:   /* supperdiagonal part */
1056:   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, yy));
1057:   PetscFunctionReturn(PETSC_SUCCESS);
1058: }
1059: #endif

1061: static PetscErrorCode MatMult_MPISBAIJ(Mat A, Vec xx, Vec yy)
1062: {
1063:   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1064:   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1065:   PetscScalar       *from;
1066:   const PetscScalar *x;

1068:   PetscFunctionBegin;
1069:   /* diagonal part */
1070:   PetscCall((*a->A->ops->mult)(a->A, xx, a->slvec1a));
1071:   /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1072:   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1073:   PetscCall(VecZeroEntries(a->slvec1b));

1075:   /* subdiagonal part */
1076:   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->slvec0b));

1078:   /* copy x into the vec slvec0 */
1079:   PetscCall(VecGetArray(a->slvec0, &from));
1080:   PetscCall(VecGetArrayRead(xx, &x));

1082:   PetscCall(PetscArraycpy(from, x, bs * mbs));
1083:   PetscCall(VecRestoreArray(a->slvec0, &from));
1084:   PetscCall(VecRestoreArrayRead(xx, &x));

1086:   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1087:   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1088:   /* supperdiagonal part */
1089:   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, yy));
1090:   PetscFunctionReturn(PETSC_SUCCESS);
1091: }

1093: #if PetscDefined(USE_COMPLEX)
1094: static PetscErrorCode MatMultAdd_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy, Vec zz)
1095: {
1096:   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1097:   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1098:   PetscScalar       *from;
1099:   const PetscScalar *x;

1101:   PetscFunctionBegin;
1102:   /* diagonal part */
1103:   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, a->slvec1a));
1104:   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1105:   PetscCall(VecZeroEntries(a->slvec1b));

1107:   /* subdiagonal part */
1108:   PetscCheck(a->B->ops->multhermitiantranspose, PetscObjectComm((PetscObject)a->B), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)a->B)->type_name);
1109:   PetscCall((*a->B->ops->multhermitiantranspose)(a->B, xx, a->slvec0b));

1111:   /* copy x into the vec slvec0 */
1112:   PetscCall(VecGetArray(a->slvec0, &from));
1113:   PetscCall(VecGetArrayRead(xx, &x));
1114:   PetscCall(PetscArraycpy(from, x, bs * mbs));
1115:   PetscCall(VecRestoreArray(a->slvec0, &from));

1117:   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1118:   PetscCall(VecRestoreArrayRead(xx, &x));
1119:   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));

1121:   /* supperdiagonal part */
1122:   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, zz));
1123:   PetscFunctionReturn(PETSC_SUCCESS);
1124: }
1125: #endif

1127: static PetscErrorCode MatMultAdd_MPISBAIJ(Mat A, Vec xx, Vec yy, Vec zz)
1128: {
1129:   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1130:   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1131:   PetscScalar       *from;
1132:   const PetscScalar *x;

1134:   PetscFunctionBegin;
1135:   /* diagonal part */
1136:   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, a->slvec1a));
1137:   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1138:   PetscCall(VecZeroEntries(a->slvec1b));

1140:   /* subdiagonal part */
1141:   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->slvec0b));

1143:   /* copy x into the vec slvec0 */
1144:   PetscCall(VecGetArray(a->slvec0, &from));
1145:   PetscCall(VecGetArrayRead(xx, &x));
1146:   PetscCall(PetscArraycpy(from, x, bs * mbs));
1147:   PetscCall(VecRestoreArray(a->slvec0, &from));

1149:   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1150:   PetscCall(VecRestoreArrayRead(xx, &x));
1151:   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));

1153:   /* supperdiagonal part */
1154:   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, zz));
1155:   PetscFunctionReturn(PETSC_SUCCESS);
1156: }

1158: /*
1159:   This only works correctly for square matrices where the subblock A->A is the
1160:    diagonal block
1161: */
1162: static PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A, Vec v)
1163: {
1164:   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;

1166:   PetscFunctionBegin;
1167:   /* PetscCheck(a->rmap->N == a->cmap->N,PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */
1168:   PetscCall(MatGetDiagonal(a->A, v));
1169:   PetscFunctionReturn(PETSC_SUCCESS);
1170: }

1172: static PetscErrorCode MatScale_MPISBAIJ(Mat A, PetscScalar aa)
1173: {
1174:   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;

1176:   PetscFunctionBegin;
1177:   PetscCall(MatScale(a->A, aa));
1178:   PetscCall(MatScale(a->B, aa));
1179:   PetscFunctionReturn(PETSC_SUCCESS);
1180: }

1182: static PetscErrorCode MatGetRow_MPISBAIJ(Mat matin, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1183: {
1184:   Mat_MPISBAIJ *mat = (Mat_MPISBAIJ *)matin->data;
1185:   PetscScalar  *vworkA, *vworkB, **pvA, **pvB, *v_p;
1186:   PetscInt      bs = matin->rmap->bs, bs2 = mat->bs2, i, *cworkA, *cworkB, **pcA, **pcB;
1187:   PetscInt      nztot, nzA, nzB, lrow, brstart = matin->rmap->rstart, brend = matin->rmap->rend;
1188:   PetscInt     *cmap, *idx_p, cstart = mat->rstartbs;

1190:   PetscFunctionBegin;
1191:   PetscCheck(!mat->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Already active");
1192:   mat->getrowactive = PETSC_TRUE;

1194:   if (!mat->rowvalues && (idx || v)) {
1195:     /*
1196:         allocate enough space to hold information from the longest row.
1197:     */
1198:     Mat_SeqSBAIJ *Aa  = (Mat_SeqSBAIJ *)mat->A->data;
1199:     Mat_SeqBAIJ  *Ba  = (Mat_SeqBAIJ *)mat->B->data;
1200:     PetscInt      max = 1, mbs = mat->mbs, tmp;
1201:     for (i = 0; i < mbs; i++) {
1202:       tmp = Aa->i[i + 1] - Aa->i[i] + Ba->i[i + 1] - Ba->i[i]; /* row length */
1203:       if (max < tmp) max = tmp;
1204:     }
1205:     PetscCall(PetscMalloc2(max * bs2, &mat->rowvalues, max * bs2, &mat->rowindices));
1206:   }

1208:   PetscCheck(row >= brstart && row < brend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local rows");
1209:   lrow = row - brstart; /* local row index */

1211:   pvA = &vworkA;
1212:   pcA = &cworkA;
1213:   pvB = &vworkB;
1214:   pcB = &cworkB;
1215:   if (!v) {
1216:     pvA = NULL;
1217:     pvB = NULL;
1218:   }
1219:   if (!idx) {
1220:     pcA = NULL;
1221:     if (!v) pcB = NULL;
1222:   }
1223:   PetscCall((*mat->A->ops->getrow)(mat->A, lrow, &nzA, pcA, pvA));
1224:   PetscCall((*mat->B->ops->getrow)(mat->B, lrow, &nzB, pcB, pvB));
1225:   nztot = nzA + nzB;

1227:   cmap = mat->garray;
1228:   if (v || idx) {
1229:     if (nztot) {
1230:       /* Sort by increasing column numbers, assuming A and B already sorted */
1231:       PetscInt imark = -1;
1232:       if (v) {
1233:         *v = v_p = mat->rowvalues;
1234:         for (i = 0; i < nzB; i++) {
1235:           if (cmap[cworkB[i] / bs] < cstart) v_p[i] = vworkB[i];
1236:           else break;
1237:         }
1238:         imark = i;
1239:         for (i = 0; i < nzA; i++) v_p[imark + i] = vworkA[i];
1240:         for (i = imark; i < nzB; i++) v_p[nzA + i] = vworkB[i];
1241:       }
1242:       if (idx) {
1243:         *idx = idx_p = mat->rowindices;
1244:         if (imark > -1) {
1245:           for (i = 0; i < imark; i++) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1246:         } else {
1247:           for (i = 0; i < nzB; i++) {
1248:             if (cmap[cworkB[i] / bs] < cstart) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1249:             else break;
1250:           }
1251:           imark = i;
1252:         }
1253:         for (i = 0; i < nzA; i++) idx_p[imark + i] = cstart * bs + cworkA[i];
1254:         for (i = imark; i < nzB; i++) idx_p[nzA + i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1255:       }
1256:     } else {
1257:       if (idx) *idx = NULL;
1258:       if (v) *v = NULL;
1259:     }
1260:   }
1261:   *nz = nztot;
1262:   PetscCall((*mat->A->ops->restorerow)(mat->A, lrow, &nzA, pcA, pvA));
1263:   PetscCall((*mat->B->ops->restorerow)(mat->B, lrow, &nzB, pcB, pvB));
1264:   PetscFunctionReturn(PETSC_SUCCESS);
1265: }

1267: static PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1268: {
1269:   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;

1271:   PetscFunctionBegin;
1272:   PetscCheck(baij->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "MatGetRow() must be called first");
1273:   baij->getrowactive = PETSC_FALSE;
1274:   PetscFunctionReturn(PETSC_SUCCESS);
1275: }

1277: static PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A)
1278: {
1279:   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1280:   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;

1282:   PetscFunctionBegin;
1283:   aA->getrow_utriangular = PETSC_TRUE;
1284:   PetscFunctionReturn(PETSC_SUCCESS);
1285: }
1286: static PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A)
1287: {
1288:   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1289:   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;

1291:   PetscFunctionBegin;
1292:   aA->getrow_utriangular = PETSC_FALSE;
1293:   PetscFunctionReturn(PETSC_SUCCESS);
1294: }

1296: static PetscErrorCode MatConjugate_MPISBAIJ(Mat mat)
1297: {
1298:   PetscFunctionBegin;
1299:   if (PetscDefined(USE_COMPLEX)) {
1300:     Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)mat->data;

1302:     PetscCall(MatConjugate(a->A));
1303:     PetscCall(MatConjugate(a->B));
1304:   }
1305:   PetscFunctionReturn(PETSC_SUCCESS);
1306: }

1308: static PetscErrorCode MatRealPart_MPISBAIJ(Mat A)
1309: {
1310:   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;

1312:   PetscFunctionBegin;
1313:   PetscCall(MatRealPart(a->A));
1314:   PetscCall(MatRealPart(a->B));
1315:   PetscFunctionReturn(PETSC_SUCCESS);
1316: }

1318: static PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A)
1319: {
1320:   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;

1322:   PetscFunctionBegin;
1323:   PetscCall(MatImaginaryPart(a->A));
1324:   PetscCall(MatImaginaryPart(a->B));
1325:   PetscFunctionReturn(PETSC_SUCCESS);
1326: }

1328: /* Check if isrow is a subset of iscol_local, called by MatCreateSubMatrix_MPISBAIJ()
1329:    Input: isrow       - distributed(parallel),
1330:           iscol_local - locally owned (seq)
1331: */
1332: static PetscErrorCode ISEqual_private(IS isrow, IS iscol_local, PetscBool *flg)
1333: {
1334:   PetscInt        sz1, sz2, *a1, *a2, i, j, k, nmatch;
1335:   const PetscInt *ptr1, *ptr2;

1337:   PetscFunctionBegin;
1338:   *flg = PETSC_FALSE;
1339:   PetscCall(ISGetLocalSize(isrow, &sz1));
1340:   PetscCall(ISGetLocalSize(iscol_local, &sz2));
1341:   if (sz1 > sz2) PetscFunctionReturn(PETSC_SUCCESS);

1343:   PetscCall(ISGetIndices(isrow, &ptr1));
1344:   PetscCall(ISGetIndices(iscol_local, &ptr2));

1346:   PetscCall(PetscMalloc1(sz1, &a1));
1347:   PetscCall(PetscMalloc1(sz2, &a2));
1348:   PetscCall(PetscArraycpy(a1, ptr1, sz1));
1349:   PetscCall(PetscArraycpy(a2, ptr2, sz2));
1350:   PetscCall(PetscSortInt(sz1, a1));
1351:   PetscCall(PetscSortInt(sz2, a2));

1353:   nmatch = 0;
1354:   k      = 0;
1355:   for (i = 0; i < sz1; i++) {
1356:     for (j = k; j < sz2; j++) {
1357:       if (a1[i] == a2[j]) {
1358:         k = j;
1359:         nmatch++;
1360:         break;
1361:       }
1362:     }
1363:   }
1364:   PetscCall(ISRestoreIndices(isrow, &ptr1));
1365:   PetscCall(ISRestoreIndices(iscol_local, &ptr2));
1366:   PetscCall(PetscFree(a1));
1367:   PetscCall(PetscFree(a2));
1368:   if (nmatch < sz1) {
1369:     *flg = PETSC_FALSE;
1370:   } else {
1371:     *flg = PETSC_TRUE;
1372:   }
1373:   PetscFunctionReturn(PETSC_SUCCESS);
1374: }

1376: static PetscErrorCode MatCreateSubMatrix_MPISBAIJ(Mat mat, IS isrow, IS iscol, MatReuse call, Mat *newmat)
1377: {
1378:   Mat       C[2];
1379:   IS        iscol_local, isrow_local;
1380:   PetscInt  csize, csize_local, rsize;
1381:   PetscBool isequal, issorted, isidentity = PETSC_FALSE;

1383:   PetscFunctionBegin;
1384:   PetscCall(ISGetLocalSize(iscol, &csize));
1385:   PetscCall(ISGetLocalSize(isrow, &rsize));
1386:   if (call == MAT_REUSE_MATRIX) {
1387:     PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather", (PetscObject *)&iscol_local));
1388:     PetscCheck(iscol_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1389:   } else {
1390:     PetscCall(ISAllGather(iscol, &iscol_local));
1391:     PetscCall(ISSorted(iscol_local, &issorted));
1392:     PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, iscol must be sorted");
1393:   }
1394:   PetscCall(ISEqual_private(isrow, iscol_local, &isequal));
1395:   if (!isequal) {
1396:     PetscCall(ISGetLocalSize(iscol_local, &csize_local));
1397:     isidentity = (PetscBool)(mat->cmap->N == csize_local);
1398:     if (!isidentity) {
1399:       if (call == MAT_REUSE_MATRIX) {
1400:         PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather_other", (PetscObject *)&isrow_local));
1401:         PetscCheck(isrow_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1402:       } else {
1403:         PetscCall(ISAllGather(isrow, &isrow_local));
1404:         PetscCall(ISSorted(isrow_local, &issorted));
1405:         PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, isrow must be sorted");
1406:       }
1407:     }
1408:   }
1409:   /* now call MatCreateSubMatrix_MPIBAIJ() */
1410:   PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, isrow, iscol_local, csize, isequal || isidentity ? call : MAT_INITIAL_MATRIX, isequal || isidentity ? newmat : C, (PetscBool)(isequal || isidentity)));
1411:   if (!isequal && !isidentity) {
1412:     if (call == MAT_INITIAL_MATRIX) {
1413:       IS       intersect;
1414:       PetscInt ni;

1416:       PetscCall(ISIntersect(isrow_local, iscol_local, &intersect));
1417:       PetscCall(ISGetLocalSize(intersect, &ni));
1418:       PetscCall(ISDestroy(&intersect));
1419:       PetscCheck(ni == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot create such a submatrix: for symmetric format, when requesting an off-diagonal submatrix, isrow and iscol should have an empty intersection (number of common indices is %" PetscInt_FMT ")", ni);
1420:     }
1421:     PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, iscol, isrow_local, rsize, MAT_INITIAL_MATRIX, C + 1, PETSC_FALSE));
1422:     PetscCall(MatTranspose(C[1], MAT_INPLACE_MATRIX, C + 1));
1423:     PetscCall(MatAXPY(C[0], 1.0, C[1], DIFFERENT_NONZERO_PATTERN));
1424:     if (call == MAT_REUSE_MATRIX) PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1425:     else if (mat->rmap->bs == 1) PetscCall(MatConvert(C[0], MATAIJ, MAT_INITIAL_MATRIX, newmat));
1426:     else PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1427:     PetscCall(MatDestroy(C));
1428:     PetscCall(MatDestroy(C + 1));
1429:   }
1430:   if (call == MAT_INITIAL_MATRIX) {
1431:     if (!isequal && !isidentity) {
1432:       PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather_other", (PetscObject)isrow_local));
1433:       PetscCall(ISDestroy(&isrow_local));
1434:     }
1435:     PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather", (PetscObject)iscol_local));
1436:     PetscCall(ISDestroy(&iscol_local));
1437:   }
1438:   PetscFunctionReturn(PETSC_SUCCESS);
1439: }

1441: static PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A)
1442: {
1443:   Mat_MPISBAIJ *l = (Mat_MPISBAIJ *)A->data;

1445:   PetscFunctionBegin;
1446:   PetscCall(MatZeroEntries(l->A));
1447:   PetscCall(MatZeroEntries(l->B));
1448:   PetscFunctionReturn(PETSC_SUCCESS);
1449: }

1451: static PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin, MatInfoType flag, MatInfo *info)
1452: {
1453:   Mat_MPISBAIJ  *a = (Mat_MPISBAIJ *)matin->data;
1454:   Mat            A = a->A, B = a->B;
1455:   PetscLogDouble isend[5], irecv[5];

1457:   PetscFunctionBegin;
1458:   info->block_size = (PetscReal)matin->rmap->bs;

1460:   PetscCall(MatGetInfo(A, MAT_LOCAL, info));

1462:   isend[0] = info->nz_used;
1463:   isend[1] = info->nz_allocated;
1464:   isend[2] = info->nz_unneeded;
1465:   isend[3] = info->memory;
1466:   isend[4] = info->mallocs;

1468:   PetscCall(MatGetInfo(B, MAT_LOCAL, info));

1470:   isend[0] += info->nz_used;
1471:   isend[1] += info->nz_allocated;
1472:   isend[2] += info->nz_unneeded;
1473:   isend[3] += info->memory;
1474:   isend[4] += info->mallocs;
1475:   if (flag == MAT_LOCAL) {
1476:     info->nz_used      = isend[0];
1477:     info->nz_allocated = isend[1];
1478:     info->nz_unneeded  = isend[2];
1479:     info->memory       = isend[3];
1480:     info->mallocs      = isend[4];
1481:   } else if (flag == MAT_GLOBAL_MAX) {
1482:     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)matin)));

1484:     info->nz_used      = irecv[0];
1485:     info->nz_allocated = irecv[1];
1486:     info->nz_unneeded  = irecv[2];
1487:     info->memory       = irecv[3];
1488:     info->mallocs      = irecv[4];
1489:   } else if (flag == MAT_GLOBAL_SUM) {
1490:     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)matin)));

1492:     info->nz_used      = irecv[0];
1493:     info->nz_allocated = irecv[1];
1494:     info->nz_unneeded  = irecv[2];
1495:     info->memory       = irecv[3];
1496:     info->mallocs      = irecv[4];
1497:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown MatInfoType argument %d", (int)flag);
1498:   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1499:   info->fill_ratio_needed = 0;
1500:   info->factor_mallocs    = 0;
1501:   PetscFunctionReturn(PETSC_SUCCESS);
1502: }

1504: static PetscErrorCode MatSetOption_MPISBAIJ(Mat A, MatOption op, PetscBool flg)
1505: {
1506:   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1507:   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;

1509:   PetscFunctionBegin;
1510:   switch (op) {
1511:   case MAT_NEW_NONZERO_LOCATIONS:
1512:   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1513:   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1514:   case MAT_KEEP_NONZERO_PATTERN:
1515:   case MAT_SUBMAT_SINGLEIS:
1516:   case MAT_NEW_NONZERO_LOCATION_ERR:
1517:     MatCheckPreallocated(A, 1);
1518:     PetscCall(MatSetOption(a->A, op, flg));
1519:     PetscCall(MatSetOption(a->B, op, flg));
1520:     break;
1521:   case MAT_ROW_ORIENTED:
1522:     MatCheckPreallocated(A, 1);
1523:     a->roworiented = flg;

1525:     PetscCall(MatSetOption(a->A, op, flg));
1526:     PetscCall(MatSetOption(a->B, op, flg));
1527:     break;
1528:   case MAT_FORCE_DIAGONAL_ENTRIES:
1529:   case MAT_SORTED_FULL:
1530:     PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
1531:     break;
1532:   case MAT_IGNORE_OFF_PROC_ENTRIES:
1533:     a->donotstash = flg;
1534:     break;
1535:   case MAT_USE_HASH_TABLE:
1536:     a->ht_flag = flg;
1537:     break;
1538:   case MAT_HERMITIAN:
1539:     MatCheckPreallocated(A, 1);
1540:     PetscCall(MatSetOption(a->A, op, flg));
1541: #if defined(PETSC_USE_COMPLEX)
1542:     if (flg) { /* need different mat-vec ops */
1543:       A->ops->mult             = MatMult_MPISBAIJ_Hermitian;
1544:       A->ops->multadd          = MatMultAdd_MPISBAIJ_Hermitian;
1545:       A->ops->multtranspose    = NULL;
1546:       A->ops->multtransposeadd = NULL;
1547:       A->symmetric             = PETSC_BOOL3_FALSE;
1548:     }
1549: #endif
1550:     break;
1551:   case MAT_SPD:
1552:   case MAT_SYMMETRIC:
1553:     MatCheckPreallocated(A, 1);
1554:     PetscCall(MatSetOption(a->A, op, flg));
1555: #if defined(PETSC_USE_COMPLEX)
1556:     if (flg) { /* restore to use default mat-vec ops */
1557:       A->ops->mult             = MatMult_MPISBAIJ;
1558:       A->ops->multadd          = MatMultAdd_MPISBAIJ;
1559:       A->ops->multtranspose    = MatMult_MPISBAIJ;
1560:       A->ops->multtransposeadd = MatMultAdd_MPISBAIJ;
1561:     }
1562: #endif
1563:     break;
1564:   case MAT_STRUCTURALLY_SYMMETRIC:
1565:     MatCheckPreallocated(A, 1);
1566:     PetscCall(MatSetOption(a->A, op, flg));
1567:     break;
1568:   case MAT_SYMMETRY_ETERNAL:
1569:   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
1570:     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix must be symmetric");
1571:     PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
1572:     break;
1573:   case MAT_SPD_ETERNAL:
1574:     break;
1575:   case MAT_IGNORE_LOWER_TRIANGULAR:
1576:     aA->ignore_ltriangular = flg;
1577:     break;
1578:   case MAT_ERROR_LOWER_TRIANGULAR:
1579:     aA->ignore_ltriangular = flg;
1580:     break;
1581:   case MAT_GETROW_UPPERTRIANGULAR:
1582:     aA->getrow_utriangular = flg;
1583:     break;
1584:   default:
1585:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "unknown option %d", op);
1586:   }
1587:   PetscFunctionReturn(PETSC_SUCCESS);
1588: }

1590: static PetscErrorCode MatTranspose_MPISBAIJ(Mat A, MatReuse reuse, Mat *B)
1591: {
1592:   PetscFunctionBegin;
1593:   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
1594:   if (reuse == MAT_INITIAL_MATRIX) {
1595:     PetscCall(MatDuplicate(A, MAT_COPY_VALUES, B));
1596:   } else if (reuse == MAT_REUSE_MATRIX) {
1597:     PetscCall(MatCopy(A, *B, SAME_NONZERO_PATTERN));
1598:   }
1599:   PetscFunctionReturn(PETSC_SUCCESS);
1600: }

1602: static PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat, Vec ll, Vec rr)
1603: {
1604:   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1605:   Mat           a = baij->A, b = baij->B;
1606:   PetscInt      nv, m, n;
1607:   PetscBool     flg;

1609:   PetscFunctionBegin;
1610:   if (ll != rr) {
1611:     PetscCall(VecEqual(ll, rr, &flg));
1612:     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format, left and right scaling vectors must be same");
1613:   }
1614:   if (!ll) PetscFunctionReturn(PETSC_SUCCESS);

1616:   PetscCall(MatGetLocalSize(mat, &m, &n));
1617:   PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "For symmetric format, local size %" PetscInt_FMT " %" PetscInt_FMT " must be same", m, n);

1619:   PetscCall(VecGetLocalSize(rr, &nv));
1620:   PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Left and right vector non-conforming local size");

1622:   PetscCall(VecScatterBegin(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));

1624:   /* left diagonalscale the off-diagonal part */
1625:   PetscUseTypeMethod(b, diagonalscale, ll, NULL);

1627:   /* scale the diagonal part */
1628:   PetscUseTypeMethod(a, diagonalscale, ll, rr);

1630:   /* right diagonalscale the off-diagonal part */
1631:   PetscCall(VecScatterEnd(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1632:   PetscUseTypeMethod(b, diagonalscale, NULL, baij->lvec);
1633:   PetscFunctionReturn(PETSC_SUCCESS);
1634: }

1636: static PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A)
1637: {
1638:   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;

1640:   PetscFunctionBegin;
1641:   PetscCall(MatSetUnfactored(a->A));
1642:   PetscFunctionReturn(PETSC_SUCCESS);
1643: }

1645: static PetscErrorCode MatDuplicate_MPISBAIJ(Mat, MatDuplicateOption, Mat *);

1647: static PetscErrorCode MatEqual_MPISBAIJ(Mat A, Mat B, PetscBool *flag)
1648: {
1649:   Mat_MPISBAIJ *matB = (Mat_MPISBAIJ *)B->data, *matA = (Mat_MPISBAIJ *)A->data;
1650:   Mat           a, b, c, d;
1651:   PetscBool     flg;

1653:   PetscFunctionBegin;
1654:   a = matA->A;
1655:   b = matA->B;
1656:   c = matB->A;
1657:   d = matB->B;

1659:   PetscCall(MatEqual(a, c, &flg));
1660:   if (flg) PetscCall(MatEqual(b, d, &flg));
1661:   PetscCallMPI(MPIU_Allreduce(&flg, flag, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
1662:   PetscFunctionReturn(PETSC_SUCCESS);
1663: }

1665: static PetscErrorCode MatCopy_MPISBAIJ(Mat A, Mat B, MatStructure str)
1666: {
1667:   PetscBool isbaij;

1669:   PetscFunctionBegin;
1670:   PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isbaij, MATSEQSBAIJ, MATMPISBAIJ, ""));
1671:   PetscCheck(isbaij, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)B)->type_name);
1672:   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1673:   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1674:     PetscCall(MatGetRowUpperTriangular(A));
1675:     PetscCall(MatCopy_Basic(A, B, str));
1676:     PetscCall(MatRestoreRowUpperTriangular(A));
1677:   } else {
1678:     Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1679:     Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;

1681:     PetscCall(MatCopy(a->A, b->A, str));
1682:     PetscCall(MatCopy(a->B, b->B, str));
1683:   }
1684:   PetscCall(PetscObjectStateIncrease((PetscObject)B));
1685:   PetscFunctionReturn(PETSC_SUCCESS);
1686: }

1688: static PetscErrorCode MatAXPY_MPISBAIJ(Mat Y, PetscScalar a, Mat X, MatStructure str)
1689: {
1690:   Mat_MPISBAIJ *xx = (Mat_MPISBAIJ *)X->data, *yy = (Mat_MPISBAIJ *)Y->data;
1691:   PetscBLASInt  bnz, one                          = 1;
1692:   Mat_SeqSBAIJ *xa, *ya;
1693:   Mat_SeqBAIJ  *xb, *yb;

1695:   PetscFunctionBegin;
1696:   if (str == SAME_NONZERO_PATTERN) {
1697:     PetscScalar alpha = a;
1698:     xa                = (Mat_SeqSBAIJ *)xx->A->data;
1699:     ya                = (Mat_SeqSBAIJ *)yy->A->data;
1700:     PetscCall(PetscBLASIntCast(xa->nz, &bnz));
1701:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xa->a, &one, ya->a, &one));
1702:     xb = (Mat_SeqBAIJ *)xx->B->data;
1703:     yb = (Mat_SeqBAIJ *)yy->B->data;
1704:     PetscCall(PetscBLASIntCast(xb->nz, &bnz));
1705:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xb->a, &one, yb->a, &one));
1706:     PetscCall(PetscObjectStateIncrease((PetscObject)Y));
1707:   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
1708:     PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
1709:     PetscCall(MatAXPY_Basic(Y, a, X, str));
1710:     PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
1711:   } else {
1712:     Mat       B;
1713:     PetscInt *nnz_d, *nnz_o, bs = Y->rmap->bs;
1714:     PetscCheck(bs == X->rmap->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Matrices must have same block size");
1715:     PetscCall(MatGetRowUpperTriangular(X));
1716:     PetscCall(MatGetRowUpperTriangular(Y));
1717:     PetscCall(PetscMalloc1(yy->A->rmap->N, &nnz_d));
1718:     PetscCall(PetscMalloc1(yy->B->rmap->N, &nnz_o));
1719:     PetscCall(MatCreate(PetscObjectComm((PetscObject)Y), &B));
1720:     PetscCall(PetscObjectSetName((PetscObject)B, ((PetscObject)Y)->name));
1721:     PetscCall(MatSetSizes(B, Y->rmap->n, Y->cmap->n, Y->rmap->N, Y->cmap->N));
1722:     PetscCall(MatSetBlockSizesFromMats(B, Y, Y));
1723:     PetscCall(MatSetType(B, MATMPISBAIJ));
1724:     PetscCall(MatAXPYGetPreallocation_SeqSBAIJ(yy->A, xx->A, nnz_d));
1725:     PetscCall(MatAXPYGetPreallocation_MPIBAIJ(yy->B, yy->garray, xx->B, xx->garray, nnz_o));
1726:     PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, nnz_d, 0, nnz_o));
1727:     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
1728:     PetscCall(MatHeaderMerge(Y, &B));
1729:     PetscCall(PetscFree(nnz_d));
1730:     PetscCall(PetscFree(nnz_o));
1731:     PetscCall(MatRestoreRowUpperTriangular(X));
1732:     PetscCall(MatRestoreRowUpperTriangular(Y));
1733:   }
1734:   PetscFunctionReturn(PETSC_SUCCESS);
1735: }

1737: static PetscErrorCode MatCreateSubMatrices_MPISBAIJ(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *B[])
1738: {
1739:   PetscInt  i;
1740:   PetscBool flg;

1742:   PetscFunctionBegin;
1743:   PetscCall(MatCreateSubMatrices_MPIBAIJ(A, n, irow, icol, scall, B)); /* B[] are sbaij matrices */
1744:   for (i = 0; i < n; i++) {
1745:     PetscCall(ISEqual(irow[i], icol[i], &flg));
1746:     if (!flg) PetscCall(MatSeqSBAIJZeroOps_Private(*B[i]));
1747:   }
1748:   PetscFunctionReturn(PETSC_SUCCESS);
1749: }

1751: static PetscErrorCode MatShift_MPISBAIJ(Mat Y, PetscScalar a)
1752: {
1753:   Mat_MPISBAIJ *maij = (Mat_MPISBAIJ *)Y->data;
1754:   Mat_SeqSBAIJ *aij  = (Mat_SeqSBAIJ *)maij->A->data;

1756:   PetscFunctionBegin;
1757:   if (!Y->preallocated) {
1758:     PetscCall(MatMPISBAIJSetPreallocation(Y, Y->rmap->bs, 1, NULL, 0, NULL));
1759:   } else if (!aij->nz) {
1760:     PetscInt nonew = aij->nonew;
1761:     PetscCall(MatSeqSBAIJSetPreallocation(maij->A, Y->rmap->bs, 1, NULL));
1762:     aij->nonew = nonew;
1763:   }
1764:   PetscCall(MatShift_Basic(Y, a));
1765:   PetscFunctionReturn(PETSC_SUCCESS);
1766: }

1768: static PetscErrorCode MatMissingDiagonal_MPISBAIJ(Mat A, PetscBool *missing, PetscInt *d)
1769: {
1770:   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;

1772:   PetscFunctionBegin;
1773:   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only works for square matrices");
1774:   PetscCall(MatMissingDiagonal(a->A, missing, d));
1775:   if (d) {
1776:     PetscInt rstart;
1777:     PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
1778:     *d += rstart / A->rmap->bs;
1779:   }
1780:   PetscFunctionReturn(PETSC_SUCCESS);
1781: }

1783: static PetscErrorCode MatGetDiagonalBlock_MPISBAIJ(Mat A, Mat *a)
1784: {
1785:   PetscFunctionBegin;
1786:   *a = ((Mat_MPISBAIJ *)A->data)->A;
1787:   PetscFunctionReturn(PETSC_SUCCESS);
1788: }

1790: static PetscErrorCode MatEliminateZeros_MPISBAIJ(Mat A, PetscBool keep)
1791: {
1792:   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;

1794:   PetscFunctionBegin;
1795:   PetscCall(MatEliminateZeros_SeqSBAIJ(a->A, keep));       // possibly keep zero diagonal coefficients
1796:   PetscCall(MatEliminateZeros_SeqBAIJ(a->B, PETSC_FALSE)); // never keep zero diagonal coefficients
1797:   PetscFunctionReturn(PETSC_SUCCESS);
1798: }

1800: static PetscErrorCode MatLoad_MPISBAIJ(Mat, PetscViewer);
1801: static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat, Vec, PetscInt[]);
1802: static PetscErrorCode MatSOR_MPISBAIJ(Mat, Vec, PetscReal, MatSORType, PetscReal, PetscInt, PetscInt, Vec);

1804: static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ,
1805:                                        MatGetRow_MPISBAIJ,
1806:                                        MatRestoreRow_MPISBAIJ,
1807:                                        MatMult_MPISBAIJ,
1808:                                        /*  4*/ MatMultAdd_MPISBAIJ,
1809:                                        MatMult_MPISBAIJ, /* transpose versions are same as non-transpose */
1810:                                        MatMultAdd_MPISBAIJ,
1811:                                        NULL,
1812:                                        NULL,
1813:                                        NULL,
1814:                                        /* 10*/ NULL,
1815:                                        NULL,
1816:                                        NULL,
1817:                                        MatSOR_MPISBAIJ,
1818:                                        MatTranspose_MPISBAIJ,
1819:                                        /* 15*/ MatGetInfo_MPISBAIJ,
1820:                                        MatEqual_MPISBAIJ,
1821:                                        MatGetDiagonal_MPISBAIJ,
1822:                                        MatDiagonalScale_MPISBAIJ,
1823:                                        MatNorm_MPISBAIJ,
1824:                                        /* 20*/ MatAssemblyBegin_MPISBAIJ,
1825:                                        MatAssemblyEnd_MPISBAIJ,
1826:                                        MatSetOption_MPISBAIJ,
1827:                                        MatZeroEntries_MPISBAIJ,
1828:                                        /* 24*/ NULL,
1829:                                        NULL,
1830:                                        NULL,
1831:                                        NULL,
1832:                                        NULL,
1833:                                        /* 29*/ MatSetUp_MPI_Hash,
1834:                                        NULL,
1835:                                        NULL,
1836:                                        MatGetDiagonalBlock_MPISBAIJ,
1837:                                        NULL,
1838:                                        /* 34*/ MatDuplicate_MPISBAIJ,
1839:                                        NULL,
1840:                                        NULL,
1841:                                        NULL,
1842:                                        NULL,
1843:                                        /* 39*/ MatAXPY_MPISBAIJ,
1844:                                        MatCreateSubMatrices_MPISBAIJ,
1845:                                        MatIncreaseOverlap_MPISBAIJ,
1846:                                        MatGetValues_MPISBAIJ,
1847:                                        MatCopy_MPISBAIJ,
1848:                                        /* 44*/ NULL,
1849:                                        MatScale_MPISBAIJ,
1850:                                        MatShift_MPISBAIJ,
1851:                                        NULL,
1852:                                        NULL,
1853:                                        /* 49*/ NULL,
1854:                                        NULL,
1855:                                        NULL,
1856:                                        NULL,
1857:                                        NULL,
1858:                                        /* 54*/ NULL,
1859:                                        NULL,
1860:                                        MatSetUnfactored_MPISBAIJ,
1861:                                        NULL,
1862:                                        MatSetValuesBlocked_MPISBAIJ,
1863:                                        /* 59*/ MatCreateSubMatrix_MPISBAIJ,
1864:                                        NULL,
1865:                                        NULL,
1866:                                        NULL,
1867:                                        NULL,
1868:                                        /* 64*/ NULL,
1869:                                        NULL,
1870:                                        NULL,
1871:                                        NULL,
1872:                                        NULL,
1873:                                        /* 69*/ MatGetRowMaxAbs_MPISBAIJ,
1874:                                        NULL,
1875:                                        MatConvert_MPISBAIJ_Basic,
1876:                                        NULL,
1877:                                        NULL,
1878:                                        /* 74*/ NULL,
1879:                                        NULL,
1880:                                        NULL,
1881:                                        NULL,
1882:                                        NULL,
1883:                                        /* 79*/ NULL,
1884:                                        NULL,
1885:                                        NULL,
1886:                                        NULL,
1887:                                        MatLoad_MPISBAIJ,
1888:                                        /* 84*/ NULL,
1889:                                        NULL,
1890:                                        NULL,
1891:                                        NULL,
1892:                                        NULL,
1893:                                        /* 89*/ NULL,
1894:                                        NULL,
1895:                                        NULL,
1896:                                        NULL,
1897:                                        NULL,
1898:                                        /* 94*/ NULL,
1899:                                        NULL,
1900:                                        NULL,
1901:                                        NULL,
1902:                                        NULL,
1903:                                        /* 99*/ NULL,
1904:                                        NULL,
1905:                                        NULL,
1906:                                        MatConjugate_MPISBAIJ,
1907:                                        NULL,
1908:                                        /*104*/ NULL,
1909:                                        MatRealPart_MPISBAIJ,
1910:                                        MatImaginaryPart_MPISBAIJ,
1911:                                        MatGetRowUpperTriangular_MPISBAIJ,
1912:                                        MatRestoreRowUpperTriangular_MPISBAIJ,
1913:                                        /*109*/ NULL,
1914:                                        NULL,
1915:                                        NULL,
1916:                                        NULL,
1917:                                        MatMissingDiagonal_MPISBAIJ,
1918:                                        /*114*/ NULL,
1919:                                        NULL,
1920:                                        NULL,
1921:                                        NULL,
1922:                                        NULL,
1923:                                        /*119*/ NULL,
1924:                                        NULL,
1925:                                        NULL,
1926:                                        NULL,
1927:                                        NULL,
1928:                                        /*124*/ NULL,
1929:                                        NULL,
1930:                                        NULL,
1931:                                        NULL,
1932:                                        NULL,
1933:                                        /*129*/ NULL,
1934:                                        NULL,
1935:                                        NULL,
1936:                                        NULL,
1937:                                        NULL,
1938:                                        /*134*/ NULL,
1939:                                        NULL,
1940:                                        NULL,
1941:                                        NULL,
1942:                                        NULL,
1943:                                        /*139*/ MatSetBlockSizes_Default,
1944:                                        NULL,
1945:                                        NULL,
1946:                                        NULL,
1947:                                        NULL,
1948:                                        /*144*/ MatCreateMPIMatConcatenateSeqMat_MPISBAIJ,
1949:                                        NULL,
1950:                                        NULL,
1951:                                        NULL,
1952:                                        NULL,
1953:                                        NULL,
1954:                                        /*150*/ NULL,
1955:                                        MatEliminateZeros_MPISBAIJ,
1956:                                        NULL,
1957:                                        NULL,
1958:                                        NULL,
1959:                                        /*155*/ NULL,
1960:                                        MatCopyHashToXAIJ_MPI_Hash};

1962: static PetscErrorCode MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz)
1963: {
1964:   Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
1965:   PetscInt      i, mbs, Mbs;
1966:   PetscMPIInt   size;

1968:   PetscFunctionBegin;
1969:   if (B->hash_active) {
1970:     B->ops[0]      = b->cops;
1971:     B->hash_active = PETSC_FALSE;
1972:   }
1973:   if (!B->preallocated) PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), bs, &B->bstash));
1974:   PetscCall(MatSetBlockSize(B, PetscAbs(bs)));
1975:   PetscCall(PetscLayoutSetUp(B->rmap));
1976:   PetscCall(PetscLayoutSetUp(B->cmap));
1977:   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
1978:   PetscCheck(B->rmap->N <= B->cmap->N, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->N, B->cmap->N);
1979:   PetscCheck(B->rmap->n <= B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more local rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->n, B->cmap->n);

1981:   mbs = B->rmap->n / bs;
1982:   Mbs = B->rmap->N / bs;
1983:   PetscCheck(mbs * bs == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "No of local rows %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, B->rmap->N, bs);

1985:   B->rmap->bs = bs;
1986:   b->bs2      = bs * bs;
1987:   b->mbs      = mbs;
1988:   b->Mbs      = Mbs;
1989:   b->nbs      = B->cmap->n / bs;
1990:   b->Nbs      = B->cmap->N / bs;

1992:   for (i = 0; i <= b->size; i++) b->rangebs[i] = B->rmap->range[i] / bs;
1993:   b->rstartbs = B->rmap->rstart / bs;
1994:   b->rendbs   = B->rmap->rend / bs;

1996:   b->cstartbs = B->cmap->rstart / bs;
1997:   b->cendbs   = B->cmap->rend / bs;

1999: #if defined(PETSC_USE_CTABLE)
2000:   PetscCall(PetscHMapIDestroy(&b->colmap));
2001: #else
2002:   PetscCall(PetscFree(b->colmap));
2003: #endif
2004:   PetscCall(PetscFree(b->garray));
2005:   PetscCall(VecDestroy(&b->lvec));
2006:   PetscCall(VecScatterDestroy(&b->Mvctx));
2007:   PetscCall(VecDestroy(&b->slvec0));
2008:   PetscCall(VecDestroy(&b->slvec0b));
2009:   PetscCall(VecDestroy(&b->slvec1));
2010:   PetscCall(VecDestroy(&b->slvec1a));
2011:   PetscCall(VecDestroy(&b->slvec1b));
2012:   PetscCall(VecScatterDestroy(&b->sMvctx));

2014:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &size));

2016:   MatSeqXAIJGetOptions_Private(b->B);
2017:   PetscCall(MatDestroy(&b->B));
2018:   PetscCall(MatCreate(PETSC_COMM_SELF, &b->B));
2019:   PetscCall(MatSetSizes(b->B, B->rmap->n, size > 1 ? B->cmap->N : 0, B->rmap->n, size > 1 ? B->cmap->N : 0));
2020:   PetscCall(MatSetType(b->B, MATSEQBAIJ));
2021:   MatSeqXAIJRestoreOptions_Private(b->B);

2023:   MatSeqXAIJGetOptions_Private(b->A);
2024:   PetscCall(MatDestroy(&b->A));
2025:   PetscCall(MatCreate(PETSC_COMM_SELF, &b->A));
2026:   PetscCall(MatSetSizes(b->A, B->rmap->n, B->cmap->n, B->rmap->n, B->cmap->n));
2027:   PetscCall(MatSetType(b->A, MATSEQSBAIJ));
2028:   MatSeqXAIJRestoreOptions_Private(b->A);

2030:   PetscCall(MatSeqSBAIJSetPreallocation(b->A, bs, d_nz, d_nnz));
2031:   PetscCall(MatSeqBAIJSetPreallocation(b->B, bs, o_nz, o_nnz));

2033:   B->preallocated  = PETSC_TRUE;
2034:   B->was_assembled = PETSC_FALSE;
2035:   B->assembled     = PETSC_FALSE;
2036:   PetscFunctionReturn(PETSC_SUCCESS);
2037: }

2039: static PetscErrorCode MatMPISBAIJSetPreallocationCSR_MPISBAIJ(Mat B, PetscInt bs, const PetscInt ii[], const PetscInt jj[], const PetscScalar V[])
2040: {
2041:   PetscInt        m, rstart, cend;
2042:   PetscInt        i, j, d, nz, bd, nz_max = 0, *d_nnz = NULL, *o_nnz = NULL;
2043:   const PetscInt *JJ          = NULL;
2044:   PetscScalar    *values      = NULL;
2045:   PetscBool       roworiented = ((Mat_MPISBAIJ *)B->data)->roworiented;
2046:   PetscBool       nooffprocentries;

2048:   PetscFunctionBegin;
2049:   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_OUTOFRANGE, "Invalid block size specified, must be positive but it is %" PetscInt_FMT, bs);
2050:   PetscCall(PetscLayoutSetBlockSize(B->rmap, bs));
2051:   PetscCall(PetscLayoutSetBlockSize(B->cmap, bs));
2052:   PetscCall(PetscLayoutSetUp(B->rmap));
2053:   PetscCall(PetscLayoutSetUp(B->cmap));
2054:   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
2055:   m      = B->rmap->n / bs;
2056:   rstart = B->rmap->rstart / bs;
2057:   cend   = B->cmap->rend / bs;

2059:   PetscCheck(!ii[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ii[0] must be 0 but it is %" PetscInt_FMT, ii[0]);
2060:   PetscCall(PetscMalloc2(m, &d_nnz, m, &o_nnz));
2061:   for (i = 0; i < m; i++) {
2062:     nz = ii[i + 1] - ii[i];
2063:     PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz);
2064:     /* count the ones on the diagonal and above, split into diagonal and off-diagonal portions. */
2065:     JJ = jj + ii[i];
2066:     bd = 0;
2067:     for (j = 0; j < nz; j++) {
2068:       if (*JJ >= i + rstart) break;
2069:       JJ++;
2070:       bd++;
2071:     }
2072:     d = 0;
2073:     for (; j < nz; j++) {
2074:       if (*JJ++ >= cend) break;
2075:       d++;
2076:     }
2077:     d_nnz[i] = d;
2078:     o_nnz[i] = nz - d - bd;
2079:     nz       = nz - bd;
2080:     nz_max   = PetscMax(nz_max, nz);
2081:   }
2082:   PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, d_nnz, 0, o_nnz));
2083:   PetscCall(MatSetOption(B, MAT_IGNORE_LOWER_TRIANGULAR, PETSC_TRUE));
2084:   PetscCall(PetscFree2(d_nnz, o_nnz));

2086:   values = (PetscScalar *)V;
2087:   if (!values) PetscCall(PetscCalloc1(bs * bs * nz_max, &values));
2088:   for (i = 0; i < m; i++) {
2089:     PetscInt        row   = i + rstart;
2090:     PetscInt        ncols = ii[i + 1] - ii[i];
2091:     const PetscInt *icols = jj + ii[i];
2092:     if (bs == 1 || !roworiented) { /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2093:       const PetscScalar *svals = values + (V ? (bs * bs * ii[i]) : 0);
2094:       PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, ncols, icols, svals, INSERT_VALUES));
2095:     } else { /* block ordering does not match so we can only insert one block at a time. */
2096:       PetscInt j;
2097:       for (j = 0; j < ncols; j++) {
2098:         const PetscScalar *svals = values + (V ? (bs * bs * (ii[i] + j)) : 0);
2099:         PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, 1, &icols[j], svals, INSERT_VALUES));
2100:       }
2101:     }
2102:   }

2104:   if (!V) PetscCall(PetscFree(values));
2105:   nooffprocentries    = B->nooffprocentries;
2106:   B->nooffprocentries = PETSC_TRUE;
2107:   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2108:   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2109:   B->nooffprocentries = nooffprocentries;

2111:   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
2112:   PetscFunctionReturn(PETSC_SUCCESS);
2113: }

2115: /*MC
2116:    MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices,
2117:    based on block compressed sparse row format.  Only the upper triangular portion of the "diagonal" portion of
2118:    the matrix is stored.

2120:    For complex numbers by default this matrix is symmetric, NOT Hermitian symmetric. To make it Hermitian symmetric you
2121:    can call `MatSetOption`(`Mat`, `MAT_HERMITIAN`);

2123:    Options Database Key:
2124: . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to `MatSetFromOptions()`

2126:    Level: beginner

2128:    Note:
2129:      The number of rows in the matrix must be less than or equal to the number of columns. Similarly the number of rows in the
2130:      diagonal portion of the matrix of each process has to less than or equal the number of columns.

2132: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MATBAIJ`, `MatCreateBAIJ()`, `MATSEQSBAIJ`, `MatType`
2133: M*/

2135: PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B)
2136: {
2137:   Mat_MPISBAIJ *b;
2138:   PetscBool     flg = PETSC_FALSE;

2140:   PetscFunctionBegin;
2141:   PetscCall(PetscNew(&b));
2142:   B->data   = (void *)b;
2143:   B->ops[0] = MatOps_Values;

2145:   B->ops->destroy = MatDestroy_MPISBAIJ;
2146:   B->ops->view    = MatView_MPISBAIJ;
2147:   B->assembled    = PETSC_FALSE;
2148:   B->insertmode   = NOT_SET_VALUES;

2150:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)B), &b->rank));
2151:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &b->size));

2153:   /* build local table of row and column ownerships */
2154:   PetscCall(PetscMalloc1(b->size + 2, &b->rangebs));

2156:   /* build cache for off array entries formed */
2157:   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));

2159:   b->donotstash  = PETSC_FALSE;
2160:   b->colmap      = NULL;
2161:   b->garray      = NULL;
2162:   b->roworiented = PETSC_TRUE;

2164:   /* stuff used in block assembly */
2165:   b->barray = NULL;

2167:   /* stuff used for matrix vector multiply */
2168:   b->lvec    = NULL;
2169:   b->Mvctx   = NULL;
2170:   b->slvec0  = NULL;
2171:   b->slvec0b = NULL;
2172:   b->slvec1  = NULL;
2173:   b->slvec1a = NULL;
2174:   b->slvec1b = NULL;
2175:   b->sMvctx  = NULL;

2177:   /* stuff for MatGetRow() */
2178:   b->rowindices   = NULL;
2179:   b->rowvalues    = NULL;
2180:   b->getrowactive = PETSC_FALSE;

2182:   /* hash table stuff */
2183:   b->ht           = NULL;
2184:   b->hd           = NULL;
2185:   b->ht_size      = 0;
2186:   b->ht_flag      = PETSC_FALSE;
2187:   b->ht_fact      = 0;
2188:   b->ht_total_ct  = 0;
2189:   b->ht_insert_ct = 0;

2191:   /* stuff for MatCreateSubMatrices_MPIBAIJ_local() */
2192:   b->ijonly = PETSC_FALSE;

2194:   b->in_loc = NULL;
2195:   b->v_loc  = NULL;
2196:   b->n_loc  = 0;

2198:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatStoreValues_C", MatStoreValues_MPISBAIJ));
2199:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatRetrieveValues_C", MatRetrieveValues_MPISBAIJ));
2200:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocation_C", MatMPISBAIJSetPreallocation_MPISBAIJ));
2201:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocationCSR_C", MatMPISBAIJSetPreallocationCSR_MPISBAIJ));
2202: #if defined(PETSC_HAVE_ELEMENTAL)
2203:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_elemental_C", MatConvert_MPISBAIJ_Elemental));
2204: #endif
2205: #if defined(PETSC_HAVE_SCALAPACK)
2206:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_scalapack_C", MatConvert_SBAIJ_ScaLAPACK));
2207: #endif
2208:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpiaij_C", MatConvert_MPISBAIJ_Basic));
2209:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpibaij_C", MatConvert_MPISBAIJ_Basic));

2211:   B->symmetric                   = PETSC_BOOL3_TRUE;
2212:   B->structurally_symmetric      = PETSC_BOOL3_TRUE;
2213:   B->symmetry_eternal            = PETSC_TRUE;
2214:   B->structural_symmetry_eternal = PETSC_TRUE;
2215: #if defined(PETSC_USE_COMPLEX)
2216:   B->hermitian = PETSC_BOOL3_FALSE;
2217: #else
2218:   B->hermitian = PETSC_BOOL3_TRUE;
2219: #endif

2221:   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPISBAIJ));
2222:   PetscOptionsBegin(PetscObjectComm((PetscObject)B), NULL, "Options for loading MPISBAIJ matrix 1", "Mat");
2223:   PetscCall(PetscOptionsBool("-mat_use_hash_table", "Use hash table to save memory in constructing matrix", "MatSetOption", flg, &flg, NULL));
2224:   if (flg) {
2225:     PetscReal fact = 1.39;
2226:     PetscCall(MatSetOption(B, MAT_USE_HASH_TABLE, PETSC_TRUE));
2227:     PetscCall(PetscOptionsReal("-mat_use_hash_table", "Use hash table factor", "MatMPIBAIJSetHashTableFactor", fact, &fact, NULL));
2228:     if (fact <= 1.0) fact = 1.39;
2229:     PetscCall(MatMPIBAIJSetHashTableFactor(B, fact));
2230:     PetscCall(PetscInfo(B, "Hash table Factor used %5.2g\n", (double)fact));
2231:   }
2232:   PetscOptionsEnd();
2233:   PetscFunctionReturn(PETSC_SUCCESS);
2234: }

2236: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2237: /*MC
2238:    MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices.

2240:    This matrix type is identical to `MATSEQSBAIJ` when constructed with a single process communicator,
2241:    and `MATMPISBAIJ` otherwise.

2243:    Options Database Key:
2244: . -mat_type sbaij - sets the matrix type to `MATSBAIJ` during a call to `MatSetFromOptions()`

2246:   Level: beginner

2248: .seealso: [](ch_matrices), `Mat`, `MATSEQSBAIJ`, `MATMPISBAIJ`, `MatCreateSBAIJ()`, `MATSEQSBAIJ`, `MATMPISBAIJ`
2249: M*/

2251: /*@
2252:   MatMPISBAIJSetPreallocation - For good matrix assembly performance
2253:   the user should preallocate the matrix storage by setting the parameters
2254:   d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2255:   performance can be increased by more than a factor of 50.

2257:   Collective

2259:   Input Parameters:
2260: + B     - the matrix
2261: . bs    - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
2262:           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
2263: . d_nz  - number of block nonzeros per block row in diagonal portion of local
2264:           submatrix  (same for all local rows)
2265: . d_nnz - array containing the number of block nonzeros in the various block rows
2266:           in the upper triangular and diagonal part of the in diagonal portion of the local
2267:           (possibly different for each block row) or `NULL`.  If you plan to factor the matrix you must leave room
2268:           for the diagonal entry and set a value even if it is zero.
2269: . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2270:           submatrix (same for all local rows).
2271: - o_nnz - array containing the number of nonzeros in the various block rows of the
2272:           off-diagonal portion of the local submatrix that is right of the diagonal
2273:           (possibly different for each block row) or `NULL`.

2275:   Options Database Keys:
2276: + -mat_no_unroll  - uses code that does not unroll the loops in the
2277:                     block calculations (much slower)
2278: - -mat_block_size - size of the blocks to use

2280:   Level: intermediate

2282:   Notes:

2284:   If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2285:   than it must be used on all processors that share the object for that argument.

2287:   If the *_nnz parameter is given then the *_nz parameter is ignored

2289:   Storage Information:
2290:   For a square global matrix we define each processor's diagonal portion
2291:   to be its local rows and the corresponding columns (a square submatrix);
2292:   each processor's off-diagonal portion encompasses the remainder of the
2293:   local matrix (a rectangular submatrix).

2295:   The user can specify preallocated storage for the diagonal part of
2296:   the local submatrix with either `d_nz` or `d_nnz` (not both).  Set
2297:   `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2298:   memory allocation.  Likewise, specify preallocated storage for the
2299:   off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).

2301:   You can call `MatGetInfo()` to get information on how effective the preallocation was;
2302:   for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
2303:   You can also run with the option `-info` and look for messages with the string
2304:   malloc in them to see if additional memory allocation was needed.

2306:   Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2307:   the figure below we depict these three local rows and all columns (0-11).

2309: .vb
2310:            0 1 2 3 4 5 6 7 8 9 10 11
2311:           --------------------------
2312:    row 3  |. . . d d d o o o o  o  o
2313:    row 4  |. . . d d d o o o o  o  o
2314:    row 5  |. . . d d d o o o o  o  o
2315:           --------------------------
2316: .ve

2318:   Thus, any entries in the d locations are stored in the d (diagonal)
2319:   submatrix, and any entries in the o locations are stored in the
2320:   o (off-diagonal) submatrix.  Note that the d matrix is stored in
2321:   `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.

2323:   Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2324:   plus the diagonal part of the d matrix,
2325:   and `o_nz` should indicate the number of block nonzeros per row in the o matrix

2327:   In general, for PDE problems in which most nonzeros are near the diagonal,
2328:   one expects `d_nz` >> `o_nz`.

2330: .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`, `PetscSplitOwnership()`
2331: @*/
2332: PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
2333: {
2334:   PetscFunctionBegin;
2338:   PetscTryMethod(B, "MatMPISBAIJSetPreallocation_C", (Mat, PetscInt, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, bs, d_nz, d_nnz, o_nz, o_nnz));
2339:   PetscFunctionReturn(PETSC_SUCCESS);
2340: }

2342: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2343: /*@
2344:   MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format, `MATSBAIJ`,
2345:   (block compressed row).  For good matrix assembly performance
2346:   the user should preallocate the matrix storage by setting the parameters
2347:   `d_nz` (or `d_nnz`) and `o_nz` (or `o_nnz`).

2349:   Collective

2351:   Input Parameters:
2352: + comm  - MPI communicator
2353: . bs    - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row
2354:           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()`
2355: . m     - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
2356:           This value should be the same as the local size used in creating the
2357:           y vector for the matrix-vector product y = Ax.
2358: . n     - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
2359:           This value should be the same as the local size used in creating the
2360:           x vector for the matrix-vector product y = Ax.
2361: . M     - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2362: . N     - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2363: . d_nz  - number of block nonzeros per block row in diagonal portion of local
2364:           submatrix (same for all local rows)
2365: . d_nnz - array containing the number of block nonzeros in the various block rows
2366:           in the upper triangular portion of the in diagonal portion of the local
2367:           (possibly different for each block block row) or `NULL`.
2368:           If you plan to factor the matrix you must leave room for the diagonal entry and
2369:           set its value even if it is zero.
2370: . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2371:           submatrix (same for all local rows).
2372: - o_nnz - array containing the number of nonzeros in the various block rows of the
2373:           off-diagonal portion of the local submatrix (possibly different for
2374:           each block row) or `NULL`.

2376:   Output Parameter:
2377: . A - the matrix

2379:   Options Database Keys:
2380: + -mat_no_unroll  - uses code that does not unroll the loops in the
2381:                     block calculations (much slower)
2382: . -mat_block_size - size of the blocks to use
2383: - -mat_mpi        - use the parallel matrix data structures even on one processor
2384:                     (defaults to using SeqBAIJ format on one processor)

2386:   Level: intermediate

2388:   Notes:
2389:   It is recommended that one use `MatCreateFromOptions()` or the `MatCreate()`, `MatSetType()` and/or `MatSetFromOptions()`,
2390:   MatXXXXSetPreallocation() paradigm instead of this routine directly.
2391:   [MatXXXXSetPreallocation() is, for example, `MatSeqAIJSetPreallocation()`]

2393:   The number of rows and columns must be divisible by blocksize.
2394:   This matrix type does not support complex Hermitian operation.

2396:   The user MUST specify either the local or global matrix dimensions
2397:   (possibly both).

2399:   If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2400:   than it must be used on all processors that share the object for that argument.

2402:   If `m` and `n` are not `PETSC_DECIDE`, then the values determines the `PetscLayout` of the matrix and the ranges returned by
2403:   `MatGetOwnershipRange()`,  `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`.

2405:   If the *_nnz parameter is given then the *_nz parameter is ignored

2407:   Storage Information:
2408:   For a square global matrix we define each processor's diagonal portion
2409:   to be its local rows and the corresponding columns (a square submatrix);
2410:   each processor's off-diagonal portion encompasses the remainder of the
2411:   local matrix (a rectangular submatrix).

2413:   The user can specify preallocated storage for the diagonal part of
2414:   the local submatrix with either `d_nz` or `d_nnz` (not both). Set
2415:   `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2416:   memory allocation. Likewise, specify preallocated storage for the
2417:   off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).

2419:   Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2420:   the figure below we depict these three local rows and all columns (0-11).

2422: .vb
2423:            0 1 2 3 4 5 6 7 8 9 10 11
2424:           --------------------------
2425:    row 3  |. . . d d d o o o o  o  o
2426:    row 4  |. . . d d d o o o o  o  o
2427:    row 5  |. . . d d d o o o o  o  o
2428:           --------------------------
2429: .ve

2431:   Thus, any entries in the d locations are stored in the d (diagonal)
2432:   submatrix, and any entries in the o locations are stored in the
2433:   o (off-diagonal) submatrix. Note that the d matrix is stored in
2434:   `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.

2436:   Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2437:   plus the diagonal part of the d matrix,
2438:   and `o_nz` should indicate the number of block nonzeros per row in the o matrix.
2439:   In general, for PDE problems in which most nonzeros are near the diagonal,
2440:   one expects `d_nz` >> `o_nz`.

2442: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`,
2443:           `MatGetOwnershipRange()`,  `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
2444: @*/
2445: PetscErrorCode MatCreateSBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A)
2446: {
2447:   PetscMPIInt size;

2449:   PetscFunctionBegin;
2450:   PetscCall(MatCreate(comm, A));
2451:   PetscCall(MatSetSizes(*A, m, n, M, N));
2452:   PetscCallMPI(MPI_Comm_size(comm, &size));
2453:   if (size > 1) {
2454:     PetscCall(MatSetType(*A, MATMPISBAIJ));
2455:     PetscCall(MatMPISBAIJSetPreallocation(*A, bs, d_nz, d_nnz, o_nz, o_nnz));
2456:   } else {
2457:     PetscCall(MatSetType(*A, MATSEQSBAIJ));
2458:     PetscCall(MatSeqSBAIJSetPreallocation(*A, bs, d_nz, d_nnz));
2459:   }
2460:   PetscFunctionReturn(PETSC_SUCCESS);
2461: }

2463: static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin, MatDuplicateOption cpvalues, Mat *newmat)
2464: {
2465:   Mat           mat;
2466:   Mat_MPISBAIJ *a, *oldmat = (Mat_MPISBAIJ *)matin->data;
2467:   PetscInt      len = 0, nt, bs = matin->rmap->bs, mbs = oldmat->mbs;
2468:   PetscScalar  *array;

2470:   PetscFunctionBegin;
2471:   *newmat = NULL;

2473:   PetscCall(MatCreate(PetscObjectComm((PetscObject)matin), &mat));
2474:   PetscCall(MatSetSizes(mat, matin->rmap->n, matin->cmap->n, matin->rmap->N, matin->cmap->N));
2475:   PetscCall(MatSetType(mat, ((PetscObject)matin)->type_name));
2476:   PetscCall(PetscLayoutReference(matin->rmap, &mat->rmap));
2477:   PetscCall(PetscLayoutReference(matin->cmap, &mat->cmap));

2479:   if (matin->hash_active) {
2480:     PetscCall(MatSetUp(mat));
2481:   } else {
2482:     mat->factortype   = matin->factortype;
2483:     mat->preallocated = PETSC_TRUE;
2484:     mat->assembled    = PETSC_TRUE;
2485:     mat->insertmode   = NOT_SET_VALUES;

2487:     a      = (Mat_MPISBAIJ *)mat->data;
2488:     a->bs2 = oldmat->bs2;
2489:     a->mbs = oldmat->mbs;
2490:     a->nbs = oldmat->nbs;
2491:     a->Mbs = oldmat->Mbs;
2492:     a->Nbs = oldmat->Nbs;

2494:     a->size         = oldmat->size;
2495:     a->rank         = oldmat->rank;
2496:     a->donotstash   = oldmat->donotstash;
2497:     a->roworiented  = oldmat->roworiented;
2498:     a->rowindices   = NULL;
2499:     a->rowvalues    = NULL;
2500:     a->getrowactive = PETSC_FALSE;
2501:     a->barray       = NULL;
2502:     a->rstartbs     = oldmat->rstartbs;
2503:     a->rendbs       = oldmat->rendbs;
2504:     a->cstartbs     = oldmat->cstartbs;
2505:     a->cendbs       = oldmat->cendbs;

2507:     /* hash table stuff */
2508:     a->ht           = NULL;
2509:     a->hd           = NULL;
2510:     a->ht_size      = 0;
2511:     a->ht_flag      = oldmat->ht_flag;
2512:     a->ht_fact      = oldmat->ht_fact;
2513:     a->ht_total_ct  = 0;
2514:     a->ht_insert_ct = 0;

2516:     PetscCall(PetscArraycpy(a->rangebs, oldmat->rangebs, a->size + 2));
2517:     if (oldmat->colmap) {
2518: #if defined(PETSC_USE_CTABLE)
2519:       PetscCall(PetscHMapIDuplicate(oldmat->colmap, &a->colmap));
2520: #else
2521:       PetscCall(PetscMalloc1(a->Nbs, &a->colmap));
2522:       PetscCall(PetscArraycpy(a->colmap, oldmat->colmap, a->Nbs));
2523: #endif
2524:     } else a->colmap = NULL;

2526:     if (oldmat->garray && (len = ((Mat_SeqBAIJ *)oldmat->B->data)->nbs)) {
2527:       PetscCall(PetscMalloc1(len, &a->garray));
2528:       PetscCall(PetscArraycpy(a->garray, oldmat->garray, len));
2529:     } else a->garray = NULL;

2531:     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)matin), matin->rmap->bs, &mat->bstash));
2532:     PetscCall(VecDuplicate(oldmat->lvec, &a->lvec));
2533:     PetscCall(VecScatterCopy(oldmat->Mvctx, &a->Mvctx));

2535:     PetscCall(VecDuplicate(oldmat->slvec0, &a->slvec0));
2536:     PetscCall(VecDuplicate(oldmat->slvec1, &a->slvec1));

2538:     PetscCall(VecGetLocalSize(a->slvec1, &nt));
2539:     PetscCall(VecGetArray(a->slvec1, &array));
2540:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, bs * mbs, array, &a->slvec1a));
2541:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec1b));
2542:     PetscCall(VecRestoreArray(a->slvec1, &array));
2543:     PetscCall(VecGetArray(a->slvec0, &array));
2544:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec0b));
2545:     PetscCall(VecRestoreArray(a->slvec0, &array));

2547:     /* ierr =  VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */
2548:     PetscCall(PetscObjectReference((PetscObject)oldmat->sMvctx));
2549:     a->sMvctx = oldmat->sMvctx;

2551:     PetscCall(MatDuplicate(oldmat->A, cpvalues, &a->A));
2552:     PetscCall(MatDuplicate(oldmat->B, cpvalues, &a->B));
2553:   }
2554:   PetscCall(PetscFunctionListDuplicate(((PetscObject)matin)->qlist, &((PetscObject)mat)->qlist));
2555:   *newmat = mat;
2556:   PetscFunctionReturn(PETSC_SUCCESS);
2557: }

2559: /* Used for both MPIBAIJ and MPISBAIJ matrices */
2560: #define MatLoad_MPISBAIJ_Binary MatLoad_MPIBAIJ_Binary

2562: static PetscErrorCode MatLoad_MPISBAIJ(Mat mat, PetscViewer viewer)
2563: {
2564:   PetscBool isbinary;

2566:   PetscFunctionBegin;
2567:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2568:   PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Viewer type %s not yet supported for reading %s matrices", ((PetscObject)viewer)->type_name, ((PetscObject)mat)->type_name);
2569:   PetscCall(MatLoad_MPISBAIJ_Binary(mat, viewer));
2570:   PetscFunctionReturn(PETSC_SUCCESS);
2571: }

2573: static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A, Vec v, PetscInt idx[])
2574: {
2575:   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
2576:   Mat_SeqBAIJ  *b = (Mat_SeqBAIJ *)a->B->data;
2577:   PetscReal     atmp;
2578:   PetscReal    *work, *svalues, *rvalues;
2579:   PetscInt      i, bs, mbs, *bi, *bj, brow, j, ncols, krow, kcol, col, row, Mbs, bcol;
2580:   PetscMPIInt   rank, size;
2581:   PetscInt     *rowners_bs, count, source;
2582:   PetscScalar  *va;
2583:   MatScalar    *ba;
2584:   MPI_Status    stat;

2586:   PetscFunctionBegin;
2587:   PetscCheck(!idx, PETSC_COMM_SELF, PETSC_ERR_SUP, "Send email to petsc-maint@mcs.anl.gov");
2588:   PetscCall(MatGetRowMaxAbs(a->A, v, NULL));
2589:   PetscCall(VecGetArray(v, &va));

2591:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
2592:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));

2594:   bs  = A->rmap->bs;
2595:   mbs = a->mbs;
2596:   Mbs = a->Mbs;
2597:   ba  = b->a;
2598:   bi  = b->i;
2599:   bj  = b->j;

2601:   /* find ownerships */
2602:   rowners_bs = A->rmap->range;

2604:   /* each proc creates an array to be distributed */
2605:   PetscCall(PetscCalloc1(bs * Mbs, &work));

2607:   /* row_max for B */
2608:   if (rank != size - 1) {
2609:     for (i = 0; i < mbs; i++) {
2610:       ncols = bi[1] - bi[0];
2611:       bi++;
2612:       brow = bs * i;
2613:       for (j = 0; j < ncols; j++) {
2614:         bcol = bs * (*bj);
2615:         for (kcol = 0; kcol < bs; kcol++) {
2616:           col = bcol + kcol;           /* local col index */
2617:           col += rowners_bs[rank + 1]; /* global col index */
2618:           for (krow = 0; krow < bs; krow++) {
2619:             atmp = PetscAbsScalar(*ba);
2620:             ba++;
2621:             row = brow + krow; /* local row index */
2622:             if (PetscRealPart(va[row]) < atmp) va[row] = atmp;
2623:             if (work[col] < atmp) work[col] = atmp;
2624:           }
2625:         }
2626:         bj++;
2627:       }
2628:     }

2630:     /* send values to its owners */
2631:     for (PetscMPIInt dest = rank + 1; dest < size; dest++) {
2632:       svalues = work + rowners_bs[dest];
2633:       count   = rowners_bs[dest + 1] - rowners_bs[dest];
2634:       PetscCallMPI(MPIU_Send(svalues, count, MPIU_REAL, dest, rank, PetscObjectComm((PetscObject)A)));
2635:     }
2636:   }

2638:   /* receive values */
2639:   if (rank) {
2640:     rvalues = work;
2641:     count   = rowners_bs[rank + 1] - rowners_bs[rank];
2642:     for (source = 0; source < rank; source++) {
2643:       PetscCallMPI(MPIU_Recv(rvalues, count, MPIU_REAL, MPI_ANY_SOURCE, MPI_ANY_TAG, PetscObjectComm((PetscObject)A), &stat));
2644:       /* process values */
2645:       for (i = 0; i < count; i++) {
2646:         if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i];
2647:       }
2648:     }
2649:   }

2651:   PetscCall(VecRestoreArray(v, &va));
2652:   PetscCall(PetscFree(work));
2653:   PetscFunctionReturn(PETSC_SUCCESS);
2654: }

2656: static PetscErrorCode MatSOR_MPISBAIJ(Mat matin, Vec bb, PetscReal omega, MatSORType flag, PetscReal fshift, PetscInt its, PetscInt lits, Vec xx)
2657: {
2658:   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ *)matin->data;
2659:   PetscInt           mbs = mat->mbs, bs = matin->rmap->bs;
2660:   PetscScalar       *x, *ptr, *from;
2661:   Vec                bb1;
2662:   const PetscScalar *b;

2664:   PetscFunctionBegin;
2665:   PetscCheck(its > 0 && lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " and local its %" PetscInt_FMT " both positive", its, lits);
2666:   PetscCheck(bs <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "SSOR for block size > 1 is not yet implemented");

2668:   if (flag == SOR_APPLY_UPPER) {
2669:     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2670:     PetscFunctionReturn(PETSC_SUCCESS);
2671:   }

2673:   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2674:     if (flag & SOR_ZERO_INITIAL_GUESS) {
2675:       PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, lits, xx));
2676:       its--;
2677:     }

2679:     PetscCall(VecDuplicate(bb, &bb1));
2680:     while (its--) {
2681:       /* lower triangular part: slvec0b = - B^T*xx */
2682:       PetscCall((*mat->B->ops->multtranspose)(mat->B, xx, mat->slvec0b));

2684:       /* copy xx into slvec0a */
2685:       PetscCall(VecGetArray(mat->slvec0, &ptr));
2686:       PetscCall(VecGetArray(xx, &x));
2687:       PetscCall(PetscArraycpy(ptr, x, bs * mbs));
2688:       PetscCall(VecRestoreArray(mat->slvec0, &ptr));

2690:       PetscCall(VecScale(mat->slvec0, -1.0));

2692:       /* copy bb into slvec1a */
2693:       PetscCall(VecGetArray(mat->slvec1, &ptr));
2694:       PetscCall(VecGetArrayRead(bb, &b));
2695:       PetscCall(PetscArraycpy(ptr, b, bs * mbs));
2696:       PetscCall(VecRestoreArray(mat->slvec1, &ptr));

2698:       /* set slvec1b = 0 */
2699:       PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2700:       PetscCall(VecZeroEntries(mat->slvec1b));

2702:       PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2703:       PetscCall(VecRestoreArray(xx, &x));
2704:       PetscCall(VecRestoreArrayRead(bb, &b));
2705:       PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));

2707:       /* upper triangular part: bb1 = bb1 - B*x */
2708:       PetscCall((*mat->B->ops->multadd)(mat->B, mat->slvec1b, mat->slvec1a, bb1));

2710:       /* local diagonal sweep */
2711:       PetscCall((*mat->A->ops->sor)(mat->A, bb1, omega, SOR_SYMMETRIC_SWEEP, fshift, lits, lits, xx));
2712:     }
2713:     PetscCall(VecDestroy(&bb1));
2714:   } else if ((flag & SOR_LOCAL_FORWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2715:     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2716:   } else if ((flag & SOR_LOCAL_BACKWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2717:     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2718:   } else if (flag & SOR_EISENSTAT) {
2719:     Vec                xx1;
2720:     PetscBool          hasop;
2721:     const PetscScalar *diag;
2722:     PetscScalar       *sl, scale = (omega - 2.0) / omega;
2723:     PetscInt           i, n;

2725:     if (!mat->xx1) {
2726:       PetscCall(VecDuplicate(bb, &mat->xx1));
2727:       PetscCall(VecDuplicate(bb, &mat->bb1));
2728:     }
2729:     xx1 = mat->xx1;
2730:     bb1 = mat->bb1;

2732:     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP), fshift, lits, 1, xx));

2734:     if (!mat->diag) {
2735:       /* this is wrong for same matrix with new nonzero values */
2736:       PetscCall(MatCreateVecs(matin, &mat->diag, NULL));
2737:       PetscCall(MatGetDiagonal(matin, mat->diag));
2738:     }
2739:     PetscCall(MatHasOperation(matin, MATOP_MULT_DIAGONAL_BLOCK, &hasop));

2741:     if (hasop) {
2742:       PetscCall(MatMultDiagonalBlock(matin, xx, bb1));
2743:       PetscCall(VecAYPX(mat->slvec1a, scale, bb));
2744:     } else {
2745:       /*
2746:           These two lines are replaced by code that may be a bit faster for a good compiler
2747:       PetscCall(VecPointwiseMult(mat->slvec1a,mat->diag,xx));
2748:       PetscCall(VecAYPX(mat->slvec1a,scale,bb));
2749:       */
2750:       PetscCall(VecGetArray(mat->slvec1a, &sl));
2751:       PetscCall(VecGetArrayRead(mat->diag, &diag));
2752:       PetscCall(VecGetArrayRead(bb, &b));
2753:       PetscCall(VecGetArray(xx, &x));
2754:       PetscCall(VecGetLocalSize(xx, &n));
2755:       if (omega == 1.0) {
2756:         for (i = 0; i < n; i++) sl[i] = b[i] - diag[i] * x[i];
2757:         PetscCall(PetscLogFlops(2.0 * n));
2758:       } else {
2759:         for (i = 0; i < n; i++) sl[i] = b[i] + scale * diag[i] * x[i];
2760:         PetscCall(PetscLogFlops(3.0 * n));
2761:       }
2762:       PetscCall(VecRestoreArray(mat->slvec1a, &sl));
2763:       PetscCall(VecRestoreArrayRead(mat->diag, &diag));
2764:       PetscCall(VecRestoreArrayRead(bb, &b));
2765:       PetscCall(VecRestoreArray(xx, &x));
2766:     }

2768:     /* multiply off-diagonal portion of matrix */
2769:     PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2770:     PetscCall(VecZeroEntries(mat->slvec1b));
2771:     PetscCall((*mat->B->ops->multtranspose)(mat->B, xx, mat->slvec0b));
2772:     PetscCall(VecGetArray(mat->slvec0, &from));
2773:     PetscCall(VecGetArray(xx, &x));
2774:     PetscCall(PetscArraycpy(from, x, bs * mbs));
2775:     PetscCall(VecRestoreArray(mat->slvec0, &from));
2776:     PetscCall(VecRestoreArray(xx, &x));
2777:     PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2778:     PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2779:     PetscCall((*mat->B->ops->multadd)(mat->B, mat->slvec1b, mat->slvec1a, mat->slvec1a));

2781:     /* local sweep */
2782:     PetscCall((*mat->A->ops->sor)(mat->A, mat->slvec1a, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP), fshift, lits, 1, xx1));
2783:     PetscCall(VecAXPY(xx, 1.0, xx1));
2784:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatSORType is not supported for SBAIJ matrix format");
2785:   PetscFunctionReturn(PETSC_SUCCESS);
2786: }

2788: /*@
2789:   MatCreateMPISBAIJWithArrays - creates a `MATMPISBAIJ` matrix using arrays that contain in standard CSR format for the local rows.

2791:   Collective

2793:   Input Parameters:
2794: + comm - MPI communicator
2795: . bs   - the block size, only a block size of 1 is supported
2796: . m    - number of local rows (Cannot be `PETSC_DECIDE`)
2797: . n    - This value should be the same as the local size used in creating the
2798:          x vector for the matrix-vector product $ y = Ax $. (or `PETSC_DECIDE` to have
2799:          calculated if `N` is given) For square matrices `n` is almost always `m`.
2800: . M    - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2801: . N    - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2802: . i    - row indices; that is i[0] = 0, i[row] = i[row-1] + number of block elements in that row block row of the matrix
2803: . j    - column indices
2804: - a    - matrix values

2806:   Output Parameter:
2807: . mat - the matrix

2809:   Level: intermediate

2811:   Notes:
2812:   The `i`, `j`, and `a` arrays ARE copied by this routine into the internal format used by PETSc;
2813:   thus you CANNOT change the matrix entries by changing the values of `a` after you have
2814:   called this routine. Use `MatCreateMPIAIJWithSplitArrays()` to avoid needing to copy the arrays.

2816:   The `i` and `j` indices are 0 based, and `i` indices are indices corresponding to the local `j` array.

2818: .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`,
2819:           `MATMPIAIJ`, `MatCreateAIJ()`, `MatCreateMPIAIJWithSplitArrays()`, `MatMPISBAIJSetPreallocationCSR()`
2820: @*/
2821: PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, const PetscInt i[], const PetscInt j[], const PetscScalar a[], Mat *mat)
2822: {
2823:   PetscFunctionBegin;
2824:   PetscCheck(!i[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i (row indices) must start with 0");
2825:   PetscCheck(m >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "local number of rows (m) cannot be PETSC_DECIDE, or negative");
2826:   PetscCall(MatCreate(comm, mat));
2827:   PetscCall(MatSetSizes(*mat, m, n, M, N));
2828:   PetscCall(MatSetType(*mat, MATMPISBAIJ));
2829:   PetscCall(MatMPISBAIJSetPreallocationCSR(*mat, bs, i, j, a));
2830:   PetscFunctionReturn(PETSC_SUCCESS);
2831: }

2833: /*@
2834:   MatMPISBAIJSetPreallocationCSR - Creates a sparse parallel matrix in `MATMPISBAIJ` format using the given nonzero structure and (optional) numerical values

2836:   Collective

2838:   Input Parameters:
2839: + B  - the matrix
2840: . bs - the block size
2841: . i  - the indices into `j` for the start of each local row (indices start with zero)
2842: . j  - the column indices for each local row (indices start with zero) these must be sorted for each row
2843: - v  - optional values in the matrix, pass `NULL` if not provided

2845:   Level: advanced

2847:   Notes:
2848:   The `i`, `j`, and `v` arrays ARE copied by this routine into the internal format used by PETSc;
2849:   thus you CANNOT change the matrix entries by changing the values of `v` after you have
2850:   called this routine.

2852:   Though this routine has Preallocation() in the name it also sets the exact nonzero locations of the matrix entries
2853:   and usually the numerical values as well

2855:   Any entries passed in that are below the diagonal are ignored

2857: .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIBAIJSetPreallocation()`, `MatCreateAIJ()`, `MATMPIAIJ`,
2858:           `MatCreateMPISBAIJWithArrays()`
2859: @*/
2860: PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat B, PetscInt bs, const PetscInt i[], const PetscInt j[], const PetscScalar v[])
2861: {
2862:   PetscFunctionBegin;
2863:   PetscTryMethod(B, "MatMPISBAIJSetPreallocationCSR_C", (Mat, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[]), (B, bs, i, j, v));
2864:   PetscFunctionReturn(PETSC_SUCCESS);
2865: }

2867: PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPISBAIJ(MPI_Comm comm, Mat inmat, PetscInt n, MatReuse scall, Mat *outmat)
2868: {
2869:   PetscInt     m, N, i, rstart, nnz, Ii, bs, cbs;
2870:   PetscInt    *indx;
2871:   PetscScalar *values;

2873:   PetscFunctionBegin;
2874:   PetscCall(MatGetSize(inmat, &m, &N));
2875:   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
2876:     Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ *)inmat->data;
2877:     PetscInt     *dnz, *onz, mbs, Nbs, nbs;
2878:     PetscInt     *bindx, rmax = a->rmax, j;
2879:     PetscMPIInt   rank, size;

2881:     PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2882:     mbs = m / bs;
2883:     Nbs = N / cbs;
2884:     if (n == PETSC_DECIDE) PetscCall(PetscSplitOwnershipBlock(comm, cbs, &n, &N));
2885:     nbs = n / cbs;

2887:     PetscCall(PetscMalloc1(rmax, &bindx));
2888:     MatPreallocateBegin(comm, mbs, nbs, dnz, onz); /* inline function, output __end and __rstart are used below */

2890:     PetscCallMPI(MPI_Comm_rank(comm, &rank));
2891:     PetscCallMPI(MPI_Comm_rank(comm, &size));
2892:     if (rank == size - 1) {
2893:       /* Check sum(nbs) = Nbs */
2894:       PetscCheck(__end == Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Sum of local block columns %" PetscInt_FMT " != global block columns %" PetscInt_FMT, __end, Nbs);
2895:     }

2897:     rstart = __rstart; /* block rstart of *outmat; see inline function MatPreallocateBegin */
2898:     PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2899:     for (i = 0; i < mbs; i++) {
2900:       PetscCall(MatGetRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL)); /* non-blocked nnz and indx */
2901:       nnz = nnz / bs;
2902:       for (j = 0; j < nnz; j++) bindx[j] = indx[j * bs] / bs;
2903:       PetscCall(MatPreallocateSet(i + rstart, nnz, bindx, dnz, onz));
2904:       PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL));
2905:     }
2906:     PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2907:     PetscCall(PetscFree(bindx));

2909:     PetscCall(MatCreate(comm, outmat));
2910:     PetscCall(MatSetSizes(*outmat, m, n, PETSC_DETERMINE, PETSC_DETERMINE));
2911:     PetscCall(MatSetBlockSizes(*outmat, bs, cbs));
2912:     PetscCall(MatSetType(*outmat, MATSBAIJ));
2913:     PetscCall(MatSeqSBAIJSetPreallocation(*outmat, bs, 0, dnz));
2914:     PetscCall(MatMPISBAIJSetPreallocation(*outmat, bs, 0, dnz, 0, onz));
2915:     MatPreallocateEnd(dnz, onz);
2916:   }

2918:   /* numeric phase */
2919:   PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2920:   PetscCall(MatGetOwnershipRange(*outmat, &rstart, NULL));

2922:   PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2923:   for (i = 0; i < m; i++) {
2924:     PetscCall(MatGetRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2925:     Ii = i + rstart;
2926:     PetscCall(MatSetValues(*outmat, 1, &Ii, nnz, indx, values, INSERT_VALUES));
2927:     PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2928:   }
2929:   PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2930:   PetscCall(MatAssemblyBegin(*outmat, MAT_FINAL_ASSEMBLY));
2931:   PetscCall(MatAssemblyEnd(*outmat, MAT_FINAL_ASSEMBLY));
2932:   PetscFunctionReturn(PETSC_SUCCESS);
2933: }