Actual source code: matis.c

  1: /*
  2:     Creates a matrix class for using the Neumann-Neumann type preconditioners.
  3:     This stores the matrices in globally unassembled form. Each processor
  4:     assembles only its local Neumann problem and the parallel matrix vector
  5:     product is handled "implicitly".

  7:     Currently this allows for only one subdomain per processor.
  8: */

 10: #include <petsc/private/matisimpl.h>
 11: #include <../src/mat/impls/aij/mpi/mpiaij.h>
 12: #include <petsc/private/sfimpl.h>
 13: #include <petsc/private/vecimpl.h>
 14: #include <petsc/private/hashseti.h>

 16: #define MATIS_MAX_ENTRIES_INSERTION 2048
 17: static PetscErrorCode MatSetValuesLocal_IS(Mat, PetscInt, const PetscInt *, PetscInt, const PetscInt *, const PetscScalar *, InsertMode);
 18: static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat, PetscInt, const PetscInt *, PetscInt, const PetscInt *, const PetscScalar *, InsertMode);
 19: static PetscErrorCode MatISSetUpScatters_Private(Mat);

 21: static PetscErrorCode MatISContainerDestroyPtAP_Private(void *ptr)
 22: {
 23:   MatISPtAP ptap = (MatISPtAP)ptr;

 25:   PetscFunctionBegin;
 26:   PetscCall(MatDestroySubMatrices(ptap->ris1 ? 2 : 1, &ptap->lP));
 27:   PetscCall(ISDestroy(&ptap->cis0));
 28:   PetscCall(ISDestroy(&ptap->cis1));
 29:   PetscCall(ISDestroy(&ptap->ris0));
 30:   PetscCall(ISDestroy(&ptap->ris1));
 31:   PetscCall(PetscFree(ptap));
 32:   PetscFunctionReturn(PETSC_SUCCESS);
 33: }

 35: static PetscErrorCode MatPtAPNumeric_IS_XAIJ(Mat A, Mat P, Mat C)
 36: {
 37:   MatISPtAP      ptap;
 38:   Mat_IS        *matis = (Mat_IS *)A->data;
 39:   Mat            lA, lC;
 40:   MatReuse       reuse;
 41:   IS             ris[2], cis[2];
 42:   PetscContainer c;
 43:   PetscInt       n;

 45:   PetscFunctionBegin;
 46:   PetscCall(PetscObjectQuery((PetscObject)C, "_MatIS_PtAP", (PetscObject *)&c));
 47:   PetscCheck(c, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Missing PtAP information");
 48:   PetscCall(PetscContainerGetPointer(c, (void **)&ptap));
 49:   ris[0] = ptap->ris0;
 50:   ris[1] = ptap->ris1;
 51:   cis[0] = ptap->cis0;
 52:   cis[1] = ptap->cis1;
 53:   n      = ptap->ris1 ? 2 : 1;
 54:   reuse  = ptap->lP ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
 55:   PetscCall(MatCreateSubMatrices(P, n, ris, cis, reuse, &ptap->lP));

 57:   PetscCall(MatISGetLocalMat(A, &lA));
 58:   PetscCall(MatISGetLocalMat(C, &lC));
 59:   if (ptap->ris1) { /* unsymmetric A mapping */
 60:     Mat lPt;

 62:     PetscCall(MatTranspose(ptap->lP[1], MAT_INITIAL_MATRIX, &lPt));
 63:     PetscCall(MatMatMatMult(lPt, lA, ptap->lP[0], reuse, ptap->fill, &lC));
 64:     if (matis->storel2l) PetscCall(PetscObjectCompose((PetscObject)A, "_MatIS_PtAP_l2l", (PetscObject)lPt));
 65:     PetscCall(MatDestroy(&lPt));
 66:   } else {
 67:     PetscCall(MatPtAP(lA, ptap->lP[0], reuse, ptap->fill, &lC));
 68:     if (matis->storel2l) PetscCall(PetscObjectCompose((PetscObject)C, "_MatIS_PtAP_l2l", (PetscObject)ptap->lP[0]));
 69:   }
 70:   if (reuse == MAT_INITIAL_MATRIX) {
 71:     PetscCall(MatISSetLocalMat(C, lC));
 72:     PetscCall(MatDestroy(&lC));
 73:   }
 74:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
 75:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
 76:   PetscFunctionReturn(PETSC_SUCCESS);
 77: }

 79: static PetscErrorCode MatGetNonzeroColumnsLocal_Private(Mat PT, IS *cis)
 80: {
 81:   Mat             Po, Pd;
 82:   IS              zd, zo;
 83:   const PetscInt *garray;
 84:   PetscInt       *aux, i, bs;
 85:   PetscInt        dc, stc, oc, ctd, cto;
 86:   PetscBool       ismpiaij, ismpibaij, isseqaij, isseqbaij;
 87:   MPI_Comm        comm;

 89:   PetscFunctionBegin;
 91:   PetscAssertPointer(cis, 2);
 92:   PetscCall(PetscObjectGetComm((PetscObject)PT, &comm));
 93:   bs = 1;
 94:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATMPIAIJ, &ismpiaij));
 95:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATMPIBAIJ, &ismpibaij));
 96:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATSEQAIJ, &isseqaij));
 97:   PetscCall(PetscObjectTypeCompare((PetscObject)PT, MATSEQBAIJ, &isseqbaij));
 98:   if (isseqaij || isseqbaij) {
 99:     Pd     = PT;
100:     Po     = NULL;
101:     garray = NULL;
102:   } else if (ismpiaij) {
103:     PetscCall(MatMPIAIJGetSeqAIJ(PT, &Pd, &Po, &garray));
104:   } else if (ismpibaij) {
105:     PetscCall(MatMPIBAIJGetSeqBAIJ(PT, &Pd, &Po, &garray));
106:     PetscCall(MatGetBlockSize(PT, &bs));
107:   } else SETERRQ(comm, PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)PT)->type_name);

109:   /* identify any null columns in Pd or Po */
110:   /* We use a tolerance comparison since it may happen that, with geometric multigrid,
111:      some of the columns are not really zero, but very close to */
112:   zo = zd = NULL;
113:   if (Po) PetscCall(MatFindNonzeroRowsOrCols_Basic(Po, PETSC_TRUE, PETSC_SMALL, &zo));
114:   PetscCall(MatFindNonzeroRowsOrCols_Basic(Pd, PETSC_TRUE, PETSC_SMALL, &zd));

116:   PetscCall(MatGetLocalSize(PT, NULL, &dc));
117:   PetscCall(MatGetOwnershipRangeColumn(PT, &stc, NULL));
118:   if (Po) PetscCall(MatGetLocalSize(Po, NULL, &oc));
119:   else oc = 0;
120:   PetscCall(PetscMalloc1((dc + oc) / bs, &aux));
121:   if (zd) {
122:     const PetscInt *idxs;
123:     PetscInt        nz;

125:     /* this will throw an error if bs is not valid */
126:     PetscCall(ISSetBlockSize(zd, bs));
127:     PetscCall(ISGetLocalSize(zd, &nz));
128:     PetscCall(ISGetIndices(zd, &idxs));
129:     ctd = nz / bs;
130:     for (i = 0; i < ctd; i++) aux[i] = (idxs[bs * i] + stc) / bs;
131:     PetscCall(ISRestoreIndices(zd, &idxs));
132:   } else {
133:     ctd = dc / bs;
134:     for (i = 0; i < ctd; i++) aux[i] = i + stc / bs;
135:   }
136:   if (zo) {
137:     const PetscInt *idxs;
138:     PetscInt        nz;

140:     /* this will throw an error if bs is not valid */
141:     PetscCall(ISSetBlockSize(zo, bs));
142:     PetscCall(ISGetLocalSize(zo, &nz));
143:     PetscCall(ISGetIndices(zo, &idxs));
144:     cto = nz / bs;
145:     for (i = 0; i < cto; i++) aux[i + ctd] = garray[idxs[bs * i] / bs];
146:     PetscCall(ISRestoreIndices(zo, &idxs));
147:   } else {
148:     cto = oc / bs;
149:     for (i = 0; i < cto; i++) aux[i + ctd] = garray[i];
150:   }
151:   PetscCall(ISCreateBlock(comm, bs, ctd + cto, aux, PETSC_OWN_POINTER, cis));
152:   PetscCall(ISDestroy(&zd));
153:   PetscCall(ISDestroy(&zo));
154:   PetscFunctionReturn(PETSC_SUCCESS);
155: }

157: static PetscErrorCode MatPtAPSymbolic_IS_XAIJ(Mat A, Mat P, PetscReal fill, Mat C)
158: {
159:   Mat                    PT, lA;
160:   MatISPtAP              ptap;
161:   ISLocalToGlobalMapping Crl2g, Ccl2g, rl2g, cl2g;
162:   PetscContainer         c;
163:   MatType                lmtype;
164:   const PetscInt        *garray;
165:   PetscInt               ibs, N, dc;
166:   MPI_Comm               comm;

168:   PetscFunctionBegin;
169:   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
170:   PetscCall(MatSetType(C, MATIS));
171:   PetscCall(MatISGetLocalMat(A, &lA));
172:   PetscCall(MatGetType(lA, &lmtype));
173:   PetscCall(MatISSetLocalMatType(C, lmtype));
174:   PetscCall(MatGetSize(P, NULL, &N));
175:   PetscCall(MatGetLocalSize(P, NULL, &dc));
176:   PetscCall(MatSetSizes(C, dc, dc, N, N));
177:   /* Not sure about this
178:   PetscCall(MatGetBlockSizes(P,NULL,&ibs));
179:   PetscCall(MatSetBlockSize(*C,ibs));
180: */

182:   PetscCall(PetscNew(&ptap));
183:   PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &c));
184:   PetscCall(PetscContainerSetPointer(c, ptap));
185:   PetscCall(PetscContainerSetUserDestroy(c, MatISContainerDestroyPtAP_Private));
186:   PetscCall(PetscObjectCompose((PetscObject)C, "_MatIS_PtAP", (PetscObject)c));
187:   PetscCall(PetscContainerDestroy(&c));
188:   ptap->fill = fill;

190:   PetscCall(MatISGetLocalToGlobalMapping(A, &rl2g, &cl2g));

192:   PetscCall(ISLocalToGlobalMappingGetBlockSize(cl2g, &ibs));
193:   PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &N));
194:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(cl2g, &garray));
195:   PetscCall(ISCreateBlock(comm, ibs, N / ibs, garray, PETSC_COPY_VALUES, &ptap->ris0));
196:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cl2g, &garray));

198:   PetscCall(MatCreateSubMatrix(P, ptap->ris0, NULL, MAT_INITIAL_MATRIX, &PT));
199:   PetscCall(MatGetNonzeroColumnsLocal_Private(PT, &ptap->cis0));
200:   PetscCall(ISLocalToGlobalMappingCreateIS(ptap->cis0, &Ccl2g));
201:   PetscCall(MatDestroy(&PT));

203:   Crl2g = NULL;
204:   if (rl2g != cl2g) { /* unsymmetric A mapping */
205:     PetscBool same, lsame = PETSC_FALSE;
206:     PetscInt  N1, ibs1;

208:     PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &N1));
209:     PetscCall(ISLocalToGlobalMappingGetBlockSize(rl2g, &ibs1));
210:     PetscCall(ISLocalToGlobalMappingGetBlockIndices(rl2g, &garray));
211:     PetscCall(ISCreateBlock(comm, ibs, N1 / ibs, garray, PETSC_COPY_VALUES, &ptap->ris1));
212:     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rl2g, &garray));
213:     if (ibs1 == ibs && N1 == N) { /* check if the l2gmaps are the same */
214:       const PetscInt *i1, *i2;

216:       PetscCall(ISBlockGetIndices(ptap->ris0, &i1));
217:       PetscCall(ISBlockGetIndices(ptap->ris1, &i2));
218:       PetscCall(PetscArraycmp(i1, i2, N, &lsame));
219:     }
220:     PetscCallMPI(MPIU_Allreduce(&lsame, &same, 1, MPIU_BOOL, MPI_LAND, comm));
221:     if (same) {
222:       PetscCall(ISDestroy(&ptap->ris1));
223:     } else {
224:       PetscCall(MatCreateSubMatrix(P, ptap->ris1, NULL, MAT_INITIAL_MATRIX, &PT));
225:       PetscCall(MatGetNonzeroColumnsLocal_Private(PT, &ptap->cis1));
226:       PetscCall(ISLocalToGlobalMappingCreateIS(ptap->cis1, &Crl2g));
227:       PetscCall(MatDestroy(&PT));
228:     }
229:   }
230:   /* Not sure about this
231:   if (!Crl2g) {
232:     PetscCall(MatGetBlockSize(C,&ibs));
233:     PetscCall(ISLocalToGlobalMappingSetBlockSize(Ccl2g,ibs));
234:   }
235: */
236:   PetscCall(MatSetLocalToGlobalMapping(C, Crl2g ? Crl2g : Ccl2g, Ccl2g));
237:   PetscCall(ISLocalToGlobalMappingDestroy(&Crl2g));
238:   PetscCall(ISLocalToGlobalMappingDestroy(&Ccl2g));

240:   C->ops->ptapnumeric = MatPtAPNumeric_IS_XAIJ;
241:   PetscFunctionReturn(PETSC_SUCCESS);
242: }

244: static PetscErrorCode MatProductSymbolic_PtAP_IS_XAIJ(Mat C)
245: {
246:   Mat_Product *product = C->product;
247:   Mat          A = product->A, P = product->B;
248:   PetscReal    fill = product->fill;

250:   PetscFunctionBegin;
251:   PetscCall(MatPtAPSymbolic_IS_XAIJ(A, P, fill, C));
252:   C->ops->productnumeric = MatProductNumeric_PtAP;
253:   PetscFunctionReturn(PETSC_SUCCESS);
254: }

256: static PetscErrorCode MatProductSetFromOptions_IS_XAIJ_PtAP(Mat C)
257: {
258:   PetscFunctionBegin;
259:   C->ops->productsymbolic = MatProductSymbolic_PtAP_IS_XAIJ;
260:   PetscFunctionReturn(PETSC_SUCCESS);
261: }

263: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_IS_XAIJ(Mat C)
264: {
265:   Mat_Product *product = C->product;

267:   PetscFunctionBegin;
268:   if (product->type == MATPRODUCT_PtAP) PetscCall(MatProductSetFromOptions_IS_XAIJ_PtAP(C));
269:   PetscFunctionReturn(PETSC_SUCCESS);
270: }

272: static PetscErrorCode MatISContainerDestroyFields_Private(void *ptr)
273: {
274:   MatISLocalFields lf = (MatISLocalFields)ptr;
275:   PetscInt         i;

277:   PetscFunctionBegin;
278:   for (i = 0; i < lf->nr; i++) PetscCall(ISDestroy(&lf->rf[i]));
279:   for (i = 0; i < lf->nc; i++) PetscCall(ISDestroy(&lf->cf[i]));
280:   PetscCall(PetscFree2(lf->rf, lf->cf));
281:   PetscCall(PetscFree(lf));
282:   PetscFunctionReturn(PETSC_SUCCESS);
283: }

285: static PetscErrorCode MatConvert_SeqXAIJ_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
286: {
287:   Mat B, lB;

289:   PetscFunctionBegin;
290:   if (reuse != MAT_REUSE_MATRIX) {
291:     ISLocalToGlobalMapping rl2g, cl2g;
292:     PetscInt               bs;
293:     IS                     is;

295:     PetscCall(MatGetBlockSize(A, &bs));
296:     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->rmap->n / bs, 0, 1, &is));
297:     if (bs > 1) {
298:       IS       is2;
299:       PetscInt i, *aux;

301:       PetscCall(ISGetLocalSize(is, &i));
302:       PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
303:       PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, i, aux, PETSC_COPY_VALUES, &is2));
304:       PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
305:       PetscCall(ISDestroy(&is));
306:       is = is2;
307:     }
308:     PetscCall(ISSetIdentity(is));
309:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
310:     PetscCall(ISDestroy(&is));
311:     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->cmap->n / bs, 0, 1, &is));
312:     if (bs > 1) {
313:       IS       is2;
314:       PetscInt i, *aux;

316:       PetscCall(ISGetLocalSize(is, &i));
317:       PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
318:       PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, i, aux, PETSC_COPY_VALUES, &is2));
319:       PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
320:       PetscCall(ISDestroy(&is));
321:       is = is2;
322:     }
323:     PetscCall(ISSetIdentity(is));
324:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
325:     PetscCall(ISDestroy(&is));
326:     PetscCall(MatCreateIS(PetscObjectComm((PetscObject)A), bs, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N, rl2g, cl2g, &B));
327:     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
328:     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
329:     PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &lB));
330:     if (reuse == MAT_INITIAL_MATRIX) *newmat = B;
331:   } else {
332:     B = *newmat;
333:     PetscCall(PetscObjectReference((PetscObject)A));
334:     lB = A;
335:   }
336:   PetscCall(MatISSetLocalMat(B, lB));
337:   PetscCall(MatDestroy(&lB));
338:   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
339:   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
340:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &B));
341:   PetscFunctionReturn(PETSC_SUCCESS);
342: }

344: static PetscErrorCode MatISScaleDisassembling_Private(Mat A)
345: {
346:   Mat_IS         *matis = (Mat_IS *)A->data;
347:   PetscScalar    *aa;
348:   const PetscInt *ii, *jj;
349:   PetscInt        i, n, m;
350:   PetscInt       *ecount, **eneighs;
351:   PetscBool       flg;

353:   PetscFunctionBegin;
354:   PetscCall(MatGetRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg));
355:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
356:   PetscCall(ISLocalToGlobalMappingGetNodeInfo(matis->rmapping, &n, &ecount, &eneighs));
357:   PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %" PetscInt_FMT " != %" PetscInt_FMT, m, n);
358:   PetscCall(MatSeqAIJGetArray(matis->A, &aa));
359:   for (i = 0; i < n; i++) {
360:     if (ecount[i] > 1) {
361:       PetscInt j;

363:       for (j = ii[i]; j < ii[i + 1]; j++) {
364:         PetscInt  i2   = jj[j], p, p2;
365:         PetscReal scal = 0.0;

367:         for (p = 0; p < ecount[i]; p++) {
368:           for (p2 = 0; p2 < ecount[i2]; p2++) {
369:             if (eneighs[i][p] == eneighs[i2][p2]) {
370:               scal += 1.0;
371:               break;
372:             }
373:           }
374:         }
375:         if (scal) aa[j] /= scal;
376:       }
377:     }
378:   }
379:   PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(matis->rmapping, &n, &ecount, &eneighs));
380:   PetscCall(MatSeqAIJRestoreArray(matis->A, &aa));
381:   PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg));
382:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
383:   PetscFunctionReturn(PETSC_SUCCESS);
384: }

386: typedef enum {
387:   MAT_IS_DISASSEMBLE_L2G_NATURAL,
388:   MAT_IS_DISASSEMBLE_L2G_MAT,
389:   MAT_IS_DISASSEMBLE_L2G_ND
390: } MatISDisassemblel2gType;

392: static PetscErrorCode MatMPIXAIJComputeLocalToGlobalMapping_Private(Mat A, ISLocalToGlobalMapping *l2g)
393: {
394:   Mat                     Ad, Ao;
395:   IS                      is, ndmap, ndsub;
396:   MPI_Comm                comm;
397:   const PetscInt         *garray, *ndmapi;
398:   PetscInt                bs, i, cnt, nl, *ncount, *ndmapc;
399:   PetscBool               ismpiaij, ismpibaij;
400:   const char *const       MatISDisassemblel2gTypes[] = {"NATURAL", "MAT", "ND", "MatISDisassemblel2gType", "MAT_IS_DISASSEMBLE_L2G_", NULL};
401:   MatISDisassemblel2gType mode                       = MAT_IS_DISASSEMBLE_L2G_NATURAL;
402:   MatPartitioning         part;
403:   PetscSF                 sf;
404:   PetscObject             dm;

406:   PetscFunctionBegin;
407:   PetscOptionsBegin(PetscObjectComm((PetscObject)A), ((PetscObject)A)->prefix, "MatIS l2g disassembling options", "Mat");
408:   PetscCall(PetscOptionsEnum("-mat_is_disassemble_l2g_type", "Type of local-to-global mapping to be used for disassembling", "MatISDisassemblel2gType", MatISDisassemblel2gTypes, (PetscEnum)mode, (PetscEnum *)&mode, NULL));
409:   PetscOptionsEnd();
410:   if (mode == MAT_IS_DISASSEMBLE_L2G_MAT) {
411:     PetscCall(MatGetLocalToGlobalMapping(A, l2g, NULL));
412:     PetscFunctionReturn(PETSC_SUCCESS);
413:   }
414:   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
415:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
416:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij));
417:   PetscCall(MatGetBlockSize(A, &bs));
418:   switch (mode) {
419:   case MAT_IS_DISASSEMBLE_L2G_ND:
420:     PetscCall(MatPartitioningCreate(comm, &part));
421:     PetscCall(MatPartitioningSetAdjacency(part, A));
422:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part, ((PetscObject)A)->prefix));
423:     PetscCall(MatPartitioningSetFromOptions(part));
424:     PetscCall(MatPartitioningApplyND(part, &ndmap));
425:     PetscCall(MatPartitioningDestroy(&part));
426:     PetscCall(ISBuildTwoSided(ndmap, NULL, &ndsub));
427:     PetscCall(MatMPIAIJSetUseScalableIncreaseOverlap(A, PETSC_TRUE));
428:     PetscCall(MatIncreaseOverlap(A, 1, &ndsub, 1));
429:     PetscCall(ISLocalToGlobalMappingCreateIS(ndsub, l2g));

431:     /* it may happen that a separator node is not properly shared */
432:     PetscCall(ISLocalToGlobalMappingGetNodeInfo(*l2g, &nl, &ncount, NULL));
433:     PetscCall(PetscSFCreate(comm, &sf));
434:     PetscCall(ISLocalToGlobalMappingGetIndices(*l2g, &garray));
435:     PetscCall(PetscSFSetGraphLayout(sf, A->rmap, nl, NULL, PETSC_OWN_POINTER, garray));
436:     PetscCall(ISLocalToGlobalMappingRestoreIndices(*l2g, &garray));
437:     PetscCall(PetscCalloc1(A->rmap->n, &ndmapc));
438:     PetscCall(PetscSFReduceBegin(sf, MPIU_INT, ncount, ndmapc, MPI_REPLACE));
439:     PetscCall(PetscSFReduceEnd(sf, MPIU_INT, ncount, ndmapc, MPI_REPLACE));
440:     PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(*l2g, NULL, &ncount, NULL));
441:     PetscCall(ISGetIndices(ndmap, &ndmapi));
442:     for (i = 0, cnt = 0; i < A->rmap->n; i++)
443:       if (ndmapi[i] < 0 && ndmapc[i] < 2) cnt++;

445:     PetscCallMPI(MPIU_Allreduce(&cnt, &i, 1, MPIU_INT, MPI_MAX, comm));
446:     if (i) { /* we detected isolated separator nodes */
447:       Mat                    A2, A3;
448:       IS                    *workis, is2;
449:       PetscScalar           *vals;
450:       PetscInt               gcnt = i, *dnz, *onz, j, *lndmapi;
451:       ISLocalToGlobalMapping ll2g;
452:       PetscBool              flg;
453:       const PetscInt        *ii, *jj;

455:       /* communicate global id of separators */
456:       MatPreallocateBegin(comm, A->rmap->n, A->cmap->n, dnz, onz);
457:       for (i = 0, cnt = 0; i < A->rmap->n; i++) dnz[i] = ndmapi[i] < 0 ? i + A->rmap->rstart : -1;

459:       PetscCall(PetscMalloc1(nl, &lndmapi));
460:       PetscCall(PetscSFBcastBegin(sf, MPIU_INT, dnz, lndmapi, MPI_REPLACE));

462:       /* compute adjacency of isolated separators node */
463:       PetscCall(PetscMalloc1(gcnt, &workis));
464:       for (i = 0, cnt = 0; i < A->rmap->n; i++) {
465:         if (ndmapi[i] < 0 && ndmapc[i] < 2) PetscCall(ISCreateStride(comm, 1, i + A->rmap->rstart, 1, &workis[cnt++]));
466:       }
467:       for (i = cnt; i < gcnt; i++) PetscCall(ISCreateStride(comm, 0, 0, 1, &workis[i]));
468:       for (i = 0; i < gcnt; i++) {
469:         PetscCall(PetscObjectSetName((PetscObject)workis[i], "ISOLATED"));
470:         PetscCall(ISViewFromOptions(workis[i], NULL, "-view_isolated_separators"));
471:       }

473:       /* no communications since all the ISes correspond to locally owned rows */
474:       PetscCall(MatIncreaseOverlap(A, gcnt, workis, 1));

476:       /* end communicate global id of separators */
477:       PetscCall(PetscSFBcastEnd(sf, MPIU_INT, dnz, lndmapi, MPI_REPLACE));

479:       /* communicate new layers : create a matrix and transpose it */
480:       PetscCall(PetscArrayzero(dnz, A->rmap->n));
481:       PetscCall(PetscArrayzero(onz, A->rmap->n));
482:       for (i = 0, j = 0; i < A->rmap->n; i++) {
483:         if (ndmapi[i] < 0 && ndmapc[i] < 2) {
484:           const PetscInt *idxs;
485:           PetscInt        s;

487:           PetscCall(ISGetLocalSize(workis[j], &s));
488:           PetscCall(ISGetIndices(workis[j], &idxs));
489:           PetscCall(MatPreallocateSet(i + A->rmap->rstart, s, idxs, dnz, onz));
490:           j++;
491:         }
492:       }
493:       PetscCheck(j == cnt, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected local count %" PetscInt_FMT " != %" PetscInt_FMT, j, cnt);

495:       for (i = 0; i < gcnt; i++) {
496:         PetscCall(PetscObjectSetName((PetscObject)workis[i], "EXTENDED"));
497:         PetscCall(ISViewFromOptions(workis[i], NULL, "-view_isolated_separators"));
498:       }

500:       for (i = 0, j = 0; i < A->rmap->n; i++) j = PetscMax(j, dnz[i] + onz[i]);
501:       PetscCall(PetscMalloc1(j, &vals));
502:       for (i = 0; i < j; i++) vals[i] = 1.0;

504:       PetscCall(MatCreate(comm, &A2));
505:       PetscCall(MatSetType(A2, MATMPIAIJ));
506:       PetscCall(MatSetSizes(A2, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
507:       PetscCall(MatMPIAIJSetPreallocation(A2, 0, dnz, 0, onz));
508:       PetscCall(MatSetOption(A2, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
509:       for (i = 0, j = 0; i < A2->rmap->n; i++) {
510:         PetscInt        row = i + A2->rmap->rstart, s = dnz[i] + onz[i];
511:         const PetscInt *idxs;

513:         if (s) {
514:           PetscCall(ISGetIndices(workis[j], &idxs));
515:           PetscCall(MatSetValues(A2, 1, &row, s, idxs, vals, INSERT_VALUES));
516:           PetscCall(ISRestoreIndices(workis[j], &idxs));
517:           j++;
518:         }
519:       }
520:       PetscCheck(j == cnt, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected local count %" PetscInt_FMT " != %" PetscInt_FMT, j, cnt);
521:       PetscCall(PetscFree(vals));
522:       PetscCall(MatAssemblyBegin(A2, MAT_FINAL_ASSEMBLY));
523:       PetscCall(MatAssemblyEnd(A2, MAT_FINAL_ASSEMBLY));
524:       PetscCall(MatTranspose(A2, MAT_INPLACE_MATRIX, &A2));

526:       /* extract submatrix corresponding to the coupling "owned separators" x "isolated separators" */
527:       for (i = 0, j = 0; i < nl; i++)
528:         if (lndmapi[i] >= 0) lndmapi[j++] = lndmapi[i];
529:       PetscCall(ISCreateGeneral(comm, j, lndmapi, PETSC_USE_POINTER, &is));
530:       PetscCall(MatMPIAIJGetLocalMatCondensed(A2, MAT_INITIAL_MATRIX, &is, NULL, &A3));
531:       PetscCall(ISDestroy(&is));
532:       PetscCall(MatDestroy(&A2));

534:       /* extend local to global map to include connected isolated separators */
535:       PetscCall(PetscObjectQuery((PetscObject)A3, "_petsc_GetLocalMatCondensed_iscol", (PetscObject *)&is));
536:       PetscCheck(is, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing column map");
537:       PetscCall(ISLocalToGlobalMappingCreateIS(is, &ll2g));
538:       PetscCall(MatGetRowIJ(A3, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &flg));
539:       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
540:       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ii[i], jj, PETSC_COPY_VALUES, &is));
541:       PetscCall(MatRestoreRowIJ(A3, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &flg));
542:       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
543:       PetscCall(ISLocalToGlobalMappingApplyIS(ll2g, is, &is2));
544:       PetscCall(ISDestroy(&is));
545:       PetscCall(ISLocalToGlobalMappingDestroy(&ll2g));

547:       /* add new nodes to the local-to-global map */
548:       PetscCall(ISLocalToGlobalMappingDestroy(l2g));
549:       PetscCall(ISExpand(ndsub, is2, &is));
550:       PetscCall(ISDestroy(&is2));
551:       PetscCall(ISLocalToGlobalMappingCreateIS(is, l2g));
552:       PetscCall(ISDestroy(&is));

554:       PetscCall(MatDestroy(&A3));
555:       PetscCall(PetscFree(lndmapi));
556:       MatPreallocateEnd(dnz, onz);
557:       for (i = 0; i < gcnt; i++) PetscCall(ISDestroy(&workis[i]));
558:       PetscCall(PetscFree(workis));
559:     }
560:     PetscCall(ISRestoreIndices(ndmap, &ndmapi));
561:     PetscCall(PetscSFDestroy(&sf));
562:     PetscCall(PetscFree(ndmapc));
563:     PetscCall(ISDestroy(&ndmap));
564:     PetscCall(ISDestroy(&ndsub));
565:     PetscCall(ISLocalToGlobalMappingSetBlockSize(*l2g, bs));
566:     PetscCall(ISLocalToGlobalMappingViewFromOptions(*l2g, NULL, "-mat_is_nd_l2g_view"));
567:     break;
568:   case MAT_IS_DISASSEMBLE_L2G_NATURAL:
569:     PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_dm", (PetscObject *)&dm));
570:     if (dm) { /* if a matrix comes from a DM, most likely we can use the l2gmap if any */
571:       PetscCall(MatGetLocalToGlobalMapping(A, l2g, NULL));
572:       PetscCall(PetscObjectReference((PetscObject)*l2g));
573:       if (*l2g) PetscFunctionReturn(PETSC_SUCCESS);
574:     }
575:     if (ismpiaij) {
576:       PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray));
577:     } else if (ismpibaij) {
578:       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray));
579:     } else SETERRQ(comm, PETSC_ERR_SUP, "Type %s", ((PetscObject)A)->type_name);
580:     if (A->rmap->n) {
581:       PetscInt dc, oc, stc, *aux;

583:       PetscCall(MatGetLocalSize(Ad, NULL, &dc));
584:       PetscCall(MatGetLocalSize(Ao, NULL, &oc));
585:       PetscCheck(!oc || garray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "garray not present");
586:       PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL));
587:       PetscCall(PetscMalloc1((dc + oc) / bs, &aux));
588:       for (i = 0; i < dc / bs; i++) aux[i] = i + stc / bs;
589:       for (i = 0; i < oc / bs; i++) aux[i + dc / bs] = (ismpiaij ? garray[i * bs] / bs : garray[i]);
590:       PetscCall(ISCreateBlock(comm, bs, (dc + oc) / bs, aux, PETSC_OWN_POINTER, &is));
591:     } else {
592:       PetscCall(ISCreateBlock(comm, 1, 0, NULL, PETSC_OWN_POINTER, &is));
593:     }
594:     PetscCall(ISLocalToGlobalMappingCreateIS(is, l2g));
595:     PetscCall(ISDestroy(&is));
596:     break;
597:   default:
598:     SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Unsupported l2g disassembling type %d", mode);
599:   }
600:   PetscFunctionReturn(PETSC_SUCCESS);
601: }

603: PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
604: {
605:   Mat                    lA, Ad, Ao, B = NULL;
606:   ISLocalToGlobalMapping rl2g, cl2g;
607:   IS                     is;
608:   MPI_Comm               comm;
609:   void                  *ptrs[2];
610:   const char            *names[2] = {"_convert_csr_aux", "_convert_csr_data"};
611:   const PetscInt        *garray;
612:   PetscScalar           *dd, *od, *aa, *data;
613:   const PetscInt        *di, *dj, *oi, *oj;
614:   const PetscInt        *odi, *odj, *ooi, *ooj;
615:   PetscInt              *aux, *ii, *jj;
616:   PetscInt               rbs, cbs, lc, dr, dc, oc, str, stc, nnz, i, jd, jo, cum;
617:   PetscBool              flg, ismpiaij, ismpibaij, was_inplace = PETSC_FALSE, cong;
618:   PetscMPIInt            size;

620:   PetscFunctionBegin;
621:   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
622:   PetscCallMPI(MPI_Comm_size(comm, &size));
623:   if (size == 1) {
624:     PetscCall(MatConvert_SeqXAIJ_IS(A, type, reuse, newmat));
625:     PetscFunctionReturn(PETSC_SUCCESS);
626:   }
627:   PetscCall(MatGetBlockSizes(A, &rbs, &cbs));
628:   PetscCall(MatHasCongruentLayouts(A, &cong));
629:   if (reuse != MAT_REUSE_MATRIX && cong && rbs == cbs) {
630:     PetscCall(MatMPIXAIJComputeLocalToGlobalMapping_Private(A, &rl2g));
631:     PetscCall(MatCreate(comm, &B));
632:     PetscCall(MatSetType(B, MATIS));
633:     PetscCall(MatSetSizes(B, A->rmap->n, A->rmap->n, A->rmap->N, A->rmap->N));
634:     PetscCall(MatSetLocalToGlobalMapping(B, rl2g, rl2g));
635:     PetscCall(MatSetBlockSizes(B, rbs, rbs));
636:     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
637:     if (reuse == MAT_INPLACE_MATRIX) was_inplace = PETSC_TRUE;
638:     reuse = MAT_REUSE_MATRIX;
639:   }
640:   if (reuse == MAT_REUSE_MATRIX) {
641:     Mat            *newlA, lA;
642:     IS              rows, cols;
643:     const PetscInt *ridx, *cidx;
644:     PetscInt        nr, nc;

646:     if (!B) B = *newmat;
647:     PetscCall(MatISGetLocalToGlobalMapping(B, &rl2g, &cl2g));
648:     PetscCall(ISLocalToGlobalMappingGetBlockIndices(rl2g, &ridx));
649:     PetscCall(ISLocalToGlobalMappingGetBlockIndices(cl2g, &cidx));
650:     PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &nr));
651:     PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &nc));
652:     PetscCall(ISLocalToGlobalMappingGetBlockSize(rl2g, &rbs));
653:     PetscCall(ISLocalToGlobalMappingGetBlockSize(cl2g, &cbs));
654:     PetscCall(ISCreateBlock(comm, rbs, nr / rbs, ridx, PETSC_USE_POINTER, &rows));
655:     if (rl2g != cl2g) {
656:       PetscCall(ISCreateBlock(comm, cbs, nc / cbs, cidx, PETSC_USE_POINTER, &cols));
657:     } else {
658:       PetscCall(PetscObjectReference((PetscObject)rows));
659:       cols = rows;
660:     }
661:     PetscCall(MatISGetLocalMat(B, &lA));
662:     PetscCall(MatCreateSubMatrices(A, 1, &rows, &cols, MAT_INITIAL_MATRIX, &newlA));
663:     PetscCall(MatConvert(newlA[0], MATSEQAIJ, MAT_INPLACE_MATRIX, &newlA[0]));
664:     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rl2g, &ridx));
665:     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cl2g, &cidx));
666:     PetscCall(ISDestroy(&rows));
667:     PetscCall(ISDestroy(&cols));
668:     if (!lA->preallocated) { /* first time */
669:       PetscCall(MatDuplicate(newlA[0], MAT_COPY_VALUES, &lA));
670:       PetscCall(MatISSetLocalMat(B, lA));
671:       PetscCall(PetscObjectDereference((PetscObject)lA));
672:     }
673:     PetscCall(MatCopy(newlA[0], lA, SAME_NONZERO_PATTERN));
674:     PetscCall(MatDestroySubMatrices(1, &newlA));
675:     PetscCall(MatISScaleDisassembling_Private(B));
676:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
677:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
678:     if (was_inplace) PetscCall(MatHeaderReplace(A, &B));
679:     else *newmat = B;
680:     PetscFunctionReturn(PETSC_SUCCESS);
681:   }
682:   /* general case, just compress out the column space */
683:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
684:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij));
685:   if (ismpiaij) {
686:     cbs = 1; /* We cannot guarantee the off-process matrix will respect the column block size */
687:     PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray));
688:   } else if (ismpibaij) {
689:     PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray));
690:     PetscCall(MatConvert(Ad, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ad));
691:     PetscCall(MatConvert(Ao, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ao));
692:   } else SETERRQ(comm, PETSC_ERR_SUP, "Type %s", ((PetscObject)A)->type_name);
693:   PetscCall(MatSeqAIJGetArray(Ad, &dd));
694:   PetscCall(MatSeqAIJGetArray(Ao, &od));

696:   /* access relevant information from MPIAIJ */
697:   PetscCall(MatGetOwnershipRange(A, &str, NULL));
698:   PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL));
699:   PetscCall(MatGetLocalSize(Ad, &dr, &dc));
700:   PetscCall(MatGetLocalSize(Ao, NULL, &oc));
701:   PetscCheck(!oc || garray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "garray not present");

703:   PetscCall(MatGetRowIJ(Ad, 0, PETSC_FALSE, PETSC_FALSE, &i, &di, &dj, &flg));
704:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
705:   PetscCall(MatGetRowIJ(Ao, 0, PETSC_FALSE, PETSC_FALSE, &i, &oi, &oj, &flg));
706:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
707:   nnz = di[dr] + oi[dr];
708:   /* store original pointers to be restored later */
709:   odi = di;
710:   odj = dj;
711:   ooi = oi;
712:   ooj = oj;

714:   /* generate l2g maps for rows and cols */
715:   PetscCall(ISCreateStride(comm, dr / rbs, str / rbs, 1, &is));
716:   if (rbs > 1) {
717:     IS is2;

719:     PetscCall(ISGetLocalSize(is, &i));
720:     PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
721:     PetscCall(ISCreateBlock(comm, rbs, i, aux, PETSC_COPY_VALUES, &is2));
722:     PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
723:     PetscCall(ISDestroy(&is));
724:     is = is2;
725:   }
726:   PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
727:   PetscCall(ISDestroy(&is));
728:   if (dr) {
729:     PetscCall(PetscMalloc1((dc + oc) / cbs, &aux));
730:     for (i = 0; i < dc / cbs; i++) aux[i] = i + stc / cbs;
731:     for (i = 0; i < oc / cbs; i++) aux[i + dc / cbs] = garray[i];
732:     PetscCall(ISCreateBlock(comm, cbs, (dc + oc) / cbs, aux, PETSC_OWN_POINTER, &is));
733:     lc = dc + oc;
734:   } else {
735:     PetscCall(ISCreateBlock(comm, cbs, 0, NULL, PETSC_OWN_POINTER, &is));
736:     lc = 0;
737:   }
738:   PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
739:   PetscCall(ISDestroy(&is));

741:   /* create MATIS object */
742:   PetscCall(MatCreate(comm, &B));
743:   PetscCall(MatSetSizes(B, dr, dc, PETSC_DECIDE, PETSC_DECIDE));
744:   PetscCall(MatSetType(B, MATIS));
745:   PetscCall(MatSetBlockSizes(B, rbs, cbs));
746:   PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g));
747:   PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
748:   PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));

750:   /* merge local matrices */
751:   PetscCall(PetscMalloc1(nnz + dr + 1, &aux));
752:   PetscCall(PetscMalloc1(nnz, &data));
753:   ii  = aux;
754:   jj  = aux + dr + 1;
755:   aa  = data;
756:   *ii = *(di++) + *(oi++);
757:   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
758:     for (; jd < *di; jd++) {
759:       *jj++ = *dj++;
760:       *aa++ = *dd++;
761:     }
762:     for (; jo < *oi; jo++) {
763:       *jj++ = *oj++ + dc;
764:       *aa++ = *od++;
765:     }
766:     *(++ii) = *(di++) + *(oi++);
767:   }
768:   for (; cum < dr; cum++) *(++ii) = nnz;

770:   PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_FALSE, PETSC_FALSE, &i, &odi, &odj, &flg));
771:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
772:   PetscCall(MatRestoreRowIJ(Ao, 0, PETSC_FALSE, PETSC_FALSE, &i, &ooi, &ooj, &flg));
773:   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
774:   PetscCall(MatSeqAIJRestoreArray(Ad, &dd));
775:   PetscCall(MatSeqAIJRestoreArray(Ao, &od));

777:   ii = aux;
778:   jj = aux + dr + 1;
779:   aa = data;
780:   PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, lc, ii, jj, aa, &lA));

782:   /* create containers to destroy the data */
783:   ptrs[0] = aux;
784:   ptrs[1] = data;
785:   for (i = 0; i < 2; i++) PetscCall(PetscObjectContainerCompose((PetscObject)lA, names[i], ptrs[i], PetscContainerUserDestroyDefault));
786:   if (ismpibaij) { /* destroy converted local matrices */
787:     PetscCall(MatDestroy(&Ad));
788:     PetscCall(MatDestroy(&Ao));
789:   }

791:   /* finalize matrix */
792:   PetscCall(MatISSetLocalMat(B, lA));
793:   PetscCall(MatDestroy(&lA));
794:   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
795:   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
796:   if (reuse == MAT_INPLACE_MATRIX) {
797:     PetscCall(MatHeaderReplace(A, &B));
798:   } else *newmat = B;
799:   PetscFunctionReturn(PETSC_SUCCESS);
800: }

802: PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
803: {
804:   Mat                  **nest, *snest, **rnest, lA, B;
805:   IS                    *iscol, *isrow, *islrow, *islcol;
806:   ISLocalToGlobalMapping rl2g, cl2g;
807:   MPI_Comm               comm;
808:   PetscInt              *lr, *lc, *l2gidxs;
809:   PetscInt               i, j, nr, nc, rbs, cbs;
810:   PetscBool              convert, lreuse, *istrans;
811:   PetscBool3             allow_repeated = PETSC_BOOL3_UNKNOWN;

813:   PetscFunctionBegin;
814:   PetscCall(MatNestGetSubMats(A, &nr, &nc, &nest));
815:   lreuse = PETSC_FALSE;
816:   rnest  = NULL;
817:   if (reuse == MAT_REUSE_MATRIX) {
818:     PetscBool ismatis, isnest;

820:     PetscCall(PetscObjectTypeCompare((PetscObject)*newmat, MATIS, &ismatis));
821:     PetscCheck(ismatis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_USER, "Cannot reuse matrix of type %s", ((PetscObject)*newmat)->type_name);
822:     PetscCall(MatISGetLocalMat(*newmat, &lA));
823:     PetscCall(PetscObjectTypeCompare((PetscObject)lA, MATNEST, &isnest));
824:     if (isnest) {
825:       PetscCall(MatNestGetSubMats(lA, &i, &j, &rnest));
826:       lreuse = (PetscBool)(i == nr && j == nc);
827:       if (!lreuse) rnest = NULL;
828:     }
829:   }
830:   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
831:   PetscCall(PetscCalloc2(nr, &lr, nc, &lc));
832:   PetscCall(PetscCalloc6(nr, &isrow, nc, &iscol, nr, &islrow, nc, &islcol, nr * nc, &snest, nr * nc, &istrans));
833:   PetscCall(MatNestGetISs(A, isrow, iscol));
834:   for (i = 0; i < nr; i++) {
835:     for (j = 0; j < nc; j++) {
836:       PetscBool ismatis, sallow;
837:       PetscInt  l1, l2, lb1, lb2, ij = i * nc + j;

839:       /* Null matrix pointers are allowed in MATNEST */
840:       if (!nest[i][j]) continue;

842:       /* Nested matrices should be of type MATIS */
843:       PetscCall(PetscObjectTypeCompare((PetscObject)nest[i][j], MATTRANSPOSEVIRTUAL, &istrans[ij]));
844:       if (istrans[ij]) {
845:         Mat T, lT;
846:         PetscCall(MatTransposeGetMat(nest[i][j], &T));
847:         PetscCall(PetscObjectTypeCompare((PetscObject)T, MATIS, &ismatis));
848:         PetscCheck(ismatis, comm, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") (transposed) is not of type MATIS", i, j);
849:         PetscCall(MatISGetAllowRepeated(T, &sallow));
850:         PetscCall(MatISGetLocalMat(T, &lT));
851:         PetscCall(MatCreateTranspose(lT, &snest[ij]));
852:       } else {
853:         PetscCall(PetscObjectTypeCompare((PetscObject)nest[i][j], MATIS, &ismatis));
854:         PetscCheck(ismatis, comm, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") is not of type MATIS", i, j);
855:         PetscCall(MatISGetAllowRepeated(nest[i][j], &sallow));
856:         PetscCall(MatISGetLocalMat(nest[i][j], &snest[ij]));
857:       }
858:       if (allow_repeated == PETSC_BOOL3_UNKNOWN) allow_repeated = PetscBoolToBool3(sallow);
859:       PetscCheck(sallow == PetscBool3ToBool(allow_repeated), comm, PETSC_ERR_SUP, "Cannot mix repeated and non repeated maps");

861:       /* Check compatibility of local sizes */
862:       PetscCall(MatGetSize(snest[ij], &l1, &l2));
863:       PetscCall(MatGetBlockSizes(snest[ij], &lb1, &lb2));
864:       if (!l1 || !l2) continue;
865:       PetscCheck(!lr[i] || l1 == lr[i], PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid local size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, lr[i], l1);
866:       PetscCheck(!lc[j] || l2 == lc[j], PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid local size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, lc[j], l2);
867:       lr[i] = l1;
868:       lc[j] = l2;

870:       /* check compatibility for local matrix reusage */
871:       if (rnest && !rnest[i][j] != !snest[ij]) lreuse = PETSC_FALSE;
872:     }
873:   }

875:   if (PetscDefined(USE_DEBUG)) {
876:     /* Check compatibility of l2g maps for rows */
877:     for (i = 0; i < nr; i++) {
878:       rl2g = NULL;
879:       for (j = 0; j < nc; j++) {
880:         PetscInt n1, n2;

882:         if (!nest[i][j]) continue;
883:         if (istrans[i * nc + j]) {
884:           Mat T;

886:           PetscCall(MatTransposeGetMat(nest[i][j], &T));
887:           PetscCall(MatISGetLocalToGlobalMapping(T, NULL, &cl2g));
888:         } else {
889:           PetscCall(MatISGetLocalToGlobalMapping(nest[i][j], &cl2g, NULL));
890:         }
891:         PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &n1));
892:         if (!n1) continue;
893:         if (!rl2g) {
894:           rl2g = cl2g;
895:         } else {
896:           const PetscInt *idxs1, *idxs2;
897:           PetscBool       same;

899:           PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &n2));
900:           PetscCheck(n1 == n2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid row l2gmap size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, n1, n2);
901:           PetscCall(ISLocalToGlobalMappingGetIndices(cl2g, &idxs1));
902:           PetscCall(ISLocalToGlobalMappingGetIndices(rl2g, &idxs2));
903:           PetscCall(PetscArraycmp(idxs1, idxs2, n1, &same));
904:           PetscCall(ISLocalToGlobalMappingRestoreIndices(cl2g, &idxs1));
905:           PetscCall(ISLocalToGlobalMappingRestoreIndices(rl2g, &idxs2));
906:           PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid row l2gmap", i, j);
907:         }
908:       }
909:     }
910:     /* Check compatibility of l2g maps for columns */
911:     for (i = 0; i < nc; i++) {
912:       rl2g = NULL;
913:       for (j = 0; j < nr; j++) {
914:         PetscInt n1, n2;

916:         if (!nest[j][i]) continue;
917:         if (istrans[j * nc + i]) {
918:           Mat T;

920:           PetscCall(MatTransposeGetMat(nest[j][i], &T));
921:           PetscCall(MatISGetLocalToGlobalMapping(T, &cl2g, NULL));
922:         } else {
923:           PetscCall(MatISGetLocalToGlobalMapping(nest[j][i], NULL, &cl2g));
924:         }
925:         PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &n1));
926:         if (!n1) continue;
927:         if (!rl2g) {
928:           rl2g = cl2g;
929:         } else {
930:           const PetscInt *idxs1, *idxs2;
931:           PetscBool       same;

933:           PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &n2));
934:           PetscCheck(n1 == n2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid column l2gmap size %" PetscInt_FMT " != %" PetscInt_FMT, j, i, n1, n2);
935:           PetscCall(ISLocalToGlobalMappingGetIndices(cl2g, &idxs1));
936:           PetscCall(ISLocalToGlobalMappingGetIndices(rl2g, &idxs2));
937:           PetscCall(PetscArraycmp(idxs1, idxs2, n1, &same));
938:           PetscCall(ISLocalToGlobalMappingRestoreIndices(cl2g, &idxs1));
939:           PetscCall(ISLocalToGlobalMappingRestoreIndices(rl2g, &idxs2));
940:           PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid column l2gmap", j, i);
941:         }
942:       }
943:     }
944:   }

946:   B = NULL;
947:   if (reuse != MAT_REUSE_MATRIX) {
948:     PetscInt stl;

950:     /* Create l2g map for the rows of the new matrix and index sets for the local MATNEST */
951:     for (i = 0, stl = 0; i < nr; i++) stl += lr[i];
952:     PetscCall(PetscMalloc1(stl, &l2gidxs));
953:     for (i = 0, stl = 0; i < nr; i++) {
954:       Mat             usedmat;
955:       Mat_IS         *matis;
956:       const PetscInt *idxs;

958:       /* local IS for local NEST */
959:       PetscCall(ISCreateStride(PETSC_COMM_SELF, lr[i], stl, 1, &islrow[i]));

961:       /* l2gmap */
962:       j       = 0;
963:       usedmat = nest[i][j];
964:       while (!usedmat && j < nc - 1) usedmat = nest[i][++j];
965:       PetscCheck(usedmat, comm, PETSC_ERR_SUP, "Cannot find valid row mat");

967:       if (istrans[i * nc + j]) {
968:         Mat T;
969:         PetscCall(MatTransposeGetMat(usedmat, &T));
970:         usedmat = T;
971:       }
972:       matis = (Mat_IS *)usedmat->data;
973:       PetscCall(ISGetIndices(isrow[i], &idxs));
974:       if (istrans[i * nc + j]) {
975:         PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
976:         PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
977:       } else {
978:         PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
979:         PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
980:       }
981:       PetscCall(ISRestoreIndices(isrow[i], &idxs));
982:       stl += lr[i];
983:     }
984:     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, stl, l2gidxs, PETSC_OWN_POINTER, &rl2g));

986:     /* Create l2g map for columns of the new matrix and index sets for the local MATNEST */
987:     for (i = 0, stl = 0; i < nc; i++) stl += lc[i];
988:     PetscCall(PetscMalloc1(stl, &l2gidxs));
989:     for (i = 0, stl = 0; i < nc; i++) {
990:       Mat             usedmat;
991:       Mat_IS         *matis;
992:       const PetscInt *idxs;

994:       /* local IS for local NEST */
995:       PetscCall(ISCreateStride(PETSC_COMM_SELF, lc[i], stl, 1, &islcol[i]));

997:       /* l2gmap */
998:       j       = 0;
999:       usedmat = nest[j][i];
1000:       while (!usedmat && j < nr - 1) usedmat = nest[++j][i];
1001:       PetscCheck(usedmat, comm, PETSC_ERR_SUP, "Cannot find valid column mat");
1002:       if (istrans[j * nc + i]) {
1003:         Mat T;
1004:         PetscCall(MatTransposeGetMat(usedmat, &T));
1005:         usedmat = T;
1006:       }
1007:       matis = (Mat_IS *)usedmat->data;
1008:       PetscCall(ISGetIndices(iscol[i], &idxs));
1009:       if (istrans[j * nc + i]) {
1010:         PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1011:         PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1012:       } else {
1013:         PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1014:         PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1015:       }
1016:       PetscCall(ISRestoreIndices(iscol[i], &idxs));
1017:       stl += lc[i];
1018:     }
1019:     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, stl, l2gidxs, PETSC_OWN_POINTER, &cl2g));

1021:     /* Create MATIS */
1022:     PetscCall(MatCreate(comm, &B));
1023:     PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
1024:     PetscCall(MatGetBlockSizes(A, &rbs, &cbs));
1025:     PetscCall(MatSetBlockSizes(B, rbs, cbs));
1026:     PetscCall(MatSetType(B, MATIS));
1027:     PetscCall(MatISSetLocalMatType(B, MATNEST));
1028:     PetscCall(MatISSetAllowRepeated(B, PetscBool3ToBool(allow_repeated)));
1029:     { /* hack : avoid setup of scatters */
1030:       Mat_IS *matis     = (Mat_IS *)B->data;
1031:       matis->islocalref = PETSC_TRUE;
1032:     }
1033:     PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g));
1034:     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
1035:     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
1036:     PetscCall(MatCreateNest(PETSC_COMM_SELF, nr, islrow, nc, islcol, snest, &lA));
1037:     PetscCall(MatNestSetVecType(lA, VECNEST));
1038:     for (i = 0; i < nr * nc; i++) {
1039:       if (istrans[i]) PetscCall(MatDestroy(&snest[i]));
1040:     }
1041:     PetscCall(MatISSetLocalMat(B, lA));
1042:     PetscCall(MatDestroy(&lA));
1043:     { /* hack : setup of scatters done here */
1044:       Mat_IS *matis = (Mat_IS *)B->data;

1046:       matis->islocalref = PETSC_FALSE;
1047:       PetscCall(MatISSetUpScatters_Private(B));
1048:     }
1049:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
1050:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
1051:     if (reuse == MAT_INPLACE_MATRIX) {
1052:       PetscCall(MatHeaderReplace(A, &B));
1053:     } else {
1054:       *newmat = B;
1055:     }
1056:   } else {
1057:     if (lreuse) {
1058:       PetscCall(MatISGetLocalMat(*newmat, &lA));
1059:       for (i = 0; i < nr; i++) {
1060:         for (j = 0; j < nc; j++) {
1061:           if (snest[i * nc + j]) {
1062:             PetscCall(MatNestSetSubMat(lA, i, j, snest[i * nc + j]));
1063:             if (istrans[i * nc + j]) PetscCall(MatDestroy(&snest[i * nc + j]));
1064:           }
1065:         }
1066:       }
1067:     } else {
1068:       PetscInt stl;
1069:       for (i = 0, stl = 0; i < nr; i++) {
1070:         PetscCall(ISCreateStride(PETSC_COMM_SELF, lr[i], stl, 1, &islrow[i]));
1071:         stl += lr[i];
1072:       }
1073:       for (i = 0, stl = 0; i < nc; i++) {
1074:         PetscCall(ISCreateStride(PETSC_COMM_SELF, lc[i], stl, 1, &islcol[i]));
1075:         stl += lc[i];
1076:       }
1077:       PetscCall(MatCreateNest(PETSC_COMM_SELF, nr, islrow, nc, islcol, snest, &lA));
1078:       for (i = 0; i < nr * nc; i++) {
1079:         if (istrans[i]) PetscCall(MatDestroy(&snest[i]));
1080:       }
1081:       PetscCall(MatISSetLocalMat(*newmat, lA));
1082:       PetscCall(MatDestroy(&lA));
1083:     }
1084:     PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY));
1085:     PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY));
1086:   }

1088:   /* Create local matrix in MATNEST format */
1089:   convert = PETSC_FALSE;
1090:   PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)A)->prefix, "-mat_is_convert_local_nest", &convert, NULL));
1091:   if (convert) {
1092:     Mat              M;
1093:     MatISLocalFields lf;

1095:     PetscCall(MatISGetLocalMat(*newmat, &lA));
1096:     PetscCall(MatConvert(lA, MATAIJ, MAT_INITIAL_MATRIX, &M));
1097:     PetscCall(MatISSetLocalMat(*newmat, M));
1098:     PetscCall(MatDestroy(&M));

1100:     /* attach local fields to the matrix */
1101:     PetscCall(PetscNew(&lf));
1102:     PetscCall(PetscMalloc2(nr, &lf->rf, nc, &lf->cf));
1103:     for (i = 0; i < nr; i++) {
1104:       PetscInt n, st;

1106:       PetscCall(ISGetLocalSize(islrow[i], &n));
1107:       PetscCall(ISStrideGetInfo(islrow[i], &st, NULL));
1108:       PetscCall(ISCreateStride(comm, n, st, 1, &lf->rf[i]));
1109:     }
1110:     for (i = 0; i < nc; i++) {
1111:       PetscInt n, st;

1113:       PetscCall(ISGetLocalSize(islcol[i], &n));
1114:       PetscCall(ISStrideGetInfo(islcol[i], &st, NULL));
1115:       PetscCall(ISCreateStride(comm, n, st, 1, &lf->cf[i]));
1116:     }
1117:     lf->nr = nr;
1118:     lf->nc = nc;
1119:     PetscCall(PetscObjectContainerCompose((PetscObject)*newmat, "_convert_nest_lfields", lf, MatISContainerDestroyFields_Private));
1120:   }

1122:   /* Free workspace */
1123:   for (i = 0; i < nr; i++) PetscCall(ISDestroy(&islrow[i]));
1124:   for (i = 0; i < nc; i++) PetscCall(ISDestroy(&islcol[i]));
1125:   PetscCall(PetscFree6(isrow, iscol, islrow, islcol, snest, istrans));
1126:   PetscCall(PetscFree2(lr, lc));
1127:   PetscFunctionReturn(PETSC_SUCCESS);
1128: }

1130: static PetscErrorCode MatDiagonalScale_IS(Mat A, Vec l, Vec r)
1131: {
1132:   Mat_IS            *matis = (Mat_IS *)A->data;
1133:   Vec                ll, rr;
1134:   const PetscScalar *Y, *X;
1135:   PetscScalar       *x, *y;

1137:   PetscFunctionBegin;
1138:   if (l) {
1139:     ll = matis->y;
1140:     PetscCall(VecGetArrayRead(l, &Y));
1141:     PetscCall(VecGetArray(ll, &y));
1142:     PetscCall(PetscSFBcastBegin(matis->sf, MPIU_SCALAR, Y, y, MPI_REPLACE));
1143:   } else {
1144:     ll = NULL;
1145:   }
1146:   if (r) {
1147:     rr = matis->x;
1148:     PetscCall(VecGetArrayRead(r, &X));
1149:     PetscCall(VecGetArray(rr, &x));
1150:     PetscCall(PetscSFBcastBegin(matis->csf, MPIU_SCALAR, X, x, MPI_REPLACE));
1151:   } else {
1152:     rr = NULL;
1153:   }
1154:   if (ll) {
1155:     PetscCall(PetscSFBcastEnd(matis->sf, MPIU_SCALAR, Y, y, MPI_REPLACE));
1156:     PetscCall(VecRestoreArrayRead(l, &Y));
1157:     PetscCall(VecRestoreArray(ll, &y));
1158:   }
1159:   if (rr) {
1160:     PetscCall(PetscSFBcastEnd(matis->csf, MPIU_SCALAR, X, x, MPI_REPLACE));
1161:     PetscCall(VecRestoreArrayRead(r, &X));
1162:     PetscCall(VecRestoreArray(rr, &x));
1163:   }
1164:   PetscCall(MatDiagonalScale(matis->A, ll, rr));
1165:   PetscFunctionReturn(PETSC_SUCCESS);
1166: }

1168: static PetscErrorCode MatGetInfo_IS(Mat A, MatInfoType flag, MatInfo *ginfo)
1169: {
1170:   Mat_IS        *matis = (Mat_IS *)A->data;
1171:   MatInfo        info;
1172:   PetscLogDouble isend[6], irecv[6];
1173:   PetscInt       bs;

1175:   PetscFunctionBegin;
1176:   PetscCall(MatGetBlockSize(A, &bs));
1177:   if (matis->A->ops->getinfo) {
1178:     PetscCall(MatGetInfo(matis->A, MAT_LOCAL, &info));
1179:     isend[0] = info.nz_used;
1180:     isend[1] = info.nz_allocated;
1181:     isend[2] = info.nz_unneeded;
1182:     isend[3] = info.memory;
1183:     isend[4] = info.mallocs;
1184:   } else {
1185:     isend[0] = 0.;
1186:     isend[1] = 0.;
1187:     isend[2] = 0.;
1188:     isend[3] = 0.;
1189:     isend[4] = 0.;
1190:   }
1191:   isend[5] = matis->A->num_ass;
1192:   if (flag == MAT_LOCAL) {
1193:     ginfo->nz_used      = isend[0];
1194:     ginfo->nz_allocated = isend[1];
1195:     ginfo->nz_unneeded  = isend[2];
1196:     ginfo->memory       = isend[3];
1197:     ginfo->mallocs      = isend[4];
1198:     ginfo->assemblies   = isend[5];
1199:   } else if (flag == MAT_GLOBAL_MAX) {
1200:     PetscCallMPI(MPIU_Allreduce(isend, irecv, 6, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)A)));

1202:     ginfo->nz_used      = irecv[0];
1203:     ginfo->nz_allocated = irecv[1];
1204:     ginfo->nz_unneeded  = irecv[2];
1205:     ginfo->memory       = irecv[3];
1206:     ginfo->mallocs      = irecv[4];
1207:     ginfo->assemblies   = irecv[5];
1208:   } else if (flag == MAT_GLOBAL_SUM) {
1209:     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)A)));

1211:     ginfo->nz_used      = irecv[0];
1212:     ginfo->nz_allocated = irecv[1];
1213:     ginfo->nz_unneeded  = irecv[2];
1214:     ginfo->memory       = irecv[3];
1215:     ginfo->mallocs      = irecv[4];
1216:     ginfo->assemblies   = A->num_ass;
1217:   }
1218:   ginfo->block_size        = bs;
1219:   ginfo->fill_ratio_given  = 0;
1220:   ginfo->fill_ratio_needed = 0;
1221:   ginfo->factor_mallocs    = 0;
1222:   PetscFunctionReturn(PETSC_SUCCESS);
1223: }

1225: static PetscErrorCode MatTranspose_IS(Mat A, MatReuse reuse, Mat *B)
1226: {
1227:   Mat C, lC, lA;

1229:   PetscFunctionBegin;
1230:   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
1231:   if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) {
1232:     ISLocalToGlobalMapping rl2g, cl2g;
1233:     PetscBool              allow_repeated;

1235:     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
1236:     PetscCall(MatSetSizes(C, A->cmap->n, A->rmap->n, A->cmap->N, A->rmap->N));
1237:     PetscCall(MatSetBlockSizes(C, PetscAbs(A->cmap->bs), PetscAbs(A->rmap->bs)));
1238:     PetscCall(MatSetType(C, MATIS));
1239:     PetscCall(MatISGetAllowRepeated(A, &allow_repeated));
1240:     PetscCall(MatISSetAllowRepeated(C, allow_repeated));
1241:     PetscCall(MatGetLocalToGlobalMapping(A, &rl2g, &cl2g));
1242:     PetscCall(MatSetLocalToGlobalMapping(C, cl2g, rl2g));
1243:   } else C = *B;

1245:   /* perform local transposition */
1246:   PetscCall(MatISGetLocalMat(A, &lA));
1247:   PetscCall(MatTranspose(lA, MAT_INITIAL_MATRIX, &lC));
1248:   PetscCall(MatSetLocalToGlobalMapping(lC, lA->cmap->mapping, lA->rmap->mapping));
1249:   PetscCall(MatISSetLocalMat(C, lC));
1250:   PetscCall(MatDestroy(&lC));

1252:   if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) {
1253:     *B = C;
1254:   } else {
1255:     PetscCall(MatHeaderMerge(A, &C));
1256:   }
1257:   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
1258:   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
1259:   PetscFunctionReturn(PETSC_SUCCESS);
1260: }

1262: static PetscErrorCode MatDiagonalSet_IS(Mat A, Vec D, InsertMode insmode)
1263: {
1264:   Mat_IS *is = (Mat_IS *)A->data;

1266:   PetscFunctionBegin;
1267:   PetscCheck(!is->allow_repeated || insmode == ADD_VALUES, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "INSERT_VALUES with repeated entries not supported");
1268:   if (D) { /* MatShift_IS pass D = NULL */
1269:     PetscCall(VecScatterBegin(is->rctx, D, is->y, INSERT_VALUES, SCATTER_FORWARD));
1270:     PetscCall(VecScatterEnd(is->rctx, D, is->y, INSERT_VALUES, SCATTER_FORWARD));
1271:   }
1272:   PetscCall(VecPointwiseDivide(is->y, is->y, is->counter));
1273:   PetscCall(MatDiagonalSet(is->A, is->y, insmode));
1274:   PetscFunctionReturn(PETSC_SUCCESS);
1275: }

1277: static PetscErrorCode MatShift_IS(Mat A, PetscScalar a)
1278: {
1279:   Mat_IS *is = (Mat_IS *)A->data;

1281:   PetscFunctionBegin;
1282:   PetscCall(VecSet(is->y, a));
1283:   PetscCall(MatDiagonalSet_IS(A, NULL, ADD_VALUES));
1284:   PetscFunctionReturn(PETSC_SUCCESS);
1285: }

1287: static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1288: {
1289:   PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION];

1291:   PetscFunctionBegin;
1292:   PetscCheck(m <= MATIS_MAX_ENTRIES_INSERTION && n <= MATIS_MAX_ENTRIES_INSERTION, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of row/column indices must be <= %d: they are %" PetscInt_FMT " %" PetscInt_FMT, MATIS_MAX_ENTRIES_INSERTION, m, n);
1293:   PetscCall(ISLocalToGlobalMappingApply(A->rmap->mapping, m, rows, rows_l));
1294:   PetscCall(ISLocalToGlobalMappingApply(A->cmap->mapping, n, cols, cols_l));
1295:   PetscCall(MatSetValuesLocal_IS(A, m, rows_l, n, cols_l, values, addv));
1296:   PetscFunctionReturn(PETSC_SUCCESS);
1297: }

1299: static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1300: {
1301:   PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION];

1303:   PetscFunctionBegin;
1304:   PetscCheck(m <= MATIS_MAX_ENTRIES_INSERTION && n <= MATIS_MAX_ENTRIES_INSERTION, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of row/column block indices must be <= %d: they are %" PetscInt_FMT " %" PetscInt_FMT, MATIS_MAX_ENTRIES_INSERTION, m, n);
1305:   PetscCall(ISLocalToGlobalMappingApplyBlock(A->rmap->mapping, m, rows, rows_l));
1306:   PetscCall(ISLocalToGlobalMappingApplyBlock(A->cmap->mapping, n, cols, cols_l));
1307:   PetscCall(MatSetValuesBlockedLocal_IS(A, m, rows_l, n, cols_l, values, addv));
1308:   PetscFunctionReturn(PETSC_SUCCESS);
1309: }

1311: static PetscErrorCode MatCreateSubMatrix_IS(Mat mat, IS irow, IS icol, MatReuse scall, Mat *newmat)
1312: {
1313:   Mat             locmat, newlocmat;
1314:   Mat_IS         *newmatis;
1315:   const PetscInt *idxs;
1316:   PetscInt        i, m, n;

1318:   PetscFunctionBegin;
1319:   if (scall == MAT_REUSE_MATRIX) {
1320:     PetscBool ismatis;

1322:     PetscCall(PetscObjectTypeCompare((PetscObject)*newmat, MATIS, &ismatis));
1323:     PetscCheck(ismatis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Not of MATIS type");
1324:     newmatis = (Mat_IS *)(*newmat)->data;
1325:     PetscCheck(newmatis->getsub_ris, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Misses local row IS");
1326:     PetscCheck(newmatis->getsub_cis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Misses local col IS");
1327:   }
1328:   /* irow and icol may not have duplicate entries */
1329:   if (PetscDefined(USE_DEBUG)) {
1330:     Vec                rtest, ltest;
1331:     const PetscScalar *array;

1333:     PetscCall(MatCreateVecs(mat, &ltest, &rtest));
1334:     PetscCall(ISGetLocalSize(irow, &n));
1335:     PetscCall(ISGetIndices(irow, &idxs));
1336:     for (i = 0; i < n; i++) PetscCall(VecSetValue(rtest, idxs[i], 1.0, ADD_VALUES));
1337:     PetscCall(VecAssemblyBegin(rtest));
1338:     PetscCall(VecAssemblyEnd(rtest));
1339:     PetscCall(VecGetLocalSize(rtest, &n));
1340:     PetscCall(VecGetOwnershipRange(rtest, &m, NULL));
1341:     PetscCall(VecGetArrayRead(rtest, &array));
1342:     for (i = 0; i < n; i++) PetscCheck(array[i] == 0. || array[i] == 1., PETSC_COMM_SELF, PETSC_ERR_SUP, "Index %" PetscInt_FMT " counted %" PetscInt_FMT " times! Irow may not have duplicate entries", i + m, (PetscInt)PetscRealPart(array[i]));
1343:     PetscCall(VecRestoreArrayRead(rtest, &array));
1344:     PetscCall(ISRestoreIndices(irow, &idxs));
1345:     PetscCall(ISGetLocalSize(icol, &n));
1346:     PetscCall(ISGetIndices(icol, &idxs));
1347:     for (i = 0; i < n; i++) PetscCall(VecSetValue(ltest, idxs[i], 1.0, ADD_VALUES));
1348:     PetscCall(VecAssemblyBegin(ltest));
1349:     PetscCall(VecAssemblyEnd(ltest));
1350:     PetscCall(VecGetLocalSize(ltest, &n));
1351:     PetscCall(VecGetOwnershipRange(ltest, &m, NULL));
1352:     PetscCall(VecGetArrayRead(ltest, &array));
1353:     for (i = 0; i < n; i++) PetscCheck(array[i] == 0. || array[i] == 1., PETSC_COMM_SELF, PETSC_ERR_SUP, "Index %" PetscInt_FMT " counted %" PetscInt_FMT " times! Icol may not have duplicate entries", i + m, (PetscInt)PetscRealPart(array[i]));
1354:     PetscCall(VecRestoreArrayRead(ltest, &array));
1355:     PetscCall(ISRestoreIndices(icol, &idxs));
1356:     PetscCall(VecDestroy(&rtest));
1357:     PetscCall(VecDestroy(&ltest));
1358:   }
1359:   if (scall == MAT_INITIAL_MATRIX) {
1360:     Mat_IS                *matis = (Mat_IS *)mat->data;
1361:     ISLocalToGlobalMapping rl2g;
1362:     IS                     is;
1363:     PetscInt              *lidxs, *lgidxs, *newgidxs;
1364:     PetscInt               ll, newloc, irbs, icbs, arbs, acbs, rbs, cbs;
1365:     PetscBool              cong;
1366:     MPI_Comm               comm;

1368:     PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
1369:     PetscCall(MatGetBlockSizes(mat, &arbs, &acbs));
1370:     PetscCall(ISGetBlockSize(irow, &irbs));
1371:     PetscCall(ISGetBlockSize(icol, &icbs));
1372:     rbs = arbs == irbs ? irbs : 1;
1373:     cbs = acbs == icbs ? icbs : 1;
1374:     PetscCall(ISGetLocalSize(irow, &m));
1375:     PetscCall(ISGetLocalSize(icol, &n));
1376:     PetscCall(MatCreate(comm, newmat));
1377:     PetscCall(MatSetType(*newmat, MATIS));
1378:     PetscCall(MatISSetAllowRepeated(*newmat, matis->allow_repeated));
1379:     PetscCall(MatSetSizes(*newmat, m, n, PETSC_DECIDE, PETSC_DECIDE));
1380:     PetscCall(MatSetBlockSizes(*newmat, rbs, cbs));
1381:     /* communicate irow to their owners in the layout */
1382:     PetscCall(ISGetIndices(irow, &idxs));
1383:     PetscCall(PetscLayoutMapLocal(mat->rmap, m, idxs, &ll, &lidxs, &lgidxs));
1384:     PetscCall(ISRestoreIndices(irow, &idxs));
1385:     PetscCall(PetscArrayzero(matis->sf_rootdata, matis->sf->nroots));
1386:     for (i = 0; i < ll; i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i] + 1;
1387:     PetscCall(PetscFree(lidxs));
1388:     PetscCall(PetscFree(lgidxs));
1389:     PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1390:     PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1391:     for (i = 0, newloc = 0; i < matis->sf->nleaves; i++)
1392:       if (matis->sf_leafdata[i]) newloc++;
1393:     PetscCall(PetscMalloc1(newloc, &newgidxs));
1394:     PetscCall(PetscMalloc1(newloc, &lidxs));
1395:     for (i = 0, newloc = 0; i < matis->sf->nleaves; i++)
1396:       if (matis->sf_leafdata[i]) {
1397:         lidxs[newloc]      = i;
1398:         newgidxs[newloc++] = matis->sf_leafdata[i] - 1;
1399:       }
1400:     PetscCall(ISCreateGeneral(comm, newloc, newgidxs, PETSC_OWN_POINTER, &is));
1401:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
1402:     PetscCall(ISLocalToGlobalMappingSetBlockSize(rl2g, rbs));
1403:     PetscCall(ISDestroy(&is));
1404:     /* local is to extract local submatrix */
1405:     newmatis = (Mat_IS *)(*newmat)->data;
1406:     PetscCall(ISCreateGeneral(comm, newloc, lidxs, PETSC_OWN_POINTER, &newmatis->getsub_ris));
1407:     PetscCall(MatHasCongruentLayouts(mat, &cong));
1408:     if (cong && irow == icol && matis->csf == matis->sf) {
1409:       PetscCall(MatSetLocalToGlobalMapping(*newmat, rl2g, rl2g));
1410:       PetscCall(PetscObjectReference((PetscObject)newmatis->getsub_ris));
1411:       newmatis->getsub_cis = newmatis->getsub_ris;
1412:     } else {
1413:       ISLocalToGlobalMapping cl2g;

1415:       /* communicate icol to their owners in the layout */
1416:       PetscCall(ISGetIndices(icol, &idxs));
1417:       PetscCall(PetscLayoutMapLocal(mat->cmap, n, idxs, &ll, &lidxs, &lgidxs));
1418:       PetscCall(ISRestoreIndices(icol, &idxs));
1419:       PetscCall(PetscArrayzero(matis->csf_rootdata, matis->csf->nroots));
1420:       for (i = 0; i < ll; i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i] + 1;
1421:       PetscCall(PetscFree(lidxs));
1422:       PetscCall(PetscFree(lgidxs));
1423:       PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, matis->csf_rootdata, matis->csf_leafdata, MPI_REPLACE));
1424:       PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, matis->csf_rootdata, matis->csf_leafdata, MPI_REPLACE));
1425:       for (i = 0, newloc = 0; i < matis->csf->nleaves; i++)
1426:         if (matis->csf_leafdata[i]) newloc++;
1427:       PetscCall(PetscMalloc1(newloc, &newgidxs));
1428:       PetscCall(PetscMalloc1(newloc, &lidxs));
1429:       for (i = 0, newloc = 0; i < matis->csf->nleaves; i++)
1430:         if (matis->csf_leafdata[i]) {
1431:           lidxs[newloc]      = i;
1432:           newgidxs[newloc++] = matis->csf_leafdata[i] - 1;
1433:         }
1434:       PetscCall(ISCreateGeneral(comm, newloc, newgidxs, PETSC_OWN_POINTER, &is));
1435:       PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
1436:       PetscCall(ISLocalToGlobalMappingSetBlockSize(cl2g, cbs));
1437:       PetscCall(ISDestroy(&is));
1438:       /* local is to extract local submatrix */
1439:       PetscCall(ISCreateGeneral(comm, newloc, lidxs, PETSC_OWN_POINTER, &newmatis->getsub_cis));
1440:       PetscCall(MatSetLocalToGlobalMapping(*newmat, rl2g, cl2g));
1441:       PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
1442:     }
1443:     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
1444:   } else {
1445:     PetscCall(MatISGetLocalMat(*newmat, &newlocmat));
1446:   }
1447:   PetscCall(MatISGetLocalMat(mat, &locmat));
1448:   newmatis = (Mat_IS *)(*newmat)->data;
1449:   PetscCall(MatCreateSubMatrix(locmat, newmatis->getsub_ris, newmatis->getsub_cis, scall, &newlocmat));
1450:   if (scall == MAT_INITIAL_MATRIX) {
1451:     PetscCall(MatISSetLocalMat(*newmat, newlocmat));
1452:     PetscCall(MatDestroy(&newlocmat));
1453:   }
1454:   PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY));
1455:   PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY));
1456:   PetscFunctionReturn(PETSC_SUCCESS);
1457: }

1459: static PetscErrorCode MatCopy_IS(Mat A, Mat B, MatStructure str)
1460: {
1461:   Mat_IS   *a = (Mat_IS *)A->data, *b;
1462:   PetscBool ismatis;

1464:   PetscFunctionBegin;
1465:   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATIS, &ismatis));
1466:   PetscCheck(ismatis, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Need to be implemented");
1467:   b = (Mat_IS *)B->data;
1468:   PetscCall(MatCopy(a->A, b->A, str));
1469:   PetscCall(PetscObjectStateIncrease((PetscObject)B));
1470:   PetscFunctionReturn(PETSC_SUCCESS);
1471: }

1473: static PetscErrorCode MatMissingDiagonal_IS(Mat A, PetscBool *missing, PetscInt *d)
1474: {
1475:   Vec                v;
1476:   const PetscScalar *array;
1477:   PetscInt           i, n;

1479:   PetscFunctionBegin;
1480:   *missing = PETSC_FALSE;
1481:   PetscCall(MatCreateVecs(A, NULL, &v));
1482:   PetscCall(MatGetDiagonal(A, v));
1483:   PetscCall(VecGetLocalSize(v, &n));
1484:   PetscCall(VecGetArrayRead(v, &array));
1485:   for (i = 0; i < n; i++)
1486:     if (array[i] == 0.) break;
1487:   PetscCall(VecRestoreArrayRead(v, &array));
1488:   PetscCall(VecDestroy(&v));
1489:   if (i != n) *missing = PETSC_TRUE;
1490:   if (d) {
1491:     *d = -1;
1492:     if (*missing) {
1493:       PetscInt rstart;
1494:       PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
1495:       *d = i + rstart;
1496:     }
1497:   }
1498:   PetscFunctionReturn(PETSC_SUCCESS);
1499: }

1501: static PetscErrorCode MatISSetUpSF_IS(Mat B)
1502: {
1503:   Mat_IS         *matis = (Mat_IS *)B->data;
1504:   const PetscInt *gidxs;
1505:   PetscInt        nleaves;

1507:   PetscFunctionBegin;
1508:   if (matis->sf) PetscFunctionReturn(PETSC_SUCCESS);
1509:   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)B), &matis->sf));
1510:   PetscCall(ISLocalToGlobalMappingGetIndices(matis->rmapping, &gidxs));
1511:   PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &nleaves));
1512:   PetscCall(PetscSFSetGraphLayout(matis->sf, B->rmap, nleaves, NULL, PETSC_OWN_POINTER, gidxs));
1513:   PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->rmapping, &gidxs));
1514:   PetscCall(PetscMalloc2(matis->sf->nroots, &matis->sf_rootdata, matis->sf->nleaves, &matis->sf_leafdata));
1515:   if (matis->rmapping != matis->cmapping) { /* setup SF for columns */
1516:     PetscCall(ISLocalToGlobalMappingGetSize(matis->cmapping, &nleaves));
1517:     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)B), &matis->csf));
1518:     PetscCall(ISLocalToGlobalMappingGetIndices(matis->cmapping, &gidxs));
1519:     PetscCall(PetscSFSetGraphLayout(matis->csf, B->cmap, nleaves, NULL, PETSC_OWN_POINTER, gidxs));
1520:     PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->cmapping, &gidxs));
1521:     PetscCall(PetscMalloc2(matis->csf->nroots, &matis->csf_rootdata, matis->csf->nleaves, &matis->csf_leafdata));
1522:   } else {
1523:     matis->csf          = matis->sf;
1524:     matis->csf_leafdata = matis->sf_leafdata;
1525:     matis->csf_rootdata = matis->sf_rootdata;
1526:   }
1527:   PetscFunctionReturn(PETSC_SUCCESS);
1528: }

1530: /*@
1531:   MatISGetAllowRepeated - Get the flag to allow repeated entries in the local to global map

1533:   Not Collective

1535:   Input Parameter:
1536: . A - the matrix

1538:   Output Parameter:
1539: . flg - the boolean flag

1541:   Level: intermediate

1543: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatSetLocalToGlobalMapping()`, `MatISSetAllowRepeated()`
1544: @*/
1545: PetscErrorCode MatISGetAllowRepeated(Mat A, PetscBool *flg)
1546: {
1547:   PetscBool ismatis;

1549:   PetscFunctionBegin;
1551:   PetscAssertPointer(flg, 2);
1552:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATIS, &ismatis));
1553:   PetscCheck(ismatis, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)A)->type_name);
1554:   *flg = ((Mat_IS *)A->data)->allow_repeated;
1555:   PetscFunctionReturn(PETSC_SUCCESS);
1556: }

1558: /*@
1559:   MatISSetAllowRepeated - Set the flag to allow repeated entries in the local to global map

1561:   Logically Collective

1563:   Input Parameters:
1564: + A   - the matrix
1565: - flg - the boolean flag

1567:   Level: intermediate

1569:   Notes:
1570:   The default value is `PETSC_FALSE`.
1571:   When called AFTER calling `MatSetLocalToGlobalMapping()` it will recreate the local matrices
1572:   if `flg` is different from the previously set value.
1573:   Specifically, when `flg` is true it will just recreate the local matrices, while if
1574:   `flg` is false will assemble the local matrices summing up repeated entries.

1576: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatSetLocalToGlobalMapping()`, `MatISGetAllowRepeated()`
1577: @*/
1578: PetscErrorCode MatISSetAllowRepeated(Mat A, PetscBool flg)
1579: {
1580:   PetscFunctionBegin;
1584:   PetscTryMethod(A, "MatISSetAllowRepeated_C", (Mat, PetscBool), (A, flg));
1585:   PetscFunctionReturn(PETSC_SUCCESS);
1586: }

1588: static PetscErrorCode MatISSetAllowRepeated_IS(Mat A, PetscBool flg)
1589: {
1590:   Mat_IS                *matis = (Mat_IS *)A->data;
1591:   Mat                    lA    = NULL;
1592:   ISLocalToGlobalMapping lrmap, lcmap;

1594:   PetscFunctionBegin;
1595:   if (flg == matis->allow_repeated) PetscFunctionReturn(PETSC_SUCCESS);
1596:   if (!matis->A) { /* matrix has not been preallocated yet */
1597:     matis->allow_repeated = flg;
1598:     PetscFunctionReturn(PETSC_SUCCESS);
1599:   }
1600:   PetscCheck(!matis->islocalref, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not implemented for local references");
1601:   if (matis->allow_repeated) { /* we will assemble the old local matrix if needed */
1602:     lA = matis->A;
1603:     PetscCall(PetscObjectReference((PetscObject)lA));
1604:   }
1605:   /* In case flg is True, we only recreate the local matrix */
1606:   matis->allow_repeated = flg;
1607:   PetscCall(MatSetLocalToGlobalMapping(A, A->rmap->mapping, A->cmap->mapping));
1608:   if (lA) { /* assemble previous local matrix if needed */
1609:     Mat nA = matis->A;

1611:     PetscCall(MatGetLocalToGlobalMapping(nA, &lrmap, &lcmap));
1612:     if (!lrmap && !lcmap) {
1613:       PetscCall(MatISSetLocalMat(A, lA));
1614:     } else {
1615:       Mat            P = NULL, R = NULL;
1616:       MatProductType ptype;

1618:       if (lrmap == lcmap) {
1619:         ptype = MATPRODUCT_PtAP;
1620:         PetscCall(MatCreateFromISLocalToGlobalMapping(lcmap, nA, PETSC_TRUE, PETSC_FALSE, NULL, &P));
1621:         PetscCall(MatProductCreate(lA, P, NULL, &nA));
1622:       } else {
1623:         if (lcmap) PetscCall(MatCreateFromISLocalToGlobalMapping(lcmap, nA, PETSC_TRUE, PETSC_FALSE, NULL, &P));
1624:         if (lrmap) PetscCall(MatCreateFromISLocalToGlobalMapping(lrmap, nA, PETSC_FALSE, PETSC_TRUE, NULL, &R));
1625:         if (R && P) {
1626:           ptype = MATPRODUCT_ABC;
1627:           PetscCall(MatProductCreate(R, lA, P, &nA));
1628:         } else if (R) {
1629:           ptype = MATPRODUCT_AB;
1630:           PetscCall(MatProductCreate(R, lA, NULL, &nA));
1631:         } else {
1632:           ptype = MATPRODUCT_AB;
1633:           PetscCall(MatProductCreate(lA, P, NULL, &nA));
1634:         }
1635:       }
1636:       PetscCall(MatProductSetType(nA, ptype));
1637:       PetscCall(MatProductSetFromOptions(nA));
1638:       PetscCall(MatProductSymbolic(nA));
1639:       PetscCall(MatProductNumeric(nA));
1640:       PetscCall(MatProductClear(nA));
1641:       PetscCall(MatConvert(nA, matis->lmattype, MAT_INPLACE_MATRIX, &nA));
1642:       PetscCall(MatISSetLocalMat(A, nA));
1643:       PetscCall(MatDestroy(&nA));
1644:       PetscCall(MatDestroy(&P));
1645:       PetscCall(MatDestroy(&R));
1646:     }
1647:   }
1648:   PetscCall(MatDestroy(&lA));
1649:   PetscFunctionReturn(PETSC_SUCCESS);
1650: }

1652: /*@
1653:   MatISStoreL2L - Store local-to-local operators during the Galerkin process of computing `MatPtAP()`

1655:   Logically Collective

1657:   Input Parameters:
1658: + A     - the matrix
1659: - store - the boolean flag

1661:   Level: advanced

1663: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatISSetPreallocation()`, `MatPtAP()`
1664: @*/
1665: PetscErrorCode MatISStoreL2L(Mat A, PetscBool store)
1666: {
1667:   PetscFunctionBegin;
1671:   PetscTryMethod(A, "MatISStoreL2L_C", (Mat, PetscBool), (A, store));
1672:   PetscFunctionReturn(PETSC_SUCCESS);
1673: }

1675: static PetscErrorCode MatISStoreL2L_IS(Mat A, PetscBool store)
1676: {
1677:   Mat_IS *matis = (Mat_IS *)A->data;

1679:   PetscFunctionBegin;
1680:   matis->storel2l = store;
1681:   if (!store) PetscCall(PetscObjectCompose((PetscObject)A, "_MatIS_PtAP_l2l", NULL));
1682:   PetscFunctionReturn(PETSC_SUCCESS);
1683: }

1685: /*@
1686:   MatISFixLocalEmpty - Compress out zero local rows from the local matrices

1688:   Logically Collective

1690:   Input Parameters:
1691: + A   - the matrix
1692: - fix - the boolean flag

1694:   Level: advanced

1696:   Note:
1697:   When `fix` is `PETSC_TRUE`, new local matrices and l2g maps are generated during the final assembly process.

1699: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatCreate()`, `MatCreateIS()`, `MatISSetPreallocation()`, `MatAssemblyEnd()`, `MAT_FINAL_ASSEMBLY`
1700: @*/
1701: PetscErrorCode MatISFixLocalEmpty(Mat A, PetscBool fix)
1702: {
1703:   PetscFunctionBegin;
1707:   PetscTryMethod(A, "MatISFixLocalEmpty_C", (Mat, PetscBool), (A, fix));
1708:   PetscFunctionReturn(PETSC_SUCCESS);
1709: }

1711: static PetscErrorCode MatISFixLocalEmpty_IS(Mat A, PetscBool fix)
1712: {
1713:   Mat_IS *matis = (Mat_IS *)A->data;

1715:   PetscFunctionBegin;
1716:   matis->locempty = fix;
1717:   PetscFunctionReturn(PETSC_SUCCESS);
1718: }

1720: /*@
1721:   MatISSetPreallocation - Preallocates memory for a `MATIS` parallel matrix.

1723:   Collective

1725:   Input Parameters:
1726: + B     - the matrix
1727: . d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1728:            (same value is used for all local rows)
1729: . d_nnz - array containing the number of nonzeros in the various rows of the
1730:            DIAGONAL portion of the local submatrix (possibly different for each row)
1731:            or `NULL`, if `d_nz` is used to specify the nonzero structure.
1732:            The size of this array is equal to the number of local rows, i.e `m`.
1733:            For matrices that will be factored, you must leave room for (and set)
1734:            the diagonal entry even if it is zero.
1735: . o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1736:            submatrix (same value is used for all local rows).
1737: - o_nnz - array containing the number of nonzeros in the various rows of the
1738:            OFF-DIAGONAL portion of the local submatrix (possibly different for
1739:            each row) or `NULL`, if `o_nz` is used to specify the nonzero
1740:            structure. The size of this array is equal to the number
1741:            of local rows, i.e `m`.

1743:    If the *_nnz parameter is given then the *_nz parameter is ignored

1745:   Level: intermediate

1747:   Note:
1748:   This function has the same interface as the `MATMPIAIJ` preallocation routine in order to simplify the transition
1749:   from the asssembled format to the unassembled one. It overestimates the preallocation of `MATIS` local
1750:   matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.

1752: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatMPIAIJSetPreallocation()`, `MatISGetLocalMat()`, `MATIS`
1753: @*/
1754: PetscErrorCode MatISSetPreallocation(Mat B, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
1755: {
1756:   PetscFunctionBegin;
1759:   PetscTryMethod(B, "MatISSetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, d_nz, d_nnz, o_nz, o_nnz));
1760:   PetscFunctionReturn(PETSC_SUCCESS);
1761: }

1763: /* this is used by DMDA */
1764: PETSC_EXTERN PetscErrorCode MatISSetPreallocation_IS(Mat B, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
1765: {
1766:   Mat_IS  *matis = (Mat_IS *)B->data;
1767:   PetscInt bs, i, nlocalcols;

1769:   PetscFunctionBegin;
1770:   PetscCall(MatSetUp(B));
1771:   if (!d_nnz)
1772:     for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] = d_nz;
1773:   else
1774:     for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] = d_nnz[i];

1776:   if (!o_nnz)
1777:     for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] += o_nz;
1778:   else
1779:     for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] += o_nnz[i];

1781:   PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1782:   PetscCall(MatGetSize(matis->A, NULL, &nlocalcols));
1783:   PetscCall(MatGetBlockSize(matis->A, &bs));
1784:   PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));

1786:   for (i = 0; i < matis->sf->nleaves; i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i], nlocalcols);
1787:   PetscCall(MatSeqAIJSetPreallocation(matis->A, 0, matis->sf_leafdata));
1788: #if defined(PETSC_HAVE_HYPRE)
1789:   PetscCall(MatHYPRESetPreallocation(matis->A, 0, matis->sf_leafdata, 0, NULL));
1790: #endif

1792:   for (i = 0; i < matis->sf->nleaves / bs; i++) {
1793:     PetscInt b;

1795:     matis->sf_leafdata[i] = matis->sf_leafdata[i * bs] / bs;
1796:     for (b = 1; b < bs; b++) matis->sf_leafdata[i] = PetscMax(matis->sf_leafdata[i], matis->sf_leafdata[i * bs + b] / bs);
1797:   }
1798:   PetscCall(MatSeqBAIJSetPreallocation(matis->A, bs, 0, matis->sf_leafdata));

1800:   nlocalcols /= bs;
1801:   for (i = 0; i < matis->sf->nleaves / bs; i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i], nlocalcols - i);
1802:   PetscCall(MatSeqSBAIJSetPreallocation(matis->A, bs, 0, matis->sf_leafdata));

1804:   /* for other matrix types */
1805:   PetscCall(MatSetUp(matis->A));
1806:   PetscFunctionReturn(PETSC_SUCCESS);
1807: }

1809: PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
1810: {
1811:   Mat_IS         *matis = (Mat_IS *)A->data;
1812:   PetscInt       *my_dnz, *my_onz, *dnz, *onz, *mat_ranges, *row_ownership;
1813:   const PetscInt *global_indices_r, *global_indices_c;
1814:   PetscInt        i, j, bs, rows, cols;
1815:   PetscInt        lrows, lcols;
1816:   PetscInt        local_rows, local_cols;
1817:   PetscMPIInt     size;
1818:   PetscBool       isdense, issbaij;

1820:   PetscFunctionBegin;
1821:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1822:   PetscCall(MatGetSize(A, &rows, &cols));
1823:   PetscCall(MatGetBlockSize(A, &bs));
1824:   PetscCall(MatGetSize(matis->A, &local_rows, &local_cols));
1825:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQDENSE, &isdense));
1826:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
1827:   PetscCall(ISLocalToGlobalMappingGetIndices(matis->rmapping, &global_indices_r));
1828:   if (matis->rmapping != matis->cmapping) {
1829:     PetscCall(ISLocalToGlobalMappingGetIndices(matis->cmapping, &global_indices_c));
1830:   } else global_indices_c = global_indices_r;

1832:   if (issbaij) PetscCall(MatGetRowUpperTriangular(matis->A));
1833:   /*
1834:      An SF reduce is needed to sum up properly on shared rows.
1835:      Note that generally preallocation is not exact, since it overestimates nonzeros
1836:   */
1837:   PetscCall(MatGetLocalSize(A, &lrows, &lcols));
1838:   MatPreallocateBegin(PetscObjectComm((PetscObject)A), lrows, lcols, dnz, onz);
1839:   /* All processes need to compute entire row ownership */
1840:   PetscCall(PetscMalloc1(rows, &row_ownership));
1841:   PetscCall(MatGetOwnershipRanges(A, (const PetscInt **)&mat_ranges));
1842:   for (i = 0; i < size; i++) {
1843:     for (j = mat_ranges[i]; j < mat_ranges[i + 1]; j++) row_ownership[j] = i;
1844:   }
1845:   PetscCall(MatGetOwnershipRangesColumn(A, (const PetscInt **)&mat_ranges));

1847:   /*
1848:      my_dnz and my_onz contains exact contribution to preallocation from each local mat
1849:      then, they will be summed up properly. This way, preallocation is always sufficient
1850:   */
1851:   PetscCall(PetscCalloc2(local_rows, &my_dnz, local_rows, &my_onz));
1852:   /* preallocation as a MATAIJ */
1853:   if (isdense) { /* special case for dense local matrices */
1854:     for (i = 0; i < local_rows; i++) {
1855:       PetscInt owner = row_ownership[global_indices_r[i]];
1856:       for (j = 0; j < local_cols; j++) {
1857:         PetscInt index_col = global_indices_c[j];
1858:         if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */
1859:           my_dnz[i] += 1;
1860:         } else { /* offdiag block */
1861:           my_onz[i] += 1;
1862:         }
1863:       }
1864:     }
1865:   } else if (matis->A->ops->getrowij) {
1866:     const PetscInt *ii, *jj, *jptr;
1867:     PetscBool       done;
1868:     PetscCall(MatGetRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &local_rows, &ii, &jj, &done));
1869:     PetscCheck(done, PetscObjectComm((PetscObject)matis->A), PETSC_ERR_PLIB, "Error in MatGetRowIJ");
1870:     jptr = jj;
1871:     for (i = 0; i < local_rows; i++) {
1872:       PetscInt index_row = global_indices_r[i];
1873:       for (j = 0; j < ii[i + 1] - ii[i]; j++, jptr++) {
1874:         PetscInt owner     = row_ownership[index_row];
1875:         PetscInt index_col = global_indices_c[*jptr];
1876:         if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */
1877:           my_dnz[i] += 1;
1878:         } else { /* offdiag block */
1879:           my_onz[i] += 1;
1880:         }
1881:         /* same as before, interchanging rows and cols */
1882:         if (issbaij && index_col != index_row) {
1883:           owner = row_ownership[index_col];
1884:           if (index_row > mat_ranges[owner] - 1 && index_row < mat_ranges[owner + 1]) {
1885:             my_dnz[*jptr] += 1;
1886:           } else {
1887:             my_onz[*jptr] += 1;
1888:           }
1889:         }
1890:       }
1891:     }
1892:     PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &local_rows, &ii, &jj, &done));
1893:     PetscCheck(done, PetscObjectComm((PetscObject)matis->A), PETSC_ERR_PLIB, "Error in MatRestoreRowIJ");
1894:   } else { /* loop over rows and use MatGetRow */
1895:     for (i = 0; i < local_rows; i++) {
1896:       const PetscInt *cols;
1897:       PetscInt        ncols, index_row = global_indices_r[i];
1898:       PetscCall(MatGetRow(matis->A, i, &ncols, &cols, NULL));
1899:       for (j = 0; j < ncols; j++) {
1900:         PetscInt owner     = row_ownership[index_row];
1901:         PetscInt index_col = global_indices_c[cols[j]];
1902:         if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */
1903:           my_dnz[i] += 1;
1904:         } else { /* offdiag block */
1905:           my_onz[i] += 1;
1906:         }
1907:         /* same as before, interchanging rows and cols */
1908:         if (issbaij && index_col != index_row) {
1909:           owner = row_ownership[index_col];
1910:           if (index_row > mat_ranges[owner] - 1 && index_row < mat_ranges[owner + 1]) {
1911:             my_dnz[cols[j]] += 1;
1912:           } else {
1913:             my_onz[cols[j]] += 1;
1914:           }
1915:         }
1916:       }
1917:       PetscCall(MatRestoreRow(matis->A, i, &ncols, &cols, NULL));
1918:     }
1919:   }
1920:   if (global_indices_c != global_indices_r) PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->cmapping, &global_indices_c));
1921:   PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->rmapping, &global_indices_r));
1922:   PetscCall(PetscFree(row_ownership));

1924:   /* Reduce my_dnz and my_onz */
1925:   if (maxreduce) {
1926:     PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_dnz, dnz, MPI_MAX));
1927:     PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_onz, onz, MPI_MAX));
1928:     PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_dnz, dnz, MPI_MAX));
1929:     PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_onz, onz, MPI_MAX));
1930:   } else {
1931:     PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_dnz, dnz, MPI_SUM));
1932:     PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_onz, onz, MPI_SUM));
1933:     PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_dnz, dnz, MPI_SUM));
1934:     PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_onz, onz, MPI_SUM));
1935:   }
1936:   PetscCall(PetscFree2(my_dnz, my_onz));

1938:   /* Resize preallocation if overestimated */
1939:   for (i = 0; i < lrows; i++) {
1940:     dnz[i] = PetscMin(dnz[i], lcols);
1941:     onz[i] = PetscMin(onz[i], cols - lcols);
1942:   }

1944:   /* Set preallocation */
1945:   PetscCall(MatSetBlockSizesFromMats(B, A, A));
1946:   PetscCall(MatSeqAIJSetPreallocation(B, 0, dnz));
1947:   PetscCall(MatMPIAIJSetPreallocation(B, 0, dnz, 0, onz));
1948:   for (i = 0; i < lrows; i += bs) {
1949:     PetscInt b, d = dnz[i], o = onz[i];

1951:     for (b = 1; b < bs; b++) {
1952:       d = PetscMax(d, dnz[i + b]);
1953:       o = PetscMax(o, onz[i + b]);
1954:     }
1955:     dnz[i / bs] = PetscMin(d / bs + d % bs, lcols / bs);
1956:     onz[i / bs] = PetscMin(o / bs + o % bs, (cols - lcols) / bs);
1957:   }
1958:   PetscCall(MatSeqBAIJSetPreallocation(B, bs, 0, dnz));
1959:   PetscCall(MatMPIBAIJSetPreallocation(B, bs, 0, dnz, 0, onz));
1960:   PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, dnz, 0, onz));
1961:   MatPreallocateEnd(dnz, onz);
1962:   if (issbaij) PetscCall(MatRestoreRowUpperTriangular(matis->A));
1963:   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
1964:   PetscFunctionReturn(PETSC_SUCCESS);
1965: }

1967: PETSC_INTERN PetscErrorCode MatConvert_IS_XAIJ(Mat mat, MatType mtype, MatReuse reuse, Mat *M)
1968: {
1969:   Mat_IS            *matis = (Mat_IS *)mat->data;
1970:   Mat                local_mat, MT;
1971:   PetscInt           rbs, cbs, rows, cols, lrows, lcols;
1972:   PetscInt           local_rows, local_cols;
1973:   PetscBool          isseqdense, isseqsbaij, isseqaij, isseqbaij;
1974:   PetscMPIInt        size;
1975:   const PetscScalar *array;

1977:   PetscFunctionBegin;
1978:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
1979:   if (size == 1 && mat->rmap->N == matis->A->rmap->N && mat->cmap->N == matis->A->cmap->N && !matis->allow_repeated) {
1980:     Mat      B;
1981:     IS       irows = NULL, icols = NULL;
1982:     PetscInt rbs, cbs;

1984:     PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->rmapping, &rbs));
1985:     PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->cmapping, &cbs));
1986:     if (reuse != MAT_REUSE_MATRIX) { /* check if l2g maps are one-to-one */
1987:       IS              rows, cols;
1988:       const PetscInt *ridxs, *cidxs;
1989:       PetscInt        i, nw;
1990:       PetscBT         work;

1992:       PetscCall(ISLocalToGlobalMappingGetBlockIndices(matis->rmapping, &ridxs));
1993:       PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &nw));
1994:       nw = nw / rbs;
1995:       PetscCall(PetscBTCreate(nw, &work));
1996:       for (i = 0; i < nw; i++) PetscCall(PetscBTSet(work, ridxs[i]));
1997:       for (i = 0; i < nw; i++)
1998:         if (!PetscBTLookup(work, i)) break;
1999:       if (i == nw) {
2000:         PetscCall(ISCreateBlock(PETSC_COMM_SELF, rbs, nw, ridxs, PETSC_USE_POINTER, &rows));
2001:         PetscCall(ISSetPermutation(rows));
2002:         PetscCall(ISInvertPermutation(rows, PETSC_DECIDE, &irows));
2003:         PetscCall(ISDestroy(&rows));
2004:       }
2005:       PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(matis->rmapping, &ridxs));
2006:       PetscCall(PetscBTDestroy(&work));
2007:       if (irows && matis->rmapping != matis->cmapping) {
2008:         PetscCall(ISLocalToGlobalMappingGetBlockIndices(matis->cmapping, &cidxs));
2009:         PetscCall(ISLocalToGlobalMappingGetSize(matis->cmapping, &nw));
2010:         nw = nw / cbs;
2011:         PetscCall(PetscBTCreate(nw, &work));
2012:         for (i = 0; i < nw; i++) PetscCall(PetscBTSet(work, cidxs[i]));
2013:         for (i = 0; i < nw; i++)
2014:           if (!PetscBTLookup(work, i)) break;
2015:         if (i == nw) {
2016:           PetscCall(ISCreateBlock(PETSC_COMM_SELF, cbs, nw, cidxs, PETSC_USE_POINTER, &cols));
2017:           PetscCall(ISSetPermutation(cols));
2018:           PetscCall(ISInvertPermutation(cols, PETSC_DECIDE, &icols));
2019:           PetscCall(ISDestroy(&cols));
2020:         }
2021:         PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(matis->cmapping, &cidxs));
2022:         PetscCall(PetscBTDestroy(&work));
2023:       } else if (irows) {
2024:         PetscCall(PetscObjectReference((PetscObject)irows));
2025:         icols = irows;
2026:       }
2027:     } else {
2028:       PetscCall(PetscObjectQuery((PetscObject)*M, "_MatIS_IS_XAIJ_irows", (PetscObject *)&irows));
2029:       PetscCall(PetscObjectQuery((PetscObject)*M, "_MatIS_IS_XAIJ_icols", (PetscObject *)&icols));
2030:       if (irows) PetscCall(PetscObjectReference((PetscObject)irows));
2031:       if (icols) PetscCall(PetscObjectReference((PetscObject)icols));
2032:     }
2033:     if (!irows || !icols) {
2034:       PetscCall(ISDestroy(&icols));
2035:       PetscCall(ISDestroy(&irows));
2036:       goto general_assembly;
2037:     }
2038:     PetscCall(MatConvert(matis->A, mtype, MAT_INITIAL_MATRIX, &B));
2039:     if (reuse != MAT_INPLACE_MATRIX) {
2040:       PetscCall(MatCreateSubMatrix(B, irows, icols, reuse, M));
2041:       PetscCall(PetscObjectCompose((PetscObject)*M, "_MatIS_IS_XAIJ_irows", (PetscObject)irows));
2042:       PetscCall(PetscObjectCompose((PetscObject)*M, "_MatIS_IS_XAIJ_icols", (PetscObject)icols));
2043:     } else {
2044:       Mat C;

2046:       PetscCall(MatCreateSubMatrix(B, irows, icols, MAT_INITIAL_MATRIX, &C));
2047:       PetscCall(MatHeaderReplace(mat, &C));
2048:     }
2049:     PetscCall(MatDestroy(&B));
2050:     PetscCall(ISDestroy(&icols));
2051:     PetscCall(ISDestroy(&irows));
2052:     PetscFunctionReturn(PETSC_SUCCESS);
2053:   }
2054: general_assembly:
2055:   PetscCall(MatGetSize(mat, &rows, &cols));
2056:   PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->rmapping, &rbs));
2057:   PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->cmapping, &cbs));
2058:   PetscCall(MatGetLocalSize(mat, &lrows, &lcols));
2059:   PetscCall(MatGetSize(matis->A, &local_rows, &local_cols));
2060:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQDENSE, &isseqdense));
2061:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQAIJ, &isseqaij));
2062:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQBAIJ, &isseqbaij));
2063:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &isseqsbaij));
2064:   PetscCheck(isseqdense || isseqaij || isseqbaij || isseqsbaij, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)matis->A)->type_name);
2065:   if (PetscDefined(USE_DEBUG)) {
2066:     PetscBool lb[4], bb[4];

2068:     lb[0] = isseqdense;
2069:     lb[1] = isseqaij;
2070:     lb[2] = isseqbaij;
2071:     lb[3] = isseqsbaij;
2072:     PetscCallMPI(MPIU_Allreduce(lb, bb, 4, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
2073:     PetscCheck(bb[0] || bb[1] || bb[2] || bb[3], PETSC_COMM_SELF, PETSC_ERR_SUP, "Local matrices must have the same type");
2074:   }

2076:   if (reuse != MAT_REUSE_MATRIX) {
2077:     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &MT));
2078:     PetscCall(MatSetSizes(MT, lrows, lcols, rows, cols));
2079:     PetscCall(MatSetType(MT, mtype));
2080:     PetscCall(MatSetBlockSizes(MT, rbs, cbs));
2081:     PetscCall(MatISSetMPIXAIJPreallocation_Private(mat, MT, PETSC_FALSE));
2082:   } else {
2083:     PetscInt mrbs, mcbs, mrows, mcols, mlrows, mlcols;

2085:     /* some checks */
2086:     MT = *M;
2087:     PetscCall(MatGetBlockSizes(MT, &mrbs, &mcbs));
2088:     PetscCall(MatGetSize(MT, &mrows, &mcols));
2089:     PetscCall(MatGetLocalSize(MT, &mlrows, &mlcols));
2090:     PetscCheck(mrows == rows, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of rows (%" PetscInt_FMT " != %" PetscInt_FMT ")", rows, mrows);
2091:     PetscCheck(mcols == cols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of cols (%" PetscInt_FMT " != %" PetscInt_FMT ")", cols, mcols);
2092:     PetscCheck(mlrows == lrows, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of local rows (%" PetscInt_FMT " != %" PetscInt_FMT ")", lrows, mlrows);
2093:     PetscCheck(mlcols == lcols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of local cols (%" PetscInt_FMT " != %" PetscInt_FMT ")", lcols, mlcols);
2094:     PetscCheck(mrbs == rbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong row block size (%" PetscInt_FMT " != %" PetscInt_FMT ")", rbs, mrbs);
2095:     PetscCheck(mcbs == cbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong col block size (%" PetscInt_FMT " != %" PetscInt_FMT ")", cbs, mcbs);
2096:     PetscCall(MatZeroEntries(MT));
2097:   }

2099:   if (isseqsbaij || isseqbaij) {
2100:     PetscCall(MatConvert(matis->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &local_mat));
2101:     isseqaij = PETSC_TRUE;
2102:   } else {
2103:     PetscCall(PetscObjectReference((PetscObject)matis->A));
2104:     local_mat = matis->A;
2105:   }

2107:   /* Set values */
2108:   PetscCall(MatSetLocalToGlobalMapping(MT, matis->rmapping, matis->cmapping));
2109:   if (isseqdense) { /* special case for dense local matrices */
2110:     PetscInt i, *dummy;

2112:     PetscCall(PetscMalloc1(PetscMax(local_rows, local_cols), &dummy));
2113:     for (i = 0; i < PetscMax(local_rows, local_cols); i++) dummy[i] = i;
2114:     PetscCall(MatSetOption(MT, MAT_ROW_ORIENTED, PETSC_FALSE));
2115:     PetscCall(MatDenseGetArrayRead(local_mat, &array));
2116:     PetscCall(MatSetValuesLocal(MT, local_rows, dummy, local_cols, dummy, array, ADD_VALUES));
2117:     PetscCall(MatDenseRestoreArrayRead(local_mat, &array));
2118:     PetscCall(PetscFree(dummy));
2119:   } else if (isseqaij) {
2120:     const PetscInt *blocks;
2121:     PetscInt        i, nvtxs, *xadj, *adjncy, nb;
2122:     PetscBool       done;
2123:     PetscScalar    *sarray;

2125:     PetscCall(MatGetRowIJ(local_mat, 0, PETSC_FALSE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &done));
2126:     PetscCheck(done, PetscObjectComm((PetscObject)local_mat), PETSC_ERR_PLIB, "Error in MatGetRowIJ");
2127:     PetscCall(MatSeqAIJGetArray(local_mat, &sarray));
2128:     PetscCall(MatGetVariableBlockSizes(local_mat, &nb, &blocks));
2129:     if (nb) { /* speed up assembly for special blocked matrices (used by BDDC) */
2130:       PetscInt sum;

2132:       for (i = 0, sum = 0; i < nb; i++) sum += blocks[i];
2133:       if (sum == nvtxs) {
2134:         PetscInt r;

2136:         for (i = 0, r = 0; i < nb; i++) {
2137:           PetscAssert(blocks[i] == xadj[r + 1] - xadj[r], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid block sizes prescribed for block %" PetscInt_FMT ": expected %" PetscInt_FMT ", got %" PetscInt_FMT, i, blocks[i], xadj[r + 1] - xadj[r]);
2138:           PetscCall(MatSetValuesLocal(MT, blocks[i], adjncy + xadj[r], blocks[i], adjncy + xadj[r], sarray + xadj[r], ADD_VALUES));
2139:           r += blocks[i];
2140:         }
2141:       } else {
2142:         for (i = 0; i < nvtxs; i++) PetscCall(MatSetValuesLocal(MT, 1, &i, xadj[i + 1] - xadj[i], adjncy + xadj[i], sarray + xadj[i], ADD_VALUES));
2143:       }
2144:     } else {
2145:       for (i = 0; i < nvtxs; i++) PetscCall(MatSetValuesLocal(MT, 1, &i, xadj[i + 1] - xadj[i], PetscSafePointerPlusOffset(adjncy, xadj[i]), PetscSafePointerPlusOffset(sarray, xadj[i]), ADD_VALUES));
2146:     }
2147:     PetscCall(MatRestoreRowIJ(local_mat, 0, PETSC_FALSE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &done));
2148:     PetscCheck(done, PetscObjectComm((PetscObject)local_mat), PETSC_ERR_PLIB, "Error in MatRestoreRowIJ");
2149:     PetscCall(MatSeqAIJRestoreArray(local_mat, &sarray));
2150:   } else { /* very basic values insertion for all other matrix types */
2151:     for (PetscInt i = 0; i < local_rows; i++) {
2152:       PetscInt        j;
2153:       const PetscInt *local_indices_cols;

2155:       PetscCall(MatGetRow(local_mat, i, &j, &local_indices_cols, &array));
2156:       PetscCall(MatSetValuesLocal(MT, 1, &i, j, local_indices_cols, array, ADD_VALUES));
2157:       PetscCall(MatRestoreRow(local_mat, i, &j, &local_indices_cols, &array));
2158:     }
2159:   }
2160:   PetscCall(MatDestroy(&local_mat));
2161:   PetscCall(MatAssemblyBegin(MT, MAT_FINAL_ASSEMBLY));
2162:   PetscCall(MatAssemblyEnd(MT, MAT_FINAL_ASSEMBLY));
2163:   if (isseqdense) PetscCall(MatSetOption(MT, MAT_ROW_ORIENTED, PETSC_TRUE));
2164:   if (reuse == MAT_INPLACE_MATRIX) {
2165:     PetscCall(MatHeaderReplace(mat, &MT));
2166:   } else if (reuse == MAT_INITIAL_MATRIX) {
2167:     *M = MT;
2168:   }
2169:   PetscFunctionReturn(PETSC_SUCCESS);
2170: }

2172: static PetscErrorCode MatDuplicate_IS(Mat mat, MatDuplicateOption op, Mat *newmat)
2173: {
2174:   Mat_IS  *matis = (Mat_IS *)mat->data;
2175:   PetscInt rbs, cbs, m, n, M, N;
2176:   Mat      B, localmat;

2178:   PetscFunctionBegin;
2179:   PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &rbs));
2180:   PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &cbs));
2181:   PetscCall(MatGetSize(mat, &M, &N));
2182:   PetscCall(MatGetLocalSize(mat, &m, &n));
2183:   PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
2184:   PetscCall(MatSetSizes(B, m, n, M, N));
2185:   PetscCall(MatSetBlockSize(B, rbs == cbs ? rbs : 1));
2186:   PetscCall(MatSetType(B, MATIS));
2187:   PetscCall(MatISSetLocalMatType(B, matis->lmattype));
2188:   PetscCall(MatISSetAllowRepeated(B, matis->allow_repeated));
2189:   PetscCall(MatSetLocalToGlobalMapping(B, mat->rmap->mapping, mat->cmap->mapping));
2190:   PetscCall(MatDuplicate(matis->A, op, &localmat));
2191:   PetscCall(MatSetLocalToGlobalMapping(localmat, matis->A->rmap->mapping, matis->A->cmap->mapping));
2192:   PetscCall(MatISSetLocalMat(B, localmat));
2193:   PetscCall(MatDestroy(&localmat));
2194:   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2195:   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2196:   *newmat = B;
2197:   PetscFunctionReturn(PETSC_SUCCESS);
2198: }

2200: static PetscErrorCode MatIsHermitian_IS(Mat A, PetscReal tol, PetscBool *flg)
2201: {
2202:   Mat_IS   *matis = (Mat_IS *)A->data;
2203:   PetscBool local_sym;

2205:   PetscFunctionBegin;
2206:   PetscCall(MatIsHermitian(matis->A, tol, &local_sym));
2207:   PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2208:   PetscFunctionReturn(PETSC_SUCCESS);
2209: }

2211: static PetscErrorCode MatIsSymmetric_IS(Mat A, PetscReal tol, PetscBool *flg)
2212: {
2213:   Mat_IS   *matis = (Mat_IS *)A->data;
2214:   PetscBool local_sym;

2216:   PetscFunctionBegin;
2217:   if (matis->rmapping != matis->cmapping) {
2218:     *flg = PETSC_FALSE;
2219:     PetscFunctionReturn(PETSC_SUCCESS);
2220:   }
2221:   PetscCall(MatIsSymmetric(matis->A, tol, &local_sym));
2222:   PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2223:   PetscFunctionReturn(PETSC_SUCCESS);
2224: }

2226: static PetscErrorCode MatIsStructurallySymmetric_IS(Mat A, PetscBool *flg)
2227: {
2228:   Mat_IS   *matis = (Mat_IS *)A->data;
2229:   PetscBool local_sym;

2231:   PetscFunctionBegin;
2232:   if (matis->rmapping != matis->cmapping) {
2233:     *flg = PETSC_FALSE;
2234:     PetscFunctionReturn(PETSC_SUCCESS);
2235:   }
2236:   PetscCall(MatIsStructurallySymmetric(matis->A, &local_sym));
2237:   PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2238:   PetscFunctionReturn(PETSC_SUCCESS);
2239: }

2241: static PetscErrorCode MatDestroy_IS(Mat A)
2242: {
2243:   Mat_IS *b = (Mat_IS *)A->data;

2245:   PetscFunctionBegin;
2246:   PetscCall(PetscFree(b->bdiag));
2247:   PetscCall(PetscFree(b->lmattype));
2248:   PetscCall(MatDestroy(&b->A));
2249:   PetscCall(VecScatterDestroy(&b->cctx));
2250:   PetscCall(VecScatterDestroy(&b->rctx));
2251:   PetscCall(VecDestroy(&b->x));
2252:   PetscCall(VecDestroy(&b->y));
2253:   PetscCall(VecDestroy(&b->counter));
2254:   PetscCall(ISDestroy(&b->getsub_ris));
2255:   PetscCall(ISDestroy(&b->getsub_cis));
2256:   if (b->sf != b->csf) {
2257:     PetscCall(PetscSFDestroy(&b->csf));
2258:     PetscCall(PetscFree2(b->csf_rootdata, b->csf_leafdata));
2259:   } else b->csf = NULL;
2260:   PetscCall(PetscSFDestroy(&b->sf));
2261:   PetscCall(PetscFree2(b->sf_rootdata, b->sf_leafdata));
2262:   PetscCall(ISLocalToGlobalMappingDestroy(&b->rmapping));
2263:   PetscCall(ISLocalToGlobalMappingDestroy(&b->cmapping));
2264:   PetscCall(MatDestroy(&b->dA));
2265:   PetscCall(MatDestroy(&b->assembledA));
2266:   PetscCall(PetscFree(A->data));
2267:   PetscCall(PetscObjectChangeTypeName((PetscObject)A, NULL));
2268:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", NULL));
2269:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalMat_C", NULL));
2270:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMat_C", NULL));
2271:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISRestoreLocalMat_C", NULL));
2272:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetPreallocation_C", NULL));
2273:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISStoreL2L_C", NULL));
2274:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISFixLocalEmpty_C", NULL));
2275:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalToGlobalMapping_C", NULL));
2276:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpiaij_C", NULL));
2277:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpibaij_C", NULL));
2278:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpisbaij_C", NULL));
2279:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqaij_C", NULL));
2280:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqbaij_C", NULL));
2281:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqsbaij_C", NULL));
2282:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_aij_C", NULL));
2283:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", NULL));
2284:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
2285:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
2286:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetAllowRepeated_C", NULL));
2287:   PetscFunctionReturn(PETSC_SUCCESS);
2288: }

2290: static PetscErrorCode MatMult_IS(Mat A, Vec x, Vec y)
2291: {
2292:   Mat_IS     *is   = (Mat_IS *)A->data;
2293:   PetscScalar zero = 0.0;

2295:   PetscFunctionBegin;
2296:   /*  scatter the global vector x into the local work vector */
2297:   PetscCall(VecScatterBegin(is->cctx, x, is->x, INSERT_VALUES, SCATTER_FORWARD));
2298:   PetscCall(VecScatterEnd(is->cctx, x, is->x, INSERT_VALUES, SCATTER_FORWARD));

2300:   /* multiply the local matrix */
2301:   PetscCall(MatMult(is->A, is->x, is->y));

2303:   /* scatter product back into global memory */
2304:   PetscCall(VecSet(y, zero));
2305:   PetscCall(VecScatterBegin(is->rctx, is->y, y, ADD_VALUES, SCATTER_REVERSE));
2306:   PetscCall(VecScatterEnd(is->rctx, is->y, y, ADD_VALUES, SCATTER_REVERSE));
2307:   PetscFunctionReturn(PETSC_SUCCESS);
2308: }

2310: static PetscErrorCode MatMultAdd_IS(Mat A, Vec v1, Vec v2, Vec v3)
2311: {
2312:   Vec temp_vec;

2314:   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
2315:   if (v3 != v2) {
2316:     PetscCall(MatMult(A, v1, v3));
2317:     PetscCall(VecAXPY(v3, 1.0, v2));
2318:   } else {
2319:     PetscCall(VecDuplicate(v2, &temp_vec));
2320:     PetscCall(MatMult(A, v1, temp_vec));
2321:     PetscCall(VecAXPY(temp_vec, 1.0, v2));
2322:     PetscCall(VecCopy(temp_vec, v3));
2323:     PetscCall(VecDestroy(&temp_vec));
2324:   }
2325:   PetscFunctionReturn(PETSC_SUCCESS);
2326: }

2328: static PetscErrorCode MatMultTranspose_IS(Mat A, Vec y, Vec x)
2329: {
2330:   Mat_IS *is = (Mat_IS *)A->data;

2332:   PetscFunctionBegin;
2333:   /*  scatter the global vector x into the local work vector */
2334:   PetscCall(VecScatterBegin(is->rctx, y, is->y, INSERT_VALUES, SCATTER_FORWARD));
2335:   PetscCall(VecScatterEnd(is->rctx, y, is->y, INSERT_VALUES, SCATTER_FORWARD));

2337:   /* multiply the local matrix */
2338:   PetscCall(MatMultTranspose(is->A, is->y, is->x));

2340:   /* scatter product back into global vector */
2341:   PetscCall(VecSet(x, 0));
2342:   PetscCall(VecScatterBegin(is->cctx, is->x, x, ADD_VALUES, SCATTER_REVERSE));
2343:   PetscCall(VecScatterEnd(is->cctx, is->x, x, ADD_VALUES, SCATTER_REVERSE));
2344:   PetscFunctionReturn(PETSC_SUCCESS);
2345: }

2347: static PetscErrorCode MatMultTransposeAdd_IS(Mat A, Vec v1, Vec v2, Vec v3)
2348: {
2349:   Vec temp_vec;

2351:   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
2352:   if (v3 != v2) {
2353:     PetscCall(MatMultTranspose(A, v1, v3));
2354:     PetscCall(VecAXPY(v3, 1.0, v2));
2355:   } else {
2356:     PetscCall(VecDuplicate(v2, &temp_vec));
2357:     PetscCall(MatMultTranspose(A, v1, temp_vec));
2358:     PetscCall(VecAXPY(temp_vec, 1.0, v2));
2359:     PetscCall(VecCopy(temp_vec, v3));
2360:     PetscCall(VecDestroy(&temp_vec));
2361:   }
2362:   PetscFunctionReturn(PETSC_SUCCESS);
2363: }

2365: static PetscErrorCode MatView_IS(Mat A, PetscViewer viewer)
2366: {
2367:   Mat_IS                *a = (Mat_IS *)A->data;
2368:   PetscViewer            sviewer;
2369:   PetscBool              isascii, isbinary, viewl2g = PETSC_FALSE, native;
2370:   PetscViewerFormat      format;
2371:   ISLocalToGlobalMapping rmap, cmap;

2373:   PetscFunctionBegin;
2374:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2375:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2376:   PetscCall(PetscViewerGetFormat(viewer, &format));
2377:   native = (PetscBool)(format == PETSC_VIEWER_NATIVE);
2378:   if (native) {
2379:     rmap = A->rmap->mapping;
2380:     cmap = A->cmap->mapping;
2381:   } else {
2382:     rmap = a->rmapping;
2383:     cmap = a->cmapping;
2384:   }
2385:   if (isascii) {
2386:     if (format == PETSC_VIEWER_ASCII_INFO) PetscFunctionReturn(PETSC_SUCCESS);
2387:     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL || format == PETSC_VIEWER_ASCII_MATLAB) viewl2g = PETSC_TRUE;
2388:   } else if (isbinary) {
2389:     PetscInt    tr[6], nr, nc;
2390:     char        lmattype[64] = {'\0'};
2391:     PetscMPIInt size;
2392:     PetscBool   skipHeader;
2393:     IS          is;

2395:     PetscCall(PetscViewerSetUp(viewer));
2396:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
2397:     tr[0] = MAT_FILE_CLASSID;
2398:     tr[1] = A->rmap->N;
2399:     tr[2] = A->cmap->N;
2400:     tr[3] = -size; /* AIJ stores nnz here */
2401:     tr[4] = (PetscInt)(rmap == cmap);
2402:     tr[5] = a->allow_repeated;
2403:     PetscCall(PetscSNPrintf(lmattype, sizeof(lmattype), "%s", a->lmattype));

2405:     PetscCall(PetscViewerBinaryWrite(viewer, tr, PETSC_STATIC_ARRAY_LENGTH(tr), PETSC_INT));
2406:     PetscCall(PetscViewerBinaryWrite(viewer, lmattype, sizeof(lmattype), PETSC_CHAR));

2408:     /* first dump l2g info (we need the header for proper loading on different number of processes) */
2409:     PetscCall(PetscViewerBinaryGetSkipHeader(viewer, &skipHeader));
2410:     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_FALSE));
2411:     PetscCall(ISLocalToGlobalMappingView(rmap, viewer));
2412:     if (cmap != rmap) PetscCall(ISLocalToGlobalMappingView(cmap, viewer));

2414:     /* then the sizes of the local matrices */
2415:     PetscCall(MatGetSize(a->A, &nr, &nc));
2416:     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), 1, &nr, PETSC_USE_POINTER, &is));
2417:     PetscCall(ISView(is, viewer));
2418:     PetscCall(ISDestroy(&is));
2419:     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), 1, &nc, PETSC_USE_POINTER, &is));
2420:     PetscCall(ISView(is, viewer));
2421:     PetscCall(ISDestroy(&is));
2422:     PetscCall(PetscViewerBinarySetSkipHeader(viewer, skipHeader));
2423:   }
2424:   if (format == PETSC_VIEWER_ASCII_MATLAB) {
2425:     char        name[64];
2426:     PetscMPIInt size, rank;

2428:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
2429:     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)viewer), &rank));
2430:     if (size > 1) PetscCall(PetscSNPrintf(name, sizeof(name), "lmat_%d", rank));
2431:     else PetscCall(PetscSNPrintf(name, sizeof(name), "lmat"));
2432:     PetscCall(PetscObjectSetName((PetscObject)a->A, name));
2433:   }

2435:   /* Dump the local matrices */
2436:   if (isbinary) { /* ViewerGetSubViewer does not work in parallel */
2437:     PetscBool   isaij;
2438:     PetscInt    nr, nc;
2439:     Mat         lA, B;
2440:     Mat_MPIAIJ *b;

2442:     /* We create a temporary MPIAIJ matrix that stores the unassembled operator */
2443:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)a->A, MATAIJ, &isaij));
2444:     if (!isaij) PetscCall(MatConvert(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &lA));
2445:     else {
2446:       PetscCall(PetscObjectReference((PetscObject)a->A));
2447:       lA = a->A;
2448:     }
2449:     PetscCall(MatCreate(PetscObjectComm((PetscObject)viewer), &B));
2450:     PetscCall(MatSetType(B, MATMPIAIJ));
2451:     PetscCall(MatGetSize(lA, &nr, &nc));
2452:     PetscCall(MatSetSizes(B, nr, nc, PETSC_DECIDE, PETSC_DECIDE));
2453:     PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));

2455:     b = (Mat_MPIAIJ *)B->data;
2456:     PetscCall(MatDestroy(&b->A));
2457:     b->A = lA;

2459:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2460:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2461:     PetscCall(MatView(B, viewer));
2462:     PetscCall(MatDestroy(&B));
2463:   } else {
2464:     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
2465:     PetscCall(MatView(a->A, sviewer));
2466:     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
2467:   }

2469:   /* with ASCII, we dump the l2gmaps at the end */
2470:   if (viewl2g) {
2471:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
2472:       PetscCall(PetscObjectSetName((PetscObject)rmap, "row"));
2473:       PetscCall(ISLocalToGlobalMappingView(rmap, viewer));
2474:       PetscCall(PetscObjectSetName((PetscObject)cmap, "col"));
2475:       PetscCall(ISLocalToGlobalMappingView(cmap, viewer));
2476:     } else {
2477:       PetscCall(ISLocalToGlobalMappingView(rmap, viewer));
2478:       PetscCall(ISLocalToGlobalMappingView(cmap, viewer));
2479:     }
2480:   }
2481:   PetscFunctionReturn(PETSC_SUCCESS);
2482: }

2484: static PetscErrorCode MatLoad_IS(Mat A, PetscViewer viewer)
2485: {
2486:   ISLocalToGlobalMapping rmap, cmap;
2487:   MPI_Comm               comm = PetscObjectComm((PetscObject)A);
2488:   PetscBool              isbinary, samel, allow, isbaij;
2489:   PetscInt               tr[6], M, N, nr, nc, Asize, isn;
2490:   const PetscInt        *idx;
2491:   PetscMPIInt            size;
2492:   char                   lmattype[64];
2493:   Mat                    dA, lA;
2494:   IS                     is;

2496:   PetscFunctionBegin;
2497:   PetscCheckSameComm(A, 1, viewer, 2);
2498:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2499:   PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Invalid viewer of type %s", ((PetscObject)viewer)->type_name);

2501:   PetscCall(PetscViewerBinaryRead(viewer, tr, PETSC_STATIC_ARRAY_LENGTH(tr), NULL, PETSC_INT));
2502:   PetscCheck(tr[0] == MAT_FILE_CLASSID, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a matrix next in file");
2503:   PetscCheck(tr[1] >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2504:   PetscCheck(tr[2] >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2505:   PetscCheck(tr[3] < 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2506:   PetscCheck(tr[4] == 0 || tr[4] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2507:   PetscCheck(tr[5] == 0 || tr[5] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2508:   M     = tr[1];
2509:   N     = tr[2];
2510:   Asize = -tr[3];
2511:   samel = (PetscBool)tr[4];
2512:   allow = (PetscBool)tr[5];
2513:   PetscCall(PetscViewerBinaryRead(viewer, lmattype, sizeof(lmattype), NULL, PETSC_CHAR));

2515:   /* if we are loading from a larger set of processes, allow repeated entries */
2516:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
2517:   if (Asize > size) allow = PETSC_TRUE;

2519:   /* set global sizes if not set already */
2520:   if (A->rmap->N < 0) A->rmap->N = M;
2521:   if (A->cmap->N < 0) A->cmap->N = N;
2522:   PetscCall(PetscLayoutSetUp(A->rmap));
2523:   PetscCall(PetscLayoutSetUp(A->cmap));
2524:   PetscCheck(M == A->rmap->N, comm, PETSC_ERR_ARG_SIZ, "Matrix rows should be %" PetscInt_FMT ", found %" PetscInt_FMT, M, A->rmap->N);
2525:   PetscCheck(N == A->cmap->N, comm, PETSC_ERR_ARG_SIZ, "Matrix columns should be %" PetscInt_FMT ", found %" PetscInt_FMT, N, A->cmap->N);

2527:   /* load l2g maps */
2528:   PetscCall(ISLocalToGlobalMappingCreate(comm, 0, 0, NULL, PETSC_USE_POINTER, &rmap));
2529:   PetscCall(ISLocalToGlobalMappingLoad(rmap, viewer));
2530:   if (!samel) {
2531:     PetscCall(ISLocalToGlobalMappingCreate(comm, 0, 0, NULL, PETSC_USE_POINTER, &cmap));
2532:     PetscCall(ISLocalToGlobalMappingLoad(cmap, viewer));
2533:   } else {
2534:     PetscCall(PetscObjectReference((PetscObject)rmap));
2535:     cmap = rmap;
2536:   }

2538:   /* load sizes of local matrices */
2539:   PetscCall(ISCreate(comm, &is));
2540:   PetscCall(ISSetType(is, ISGENERAL));
2541:   PetscCall(ISLoad(is, viewer));
2542:   PetscCall(ISGetLocalSize(is, &isn));
2543:   PetscCall(ISGetIndices(is, &idx));
2544:   nr = 0;
2545:   for (PetscInt i = 0; i < isn; i++) nr += idx[i];
2546:   PetscCall(ISRestoreIndices(is, &idx));
2547:   PetscCall(ISDestroy(&is));
2548:   PetscCall(ISCreate(comm, &is));
2549:   PetscCall(ISSetType(is, ISGENERAL));
2550:   PetscCall(ISLoad(is, viewer));
2551:   PetscCall(ISGetLocalSize(is, &isn));
2552:   PetscCall(ISGetIndices(is, &idx));
2553:   nc = 0;
2554:   for (PetscInt i = 0; i < isn; i++) nc += idx[i];
2555:   PetscCall(ISRestoreIndices(is, &idx));
2556:   PetscCall(ISDestroy(&is));

2558:   /* now load the unassembled operator */
2559:   PetscCall(MatCreate(comm, &dA));
2560:   PetscCall(MatSetType(dA, MATMPIAIJ));
2561:   PetscCall(MatSetSizes(dA, nr, nc, PETSC_DECIDE, PETSC_DECIDE));
2562:   PetscCall(MatLoad(dA, viewer));
2563:   PetscCall(MatMPIAIJGetSeqAIJ(dA, &lA, NULL, NULL));
2564:   PetscCall(PetscObjectReference((PetscObject)lA));
2565:   PetscCall(MatDestroy(&dA));

2567:   /* and convert to the desired format */
2568:   PetscCall(PetscStrcmpAny(lmattype, &isbaij, MATSBAIJ, MATSEQSBAIJ, ""));
2569:   if (isbaij) PetscCall(MatSetOption(lA, MAT_SYMMETRIC, PETSC_TRUE));
2570:   PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));

2572:   /* assemble the MATIS object */
2573:   PetscCall(MatISSetAllowRepeated(A, allow));
2574:   PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap));
2575:   PetscCall(MatISSetLocalMat(A, lA));
2576:   PetscCall(MatDestroy(&lA));
2577:   PetscCall(ISLocalToGlobalMappingDestroy(&rmap));
2578:   PetscCall(ISLocalToGlobalMappingDestroy(&cmap));
2579:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
2580:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
2581:   PetscFunctionReturn(PETSC_SUCCESS);
2582: }

2584: static PetscErrorCode MatInvertBlockDiagonal_IS(Mat mat, const PetscScalar **values)
2585: {
2586:   Mat_IS            *is = (Mat_IS *)mat->data;
2587:   MPI_Datatype       nodeType;
2588:   const PetscScalar *lv;
2589:   PetscInt           bs;

2591:   PetscFunctionBegin;
2592:   PetscCall(MatGetBlockSize(mat, &bs));
2593:   PetscCall(MatSetBlockSize(is->A, bs));
2594:   PetscCall(MatInvertBlockDiagonal(is->A, &lv));
2595:   if (!is->bdiag) PetscCall(PetscMalloc1(bs * mat->rmap->n, &is->bdiag));
2596:   PetscCallMPI(MPI_Type_contiguous((PetscMPIInt)bs, MPIU_SCALAR, &nodeType));
2597:   PetscCallMPI(MPI_Type_commit(&nodeType));
2598:   PetscCall(PetscSFReduceBegin(is->sf, nodeType, lv, is->bdiag, MPI_REPLACE));
2599:   PetscCall(PetscSFReduceEnd(is->sf, nodeType, lv, is->bdiag, MPI_REPLACE));
2600:   PetscCallMPI(MPI_Type_free(&nodeType));
2601:   if (values) *values = is->bdiag;
2602:   PetscFunctionReturn(PETSC_SUCCESS);
2603: }

2605: static PetscErrorCode MatISSetUpScatters_Private(Mat A)
2606: {
2607:   Vec             cglobal, rglobal;
2608:   IS              from;
2609:   Mat_IS         *is = (Mat_IS *)A->data;
2610:   PetscScalar     sum;
2611:   const PetscInt *garray;
2612:   PetscInt        nr, rbs, nc, cbs;
2613:   VecType         rtype;

2615:   PetscFunctionBegin;
2616:   PetscCall(ISLocalToGlobalMappingGetSize(is->rmapping, &nr));
2617:   PetscCall(ISLocalToGlobalMappingGetBlockSize(is->rmapping, &rbs));
2618:   PetscCall(ISLocalToGlobalMappingGetSize(is->cmapping, &nc));
2619:   PetscCall(ISLocalToGlobalMappingGetBlockSize(is->cmapping, &cbs));
2620:   PetscCall(VecDestroy(&is->x));
2621:   PetscCall(VecDestroy(&is->y));
2622:   PetscCall(VecDestroy(&is->counter));
2623:   PetscCall(VecScatterDestroy(&is->rctx));
2624:   PetscCall(VecScatterDestroy(&is->cctx));
2625:   PetscCall(MatCreateVecs(is->A, &is->x, &is->y));
2626:   PetscCall(VecBindToCPU(is->y, PETSC_TRUE));
2627:   PetscCall(VecGetRootType_Private(is->y, &rtype));
2628:   PetscCall(PetscFree(A->defaultvectype));
2629:   PetscCall(PetscStrallocpy(rtype, &A->defaultvectype));
2630:   PetscCall(MatCreateVecs(A, &cglobal, &rglobal));
2631:   PetscCall(VecBindToCPU(rglobal, PETSC_TRUE));
2632:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->rmapping, &garray));
2633:   PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), rbs, nr / rbs, garray, PETSC_USE_POINTER, &from));
2634:   PetscCall(VecScatterCreate(rglobal, from, is->y, NULL, &is->rctx));
2635:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->rmapping, &garray));
2636:   PetscCall(ISDestroy(&from));
2637:   if (is->rmapping != is->cmapping) {
2638:     PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->cmapping, &garray));
2639:     PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), cbs, nc / cbs, garray, PETSC_USE_POINTER, &from));
2640:     PetscCall(VecScatterCreate(cglobal, from, is->x, NULL, &is->cctx));
2641:     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->cmapping, &garray));
2642:     PetscCall(ISDestroy(&from));
2643:   } else {
2644:     PetscCall(PetscObjectReference((PetscObject)is->rctx));
2645:     is->cctx = is->rctx;
2646:   }
2647:   PetscCall(VecDestroy(&cglobal));

2649:   /* interface counter vector (local) */
2650:   PetscCall(VecDuplicate(is->y, &is->counter));
2651:   PetscCall(VecBindToCPU(is->counter, PETSC_TRUE));
2652:   PetscCall(VecSet(is->y, 1.));
2653:   PetscCall(VecScatterBegin(is->rctx, is->y, rglobal, ADD_VALUES, SCATTER_REVERSE));
2654:   PetscCall(VecScatterEnd(is->rctx, is->y, rglobal, ADD_VALUES, SCATTER_REVERSE));
2655:   PetscCall(VecScatterBegin(is->rctx, rglobal, is->counter, INSERT_VALUES, SCATTER_FORWARD));
2656:   PetscCall(VecScatterEnd(is->rctx, rglobal, is->counter, INSERT_VALUES, SCATTER_FORWARD));
2657:   PetscCall(VecBindToCPU(is->y, PETSC_FALSE));
2658:   PetscCall(VecBindToCPU(is->counter, PETSC_FALSE));

2660:   /* special functions for block-diagonal matrices */
2661:   PetscCall(VecSum(rglobal, &sum));
2662:   A->ops->invertblockdiagonal = NULL;
2663:   if ((PetscInt)(PetscRealPart(sum)) == A->rmap->N && A->rmap->N == A->cmap->N && is->rmapping == is->cmapping) A->ops->invertblockdiagonal = MatInvertBlockDiagonal_IS;
2664:   PetscCall(VecDestroy(&rglobal));

2666:   /* setup SF for general purpose shared indices based communications */
2667:   PetscCall(MatISSetUpSF_IS(A));
2668:   PetscFunctionReturn(PETSC_SUCCESS);
2669: }

2671: static PetscErrorCode MatISFilterL2GMap(Mat A, ISLocalToGlobalMapping map, ISLocalToGlobalMapping *nmap, ISLocalToGlobalMapping *lmap)
2672: {
2673:   Mat_IS                    *matis = (Mat_IS *)A->data;
2674:   IS                         is;
2675:   ISLocalToGlobalMappingType l2gtype;
2676:   const PetscInt            *idxs;
2677:   PetscHSetI                 ht;
2678:   PetscInt                  *nidxs;
2679:   PetscInt                   i, n, bs, c;
2680:   PetscBool                  flg[] = {PETSC_FALSE, PETSC_FALSE};

2682:   PetscFunctionBegin;
2683:   PetscCall(ISLocalToGlobalMappingGetSize(map, &n));
2684:   PetscCall(ISLocalToGlobalMappingGetBlockSize(map, &bs));
2685:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(map, &idxs));
2686:   PetscCall(PetscHSetICreate(&ht));
2687:   PetscCall(PetscMalloc1(n / bs, &nidxs));
2688:   for (i = 0, c = 0; i < n / bs; i++) {
2689:     PetscBool missing = PETSC_TRUE;
2690:     if (idxs[i] < 0) {
2691:       flg[0] = PETSC_TRUE;
2692:       continue;
2693:     }
2694:     if (!matis->allow_repeated) PetscCall(PetscHSetIQueryAdd(ht, idxs[i], &missing));
2695:     if (!missing) flg[1] = PETSC_TRUE;
2696:     else nidxs[c++] = idxs[i];
2697:   }
2698:   PetscCall(PetscHSetIDestroy(&ht));
2699:   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, flg, 2, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
2700:   if (!flg[0] && !flg[1]) { /* Entries are all non negative and unique */
2701:     *nmap = NULL;
2702:     *lmap = NULL;
2703:     PetscCall(PetscFree(nidxs));
2704:     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(map, &idxs));
2705:     PetscFunctionReturn(PETSC_SUCCESS);
2706:   }

2708:   /* New l2g map without negative indices (and repeated indices if not allowed) */
2709:   PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, c, nidxs, PETSC_USE_POINTER, &is));
2710:   PetscCall(ISLocalToGlobalMappingCreateIS(is, nmap));
2711:   PetscCall(ISDestroy(&is));
2712:   PetscCall(ISLocalToGlobalMappingGetType(map, &l2gtype));
2713:   PetscCall(ISLocalToGlobalMappingSetType(*nmap, l2gtype));

2715:   /* New local l2g map for repeated indices if not allowed */
2716:   PetscCall(ISGlobalToLocalMappingApplyBlock(*nmap, IS_GTOLM_MASK, n / bs, idxs, NULL, nidxs));
2717:   PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, n / bs, nidxs, PETSC_USE_POINTER, &is));
2718:   PetscCall(ISLocalToGlobalMappingCreateIS(is, lmap));
2719:   PetscCall(ISDestroy(&is));
2720:   PetscCall(PetscFree(nidxs));
2721:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(map, &idxs));
2722:   PetscFunctionReturn(PETSC_SUCCESS);
2723: }

2725: static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2726: {
2727:   Mat_IS                *is            = (Mat_IS *)A->data;
2728:   ISLocalToGlobalMapping localrmapping = NULL, localcmapping = NULL;
2729:   PetscInt               nr, rbs, nc, cbs;
2730:   PetscBool              cong, freem[] = {PETSC_FALSE, PETSC_FALSE};

2732:   PetscFunctionBegin;
2733:   if (rmapping) PetscCheckSameComm(A, 1, rmapping, 2);
2734:   if (cmapping) PetscCheckSameComm(A, 1, cmapping, 3);

2736:   PetscCall(ISLocalToGlobalMappingDestroy(&is->rmapping));
2737:   PetscCall(ISLocalToGlobalMappingDestroy(&is->cmapping));
2738:   PetscCall(PetscLayoutSetUp(A->rmap));
2739:   PetscCall(PetscLayoutSetUp(A->cmap));
2740:   PetscCall(MatHasCongruentLayouts(A, &cong));

2742:   /* If NULL, local space matches global space */
2743:   if (!rmapping) {
2744:     IS is;

2746:     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->rmap->N, 0, 1, &is));
2747:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rmapping));
2748:     if (A->rmap->bs > 0) PetscCall(ISLocalToGlobalMappingSetBlockSize(rmapping, A->rmap->bs));
2749:     PetscCall(ISDestroy(&is));
2750:     freem[0] = PETSC_TRUE;
2751:     if (!cmapping && cong && A->rmap->bs == A->cmap->bs) cmapping = rmapping;
2752:   } else if (!is->islocalref) { /* check if the l2g map has negative or repeated entries */
2753:     PetscCall(MatISFilterL2GMap(A, rmapping, &is->rmapping, &localrmapping));
2754:     if (rmapping == cmapping) {
2755:       PetscCall(PetscObjectReference((PetscObject)is->rmapping));
2756:       is->cmapping = is->rmapping;
2757:       PetscCall(PetscObjectReference((PetscObject)localrmapping));
2758:       localcmapping = localrmapping;
2759:     }
2760:   }
2761:   if (!cmapping) {
2762:     IS is;

2764:     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->cmap->N, 0, 1, &is));
2765:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cmapping));
2766:     if (A->cmap->bs > 0) PetscCall(ISLocalToGlobalMappingSetBlockSize(cmapping, A->cmap->bs));
2767:     PetscCall(ISDestroy(&is));
2768:     freem[1] = PETSC_TRUE;
2769:   } else if (cmapping != rmapping && !is->islocalref) { /* check if the l2g map has negative or repeated entries */
2770:     PetscCall(MatISFilterL2GMap(A, cmapping, &is->cmapping, &localcmapping));
2771:   }
2772:   if (!is->rmapping) {
2773:     PetscCall(PetscObjectReference((PetscObject)rmapping));
2774:     is->rmapping = rmapping;
2775:   }
2776:   if (!is->cmapping) {
2777:     PetscCall(PetscObjectReference((PetscObject)cmapping));
2778:     is->cmapping = cmapping;
2779:   }

2781:   /* Clean up */
2782:   is->lnnzstate = 0;
2783:   PetscCall(MatDestroy(&is->dA));
2784:   PetscCall(MatDestroy(&is->assembledA));
2785:   PetscCall(MatDestroy(&is->A));
2786:   if (is->csf != is->sf) {
2787:     PetscCall(PetscSFDestroy(&is->csf));
2788:     PetscCall(PetscFree2(is->csf_rootdata, is->csf_leafdata));
2789:   } else is->csf = NULL;
2790:   PetscCall(PetscSFDestroy(&is->sf));
2791:   PetscCall(PetscFree2(is->sf_rootdata, is->sf_leafdata));
2792:   PetscCall(PetscFree(is->bdiag));

2794:   /* check if the two mappings are actually the same for square matrices since MATIS has some optimization for this case
2795:      (DOLFIN passes 2 different objects) */
2796:   PetscCall(ISLocalToGlobalMappingGetSize(is->rmapping, &nr));
2797:   PetscCall(ISLocalToGlobalMappingGetBlockSize(is->rmapping, &rbs));
2798:   PetscCall(ISLocalToGlobalMappingGetSize(is->cmapping, &nc));
2799:   PetscCall(ISLocalToGlobalMappingGetBlockSize(is->cmapping, &cbs));
2800:   if (is->rmapping != is->cmapping && cong) {
2801:     PetscBool same = PETSC_FALSE;
2802:     if (nr == nc && cbs == rbs) {
2803:       const PetscInt *idxs1, *idxs2;

2805:       PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->rmapping, &idxs1));
2806:       PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->cmapping, &idxs2));
2807:       PetscCall(PetscArraycmp(idxs1, idxs2, nr / rbs, &same));
2808:       PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->rmapping, &idxs1));
2809:       PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->cmapping, &idxs2));
2810:     }
2811:     PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &same, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2812:     if (same) {
2813:       PetscCall(ISLocalToGlobalMappingDestroy(&is->cmapping));
2814:       PetscCall(PetscObjectReference((PetscObject)is->rmapping));
2815:       is->cmapping = is->rmapping;
2816:     }
2817:   }
2818:   PetscCall(PetscLayoutSetBlockSize(A->rmap, rbs));
2819:   PetscCall(PetscLayoutSetBlockSize(A->cmap, cbs));
2820:   /* Pass the user defined maps to the layout */
2821:   PetscCall(PetscLayoutSetISLocalToGlobalMapping(A->rmap, rmapping));
2822:   PetscCall(PetscLayoutSetISLocalToGlobalMapping(A->cmap, cmapping));
2823:   if (freem[0]) PetscCall(ISLocalToGlobalMappingDestroy(&rmapping));
2824:   if (freem[1]) PetscCall(ISLocalToGlobalMappingDestroy(&cmapping));

2826:   /* Create the local matrix A */
2827:   PetscCall(MatCreate(PETSC_COMM_SELF, &is->A));
2828:   PetscCall(MatSetType(is->A, is->lmattype));
2829:   PetscCall(MatSetSizes(is->A, nr, nc, nr, nc));
2830:   PetscCall(MatSetBlockSizes(is->A, rbs, cbs));
2831:   PetscCall(MatSetOptionsPrefix(is->A, "is_"));
2832:   PetscCall(MatAppendOptionsPrefix(is->A, ((PetscObject)A)->prefix));
2833:   PetscCall(PetscLayoutSetUp(is->A->rmap));
2834:   PetscCall(PetscLayoutSetUp(is->A->cmap));
2835:   PetscCall(MatSetLocalToGlobalMapping(is->A, localrmapping, localcmapping));
2836:   PetscCall(ISLocalToGlobalMappingDestroy(&localrmapping));
2837:   PetscCall(ISLocalToGlobalMappingDestroy(&localcmapping));

2839:   /* setup scatters and local vectors for MatMult */
2840:   if (!is->islocalref) PetscCall(MatISSetUpScatters_Private(A));
2841:   A->preallocated = PETSC_TRUE;
2842:   PetscFunctionReturn(PETSC_SUCCESS);
2843: }

2845: static PetscErrorCode MatSetUp_IS(Mat A)
2846: {
2847:   ISLocalToGlobalMapping rmap, cmap;

2849:   PetscFunctionBegin;
2850:   PetscCall(MatGetLocalToGlobalMapping(A, &rmap, &cmap));
2851:   if (!rmap && !cmap) PetscCall(MatSetLocalToGlobalMapping(A, NULL, NULL));
2852:   PetscFunctionReturn(PETSC_SUCCESS);
2853: }

2855: static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2856: {
2857:   Mat_IS  *is = (Mat_IS *)mat->data;
2858:   PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION];

2860:   PetscFunctionBegin;
2861:   PetscCall(ISGlobalToLocalMappingApply(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l));
2862:   if (m != n || rows != cols || is->cmapping != is->rmapping) {
2863:     PetscCall(ISGlobalToLocalMappingApply(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l));
2864:     PetscCall(MatSetValues(is->A, m, rows_l, n, cols_l, values, addv));
2865:   } else {
2866:     PetscCall(MatSetValues(is->A, m, rows_l, m, rows_l, values, addv));
2867:   }
2868:   PetscFunctionReturn(PETSC_SUCCESS);
2869: }

2871: static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2872: {
2873:   Mat_IS  *is = (Mat_IS *)mat->data;
2874:   PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION];

2876:   PetscFunctionBegin;
2877:   PetscCall(ISGlobalToLocalMappingApplyBlock(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l));
2878:   if (m != n || rows != cols || is->cmapping != is->rmapping) {
2879:     PetscCall(ISGlobalToLocalMappingApplyBlock(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l));
2880:     PetscCall(MatSetValuesBlocked(is->A, m, rows_l, n, cols_l, values, addv));
2881:   } else {
2882:     PetscCall(MatSetValuesBlocked(is->A, m, rows_l, m, rows_l, values, addv));
2883:   }
2884:   PetscFunctionReturn(PETSC_SUCCESS);
2885: }

2887: static PetscErrorCode MatSetValuesLocal_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2888: {
2889:   Mat_IS *is = (Mat_IS *)A->data;

2891:   PetscFunctionBegin;
2892:   if (is->A->rmap->mapping || is->A->cmap->mapping) {
2893:     PetscCall(MatSetValuesLocal(is->A, m, rows, n, cols, values, addv));
2894:   } else {
2895:     PetscCall(MatSetValues(is->A, m, rows, n, cols, values, addv));
2896:   }
2897:   PetscFunctionReturn(PETSC_SUCCESS);
2898: }

2900: static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2901: {
2902:   Mat_IS *is = (Mat_IS *)A->data;

2904:   PetscFunctionBegin;
2905:   if (is->A->rmap->mapping || is->A->cmap->mapping) {
2906:     PetscCall(MatSetValuesBlockedLocal(is->A, m, rows, n, cols, values, addv));
2907:   } else {
2908:     PetscCall(MatSetValuesBlocked(is->A, m, rows, n, cols, values, addv));
2909:   }
2910:   PetscFunctionReturn(PETSC_SUCCESS);
2911: }

2913: static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, PetscBool columns)
2914: {
2915:   Mat_IS *is = (Mat_IS *)A->data;

2917:   PetscFunctionBegin;
2918:   if (!n) {
2919:     is->pure_neumann = PETSC_TRUE;
2920:   } else {
2921:     PetscInt i;
2922:     is->pure_neumann = PETSC_FALSE;

2924:     if (columns) {
2925:       PetscCall(MatZeroRowsColumns(is->A, n, rows, diag, NULL, NULL));
2926:     } else {
2927:       PetscCall(MatZeroRows(is->A, n, rows, diag, NULL, NULL));
2928:     }
2929:     if (diag != 0.) {
2930:       const PetscScalar *array;
2931:       PetscCall(VecGetArrayRead(is->counter, &array));
2932:       for (i = 0; i < n; i++) PetscCall(MatSetValue(is->A, rows[i], rows[i], diag / (array[rows[i]]), INSERT_VALUES));
2933:       PetscCall(VecRestoreArrayRead(is->counter, &array));
2934:     }
2935:     PetscCall(MatAssemblyBegin(is->A, MAT_FINAL_ASSEMBLY));
2936:     PetscCall(MatAssemblyEnd(is->A, MAT_FINAL_ASSEMBLY));
2937:   }
2938:   PetscFunctionReturn(PETSC_SUCCESS);
2939: }

2941: static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b, PetscBool columns)
2942: {
2943:   Mat_IS   *matis = (Mat_IS *)A->data;
2944:   PetscInt  nr, nl, len, i;
2945:   PetscInt *lrows;

2947:   PetscFunctionBegin;
2948:   if (PetscUnlikelyDebug(columns || diag != 0. || (x && b))) {
2949:     PetscBool cong;

2951:     PetscCall(PetscLayoutCompare(A->rmap, A->cmap, &cong));
2952:     cong = (PetscBool)(cong && matis->sf == matis->csf);
2953:     PetscCheck(cong || !columns, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Columns can be zeroed if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS");
2954:     PetscCheck(cong || diag == 0., PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS");
2955:     PetscCheck(cong || !x || !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "A->rmap and A->cmap need to be congruent, and the l2g maps be the same");
2956:   }
2957:   /* get locally owned rows */
2958:   PetscCall(PetscLayoutMapLocal(A->rmap, n, rows, &len, &lrows, NULL));
2959:   /* fix right-hand side if needed */
2960:   if (x && b) {
2961:     const PetscScalar *xx;
2962:     PetscScalar       *bb;

2964:     PetscCall(VecGetArrayRead(x, &xx));
2965:     PetscCall(VecGetArray(b, &bb));
2966:     for (i = 0; i < len; ++i) bb[lrows[i]] = diag * xx[lrows[i]];
2967:     PetscCall(VecRestoreArrayRead(x, &xx));
2968:     PetscCall(VecRestoreArray(b, &bb));
2969:   }
2970:   /* get rows associated to the local matrices */
2971:   PetscCall(MatGetSize(matis->A, &nl, NULL));
2972:   PetscCall(PetscArrayzero(matis->sf_leafdata, nl));
2973:   PetscCall(PetscArrayzero(matis->sf_rootdata, A->rmap->n));
2974:   for (i = 0; i < len; i++) matis->sf_rootdata[lrows[i]] = 1;
2975:   PetscCall(PetscFree(lrows));
2976:   PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
2977:   PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
2978:   PetscCall(PetscMalloc1(nl, &lrows));
2979:   for (i = 0, nr = 0; i < nl; i++)
2980:     if (matis->sf_leafdata[i]) lrows[nr++] = i;
2981:   PetscCall(MatISZeroRowsColumnsLocal_Private(A, nr, lrows, diag, columns));
2982:   PetscCall(PetscFree(lrows));
2983:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
2984:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
2985:   PetscFunctionReturn(PETSC_SUCCESS);
2986: }

2988: static PetscErrorCode MatZeroRows_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2989: {
2990:   PetscFunctionBegin;
2991:   PetscCall(MatZeroRowsColumns_Private_IS(A, n, rows, diag, x, b, PETSC_FALSE));
2992:   PetscFunctionReturn(PETSC_SUCCESS);
2993: }

2995: static PetscErrorCode MatZeroRowsColumns_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2996: {
2997:   PetscFunctionBegin;
2998:   PetscCall(MatZeroRowsColumns_Private_IS(A, n, rows, diag, x, b, PETSC_TRUE));
2999:   PetscFunctionReturn(PETSC_SUCCESS);
3000: }

3002: static PetscErrorCode MatAssemblyBegin_IS(Mat A, MatAssemblyType type)
3003: {
3004:   Mat_IS *is = (Mat_IS *)A->data;

3006:   PetscFunctionBegin;
3007:   PetscCall(MatAssemblyBegin(is->A, type));
3008:   PetscFunctionReturn(PETSC_SUCCESS);
3009: }

3011: static PetscErrorCode MatAssemblyEnd_IS(Mat A, MatAssemblyType type)
3012: {
3013:   Mat_IS   *is = (Mat_IS *)A->data;
3014:   PetscBool lnnz;

3016:   PetscFunctionBegin;
3017:   PetscCall(MatAssemblyEnd(is->A, type));
3018:   /* fix for local empty rows/cols */
3019:   if (is->locempty && type == MAT_FINAL_ASSEMBLY) {
3020:     Mat                    newlA;
3021:     ISLocalToGlobalMapping rl2g, cl2g;
3022:     IS                     nzr, nzc;
3023:     PetscInt               nr, nc, nnzr, nnzc;
3024:     PetscBool              lnewl2g, newl2g;

3026:     PetscCall(MatGetSize(is->A, &nr, &nc));
3027:     PetscCall(MatFindNonzeroRowsOrCols_Basic(is->A, PETSC_FALSE, PETSC_SMALL, &nzr));
3028:     if (!nzr) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)is->A), nr, 0, 1, &nzr));
3029:     PetscCall(MatFindNonzeroRowsOrCols_Basic(is->A, PETSC_TRUE, PETSC_SMALL, &nzc));
3030:     if (!nzc) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)is->A), nc, 0, 1, &nzc));
3031:     PetscCall(ISGetSize(nzr, &nnzr));
3032:     PetscCall(ISGetSize(nzc, &nnzc));
3033:     if (nnzr != nr || nnzc != nc) { /* need new global l2g map */
3034:       lnewl2g = PETSC_TRUE;
3035:       PetscCallMPI(MPIU_Allreduce(&lnewl2g, &newl2g, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));

3037:       /* extract valid submatrix */
3038:       PetscCall(MatCreateSubMatrix(is->A, nzr, nzc, MAT_INITIAL_MATRIX, &newlA));
3039:     } else { /* local matrix fully populated */
3040:       lnewl2g = PETSC_FALSE;
3041:       PetscCallMPI(MPIU_Allreduce(&lnewl2g, &newl2g, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
3042:       PetscCall(PetscObjectReference((PetscObject)is->A));
3043:       newlA = is->A;
3044:     }

3046:     /* attach new global l2g map if needed */
3047:     if (newl2g) {
3048:       IS              zr, zc;
3049:       const PetscInt *ridxs, *cidxs, *zridxs, *zcidxs;
3050:       PetscInt       *nidxs, i;

3052:       PetscCall(ISComplement(nzr, 0, nr, &zr));
3053:       PetscCall(ISComplement(nzc, 0, nc, &zc));
3054:       PetscCall(PetscMalloc1(PetscMax(nr, nc), &nidxs));
3055:       PetscCall(ISLocalToGlobalMappingGetIndices(is->rmapping, &ridxs));
3056:       PetscCall(ISLocalToGlobalMappingGetIndices(is->cmapping, &cidxs));
3057:       PetscCall(ISGetIndices(zr, &zridxs));
3058:       PetscCall(ISGetIndices(zc, &zcidxs));
3059:       PetscCall(ISGetLocalSize(zr, &nnzr));
3060:       PetscCall(ISGetLocalSize(zc, &nnzc));

3062:       PetscCall(PetscArraycpy(nidxs, ridxs, nr));
3063:       for (i = 0; i < nnzr; i++) nidxs[zridxs[i]] = -1;
3064:       PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, nr, nidxs, PETSC_COPY_VALUES, &rl2g));
3065:       PetscCall(PetscArraycpy(nidxs, cidxs, nc));
3066:       for (i = 0; i < nnzc; i++) nidxs[zcidxs[i]] = -1;
3067:       PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, nc, nidxs, PETSC_COPY_VALUES, &cl2g));

3069:       PetscCall(ISRestoreIndices(zr, &zridxs));
3070:       PetscCall(ISRestoreIndices(zc, &zcidxs));
3071:       PetscCall(ISLocalToGlobalMappingRestoreIndices(is->rmapping, &ridxs));
3072:       PetscCall(ISLocalToGlobalMappingRestoreIndices(is->cmapping, &cidxs));
3073:       PetscCall(ISDestroy(&nzr));
3074:       PetscCall(ISDestroy(&nzc));
3075:       PetscCall(ISDestroy(&zr));
3076:       PetscCall(ISDestroy(&zc));
3077:       PetscCall(PetscFree(nidxs));
3078:       PetscCall(MatSetLocalToGlobalMapping(A, rl2g, cl2g));
3079:       PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
3080:       PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
3081:     }
3082:     PetscCall(MatISSetLocalMat(A, newlA));
3083:     PetscCall(MatDestroy(&newlA));
3084:     PetscCall(ISDestroy(&nzr));
3085:     PetscCall(ISDestroy(&nzc));
3086:     is->locempty = PETSC_FALSE;
3087:   }
3088:   lnnz          = (PetscBool)(is->A->nonzerostate == is->lnnzstate);
3089:   is->lnnzstate = is->A->nonzerostate;
3090:   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &lnnz, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
3091:   if (!lnnz) A->nonzerostate++;
3092:   PetscFunctionReturn(PETSC_SUCCESS);
3093: }

3095: static PetscErrorCode MatISGetLocalMat_IS(Mat mat, Mat *local)
3096: {
3097:   Mat_IS *is = (Mat_IS *)mat->data;

3099:   PetscFunctionBegin;
3100:   *local = is->A;
3101:   PetscFunctionReturn(PETSC_SUCCESS);
3102: }

3104: static PetscErrorCode MatISRestoreLocalMat_IS(Mat mat, Mat *local)
3105: {
3106:   PetscFunctionBegin;
3107:   *local = NULL;
3108:   PetscFunctionReturn(PETSC_SUCCESS);
3109: }

3111: /*@
3112:   MatISGetLocalMat - Gets the local matrix stored inside a `MATIS` matrix.

3114:   Not Collective.

3116:   Input Parameter:
3117: . mat - the matrix

3119:   Output Parameter:
3120: . local - the local matrix

3122:   Level: intermediate

3124:   Notes:
3125:   This can be called if you have precomputed the nonzero structure of the
3126:   matrix and want to provide it to the inner matrix object to improve the performance
3127:   of the `MatSetValues()` operation.

3129:   Call `MatISRestoreLocalMat()` when finished with the local matrix.

3131: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISRestoreLocalMat()`
3132: @*/
3133: PetscErrorCode MatISGetLocalMat(Mat mat, Mat *local)
3134: {
3135:   PetscFunctionBegin;
3137:   PetscAssertPointer(local, 2);
3138:   PetscUseMethod(mat, "MatISGetLocalMat_C", (Mat, Mat *), (mat, local));
3139:   PetscFunctionReturn(PETSC_SUCCESS);
3140: }

3142: /*@
3143:   MatISRestoreLocalMat - Restores the local matrix obtained with `MatISGetLocalMat()`

3145:   Not Collective.

3147:   Input Parameters:
3148: + mat   - the matrix
3149: - local - the local matrix

3151:   Level: intermediate

3153: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISGetLocalMat()`
3154: @*/
3155: PetscErrorCode MatISRestoreLocalMat(Mat mat, Mat *local)
3156: {
3157:   PetscFunctionBegin;
3159:   PetscAssertPointer(local, 2);
3160:   PetscUseMethod(mat, "MatISRestoreLocalMat_C", (Mat, Mat *), (mat, local));
3161:   PetscFunctionReturn(PETSC_SUCCESS);
3162: }

3164: static PetscErrorCode MatISSetLocalMatType_IS(Mat mat, MatType mtype)
3165: {
3166:   Mat_IS *is = (Mat_IS *)mat->data;

3168:   PetscFunctionBegin;
3169:   if (is->A) PetscCall(MatSetType(is->A, mtype));
3170:   PetscCall(PetscFree(is->lmattype));
3171:   PetscCall(PetscStrallocpy(mtype, &is->lmattype));
3172:   PetscFunctionReturn(PETSC_SUCCESS);
3173: }

3175: /*@
3176:   MatISSetLocalMatType - Specifies the type of local matrix inside the `MATIS`

3178:   Logically Collective.

3180:   Input Parameters:
3181: + mat   - the matrix
3182: - mtype - the local matrix type

3184:   Level: intermediate

3186: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatSetType()`, `MatType`
3187: @*/
3188: PetscErrorCode MatISSetLocalMatType(Mat mat, MatType mtype)
3189: {
3190:   PetscFunctionBegin;
3192:   PetscUseMethod(mat, "MatISSetLocalMatType_C", (Mat, MatType), (mat, mtype));
3193:   PetscFunctionReturn(PETSC_SUCCESS);
3194: }

3196: static PetscErrorCode MatISSetLocalMat_IS(Mat mat, Mat local)
3197: {
3198:   Mat_IS   *is = (Mat_IS *)mat->data;
3199:   PetscInt  nrows, ncols, orows, ocols;
3200:   MatType   mtype, otype;
3201:   PetscBool sametype = PETSC_TRUE;

3203:   PetscFunctionBegin;
3204:   if (is->A && !is->islocalref) {
3205:     PetscCall(MatGetSize(is->A, &orows, &ocols));
3206:     PetscCall(MatGetSize(local, &nrows, &ncols));
3207:     PetscCheck(orows == nrows && ocols == ncols, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Local MATIS matrix should be of size %" PetscInt_FMT "x%" PetscInt_FMT " (passed a %" PetscInt_FMT "x%" PetscInt_FMT " matrix)", orows, ocols, nrows, ncols);
3208:     PetscCall(MatGetType(local, &mtype));
3209:     PetscCall(MatGetType(is->A, &otype));
3210:     PetscCall(PetscStrcmp(mtype, otype, &sametype));
3211:   }
3212:   PetscCall(PetscObjectReference((PetscObject)local));
3213:   PetscCall(MatDestroy(&is->A));
3214:   is->A = local;
3215:   PetscCall(MatGetType(is->A, &mtype));
3216:   PetscCall(MatISSetLocalMatType(mat, mtype));
3217:   if (!sametype && !is->islocalref) PetscCall(MatISSetUpScatters_Private(mat));
3218:   is->lnnzstate = 0;
3219:   PetscFunctionReturn(PETSC_SUCCESS);
3220: }

3222: /*@
3223:   MatISSetLocalMat - Replace the local matrix stored inside a `MATIS` object.

3225:   Not Collective

3227:   Input Parameters:
3228: + mat   - the matrix
3229: - local - the local matrix

3231:   Level: intermediate

3233: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISSetLocalMatType`, `MatISGetLocalMat()`
3234: @*/
3235: PetscErrorCode MatISSetLocalMat(Mat mat, Mat local)
3236: {
3237:   PetscFunctionBegin;
3240:   PetscUseMethod(mat, "MatISSetLocalMat_C", (Mat, Mat), (mat, local));
3241:   PetscFunctionReturn(PETSC_SUCCESS);
3242: }

3244: static PetscErrorCode MatZeroEntries_IS(Mat A)
3245: {
3246:   Mat_IS *a = (Mat_IS *)A->data;

3248:   PetscFunctionBegin;
3249:   PetscCall(MatZeroEntries(a->A));
3250:   PetscFunctionReturn(PETSC_SUCCESS);
3251: }

3253: static PetscErrorCode MatScale_IS(Mat A, PetscScalar a)
3254: {
3255:   Mat_IS *is = (Mat_IS *)A->data;

3257:   PetscFunctionBegin;
3258:   PetscCall(MatScale(is->A, a));
3259:   PetscFunctionReturn(PETSC_SUCCESS);
3260: }

3262: static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
3263: {
3264:   Mat_IS *is = (Mat_IS *)A->data;

3266:   PetscFunctionBegin;
3267:   /* get diagonal of the local matrix */
3268:   PetscCall(MatGetDiagonal(is->A, is->y));

3270:   /* scatter diagonal back into global vector */
3271:   PetscCall(VecSet(v, 0));
3272:   PetscCall(VecScatterBegin(is->rctx, is->y, v, ADD_VALUES, SCATTER_REVERSE));
3273:   PetscCall(VecScatterEnd(is->rctx, is->y, v, ADD_VALUES, SCATTER_REVERSE));
3274:   PetscFunctionReturn(PETSC_SUCCESS);
3275: }

3277: static PetscErrorCode MatSetOption_IS(Mat A, MatOption op, PetscBool flg)
3278: {
3279:   Mat_IS *a = (Mat_IS *)A->data;

3281:   PetscFunctionBegin;
3282:   PetscCall(MatSetOption(a->A, op, flg));
3283:   PetscFunctionReturn(PETSC_SUCCESS);
3284: }

3286: static PetscErrorCode MatAXPY_IS(Mat Y, PetscScalar a, Mat X, MatStructure str)
3287: {
3288:   Mat_IS *y = (Mat_IS *)Y->data;
3289:   Mat_IS *x;

3291:   PetscFunctionBegin;
3292:   if (PetscDefined(USE_DEBUG)) {
3293:     PetscBool ismatis;
3294:     PetscCall(PetscObjectTypeCompare((PetscObject)X, MATIS, &ismatis));
3295:     PetscCheck(ismatis, PetscObjectComm((PetscObject)Y), PETSC_ERR_SUP, "Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
3296:   }
3297:   x = (Mat_IS *)X->data;
3298:   PetscCall(MatAXPY(y->A, a, x->A, str));
3299:   PetscFunctionReturn(PETSC_SUCCESS);
3300: }

3302: static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A, IS row, IS col, Mat *submat)
3303: {
3304:   Mat                    lA;
3305:   Mat_IS                *matis = (Mat_IS *)A->data;
3306:   ISLocalToGlobalMapping rl2g, cl2g;
3307:   IS                     is;
3308:   const PetscInt        *rg, *rl;
3309:   PetscInt               nrg;
3310:   PetscInt               N, M, nrl, i, *idxs;

3312:   PetscFunctionBegin;
3313:   PetscCall(ISLocalToGlobalMappingGetIndices(A->rmap->mapping, &rg));
3314:   PetscCall(ISGetLocalSize(row, &nrl));
3315:   PetscCall(ISGetIndices(row, &rl));
3316:   PetscCall(ISLocalToGlobalMappingGetSize(A->rmap->mapping, &nrg));
3317:   if (PetscDefined(USE_DEBUG)) {
3318:     for (i = 0; i < nrl; i++) PetscCheck(rl[i] < nrg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Local row index %" PetscInt_FMT " -> %" PetscInt_FMT " greater then maximum possible %" PetscInt_FMT, i, rl[i], nrg);
3319:   }
3320:   PetscCall(PetscMalloc1(nrg, &idxs));
3321:   /* map from [0,nrl) to row */
3322:   for (i = 0; i < nrl; i++) idxs[i] = rl[i];
3323:   for (i = nrl; i < nrg; i++) idxs[i] = -1;
3324:   PetscCall(ISRestoreIndices(row, &rl));
3325:   PetscCall(ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping, &rg));
3326:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), nrg, idxs, PETSC_OWN_POINTER, &is));
3327:   PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
3328:   PetscCall(ISDestroy(&is));
3329:   /* compute new l2g map for columns */
3330:   if (col != row || matis->rmapping != matis->cmapping || matis->A->rmap->mapping != matis->A->cmap->mapping) {
3331:     const PetscInt *cg, *cl;
3332:     PetscInt        ncg;
3333:     PetscInt        ncl;

3335:     PetscCall(ISLocalToGlobalMappingGetIndices(A->cmap->mapping, &cg));
3336:     PetscCall(ISGetLocalSize(col, &ncl));
3337:     PetscCall(ISGetIndices(col, &cl));
3338:     PetscCall(ISLocalToGlobalMappingGetSize(A->cmap->mapping, &ncg));
3339:     if (PetscDefined(USE_DEBUG)) {
3340:       for (i = 0; i < ncl; i++) PetscCheck(cl[i] < ncg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Local column index %" PetscInt_FMT " -> %" PetscInt_FMT " greater then maximum possible %" PetscInt_FMT, i, cl[i], ncg);
3341:     }
3342:     PetscCall(PetscMalloc1(ncg, &idxs));
3343:     /* map from [0,ncl) to col */
3344:     for (i = 0; i < ncl; i++) idxs[i] = cl[i];
3345:     for (i = ncl; i < ncg; i++) idxs[i] = -1;
3346:     PetscCall(ISRestoreIndices(col, &cl));
3347:     PetscCall(ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping, &cg));
3348:     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), ncg, idxs, PETSC_OWN_POINTER, &is));
3349:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
3350:     PetscCall(ISDestroy(&is));
3351:   } else {
3352:     PetscCall(PetscObjectReference((PetscObject)rl2g));
3353:     cl2g = rl2g;
3354:   }
3355:   /* create the MATIS submatrix */
3356:   PetscCall(MatGetSize(A, &M, &N));
3357:   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), submat));
3358:   PetscCall(MatSetSizes(*submat, PETSC_DECIDE, PETSC_DECIDE, M, N));
3359:   PetscCall(MatSetType(*submat, MATIS));
3360:   matis             = (Mat_IS *)((*submat)->data);
3361:   matis->islocalref = PETSC_TRUE;
3362:   PetscCall(MatSetLocalToGlobalMapping(*submat, rl2g, cl2g));
3363:   PetscCall(MatISGetLocalMat(A, &lA));
3364:   PetscCall(MatISSetLocalMat(*submat, lA));
3365:   PetscCall(MatSetUp(*submat));
3366:   PetscCall(MatAssemblyBegin(*submat, MAT_FINAL_ASSEMBLY));
3367:   PetscCall(MatAssemblyEnd(*submat, MAT_FINAL_ASSEMBLY));
3368:   PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
3369:   PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));

3371:   /* remove unsupported ops */
3372:   PetscCall(PetscMemzero((*submat)->ops, sizeof(struct _MatOps)));
3373:   (*submat)->ops->destroy               = MatDestroy_IS;
3374:   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
3375:   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
3376:   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
3377:   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
3378:   PetscFunctionReturn(PETSC_SUCCESS);
3379: }

3381: static PetscErrorCode MatSetFromOptions_IS(Mat A, PetscOptionItems *PetscOptionsObject)
3382: {
3383:   Mat_IS   *a = (Mat_IS *)A->data;
3384:   char      type[256];
3385:   PetscBool flg;

3387:   PetscFunctionBegin;
3388:   PetscOptionsHeadBegin(PetscOptionsObject, "MATIS options");
3389:   PetscCall(PetscOptionsDeprecated("-matis_keepassembled", "-mat_is_keepassembled", "3.21", NULL));
3390:   PetscCall(PetscOptionsDeprecated("-matis_fixempty", "-mat_is_fixempty", "3.21", NULL));
3391:   PetscCall(PetscOptionsDeprecated("-matis_storel2l", "-mat_is_storel2l", "3.21", NULL));
3392:   PetscCall(PetscOptionsDeprecated("-matis_localmat_type", "-mat_is_localmat_type", "3.21", NULL));
3393:   PetscCall(PetscOptionsBool("-mat_is_keepassembled", "Store an assembled version if needed", NULL, a->keepassembled, &a->keepassembled, NULL));
3394:   PetscCall(PetscOptionsBool("-mat_is_fixempty", "Fix local matrices in case of empty local rows/columns", "MatISFixLocalEmpty", a->locempty, &a->locempty, NULL));
3395:   PetscCall(PetscOptionsBool("-mat_is_storel2l", "Store local-to-local matrices generated from PtAP operations", "MatISStoreL2L", a->storel2l, &a->storel2l, NULL));
3396:   PetscCall(PetscOptionsBool("-mat_is_allow_repeated", "Allow local repeated entries", "MatISSetAllowRepeated", a->allow_repeated, &a->allow_repeated, NULL));
3397:   PetscCall(PetscOptionsFList("-mat_is_localmat_type", "Matrix type", "MatISSetLocalMatType", MatList, a->lmattype, type, 256, &flg));
3398:   if (flg) PetscCall(MatISSetLocalMatType(A, type));
3399:   if (a->A) PetscCall(MatSetFromOptions(a->A));
3400:   PetscOptionsHeadEnd();
3401:   PetscFunctionReturn(PETSC_SUCCESS);
3402: }

3404: /*@
3405:   MatCreateIS - Creates a "process" unassembled matrix.

3407:   Collective.

3409:   Input Parameters:
3410: + comm - MPI communicator that will share the matrix
3411: . bs   - block size of the matrix
3412: . m    - local size of left vector used in matrix vector products
3413: . n    - local size of right vector used in matrix vector products
3414: . M    - global size of left vector used in matrix vector products
3415: . N    - global size of right vector used in matrix vector products
3416: . rmap - local to global map for rows
3417: - cmap - local to global map for cols

3419:   Output Parameter:
3420: . A - the resulting matrix

3422:   Level: intermediate

3424:   Notes:
3425:   `m` and `n` are NOT related to the size of the map; they represent the size of the local parts of the distributed vectors
3426:   used in `MatMult()` operations. The local sizes of `rmap` and `cmap` define the size of the local matrices.

3428:   If `rmap` (`cmap`) is `NULL`, then the local row (column) spaces matches the global space.

3430: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatSetLocalToGlobalMapping()`
3431: @*/
3432: PetscErrorCode MatCreateIS(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, ISLocalToGlobalMapping rmap, ISLocalToGlobalMapping cmap, Mat *A)
3433: {
3434:   PetscFunctionBegin;
3435:   PetscCall(MatCreate(comm, A));
3436:   PetscCall(MatSetSizes(*A, m, n, M, N));
3437:   if (bs > 0) PetscCall(MatSetBlockSize(*A, bs));
3438:   PetscCall(MatSetType(*A, MATIS));
3439:   PetscCall(MatSetLocalToGlobalMapping(*A, rmap, cmap));
3440:   PetscFunctionReturn(PETSC_SUCCESS);
3441: }

3443: static PetscErrorCode MatHasOperation_IS(Mat A, MatOperation op, PetscBool *has)
3444: {
3445:   Mat_IS      *a              = (Mat_IS *)A->data;
3446:   MatOperation tobefiltered[] = {MATOP_MULT_ADD, MATOP_MULT_TRANSPOSE_ADD, MATOP_GET_DIAGONAL_BLOCK, MATOP_INCREASE_OVERLAP};

3448:   PetscFunctionBegin;
3449:   *has = PETSC_FALSE;
3450:   if (!((void **)A->ops)[op] || !a->A) PetscFunctionReturn(PETSC_SUCCESS);
3451:   *has = PETSC_TRUE;
3452:   for (PetscInt i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(tobefiltered); i++)
3453:     if (op == tobefiltered[i]) PetscFunctionReturn(PETSC_SUCCESS);
3454:   PetscCall(MatHasOperation(a->A, op, has));
3455:   PetscFunctionReturn(PETSC_SUCCESS);
3456: }

3458: static PetscErrorCode MatSetValuesCOO_IS(Mat A, const PetscScalar v[], InsertMode imode)
3459: {
3460:   Mat_IS *a = (Mat_IS *)A->data;

3462:   PetscFunctionBegin;
3463:   PetscCall(MatSetValuesCOO(a->A, v, imode));
3464:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
3465:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
3466:   PetscFunctionReturn(PETSC_SUCCESS);
3467: }

3469: static PetscErrorCode MatSetPreallocationCOOLocal_IS(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
3470: {
3471:   Mat_IS *a = (Mat_IS *)A->data;

3473:   PetscFunctionBegin;
3474:   PetscCheck(a->A, PetscObjectComm((PetscObject)A), PETSC_ERR_ORDER, "Need to provide l2g map first via MatSetLocalToGlobalMapping");
3475:   if (a->A->rmap->mapping || a->A->cmap->mapping) {
3476:     PetscCall(MatSetPreallocationCOOLocal(a->A, ncoo, coo_i, coo_j));
3477:   } else {
3478:     PetscCall(MatSetPreallocationCOO(a->A, ncoo, coo_i, coo_j));
3479:   }
3480:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", MatSetValuesCOO_IS));
3481:   A->preallocated = PETSC_TRUE;
3482:   PetscFunctionReturn(PETSC_SUCCESS);
3483: }

3485: static PetscErrorCode MatSetPreallocationCOO_IS(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
3486: {
3487:   Mat_IS *a = (Mat_IS *)A->data;

3489:   PetscFunctionBegin;
3490:   PetscCheck(a->A, PetscObjectComm((PetscObject)A), PETSC_ERR_ORDER, "Need to provide l2g map first via MatSetLocalToGlobalMapping");
3491:   PetscCheck(ncoo <= PETSC_INT_MAX, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ncoo %" PetscCount_FMT " overflowed PetscInt; configure --with-64-bit-indices or request support", ncoo);
3492:   PetscCall(ISGlobalToLocalMappingApply(a->rmapping, IS_GTOLM_MASK, (PetscInt)ncoo, coo_i, NULL, coo_i));
3493:   PetscCall(ISGlobalToLocalMappingApply(a->cmapping, IS_GTOLM_MASK, (PetscInt)ncoo, coo_j, NULL, coo_j));
3494:   PetscCall(MatSetPreallocationCOO(a->A, ncoo, coo_i, coo_j));
3495:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", MatSetValuesCOO_IS));
3496:   A->preallocated = PETSC_TRUE;
3497:   PetscFunctionReturn(PETSC_SUCCESS);
3498: }

3500: static PetscErrorCode MatISGetAssembled_Private(Mat A, Mat *tA)
3501: {
3502:   Mat_IS          *a = (Mat_IS *)A->data;
3503:   PetscObjectState Astate, aAstate       = PETSC_INT_MIN;
3504:   PetscObjectState Annzstate, aAnnzstate = PETSC_INT_MIN;

3506:   PetscFunctionBegin;
3507:   PetscCall(PetscObjectStateGet((PetscObject)A, &Astate));
3508:   Annzstate = A->nonzerostate;
3509:   if (a->assembledA) {
3510:     PetscCall(PetscObjectStateGet((PetscObject)a->assembledA, &aAstate));
3511:     aAnnzstate = a->assembledA->nonzerostate;
3512:   }
3513:   if (aAnnzstate != Annzstate) PetscCall(MatDestroy(&a->assembledA));
3514:   if (Astate != aAstate || !a->assembledA) {
3515:     MatType     aAtype;
3516:     PetscMPIInt size;
3517:     PetscInt    rbs, cbs, bs;

3519:     /* the assembled form is used as temporary storage for parallel operations
3520:        like createsubmatrices and the like, do not waste device memory */
3521:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
3522:     PetscCall(ISLocalToGlobalMappingGetBlockSize(a->cmapping, &cbs));
3523:     PetscCall(ISLocalToGlobalMappingGetBlockSize(a->rmapping, &rbs));
3524:     bs = rbs == cbs ? rbs : 1;
3525:     if (a->assembledA) PetscCall(MatGetType(a->assembledA, &aAtype));
3526:     else if (size > 1) aAtype = bs > 1 ? MATMPIBAIJ : MATMPIAIJ;
3527:     else aAtype = bs > 1 ? MATSEQBAIJ : MATSEQAIJ;

3529:     PetscCall(MatConvert(A, aAtype, a->assembledA ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &a->assembledA));
3530:     PetscCall(PetscObjectStateSet((PetscObject)a->assembledA, Astate));
3531:     a->assembledA->nonzerostate = Annzstate;
3532:   }
3533:   PetscCall(PetscObjectReference((PetscObject)a->assembledA));
3534:   *tA = a->assembledA;
3535:   if (!a->keepassembled) PetscCall(MatDestroy(&a->assembledA));
3536:   PetscFunctionReturn(PETSC_SUCCESS);
3537: }

3539: static PetscErrorCode MatISRestoreAssembled_Private(Mat A, Mat *tA)
3540: {
3541:   PetscFunctionBegin;
3542:   PetscCall(MatDestroy(tA));
3543:   *tA = NULL;
3544:   PetscFunctionReturn(PETSC_SUCCESS);
3545: }

3547: static PetscErrorCode MatGetDiagonalBlock_IS(Mat A, Mat *dA)
3548: {
3549:   Mat_IS          *a = (Mat_IS *)A->data;
3550:   PetscObjectState Astate, dAstate = PETSC_INT_MIN;

3552:   PetscFunctionBegin;
3553:   PetscCall(PetscObjectStateGet((PetscObject)A, &Astate));
3554:   if (a->dA) PetscCall(PetscObjectStateGet((PetscObject)a->dA, &dAstate));
3555:   if (Astate != dAstate) {
3556:     Mat     tA;
3557:     MatType ltype;

3559:     PetscCall(MatDestroy(&a->dA));
3560:     PetscCall(MatISGetAssembled_Private(A, &tA));
3561:     PetscCall(MatGetDiagonalBlock(tA, &a->dA));
3562:     PetscCall(MatPropagateSymmetryOptions(tA, a->dA));
3563:     PetscCall(MatGetType(a->A, &ltype));
3564:     PetscCall(MatConvert(a->dA, ltype, MAT_INPLACE_MATRIX, &a->dA));
3565:     PetscCall(PetscObjectReference((PetscObject)a->dA));
3566:     PetscCall(MatISRestoreAssembled_Private(A, &tA));
3567:     PetscCall(PetscObjectStateSet((PetscObject)a->dA, Astate));
3568:   }
3569:   *dA = a->dA;
3570:   PetscFunctionReturn(PETSC_SUCCESS);
3571: }

3573: static PetscErrorCode MatCreateSubMatrices_IS(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse reuse, Mat *submat[])
3574: {
3575:   Mat tA;

3577:   PetscFunctionBegin;
3578:   PetscCall(MatISGetAssembled_Private(A, &tA));
3579:   PetscCall(MatCreateSubMatrices(tA, n, irow, icol, reuse, submat));
3580:   /* MatCreateSubMatrices_MPIAIJ is a mess at the moment */
3581: #if 0
3582:   {
3583:     Mat_IS    *a = (Mat_IS*)A->data;
3584:     MatType   ltype;
3585:     VecType   vtype;
3586:     char      *flg;

3588:     PetscCall(MatGetType(a->A,&ltype));
3589:     PetscCall(MatGetVecType(a->A,&vtype));
3590:     PetscCall(PetscStrstr(vtype,"cuda",&flg));
3591:     if (!flg) PetscCall(PetscStrstr(vtype,"hip",&flg));
3592:     if (!flg) PetscCall(PetscStrstr(vtype,"kokkos",&flg));
3593:     if (flg) {
3594:       for (PetscInt i = 0; i < n; i++) {
3595:         Mat sA = (*submat)[i];

3597:         PetscCall(MatConvert(sA,ltype,MAT_INPLACE_MATRIX,&sA));
3598:         (*submat)[i] = sA;
3599:       }
3600:     }
3601:   }
3602: #endif
3603:   PetscCall(MatISRestoreAssembled_Private(A, &tA));
3604:   PetscFunctionReturn(PETSC_SUCCESS);
3605: }

3607: static PetscErrorCode MatIncreaseOverlap_IS(Mat A, PetscInt n, IS is[], PetscInt ov)
3608: {
3609:   Mat tA;

3611:   PetscFunctionBegin;
3612:   PetscCall(MatISGetAssembled_Private(A, &tA));
3613:   PetscCall(MatIncreaseOverlap(tA, n, is, ov));
3614:   PetscCall(MatISRestoreAssembled_Private(A, &tA));
3615:   PetscFunctionReturn(PETSC_SUCCESS);
3616: }

3618: /*@
3619:   MatISGetLocalToGlobalMapping - Gets the local-to-global numbering of the `MATIS` object

3621:   Not Collective

3623:   Input Parameter:
3624: . A - the matrix

3626:   Output Parameters:
3627: + rmapping - row mapping
3628: - cmapping - column mapping

3630:   Level: advanced

3632:   Note:
3633:   The returned map can be different from the one used to construct the `MATIS` object, since it will not contain negative or repeated indices.

3635: .seealso: [](ch_matrices), `Mat`, `MatIS`, `MatSetLocalToGlobalMapping()`
3636: @*/
3637: PetscErrorCode MatISGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
3638: {
3639:   PetscFunctionBegin;
3642:   if (rmapping) PetscAssertPointer(rmapping, 2);
3643:   if (cmapping) PetscAssertPointer(cmapping, 3);
3644:   PetscUseMethod(A, "MatISGetLocalToGlobalMapping_C", (Mat, ISLocalToGlobalMapping *, ISLocalToGlobalMapping *), (A, rmapping, cmapping));
3645:   PetscFunctionReturn(PETSC_SUCCESS);
3646: }

3648: static PetscErrorCode MatISGetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapping *r, ISLocalToGlobalMapping *c)
3649: {
3650:   Mat_IS *a = (Mat_IS *)A->data;

3652:   PetscFunctionBegin;
3653:   if (r) *r = a->rmapping;
3654:   if (c) *c = a->cmapping;
3655:   PetscFunctionReturn(PETSC_SUCCESS);
3656: }

3658: static PetscErrorCode MatSetBlockSizes_IS(Mat A, PetscInt rbs, PetscInt cbs)
3659: {
3660:   Mat_IS *a = (Mat_IS *)A->data;

3662:   PetscFunctionBegin;
3663:   if (a->A) PetscCall(MatSetBlockSizes(a->A, rbs, cbs));
3664:   PetscFunctionReturn(PETSC_SUCCESS);
3665: }

3667: /*MC
3668:   MATIS - MATIS = "is" - A matrix type to be used for non-overlapping domain decomposition methods (e.g. `PCBDDC` or `KSPFETIDP`).
3669:   This stores the matrices in globally unassembled form and the parallel matrix vector product is handled "implicitly".

3671:   Options Database Keys:
3672: + -mat_type is           - Set the matrix type to `MATIS`.
3673: . -mat_is_allow_repeated - Allow repeated entries in the local part of the local to global maps.
3674: . -mat_is_fixempty       - Fix local matrices in case of empty local rows/columns.
3675: - -mat_is_storel2l       - Store the local-to-local operators generated by the Galerkin process of `MatPtAP()`.

3677:   Level: intermediate

3679:   Notes:
3680:   Options prefix for the inner matrix are given by `-is_mat_xxx`

3682:   You must call `MatSetLocalToGlobalMapping()` before using this matrix type.

3684:   You can do matrix preallocation on the local matrix after you obtain it with
3685:   `MatISGetLocalMat()`; otherwise, you could use `MatISSetPreallocation()` or `MatXAIJSetPreallocation()`

3687: .seealso: [](ch_matrices), `Mat`, `MATIS`, `Mat`, `MatISGetLocalMat()`, `MatSetLocalToGlobalMapping()`, `MatISSetPreallocation()`, `MatCreateIS()`, `PCBDDC`, `KSPFETIDP`
3688: M*/
3689: PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
3690: {
3691:   Mat_IS *a;

3693:   PetscFunctionBegin;
3694:   PetscCall(PetscNew(&a));
3695:   PetscCall(PetscStrallocpy(MATAIJ, &a->lmattype));
3696:   A->data = (void *)a;

3698:   /* matrix ops */
3699:   PetscCall(PetscMemzero(A->ops, sizeof(struct _MatOps)));
3700:   A->ops->mult                    = MatMult_IS;
3701:   A->ops->multadd                 = MatMultAdd_IS;
3702:   A->ops->multtranspose           = MatMultTranspose_IS;
3703:   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
3704:   A->ops->destroy                 = MatDestroy_IS;
3705:   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
3706:   A->ops->setvalues               = MatSetValues_IS;
3707:   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
3708:   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
3709:   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
3710:   A->ops->zerorows                = MatZeroRows_IS;
3711:   A->ops->zerorowscolumns         = MatZeroRowsColumns_IS;
3712:   A->ops->assemblybegin           = MatAssemblyBegin_IS;
3713:   A->ops->assemblyend             = MatAssemblyEnd_IS;
3714:   A->ops->view                    = MatView_IS;
3715:   A->ops->load                    = MatLoad_IS;
3716:   A->ops->zeroentries             = MatZeroEntries_IS;
3717:   A->ops->scale                   = MatScale_IS;
3718:   A->ops->getdiagonal             = MatGetDiagonal_IS;
3719:   A->ops->setoption               = MatSetOption_IS;
3720:   A->ops->ishermitian             = MatIsHermitian_IS;
3721:   A->ops->issymmetric             = MatIsSymmetric_IS;
3722:   A->ops->isstructurallysymmetric = MatIsStructurallySymmetric_IS;
3723:   A->ops->duplicate               = MatDuplicate_IS;
3724:   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
3725:   A->ops->copy                    = MatCopy_IS;
3726:   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
3727:   A->ops->createsubmatrix         = MatCreateSubMatrix_IS;
3728:   A->ops->axpy                    = MatAXPY_IS;
3729:   A->ops->diagonalset             = MatDiagonalSet_IS;
3730:   A->ops->shift                   = MatShift_IS;
3731:   A->ops->transpose               = MatTranspose_IS;
3732:   A->ops->getinfo                 = MatGetInfo_IS;
3733:   A->ops->diagonalscale           = MatDiagonalScale_IS;
3734:   A->ops->setfromoptions          = MatSetFromOptions_IS;
3735:   A->ops->setup                   = MatSetUp_IS;
3736:   A->ops->hasoperation            = MatHasOperation_IS;
3737:   A->ops->getdiagonalblock        = MatGetDiagonalBlock_IS;
3738:   A->ops->createsubmatrices       = MatCreateSubMatrices_IS;
3739:   A->ops->increaseoverlap         = MatIncreaseOverlap_IS;
3740:   A->ops->setblocksizes           = MatSetBlockSizes_IS;

3742:   /* special MATIS functions */
3743:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", MatISSetLocalMatType_IS));
3744:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalMat_C", MatISGetLocalMat_IS));
3745:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISRestoreLocalMat_C", MatISRestoreLocalMat_IS));
3746:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMat_C", MatISSetLocalMat_IS));
3747:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetPreallocation_C", MatISSetPreallocation_IS));
3748:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetAllowRepeated_C", MatISSetAllowRepeated_IS));
3749:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISStoreL2L_C", MatISStoreL2L_IS));
3750:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISFixLocalEmpty_C", MatISFixLocalEmpty_IS));
3751:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalToGlobalMapping_C", MatISGetLocalToGlobalMapping_IS));
3752:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpiaij_C", MatConvert_IS_XAIJ));
3753:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpibaij_C", MatConvert_IS_XAIJ));
3754:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpisbaij_C", MatConvert_IS_XAIJ));
3755:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqaij_C", MatConvert_IS_XAIJ));
3756:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqbaij_C", MatConvert_IS_XAIJ));
3757:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqsbaij_C", MatConvert_IS_XAIJ));
3758:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_aij_C", MatConvert_IS_XAIJ));
3759:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", MatSetPreallocationCOOLocal_IS));
3760:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_IS));
3761:   PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATIS));
3762:   PetscFunctionReturn(PETSC_SUCCESS);
3763: }