Actual source code: matis.c
1: /*
2: Creates a matrix class for using the Neumann-Neumann type preconditioners.
3: This stores the matrices in globally unassembled form. Each processor
4: assembles only its local Neumann problem and the parallel matrix vector
5: product is handled "implicitly".
7: Currently this allows for only one subdomain per processor.
8: */
10: #include <petsc/private/matisimpl.h>
11: #include <../src/mat/impls/aij/mpi/mpiaij.h>
12: #include <petsc/private/sfimpl.h>
13: #include <petsc/private/vecimpl.h>
14: #include <petsc/private/hashseti.h>
16: #define MATIS_MAX_ENTRIES_INSERTION 2048
18: /* copied from src/mat/impls/localref/mlocalref.c */
19: #define IndexSpaceGet(buf, nrow, ncol, irowm, icolm) \
20: do { \
21: if (nrow + ncol > (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) { \
22: PetscCall(PetscMalloc2(nrow, &irowm, ncol, &icolm)); \
23: } else { \
24: irowm = &buf[0]; \
25: icolm = &buf[nrow]; \
26: } \
27: } while (0)
29: #define IndexSpaceRestore(buf, nrow, ncol, irowm, icolm) \
30: do { \
31: if (nrow + ncol > (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) PetscCall(PetscFree2(irowm, icolm)); \
32: } while (0)
34: static void BlockIndicesExpand(PetscInt n, const PetscInt idx[], PetscInt bs, PetscInt idxm[])
35: {
36: for (PetscInt i = 0; i < n; i++) {
37: for (PetscInt j = 0; j < bs; j++) idxm[i * bs + j] = idx[i] * bs + j;
38: }
39: }
41: static PetscErrorCode MatSetValuesLocal_IS(Mat, PetscInt, const PetscInt *, PetscInt, const PetscInt *, const PetscScalar *, InsertMode);
42: static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat, PetscInt, const PetscInt *, PetscInt, const PetscInt *, const PetscScalar *, InsertMode);
43: static PetscErrorCode MatISSetUpScatters_Private(Mat);
45: static PetscErrorCode MatISContainerDestroyPtAP_Private(void **ptr)
46: {
47: MatISPtAP ptap = (MatISPtAP)*ptr;
49: PetscFunctionBegin;
50: PetscCall(MatDestroySubMatrices(ptap->ris1 ? 2 : 1, &ptap->lP));
51: PetscCall(ISDestroy(&ptap->cis0));
52: PetscCall(ISDestroy(&ptap->cis1));
53: PetscCall(ISDestroy(&ptap->ris0));
54: PetscCall(ISDestroy(&ptap->ris1));
55: PetscCall(PetscFree(ptap));
56: PetscFunctionReturn(PETSC_SUCCESS);
57: }
59: static PetscErrorCode MatPtAPNumeric_IS_XAIJ(Mat A, Mat P, Mat C)
60: {
61: MatISPtAP ptap;
62: Mat_IS *matis = (Mat_IS *)A->data;
63: Mat lA, lC;
64: MatReuse reuse;
65: IS ris[2], cis[2];
66: PetscContainer c;
67: PetscInt n;
69: PetscFunctionBegin;
70: PetscCall(PetscObjectQuery((PetscObject)C, "_MatIS_PtAP", (PetscObject *)&c));
71: PetscCheck(c, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Missing PtAP information");
72: PetscCall(PetscContainerGetPointer(c, (void **)&ptap));
73: ris[0] = ptap->ris0;
74: ris[1] = ptap->ris1;
75: cis[0] = ptap->cis0;
76: cis[1] = ptap->cis1;
77: n = ptap->ris1 ? 2 : 1;
78: reuse = ptap->lP ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
79: PetscCall(MatCreateSubMatrices(P, n, ris, cis, reuse, &ptap->lP));
81: PetscCall(MatISGetLocalMat(A, &lA));
82: PetscCall(MatISGetLocalMat(C, &lC));
83: if (ptap->ris1) { /* unsymmetric A mapping */
84: Mat lPt;
86: PetscCall(MatTranspose(ptap->lP[1], MAT_INITIAL_MATRIX, &lPt));
87: PetscCall(MatMatMatMult(lPt, lA, ptap->lP[0], reuse, ptap->fill, &lC));
88: if (matis->storel2l) PetscCall(PetscObjectCompose((PetscObject)A, "_MatIS_PtAP_l2l", (PetscObject)lPt));
89: PetscCall(MatDestroy(&lPt));
90: } else {
91: PetscCall(MatPtAP(lA, ptap->lP[0], reuse, ptap->fill, &lC));
92: if (matis->storel2l) PetscCall(PetscObjectCompose((PetscObject)C, "_MatIS_PtAP_l2l", (PetscObject)ptap->lP[0]));
93: }
94: if (reuse == MAT_INITIAL_MATRIX) {
95: PetscCall(MatISSetLocalMat(C, lC));
96: PetscCall(MatDestroy(&lC));
97: }
98: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
99: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
100: PetscFunctionReturn(PETSC_SUCCESS);
101: }
103: static PetscErrorCode MatGetNonzeroColumnsLocal_Private(Mat PT, IS *cis)
104: {
105: Mat Po, Pd;
106: IS zd, zo;
107: const PetscInt *garray;
108: PetscInt *aux, i, bs;
109: PetscInt dc, stc, oc, ctd, cto;
110: PetscBool ismpiaij, ismpibaij, isseqaij, isseqbaij;
111: MPI_Comm comm;
113: PetscFunctionBegin;
115: PetscAssertPointer(cis, 2);
116: PetscCall(PetscObjectGetComm((PetscObject)PT, &comm));
117: bs = 1;
118: PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATMPIAIJ, &ismpiaij));
119: PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATMPIBAIJ, &ismpibaij));
120: PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATSEQAIJ, &isseqaij));
121: PetscCall(PetscObjectTypeCompare((PetscObject)PT, MATSEQBAIJ, &isseqbaij));
122: if (isseqaij || isseqbaij) {
123: Pd = PT;
124: Po = NULL;
125: garray = NULL;
126: } else if (ismpiaij) {
127: PetscCall(MatMPIAIJGetSeqAIJ(PT, &Pd, &Po, &garray));
128: } else if (ismpibaij) {
129: PetscCall(MatMPIBAIJGetSeqBAIJ(PT, &Pd, &Po, &garray));
130: PetscCall(MatGetBlockSize(PT, &bs));
131: } else SETERRQ(comm, PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)PT)->type_name);
133: /* identify any null columns in Pd or Po */
134: /* We use a tolerance comparison since it may happen that, with geometric multigrid,
135: some of the columns are not really zero, but very close to */
136: zo = zd = NULL;
137: if (Po) PetscCall(MatFindNonzeroRowsOrCols_Basic(Po, PETSC_TRUE, PETSC_SMALL, &zo));
138: PetscCall(MatFindNonzeroRowsOrCols_Basic(Pd, PETSC_TRUE, PETSC_SMALL, &zd));
140: PetscCall(MatGetLocalSize(PT, NULL, &dc));
141: PetscCall(MatGetOwnershipRangeColumn(PT, &stc, NULL));
142: if (Po) PetscCall(MatGetLocalSize(Po, NULL, &oc));
143: else oc = 0;
144: PetscCall(PetscMalloc1((dc + oc) / bs, &aux));
145: if (zd) {
146: const PetscInt *idxs;
147: PetscInt nz;
149: /* this will throw an error if bs is not valid */
150: PetscCall(ISSetBlockSize(zd, bs));
151: PetscCall(ISGetLocalSize(zd, &nz));
152: PetscCall(ISGetIndices(zd, &idxs));
153: ctd = nz / bs;
154: for (i = 0; i < ctd; i++) aux[i] = (idxs[bs * i] + stc) / bs;
155: PetscCall(ISRestoreIndices(zd, &idxs));
156: } else {
157: ctd = dc / bs;
158: for (i = 0; i < ctd; i++) aux[i] = i + stc / bs;
159: }
160: if (zo) {
161: const PetscInt *idxs;
162: PetscInt nz;
164: /* this will throw an error if bs is not valid */
165: PetscCall(ISSetBlockSize(zo, bs));
166: PetscCall(ISGetLocalSize(zo, &nz));
167: PetscCall(ISGetIndices(zo, &idxs));
168: cto = nz / bs;
169: for (i = 0; i < cto; i++) aux[i + ctd] = garray[idxs[bs * i] / bs];
170: PetscCall(ISRestoreIndices(zo, &idxs));
171: } else {
172: cto = oc / bs;
173: for (i = 0; i < cto; i++) aux[i + ctd] = garray[i];
174: }
175: PetscCall(ISCreateBlock(comm, bs, ctd + cto, aux, PETSC_OWN_POINTER, cis));
176: PetscCall(ISDestroy(&zd));
177: PetscCall(ISDestroy(&zo));
178: PetscFunctionReturn(PETSC_SUCCESS);
179: }
181: static PetscErrorCode MatPtAPSymbolic_IS_XAIJ(Mat A, Mat P, PetscReal fill, Mat C)
182: {
183: Mat PT, lA;
184: MatISPtAP ptap;
185: ISLocalToGlobalMapping Crl2g, Ccl2g, rl2g, cl2g;
186: PetscContainer c;
187: MatType lmtype;
188: const PetscInt *garray;
189: PetscInt ibs, N, dc;
190: MPI_Comm comm;
192: PetscFunctionBegin;
193: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
194: PetscCall(MatSetType(C, MATIS));
195: PetscCall(MatISGetLocalMat(A, &lA));
196: PetscCall(MatGetType(lA, &lmtype));
197: PetscCall(MatISSetLocalMatType(C, lmtype));
198: PetscCall(MatGetSize(P, NULL, &N));
199: PetscCall(MatGetLocalSize(P, NULL, &dc));
200: PetscCall(MatSetSizes(C, dc, dc, N, N));
201: /* Not sure about this
202: PetscCall(MatGetBlockSizes(P,NULL,&ibs));
203: PetscCall(MatSetBlockSize(*C,ibs));
204: */
206: PetscCall(PetscNew(&ptap));
207: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &c));
208: PetscCall(PetscContainerSetPointer(c, ptap));
209: PetscCall(PetscContainerSetCtxDestroy(c, MatISContainerDestroyPtAP_Private));
210: PetscCall(PetscObjectCompose((PetscObject)C, "_MatIS_PtAP", (PetscObject)c));
211: PetscCall(PetscContainerDestroy(&c));
212: ptap->fill = fill;
214: PetscCall(MatISGetLocalToGlobalMapping(A, &rl2g, &cl2g));
216: PetscCall(ISLocalToGlobalMappingGetBlockSize(cl2g, &ibs));
217: PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &N));
218: PetscCall(ISLocalToGlobalMappingGetBlockIndices(cl2g, &garray));
219: PetscCall(ISCreateBlock(comm, ibs, N / ibs, garray, PETSC_COPY_VALUES, &ptap->ris0));
220: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cl2g, &garray));
222: PetscCall(MatCreateSubMatrix(P, ptap->ris0, NULL, MAT_INITIAL_MATRIX, &PT));
223: PetscCall(MatGetNonzeroColumnsLocal_Private(PT, &ptap->cis0));
224: PetscCall(ISLocalToGlobalMappingCreateIS(ptap->cis0, &Ccl2g));
225: PetscCall(MatDestroy(&PT));
227: Crl2g = NULL;
228: if (rl2g != cl2g) { /* unsymmetric A mapping */
229: PetscBool same, lsame = PETSC_FALSE;
230: PetscInt N1, ibs1;
232: PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &N1));
233: PetscCall(ISLocalToGlobalMappingGetBlockSize(rl2g, &ibs1));
234: PetscCall(ISLocalToGlobalMappingGetBlockIndices(rl2g, &garray));
235: PetscCall(ISCreateBlock(comm, ibs, N1 / ibs, garray, PETSC_COPY_VALUES, &ptap->ris1));
236: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rl2g, &garray));
237: if (ibs1 == ibs && N1 == N) { /* check if the l2gmaps are the same */
238: const PetscInt *i1, *i2;
240: PetscCall(ISBlockGetIndices(ptap->ris0, &i1));
241: PetscCall(ISBlockGetIndices(ptap->ris1, &i2));
242: PetscCall(PetscArraycmp(i1, i2, N, &lsame));
243: }
244: PetscCallMPI(MPIU_Allreduce(&lsame, &same, 1, MPI_C_BOOL, MPI_LAND, comm));
245: if (same) {
246: PetscCall(ISDestroy(&ptap->ris1));
247: } else {
248: PetscCall(MatCreateSubMatrix(P, ptap->ris1, NULL, MAT_INITIAL_MATRIX, &PT));
249: PetscCall(MatGetNonzeroColumnsLocal_Private(PT, &ptap->cis1));
250: PetscCall(ISLocalToGlobalMappingCreateIS(ptap->cis1, &Crl2g));
251: PetscCall(MatDestroy(&PT));
252: }
253: }
254: /* Not sure about this
255: if (!Crl2g) {
256: PetscCall(MatGetBlockSize(C,&ibs));
257: PetscCall(ISLocalToGlobalMappingSetBlockSize(Ccl2g,ibs));
258: }
259: */
260: PetscCall(MatSetLocalToGlobalMapping(C, Crl2g ? Crl2g : Ccl2g, Ccl2g));
261: PetscCall(ISLocalToGlobalMappingDestroy(&Crl2g));
262: PetscCall(ISLocalToGlobalMappingDestroy(&Ccl2g));
264: C->ops->ptapnumeric = MatPtAPNumeric_IS_XAIJ;
265: PetscFunctionReturn(PETSC_SUCCESS);
266: }
268: static PetscErrorCode MatProductSymbolic_PtAP_IS_XAIJ(Mat C)
269: {
270: Mat_Product *product = C->product;
271: Mat A = product->A, P = product->B;
272: PetscReal fill = product->fill;
274: PetscFunctionBegin;
275: PetscCall(MatPtAPSymbolic_IS_XAIJ(A, P, fill, C));
276: C->ops->productnumeric = MatProductNumeric_PtAP;
277: PetscFunctionReturn(PETSC_SUCCESS);
278: }
280: static PetscErrorCode MatProductSetFromOptions_IS_XAIJ_PtAP(Mat C)
281: {
282: PetscFunctionBegin;
283: C->ops->productsymbolic = MatProductSymbolic_PtAP_IS_XAIJ;
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_IS_XAIJ(Mat C)
288: {
289: Mat_Product *product = C->product;
291: PetscFunctionBegin;
292: if (product->type == MATPRODUCT_PtAP) PetscCall(MatProductSetFromOptions_IS_XAIJ_PtAP(C));
293: PetscFunctionReturn(PETSC_SUCCESS);
294: }
296: static PetscErrorCode MatISContainerDestroyFields_Private(void **ptr)
297: {
298: MatISLocalFields lf = (MatISLocalFields)*ptr;
299: PetscInt i;
301: PetscFunctionBegin;
302: for (i = 0; i < lf->nr; i++) PetscCall(ISDestroy(&lf->rf[i]));
303: for (i = 0; i < lf->nc; i++) PetscCall(ISDestroy(&lf->cf[i]));
304: PetscCall(PetscFree2(lf->rf, lf->cf));
305: PetscCall(PetscFree(lf));
306: PetscFunctionReturn(PETSC_SUCCESS);
307: }
309: static PetscErrorCode MatConvert_SeqXAIJ_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
310: {
311: Mat B, lB;
313: PetscFunctionBegin;
314: if (reuse != MAT_REUSE_MATRIX) {
315: ISLocalToGlobalMapping rl2g, cl2g;
316: PetscInt bs;
317: IS is;
319: PetscCall(MatGetBlockSize(A, &bs));
320: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->rmap->n / bs, 0, 1, &is));
321: if (bs > 1) {
322: IS is2;
323: PetscInt i, *aux;
325: PetscCall(ISGetLocalSize(is, &i));
326: PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
327: PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, i, aux, PETSC_COPY_VALUES, &is2));
328: PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
329: PetscCall(ISDestroy(&is));
330: is = is2;
331: }
332: PetscCall(ISSetIdentity(is));
333: PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
334: PetscCall(ISDestroy(&is));
335: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->cmap->n / bs, 0, 1, &is));
336: if (bs > 1) {
337: IS is2;
338: PetscInt i, *aux;
340: PetscCall(ISGetLocalSize(is, &i));
341: PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
342: PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, i, aux, PETSC_COPY_VALUES, &is2));
343: PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
344: PetscCall(ISDestroy(&is));
345: is = is2;
346: }
347: PetscCall(ISSetIdentity(is));
348: PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
349: PetscCall(ISDestroy(&is));
350: PetscCall(MatCreateIS(PetscObjectComm((PetscObject)A), bs, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N, rl2g, cl2g, &B));
351: PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
352: PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
353: PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &lB));
354: if (reuse == MAT_INITIAL_MATRIX) *newmat = B;
355: } else {
356: B = *newmat;
357: PetscCall(PetscObjectReference((PetscObject)A));
358: lB = A;
359: }
360: PetscCall(MatISSetLocalMat(B, lB));
361: PetscCall(MatDestroy(&lB));
362: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
363: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
364: if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &B));
365: PetscFunctionReturn(PETSC_SUCCESS);
366: }
368: static PetscErrorCode MatISScaleDisassembling_Private(Mat A)
369: {
370: Mat_IS *matis = (Mat_IS *)A->data;
371: PetscScalar *aa;
372: const PetscInt *ii, *jj;
373: PetscInt i, n, m;
374: PetscInt *ecount, **eneighs;
375: PetscBool flg;
377: PetscFunctionBegin;
378: PetscCall(MatGetRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg));
379: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
380: PetscCall(ISLocalToGlobalMappingGetNodeInfo(matis->rmapping, &n, &ecount, &eneighs));
381: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %" PetscInt_FMT " != %" PetscInt_FMT, m, n);
382: PetscCall(MatSeqAIJGetArray(matis->A, &aa));
383: for (i = 0; i < n; i++) {
384: if (ecount[i] > 1) {
385: PetscInt j;
387: for (j = ii[i]; j < ii[i + 1]; j++) {
388: PetscInt i2 = jj[j], p, p2;
389: PetscReal scal = 0.0;
391: for (p = 0; p < ecount[i]; p++) {
392: for (p2 = 0; p2 < ecount[i2]; p2++) {
393: if (eneighs[i][p] == eneighs[i2][p2]) {
394: scal += 1.0;
395: break;
396: }
397: }
398: }
399: if (scal) aa[j] /= scal;
400: }
401: }
402: }
403: PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(matis->rmapping, &n, &ecount, &eneighs));
404: PetscCall(MatSeqAIJRestoreArray(matis->A, &aa));
405: PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg));
406: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
407: PetscFunctionReturn(PETSC_SUCCESS);
408: }
410: typedef enum {
411: MAT_IS_DISASSEMBLE_L2G_NATURAL,
412: MAT_IS_DISASSEMBLE_L2G_MAT,
413: MAT_IS_DISASSEMBLE_L2G_ND
414: } MatISDisassemblel2gType;
416: static PetscErrorCode MatMPIXAIJComputeLocalToGlobalMapping_Private(Mat A, ISLocalToGlobalMapping *l2g)
417: {
418: Mat Ad, Ao;
419: IS is, ndmap, ndsub;
420: MPI_Comm comm;
421: const PetscInt *garray, *ndmapi;
422: PetscInt bs, i, cnt, nl, *ncount, *ndmapc;
423: PetscBool ismpiaij, ismpibaij;
424: const char *const MatISDisassemblel2gTypes[] = {"NATURAL", "MAT", "ND", "MatISDisassemblel2gType", "MAT_IS_DISASSEMBLE_L2G_", NULL};
425: MatISDisassemblel2gType mode = MAT_IS_DISASSEMBLE_L2G_NATURAL;
426: MatPartitioning part;
427: PetscSF sf;
428: PetscObject dm;
430: PetscFunctionBegin;
431: PetscOptionsBegin(PetscObjectComm((PetscObject)A), ((PetscObject)A)->prefix, "MatIS l2g disassembling options", "Mat");
432: PetscCall(PetscOptionsEnum("-mat_is_disassemble_l2g_type", "Type of local-to-global mapping to be used for disassembling", "MatISDisassemblel2gType", MatISDisassemblel2gTypes, (PetscEnum)mode, (PetscEnum *)&mode, NULL));
433: PetscOptionsEnd();
434: if (mode == MAT_IS_DISASSEMBLE_L2G_MAT) {
435: PetscCall(MatGetLocalToGlobalMapping(A, l2g, NULL));
436: PetscFunctionReturn(PETSC_SUCCESS);
437: }
438: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
439: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
440: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij));
441: PetscCall(MatGetBlockSize(A, &bs));
442: switch (mode) {
443: case MAT_IS_DISASSEMBLE_L2G_ND:
444: PetscCall(MatPartitioningCreate(comm, &part));
445: PetscCall(MatPartitioningSetAdjacency(part, A));
446: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part, ((PetscObject)A)->prefix));
447: PetscCall(MatPartitioningSetFromOptions(part));
448: PetscCall(MatPartitioningApplyND(part, &ndmap));
449: PetscCall(MatPartitioningDestroy(&part));
450: PetscCall(ISBuildTwoSided(ndmap, NULL, &ndsub));
451: PetscCall(MatMPIAIJSetUseScalableIncreaseOverlap(A, PETSC_TRUE));
452: PetscCall(MatIncreaseOverlap(A, 1, &ndsub, 1));
453: PetscCall(ISLocalToGlobalMappingCreateIS(ndsub, l2g));
455: /* it may happen that a separator node is not properly shared */
456: PetscCall(ISLocalToGlobalMappingGetNodeInfo(*l2g, &nl, &ncount, NULL));
457: PetscCall(PetscSFCreate(comm, &sf));
458: PetscCall(ISLocalToGlobalMappingGetIndices(*l2g, &garray));
459: PetscCall(PetscSFSetGraphLayout(sf, A->rmap, nl, NULL, PETSC_OWN_POINTER, garray));
460: PetscCall(ISLocalToGlobalMappingRestoreIndices(*l2g, &garray));
461: PetscCall(PetscCalloc1(A->rmap->n, &ndmapc));
462: PetscCall(PetscSFReduceBegin(sf, MPIU_INT, ncount, ndmapc, MPI_REPLACE));
463: PetscCall(PetscSFReduceEnd(sf, MPIU_INT, ncount, ndmapc, MPI_REPLACE));
464: PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(*l2g, NULL, &ncount, NULL));
465: PetscCall(ISGetIndices(ndmap, &ndmapi));
466: for (i = 0, cnt = 0; i < A->rmap->n; i++)
467: if (ndmapi[i] < 0 && ndmapc[i] < 2) cnt++;
469: PetscCallMPI(MPIU_Allreduce(&cnt, &i, 1, MPIU_INT, MPI_MAX, comm));
470: if (i) { /* we detected isolated separator nodes */
471: Mat A2, A3;
472: IS *workis, is2;
473: PetscScalar *vals;
474: PetscInt gcnt = i, *dnz, *onz, j, *lndmapi;
475: ISLocalToGlobalMapping ll2g;
476: PetscBool flg;
477: const PetscInt *ii, *jj;
479: /* communicate global id of separators */
480: MatPreallocateBegin(comm, A->rmap->n, A->cmap->n, dnz, onz);
481: for (i = 0, cnt = 0; i < A->rmap->n; i++) dnz[i] = ndmapi[i] < 0 ? i + A->rmap->rstart : -1;
483: PetscCall(PetscMalloc1(nl, &lndmapi));
484: PetscCall(PetscSFBcastBegin(sf, MPIU_INT, dnz, lndmapi, MPI_REPLACE));
486: /* compute adjacency of isolated separators node */
487: PetscCall(PetscMalloc1(gcnt, &workis));
488: for (i = 0, cnt = 0; i < A->rmap->n; i++) {
489: if (ndmapi[i] < 0 && ndmapc[i] < 2) PetscCall(ISCreateStride(comm, 1, i + A->rmap->rstart, 1, &workis[cnt++]));
490: }
491: for (i = cnt; i < gcnt; i++) PetscCall(ISCreateStride(comm, 0, 0, 1, &workis[i]));
492: for (i = 0; i < gcnt; i++) {
493: PetscCall(PetscObjectSetName((PetscObject)workis[i], "ISOLATED"));
494: PetscCall(ISViewFromOptions(workis[i], NULL, "-view_isolated_separators"));
495: }
497: /* no communications since all the ISes correspond to locally owned rows */
498: PetscCall(MatIncreaseOverlap(A, gcnt, workis, 1));
500: /* end communicate global id of separators */
501: PetscCall(PetscSFBcastEnd(sf, MPIU_INT, dnz, lndmapi, MPI_REPLACE));
503: /* communicate new layers : create a matrix and transpose it */
504: PetscCall(PetscArrayzero(dnz, A->rmap->n));
505: PetscCall(PetscArrayzero(onz, A->rmap->n));
506: for (i = 0, j = 0; i < A->rmap->n; i++) {
507: if (ndmapi[i] < 0 && ndmapc[i] < 2) {
508: const PetscInt *idxs;
509: PetscInt s;
511: PetscCall(ISGetLocalSize(workis[j], &s));
512: PetscCall(ISGetIndices(workis[j], &idxs));
513: PetscCall(MatPreallocateSet(i + A->rmap->rstart, s, idxs, dnz, onz));
514: j++;
515: }
516: }
517: PetscCheck(j == cnt, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected local count %" PetscInt_FMT " != %" PetscInt_FMT, j, cnt);
519: for (i = 0; i < gcnt; i++) {
520: PetscCall(PetscObjectSetName((PetscObject)workis[i], "EXTENDED"));
521: PetscCall(ISViewFromOptions(workis[i], NULL, "-view_isolated_separators"));
522: }
524: for (i = 0, j = 0; i < A->rmap->n; i++) j = PetscMax(j, dnz[i] + onz[i]);
525: PetscCall(PetscMalloc1(j, &vals));
526: for (i = 0; i < j; i++) vals[i] = 1.0;
528: PetscCall(MatCreate(comm, &A2));
529: PetscCall(MatSetType(A2, MATMPIAIJ));
530: PetscCall(MatSetSizes(A2, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
531: PetscCall(MatMPIAIJSetPreallocation(A2, 0, dnz, 0, onz));
532: PetscCall(MatSetOption(A2, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
533: for (i = 0, j = 0; i < A2->rmap->n; i++) {
534: PetscInt row = i + A2->rmap->rstart, s = dnz[i] + onz[i];
535: const PetscInt *idxs;
537: if (s) {
538: PetscCall(ISGetIndices(workis[j], &idxs));
539: PetscCall(MatSetValues(A2, 1, &row, s, idxs, vals, INSERT_VALUES));
540: PetscCall(ISRestoreIndices(workis[j], &idxs));
541: j++;
542: }
543: }
544: PetscCheck(j == cnt, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected local count %" PetscInt_FMT " != %" PetscInt_FMT, j, cnt);
545: PetscCall(PetscFree(vals));
546: PetscCall(MatAssemblyBegin(A2, MAT_FINAL_ASSEMBLY));
547: PetscCall(MatAssemblyEnd(A2, MAT_FINAL_ASSEMBLY));
548: PetscCall(MatTranspose(A2, MAT_INPLACE_MATRIX, &A2));
550: /* extract submatrix corresponding to the coupling "owned separators" x "isolated separators" */
551: for (i = 0, j = 0; i < nl; i++)
552: if (lndmapi[i] >= 0) lndmapi[j++] = lndmapi[i];
553: PetscCall(ISCreateGeneral(comm, j, lndmapi, PETSC_USE_POINTER, &is));
554: PetscCall(MatMPIAIJGetLocalMatCondensed(A2, MAT_INITIAL_MATRIX, &is, NULL, &A3));
555: PetscCall(ISDestroy(&is));
556: PetscCall(MatDestroy(&A2));
558: /* extend local to global map to include connected isolated separators */
559: PetscCall(PetscObjectQuery((PetscObject)A3, "_petsc_GetLocalMatCondensed_iscol", (PetscObject *)&is));
560: PetscCheck(is, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing column map");
561: PetscCall(ISLocalToGlobalMappingCreateIS(is, &ll2g));
562: PetscCall(MatGetRowIJ(A3, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &flg));
563: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
564: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ii[i], jj, PETSC_COPY_VALUES, &is));
565: PetscCall(MatRestoreRowIJ(A3, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &flg));
566: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
567: PetscCall(ISLocalToGlobalMappingApplyIS(ll2g, is, &is2));
568: PetscCall(ISDestroy(&is));
569: PetscCall(ISLocalToGlobalMappingDestroy(&ll2g));
571: /* add new nodes to the local-to-global map */
572: PetscCall(ISLocalToGlobalMappingDestroy(l2g));
573: PetscCall(ISExpand(ndsub, is2, &is));
574: PetscCall(ISDestroy(&is2));
575: PetscCall(ISLocalToGlobalMappingCreateIS(is, l2g));
576: PetscCall(ISDestroy(&is));
578: PetscCall(MatDestroy(&A3));
579: PetscCall(PetscFree(lndmapi));
580: MatPreallocateEnd(dnz, onz);
581: for (i = 0; i < gcnt; i++) PetscCall(ISDestroy(&workis[i]));
582: PetscCall(PetscFree(workis));
583: }
584: PetscCall(ISRestoreIndices(ndmap, &ndmapi));
585: PetscCall(PetscSFDestroy(&sf));
586: PetscCall(PetscFree(ndmapc));
587: PetscCall(ISDestroy(&ndmap));
588: PetscCall(ISDestroy(&ndsub));
589: PetscCall(ISLocalToGlobalMappingSetBlockSize(*l2g, bs));
590: PetscCall(ISLocalToGlobalMappingViewFromOptions(*l2g, NULL, "-mat_is_nd_l2g_view"));
591: break;
592: case MAT_IS_DISASSEMBLE_L2G_NATURAL:
593: PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_dm", &dm));
594: if (dm) { /* if a matrix comes from a DM, most likely we can use the l2gmap if any */
595: PetscCall(MatGetLocalToGlobalMapping(A, l2g, NULL));
596: PetscCall(PetscObjectReference((PetscObject)*l2g));
597: if (*l2g) PetscFunctionReturn(PETSC_SUCCESS);
598: }
599: if (ismpiaij) {
600: PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray));
601: } else if (ismpibaij) {
602: PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray));
603: } else SETERRQ(comm, PETSC_ERR_SUP, "Type %s", ((PetscObject)A)->type_name);
604: if (A->rmap->n) {
605: PetscInt dc, oc, stc, *aux;
607: PetscCall(MatGetLocalSize(Ad, NULL, &dc));
608: PetscCall(MatGetLocalSize(Ao, NULL, &oc));
609: PetscCheck(!oc || garray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "garray not present");
610: PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL));
611: PetscCall(PetscMalloc1((dc + oc) / bs, &aux));
612: for (i = 0; i < dc / bs; i++) aux[i] = i + stc / bs;
613: for (i = 0; i < oc / bs; i++) aux[i + dc / bs] = (ismpiaij ? garray[i * bs] / bs : garray[i]);
614: PetscCall(ISCreateBlock(comm, bs, (dc + oc) / bs, aux, PETSC_OWN_POINTER, &is));
615: } else {
616: PetscCall(ISCreateBlock(comm, 1, 0, NULL, PETSC_OWN_POINTER, &is));
617: }
618: PetscCall(ISLocalToGlobalMappingCreateIS(is, l2g));
619: PetscCall(ISDestroy(&is));
620: break;
621: default:
622: SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Unsupported l2g disassembling type %d", mode);
623: }
624: PetscFunctionReturn(PETSC_SUCCESS);
625: }
627: PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
628: {
629: Mat lA, Ad, Ao, B = NULL;
630: ISLocalToGlobalMapping rl2g, cl2g;
631: IS is;
632: MPI_Comm comm;
633: void *ptrs[2];
634: const char *names[2] = {"_convert_csr_aux", "_convert_csr_data"};
635: const PetscInt *garray;
636: PetscScalar *dd, *od, *aa, *data;
637: const PetscInt *di, *dj, *oi, *oj;
638: const PetscInt *odi, *odj, *ooi, *ooj;
639: PetscInt *aux, *ii, *jj;
640: PetscInt rbs, cbs, lc, dr, dc, oc, str, stc, nnz, i, jd, jo, cum;
641: PetscBool flg, ismpiaij, ismpibaij, was_inplace = PETSC_FALSE, cong;
642: PetscMPIInt size;
644: PetscFunctionBegin;
645: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
646: PetscCallMPI(MPI_Comm_size(comm, &size));
647: if (size == 1) {
648: PetscCall(MatConvert_SeqXAIJ_IS(A, type, reuse, newmat));
649: PetscFunctionReturn(PETSC_SUCCESS);
650: }
651: PetscCall(MatGetBlockSizes(A, &rbs, &cbs));
652: PetscCall(MatHasCongruentLayouts(A, &cong));
653: if (reuse != MAT_REUSE_MATRIX && cong && rbs == cbs) {
654: PetscCall(MatMPIXAIJComputeLocalToGlobalMapping_Private(A, &rl2g));
655: PetscCall(MatCreate(comm, &B));
656: PetscCall(MatSetType(B, MATIS));
657: PetscCall(MatSetSizes(B, A->rmap->n, A->rmap->n, A->rmap->N, A->rmap->N));
658: PetscCall(MatSetLocalToGlobalMapping(B, rl2g, rl2g));
659: PetscCall(MatSetBlockSizes(B, rbs, rbs));
660: PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
661: if (reuse == MAT_INPLACE_MATRIX) was_inplace = PETSC_TRUE;
662: reuse = MAT_REUSE_MATRIX;
663: }
664: if (reuse == MAT_REUSE_MATRIX) {
665: Mat *newlA, lA;
666: IS rows, cols;
667: const PetscInt *ridx, *cidx;
668: PetscInt nr, nc;
670: if (!B) B = *newmat;
671: PetscCall(MatISGetLocalToGlobalMapping(B, &rl2g, &cl2g));
672: PetscCall(ISLocalToGlobalMappingGetBlockIndices(rl2g, &ridx));
673: PetscCall(ISLocalToGlobalMappingGetBlockIndices(cl2g, &cidx));
674: PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &nr));
675: PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &nc));
676: PetscCall(ISLocalToGlobalMappingGetBlockSize(rl2g, &rbs));
677: PetscCall(ISLocalToGlobalMappingGetBlockSize(cl2g, &cbs));
678: PetscCall(ISCreateBlock(comm, rbs, nr / rbs, ridx, PETSC_USE_POINTER, &rows));
679: if (rl2g != cl2g) {
680: PetscCall(ISCreateBlock(comm, cbs, nc / cbs, cidx, PETSC_USE_POINTER, &cols));
681: } else {
682: PetscCall(PetscObjectReference((PetscObject)rows));
683: cols = rows;
684: }
685: PetscCall(MatISGetLocalMat(B, &lA));
686: PetscCall(MatCreateSubMatrices(A, 1, &rows, &cols, MAT_INITIAL_MATRIX, &newlA));
687: PetscCall(MatConvert(newlA[0], MATSEQAIJ, MAT_INPLACE_MATRIX, &newlA[0]));
688: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rl2g, &ridx));
689: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cl2g, &cidx));
690: PetscCall(ISDestroy(&rows));
691: PetscCall(ISDestroy(&cols));
692: if (!lA->preallocated) { /* first time */
693: PetscCall(MatDuplicate(newlA[0], MAT_COPY_VALUES, &lA));
694: PetscCall(MatISSetLocalMat(B, lA));
695: PetscCall(PetscObjectDereference((PetscObject)lA));
696: }
697: PetscCall(MatCopy(newlA[0], lA, SAME_NONZERO_PATTERN));
698: PetscCall(MatDestroySubMatrices(1, &newlA));
699: PetscCall(MatISScaleDisassembling_Private(B));
700: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
701: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
702: if (was_inplace) PetscCall(MatHeaderReplace(A, &B));
703: else *newmat = B;
704: PetscFunctionReturn(PETSC_SUCCESS);
705: }
706: /* general case, just compress out the column space */
707: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
708: PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij));
709: if (ismpiaij) {
710: cbs = 1; /* We cannot guarantee the off-process matrix will respect the column block size */
711: PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray));
712: } else if (ismpibaij) {
713: PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray));
714: PetscCall(MatConvert(Ad, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ad));
715: PetscCall(MatConvert(Ao, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ao));
716: } else SETERRQ(comm, PETSC_ERR_SUP, "Type %s", ((PetscObject)A)->type_name);
717: PetscCall(MatSeqAIJGetArray(Ad, &dd));
718: PetscCall(MatSeqAIJGetArray(Ao, &od));
720: /* access relevant information from MPIAIJ */
721: PetscCall(MatGetOwnershipRange(A, &str, NULL));
722: PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL));
723: PetscCall(MatGetLocalSize(Ad, &dr, &dc));
724: PetscCall(MatGetLocalSize(Ao, NULL, &oc));
725: PetscCheck(!oc || garray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "garray not present");
727: PetscCall(MatGetRowIJ(Ad, 0, PETSC_FALSE, PETSC_FALSE, &i, &di, &dj, &flg));
728: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
729: PetscCall(MatGetRowIJ(Ao, 0, PETSC_FALSE, PETSC_FALSE, &i, &oi, &oj, &flg));
730: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
731: nnz = di[dr] + oi[dr];
732: /* store original pointers to be restored later */
733: odi = di;
734: odj = dj;
735: ooi = oi;
736: ooj = oj;
738: /* generate l2g maps for rows and cols */
739: PetscCall(ISCreateStride(comm, dr / rbs, str / rbs, 1, &is));
740: if (rbs > 1) {
741: IS is2;
743: PetscCall(ISGetLocalSize(is, &i));
744: PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
745: PetscCall(ISCreateBlock(comm, rbs, i, aux, PETSC_COPY_VALUES, &is2));
746: PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
747: PetscCall(ISDestroy(&is));
748: is = is2;
749: }
750: PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
751: PetscCall(ISDestroy(&is));
752: if (dr) {
753: PetscCall(PetscMalloc1((dc + oc) / cbs, &aux));
754: for (i = 0; i < dc / cbs; i++) aux[i] = i + stc / cbs;
755: for (i = 0; i < oc / cbs; i++) aux[i + dc / cbs] = garray[i];
756: PetscCall(ISCreateBlock(comm, cbs, (dc + oc) / cbs, aux, PETSC_OWN_POINTER, &is));
757: lc = dc + oc;
758: } else {
759: PetscCall(ISCreateBlock(comm, cbs, 0, NULL, PETSC_OWN_POINTER, &is));
760: lc = 0;
761: }
762: PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
763: PetscCall(ISDestroy(&is));
765: /* create MATIS object */
766: PetscCall(MatCreate(comm, &B));
767: PetscCall(MatSetSizes(B, dr, dc, PETSC_DECIDE, PETSC_DECIDE));
768: PetscCall(MatSetType(B, MATIS));
769: PetscCall(MatSetBlockSizes(B, rbs, cbs));
770: PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g));
771: PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
772: PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
774: /* merge local matrices */
775: PetscCall(PetscMalloc1(nnz + dr + 1, &aux));
776: PetscCall(PetscMalloc1(nnz, &data));
777: ii = aux;
778: jj = aux + dr + 1;
779: aa = data;
780: *ii = *(di++) + *(oi++);
781: for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
782: for (; jd < *di; jd++) {
783: *jj++ = *dj++;
784: *aa++ = *dd++;
785: }
786: for (; jo < *oi; jo++) {
787: *jj++ = *oj++ + dc;
788: *aa++ = *od++;
789: }
790: *(++ii) = *(di++) + *(oi++);
791: }
792: for (; cum < dr; cum++) *(++ii) = nnz;
794: PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_FALSE, PETSC_FALSE, &i, &odi, &odj, &flg));
795: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
796: PetscCall(MatRestoreRowIJ(Ao, 0, PETSC_FALSE, PETSC_FALSE, &i, &ooi, &ooj, &flg));
797: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
798: PetscCall(MatSeqAIJRestoreArray(Ad, &dd));
799: PetscCall(MatSeqAIJRestoreArray(Ao, &od));
801: ii = aux;
802: jj = aux + dr + 1;
803: aa = data;
804: PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, lc, ii, jj, aa, &lA));
806: /* create containers to destroy the data */
807: ptrs[0] = aux;
808: ptrs[1] = data;
809: for (i = 0; i < 2; i++) PetscCall(PetscObjectContainerCompose((PetscObject)lA, names[i], ptrs[i], PetscCtxDestroyDefault));
810: if (ismpibaij) { /* destroy converted local matrices */
811: PetscCall(MatDestroy(&Ad));
812: PetscCall(MatDestroy(&Ao));
813: }
815: /* finalize matrix */
816: PetscCall(MatISSetLocalMat(B, lA));
817: PetscCall(MatDestroy(&lA));
818: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
819: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
820: if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &B));
821: else *newmat = B;
822: PetscFunctionReturn(PETSC_SUCCESS);
823: }
825: PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
826: {
827: Mat **nest, *snest, **rnest, lA, B;
828: IS *iscol, *isrow, *islrow, *islcol;
829: ISLocalToGlobalMapping rl2g, cl2g;
830: MPI_Comm comm;
831: PetscInt *lr, *lc, *l2gidxs;
832: PetscInt i, j, nr, nc, rbs, cbs;
833: PetscBool convert, lreuse, *istrans;
834: PetscBool3 allow_repeated = PETSC_BOOL3_UNKNOWN;
836: PetscFunctionBegin;
837: PetscCall(MatNestGetSubMats(A, &nr, &nc, &nest));
838: lreuse = PETSC_FALSE;
839: rnest = NULL;
840: if (reuse == MAT_REUSE_MATRIX) {
841: PetscBool ismatis, isnest;
843: PetscCall(PetscObjectTypeCompare((PetscObject)*newmat, MATIS, &ismatis));
844: PetscCheck(ismatis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_USER, "Cannot reuse matrix of type %s", ((PetscObject)*newmat)->type_name);
845: PetscCall(MatISGetLocalMat(*newmat, &lA));
846: PetscCall(PetscObjectTypeCompare((PetscObject)lA, MATNEST, &isnest));
847: if (isnest) {
848: PetscCall(MatNestGetSubMats(lA, &i, &j, &rnest));
849: lreuse = (PetscBool)(i == nr && j == nc);
850: if (!lreuse) rnest = NULL;
851: }
852: }
853: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
854: PetscCall(PetscCalloc2(nr, &lr, nc, &lc));
855: PetscCall(PetscCalloc6(nr, &isrow, nc, &iscol, nr, &islrow, nc, &islcol, nr * nc, &snest, nr * nc, &istrans));
856: PetscCall(MatNestGetISs(A, isrow, iscol));
857: for (i = 0; i < nr; i++) {
858: for (j = 0; j < nc; j++) {
859: PetscBool ismatis, sallow;
860: PetscInt l1, l2, lb1, lb2, ij = i * nc + j;
862: /* Null matrix pointers are allowed in MATNEST */
863: if (!nest[i][j]) continue;
865: /* Nested matrices should be of type MATIS */
866: PetscCall(PetscObjectTypeCompare((PetscObject)nest[i][j], MATTRANSPOSEVIRTUAL, &istrans[ij]));
867: if (istrans[ij]) {
868: Mat T, lT;
869: PetscCall(MatTransposeGetMat(nest[i][j], &T));
870: PetscCall(PetscObjectTypeCompare((PetscObject)T, MATIS, &ismatis));
871: PetscCheck(ismatis, comm, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") (transposed) is not of type MATIS", i, j);
872: PetscCall(MatISGetAllowRepeated(T, &sallow));
873: PetscCall(MatISGetLocalMat(T, &lT));
874: PetscCall(MatCreateTranspose(lT, &snest[ij]));
875: } else {
876: PetscCall(PetscObjectTypeCompare((PetscObject)nest[i][j], MATIS, &ismatis));
877: PetscCheck(ismatis, comm, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") is not of type MATIS", i, j);
878: PetscCall(MatISGetAllowRepeated(nest[i][j], &sallow));
879: PetscCall(MatISGetLocalMat(nest[i][j], &snest[ij]));
880: }
881: if (allow_repeated == PETSC_BOOL3_UNKNOWN) allow_repeated = PetscBoolToBool3(sallow);
882: PetscCheck(sallow == PetscBool3ToBool(allow_repeated), comm, PETSC_ERR_SUP, "Cannot mix repeated and non repeated maps");
884: /* Check compatibility of local sizes */
885: PetscCall(MatGetSize(snest[ij], &l1, &l2));
886: PetscCall(MatGetBlockSizes(snest[ij], &lb1, &lb2));
887: if (!l1 || !l2) continue;
888: PetscCheck(!lr[i] || l1 == lr[i], PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid local size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, lr[i], l1);
889: PetscCheck(!lc[j] || l2 == lc[j], PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid local size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, lc[j], l2);
890: lr[i] = l1;
891: lc[j] = l2;
893: /* check compatibility for local matrix reusage */
894: if (rnest && !rnest[i][j] != !snest[ij]) lreuse = PETSC_FALSE;
895: }
896: }
898: if (PetscDefined(USE_DEBUG)) {
899: /* Check compatibility of l2g maps for rows */
900: for (i = 0; i < nr; i++) {
901: rl2g = NULL;
902: for (j = 0; j < nc; j++) {
903: PetscInt n1, n2;
905: if (!nest[i][j]) continue;
906: if (istrans[i * nc + j]) {
907: Mat T;
909: PetscCall(MatTransposeGetMat(nest[i][j], &T));
910: PetscCall(MatISGetLocalToGlobalMapping(T, NULL, &cl2g));
911: } else {
912: PetscCall(MatISGetLocalToGlobalMapping(nest[i][j], &cl2g, NULL));
913: }
914: PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &n1));
915: if (!n1) continue;
916: if (!rl2g) {
917: rl2g = cl2g;
918: } else {
919: const PetscInt *idxs1, *idxs2;
920: PetscBool same;
922: PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &n2));
923: PetscCheck(n1 == n2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid row l2gmap size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, n1, n2);
924: PetscCall(ISLocalToGlobalMappingGetIndices(cl2g, &idxs1));
925: PetscCall(ISLocalToGlobalMappingGetIndices(rl2g, &idxs2));
926: PetscCall(PetscArraycmp(idxs1, idxs2, n1, &same));
927: PetscCall(ISLocalToGlobalMappingRestoreIndices(cl2g, &idxs1));
928: PetscCall(ISLocalToGlobalMappingRestoreIndices(rl2g, &idxs2));
929: PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid row l2gmap", i, j);
930: }
931: }
932: }
933: /* Check compatibility of l2g maps for columns */
934: for (i = 0; i < nc; i++) {
935: rl2g = NULL;
936: for (j = 0; j < nr; j++) {
937: PetscInt n1, n2;
939: if (!nest[j][i]) continue;
940: if (istrans[j * nc + i]) {
941: Mat T;
943: PetscCall(MatTransposeGetMat(nest[j][i], &T));
944: PetscCall(MatISGetLocalToGlobalMapping(T, &cl2g, NULL));
945: } else {
946: PetscCall(MatISGetLocalToGlobalMapping(nest[j][i], NULL, &cl2g));
947: }
948: PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &n1));
949: if (!n1) continue;
950: if (!rl2g) {
951: rl2g = cl2g;
952: } else {
953: const PetscInt *idxs1, *idxs2;
954: PetscBool same;
956: PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &n2));
957: PetscCheck(n1 == n2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid column l2gmap size %" PetscInt_FMT " != %" PetscInt_FMT, j, i, n1, n2);
958: PetscCall(ISLocalToGlobalMappingGetIndices(cl2g, &idxs1));
959: PetscCall(ISLocalToGlobalMappingGetIndices(rl2g, &idxs2));
960: PetscCall(PetscArraycmp(idxs1, idxs2, n1, &same));
961: PetscCall(ISLocalToGlobalMappingRestoreIndices(cl2g, &idxs1));
962: PetscCall(ISLocalToGlobalMappingRestoreIndices(rl2g, &idxs2));
963: PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid column l2gmap", j, i);
964: }
965: }
966: }
967: }
969: B = NULL;
970: if (reuse != MAT_REUSE_MATRIX) {
971: PetscInt stl;
973: /* Create l2g map for the rows of the new matrix and index sets for the local MATNEST */
974: for (i = 0, stl = 0; i < nr; i++) stl += lr[i];
975: PetscCall(PetscMalloc1(stl, &l2gidxs));
976: for (i = 0, stl = 0; i < nr; i++) {
977: Mat usedmat;
978: Mat_IS *matis;
979: const PetscInt *idxs;
981: /* local IS for local NEST */
982: PetscCall(ISCreateStride(PETSC_COMM_SELF, lr[i], stl, 1, &islrow[i]));
984: /* l2gmap */
985: j = 0;
986: usedmat = nest[i][j];
987: while (!usedmat && j < nc - 1) usedmat = nest[i][++j];
988: PetscCheck(usedmat, comm, PETSC_ERR_SUP, "Cannot find valid row mat");
990: if (istrans[i * nc + j]) {
991: Mat T;
992: PetscCall(MatTransposeGetMat(usedmat, &T));
993: usedmat = T;
994: }
995: matis = (Mat_IS *)usedmat->data;
996: PetscCall(ISGetIndices(isrow[i], &idxs));
997: if (istrans[i * nc + j]) {
998: PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
999: PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1000: } else {
1001: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1002: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1003: }
1004: PetscCall(ISRestoreIndices(isrow[i], &idxs));
1005: stl += lr[i];
1006: }
1007: PetscCall(ISLocalToGlobalMappingCreate(comm, 1, stl, l2gidxs, PETSC_OWN_POINTER, &rl2g));
1009: /* Create l2g map for columns of the new matrix and index sets for the local MATNEST */
1010: for (i = 0, stl = 0; i < nc; i++) stl += lc[i];
1011: PetscCall(PetscMalloc1(stl, &l2gidxs));
1012: for (i = 0, stl = 0; i < nc; i++) {
1013: Mat usedmat;
1014: Mat_IS *matis;
1015: const PetscInt *idxs;
1017: /* local IS for local NEST */
1018: PetscCall(ISCreateStride(PETSC_COMM_SELF, lc[i], stl, 1, &islcol[i]));
1020: /* l2gmap */
1021: j = 0;
1022: usedmat = nest[j][i];
1023: while (!usedmat && j < nr - 1) usedmat = nest[++j][i];
1024: PetscCheck(usedmat, comm, PETSC_ERR_SUP, "Cannot find valid column mat");
1025: if (istrans[j * nc + i]) {
1026: Mat T;
1027: PetscCall(MatTransposeGetMat(usedmat, &T));
1028: usedmat = T;
1029: }
1030: matis = (Mat_IS *)usedmat->data;
1031: PetscCall(ISGetIndices(iscol[i], &idxs));
1032: if (istrans[j * nc + i]) {
1033: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1034: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1035: } else {
1036: PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1037: PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1038: }
1039: PetscCall(ISRestoreIndices(iscol[i], &idxs));
1040: stl += lc[i];
1041: }
1042: PetscCall(ISLocalToGlobalMappingCreate(comm, 1, stl, l2gidxs, PETSC_OWN_POINTER, &cl2g));
1044: /* Create MATIS */
1045: PetscCall(MatCreate(comm, &B));
1046: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
1047: PetscCall(MatGetBlockSizes(A, &rbs, &cbs));
1048: PetscCall(MatSetBlockSizes(B, rbs, cbs));
1049: PetscCall(MatSetType(B, MATIS));
1050: PetscCall(MatISSetLocalMatType(B, MATNEST));
1051: PetscCall(MatISSetAllowRepeated(B, PetscBool3ToBool(allow_repeated)));
1052: { /* hack : avoid setup of scatters */
1053: Mat_IS *matis = (Mat_IS *)B->data;
1054: matis->islocalref = B;
1055: }
1056: PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g));
1057: PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
1058: PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
1059: PetscCall(MatCreateNest(PETSC_COMM_SELF, nr, islrow, nc, islcol, snest, &lA));
1060: PetscCall(MatNestSetVecType(lA, VECNEST));
1061: for (i = 0; i < nr * nc; i++) {
1062: if (istrans[i]) PetscCall(MatDestroy(&snest[i]));
1063: }
1064: PetscCall(MatISSetLocalMat(B, lA));
1065: PetscCall(MatDestroy(&lA));
1066: { /* hack : setup of scatters done here */
1067: Mat_IS *matis = (Mat_IS *)B->data;
1069: matis->islocalref = NULL;
1070: PetscCall(MatISSetUpScatters_Private(B));
1071: }
1072: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
1073: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
1074: if (reuse == MAT_INPLACE_MATRIX) {
1075: PetscCall(MatHeaderReplace(A, &B));
1076: } else {
1077: *newmat = B;
1078: }
1079: } else {
1080: if (lreuse) {
1081: PetscCall(MatISGetLocalMat(*newmat, &lA));
1082: for (i = 0; i < nr; i++) {
1083: for (j = 0; j < nc; j++) {
1084: if (snest[i * nc + j]) {
1085: PetscCall(MatNestSetSubMat(lA, i, j, snest[i * nc + j]));
1086: if (istrans[i * nc + j]) PetscCall(MatDestroy(&snest[i * nc + j]));
1087: }
1088: }
1089: }
1090: } else {
1091: PetscInt stl;
1092: for (i = 0, stl = 0; i < nr; i++) {
1093: PetscCall(ISCreateStride(PETSC_COMM_SELF, lr[i], stl, 1, &islrow[i]));
1094: stl += lr[i];
1095: }
1096: for (i = 0, stl = 0; i < nc; i++) {
1097: PetscCall(ISCreateStride(PETSC_COMM_SELF, lc[i], stl, 1, &islcol[i]));
1098: stl += lc[i];
1099: }
1100: PetscCall(MatCreateNest(PETSC_COMM_SELF, nr, islrow, nc, islcol, snest, &lA));
1101: for (i = 0; i < nr * nc; i++) {
1102: if (istrans[i]) PetscCall(MatDestroy(&snest[i]));
1103: }
1104: PetscCall(MatISSetLocalMat(*newmat, lA));
1105: PetscCall(MatDestroy(&lA));
1106: }
1107: PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY));
1108: PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY));
1109: }
1111: /* Create local matrix in MATNEST format */
1112: convert = PETSC_FALSE;
1113: PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)A)->prefix, "-mat_is_convert_local_nest", &convert, NULL));
1114: if (convert) {
1115: Mat M;
1116: MatISLocalFields lf;
1118: PetscCall(MatISGetLocalMat(*newmat, &lA));
1119: PetscCall(MatConvert(lA, MATAIJ, MAT_INITIAL_MATRIX, &M));
1120: PetscCall(MatISSetLocalMat(*newmat, M));
1121: PetscCall(MatDestroy(&M));
1123: /* attach local fields to the matrix */
1124: PetscCall(PetscNew(&lf));
1125: PetscCall(PetscMalloc2(nr, &lf->rf, nc, &lf->cf));
1126: for (i = 0; i < nr; i++) {
1127: PetscInt n, st;
1129: PetscCall(ISGetLocalSize(islrow[i], &n));
1130: PetscCall(ISStrideGetInfo(islrow[i], &st, NULL));
1131: PetscCall(ISCreateStride(comm, n, st, 1, &lf->rf[i]));
1132: }
1133: for (i = 0; i < nc; i++) {
1134: PetscInt n, st;
1136: PetscCall(ISGetLocalSize(islcol[i], &n));
1137: PetscCall(ISStrideGetInfo(islcol[i], &st, NULL));
1138: PetscCall(ISCreateStride(comm, n, st, 1, &lf->cf[i]));
1139: }
1140: lf->nr = nr;
1141: lf->nc = nc;
1142: PetscCall(PetscObjectContainerCompose((PetscObject)*newmat, "_convert_nest_lfields", lf, MatISContainerDestroyFields_Private));
1143: }
1145: /* Free workspace */
1146: for (i = 0; i < nr; i++) PetscCall(ISDestroy(&islrow[i]));
1147: for (i = 0; i < nc; i++) PetscCall(ISDestroy(&islcol[i]));
1148: PetscCall(PetscFree6(isrow, iscol, islrow, islcol, snest, istrans));
1149: PetscCall(PetscFree2(lr, lc));
1150: PetscFunctionReturn(PETSC_SUCCESS);
1151: }
1153: static PetscErrorCode MatDiagonalScale_IS(Mat A, Vec l, Vec r)
1154: {
1155: Mat_IS *matis = (Mat_IS *)A->data;
1156: Vec ll, rr;
1157: const PetscScalar *Y, *X;
1158: PetscScalar *x, *y;
1160: PetscFunctionBegin;
1161: if (l) {
1162: ll = matis->y;
1163: PetscCall(VecGetArrayRead(l, &Y));
1164: PetscCall(VecGetArray(ll, &y));
1165: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_SCALAR, Y, y, MPI_REPLACE));
1166: } else {
1167: ll = NULL;
1168: }
1169: if (r) {
1170: rr = matis->x;
1171: PetscCall(VecGetArrayRead(r, &X));
1172: PetscCall(VecGetArray(rr, &x));
1173: PetscCall(PetscSFBcastBegin(matis->csf, MPIU_SCALAR, X, x, MPI_REPLACE));
1174: } else {
1175: rr = NULL;
1176: }
1177: if (ll) {
1178: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_SCALAR, Y, y, MPI_REPLACE));
1179: PetscCall(VecRestoreArrayRead(l, &Y));
1180: PetscCall(VecRestoreArray(ll, &y));
1181: }
1182: if (rr) {
1183: PetscCall(PetscSFBcastEnd(matis->csf, MPIU_SCALAR, X, x, MPI_REPLACE));
1184: PetscCall(VecRestoreArrayRead(r, &X));
1185: PetscCall(VecRestoreArray(rr, &x));
1186: }
1187: PetscCall(MatDiagonalScale(matis->A, ll, rr));
1188: PetscFunctionReturn(PETSC_SUCCESS);
1189: }
1191: static PetscErrorCode MatGetInfo_IS(Mat A, MatInfoType flag, MatInfo *ginfo)
1192: {
1193: Mat_IS *matis = (Mat_IS *)A->data;
1194: MatInfo info;
1195: PetscLogDouble isend[6], irecv[6];
1196: PetscInt bs;
1198: PetscFunctionBegin;
1199: PetscCall(MatGetBlockSize(A, &bs));
1200: if (matis->A->ops->getinfo) {
1201: PetscCall(MatGetInfo(matis->A, MAT_LOCAL, &info));
1202: isend[0] = info.nz_used;
1203: isend[1] = info.nz_allocated;
1204: isend[2] = info.nz_unneeded;
1205: isend[3] = info.memory;
1206: isend[4] = info.mallocs;
1207: } else {
1208: isend[0] = 0.;
1209: isend[1] = 0.;
1210: isend[2] = 0.;
1211: isend[3] = 0.;
1212: isend[4] = 0.;
1213: }
1214: isend[5] = matis->A->num_ass;
1215: if (flag == MAT_LOCAL) {
1216: ginfo->nz_used = isend[0];
1217: ginfo->nz_allocated = isend[1];
1218: ginfo->nz_unneeded = isend[2];
1219: ginfo->memory = isend[3];
1220: ginfo->mallocs = isend[4];
1221: ginfo->assemblies = isend[5];
1222: } else if (flag == MAT_GLOBAL_MAX) {
1223: PetscCallMPI(MPIU_Allreduce(isend, irecv, 6, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)A)));
1225: ginfo->nz_used = irecv[0];
1226: ginfo->nz_allocated = irecv[1];
1227: ginfo->nz_unneeded = irecv[2];
1228: ginfo->memory = irecv[3];
1229: ginfo->mallocs = irecv[4];
1230: ginfo->assemblies = irecv[5];
1231: } else if (flag == MAT_GLOBAL_SUM) {
1232: PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)A)));
1234: ginfo->nz_used = irecv[0];
1235: ginfo->nz_allocated = irecv[1];
1236: ginfo->nz_unneeded = irecv[2];
1237: ginfo->memory = irecv[3];
1238: ginfo->mallocs = irecv[4];
1239: ginfo->assemblies = A->num_ass;
1240: }
1241: ginfo->block_size = bs;
1242: ginfo->fill_ratio_given = 0;
1243: ginfo->fill_ratio_needed = 0;
1244: ginfo->factor_mallocs = 0;
1245: PetscFunctionReturn(PETSC_SUCCESS);
1246: }
1248: static PetscErrorCode MatTranspose_IS(Mat A, MatReuse reuse, Mat *B)
1249: {
1250: Mat C, lC, lA;
1252: PetscFunctionBegin;
1253: if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
1254: if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) {
1255: ISLocalToGlobalMapping rl2g, cl2g;
1256: PetscBool allow_repeated;
1258: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
1259: PetscCall(MatSetSizes(C, A->cmap->n, A->rmap->n, A->cmap->N, A->rmap->N));
1260: PetscCall(MatSetBlockSizes(C, A->cmap->bs, A->rmap->bs));
1261: PetscCall(MatSetType(C, MATIS));
1262: PetscCall(MatISGetAllowRepeated(A, &allow_repeated));
1263: PetscCall(MatISSetAllowRepeated(C, allow_repeated));
1264: PetscCall(MatGetLocalToGlobalMapping(A, &rl2g, &cl2g));
1265: PetscCall(MatSetLocalToGlobalMapping(C, cl2g, rl2g));
1266: } else C = *B;
1268: /* perform local transposition */
1269: PetscCall(MatISGetLocalMat(A, &lA));
1270: PetscCall(MatTranspose(lA, MAT_INITIAL_MATRIX, &lC));
1271: PetscCall(MatSetLocalToGlobalMapping(lC, lA->cmap->mapping, lA->rmap->mapping));
1272: PetscCall(MatISSetLocalMat(C, lC));
1273: PetscCall(MatDestroy(&lC));
1275: if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) {
1276: *B = C;
1277: } else {
1278: PetscCall(MatHeaderMerge(A, &C));
1279: }
1280: PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
1281: PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
1282: PetscFunctionReturn(PETSC_SUCCESS);
1283: }
1285: static PetscErrorCode MatDiagonalSet_IS(Mat A, Vec D, InsertMode insmode)
1286: {
1287: Mat_IS *is = (Mat_IS *)A->data;
1289: PetscFunctionBegin;
1290: PetscCheck(!is->allow_repeated || insmode == ADD_VALUES, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "INSERT_VALUES with repeated entries not supported");
1291: if (D) { /* MatShift_IS pass D = NULL */
1292: PetscCall(VecScatterBegin(is->rctx, D, is->y, INSERT_VALUES, SCATTER_FORWARD));
1293: PetscCall(VecScatterEnd(is->rctx, D, is->y, INSERT_VALUES, SCATTER_FORWARD));
1294: }
1295: PetscCall(VecPointwiseDivide(is->y, is->y, is->counter));
1296: PetscCall(MatDiagonalSet(is->A, is->y, insmode));
1297: PetscFunctionReturn(PETSC_SUCCESS);
1298: }
1300: static PetscErrorCode MatShift_IS(Mat A, PetscScalar a)
1301: {
1302: Mat_IS *is = (Mat_IS *)A->data;
1304: PetscFunctionBegin;
1305: PetscCall(VecSet(is->y, a));
1306: PetscCall(MatDiagonalSet_IS(A, NULL, ADD_VALUES));
1307: PetscFunctionReturn(PETSC_SUCCESS);
1308: }
1310: static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1311: {
1312: PetscInt buf[2 * MATIS_MAX_ENTRIES_INSERTION], *rows_l = NULL, *cols_l = NULL;
1314: PetscFunctionBegin;
1315: IndexSpaceGet(buf, m, n, rows_l, cols_l);
1316: PetscCall(ISLocalToGlobalMappingApply(A->rmap->mapping, m, rows, rows_l));
1317: PetscCall(ISLocalToGlobalMappingApply(A->cmap->mapping, n, cols, cols_l));
1318: PetscCall(MatSetValuesLocal_IS(A, m, rows_l, n, cols_l, values, addv));
1319: IndexSpaceRestore(buf, m, n, rows_l, cols_l);
1320: PetscFunctionReturn(PETSC_SUCCESS);
1321: }
1323: static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1324: {
1325: PetscInt buf[2 * MATIS_MAX_ENTRIES_INSERTION], *rows_l = NULL, *cols_l = NULL, rbs, cbs;
1327: PetscFunctionBegin;
1328: /* We cannot guarantee the local matrix will have the same block size of the original matrix */
1329: PetscCall(ISLocalToGlobalMappingGetBlockSize(A->rmap->mapping, &rbs));
1330: PetscCall(ISLocalToGlobalMappingGetBlockSize(A->cmap->mapping, &cbs));
1331: IndexSpaceGet(buf, m * rbs, n * cbs, rows_l, cols_l);
1332: BlockIndicesExpand(m, rows, rbs, rows_l);
1333: BlockIndicesExpand(n, cols, cbs, cols_l);
1334: PetscCall(ISLocalToGlobalMappingApply(A->rmap->mapping, m * rbs, rows_l, rows_l));
1335: PetscCall(ISLocalToGlobalMappingApply(A->cmap->mapping, n * cbs, cols_l, cols_l));
1336: PetscCall(MatSetValuesLocal_IS(A, m * rbs, rows_l, n * cbs, cols_l, values, addv));
1337: IndexSpaceRestore(buf, m * rbs, n * cbs, rows_l, cols_l);
1338: PetscFunctionReturn(PETSC_SUCCESS);
1339: }
1341: static PetscErrorCode MatZeroRowsLocal_SubMat_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1342: {
1343: PetscInt *rows_l;
1344: Mat_IS *is = (Mat_IS *)A->data;
1346: PetscFunctionBegin;
1347: PetscCall(PetscMalloc1(n, &rows_l));
1348: PetscCall(ISLocalToGlobalMappingApply(A->rmap->mapping, n, rows, rows_l));
1349: PetscCall(MatZeroRowsLocal(is->islocalref, n, rows_l, diag, x, b));
1350: PetscCall(PetscFree(rows_l));
1351: PetscFunctionReturn(PETSC_SUCCESS);
1352: }
1354: static PetscErrorCode MatZeroRowsColumnsLocal_SubMat_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1355: {
1356: PetscInt *rows_l;
1357: Mat_IS *is = (Mat_IS *)A->data;
1359: PetscFunctionBegin;
1360: PetscCall(PetscMalloc1(n, &rows_l));
1361: PetscCall(ISLocalToGlobalMappingApply(A->rmap->mapping, n, rows, rows_l));
1362: PetscCall(MatZeroRowsColumnsLocal(is->islocalref, n, rows_l, diag, x, b));
1363: PetscCall(PetscFree(rows_l));
1364: PetscFunctionReturn(PETSC_SUCCESS);
1365: }
1367: static PetscErrorCode MatCreateSubMatrix_IS(Mat mat, IS irow, IS icol, MatReuse scall, Mat *newmat)
1368: {
1369: Mat locmat, newlocmat;
1370: Mat_IS *newmatis;
1371: const PetscInt *idxs;
1372: PetscInt i, m, n;
1374: PetscFunctionBegin;
1375: if (scall == MAT_REUSE_MATRIX) {
1376: PetscBool ismatis;
1378: PetscCall(PetscObjectTypeCompare((PetscObject)*newmat, MATIS, &ismatis));
1379: PetscCheck(ismatis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Not of MATIS type");
1380: newmatis = (Mat_IS *)(*newmat)->data;
1381: PetscCheck(newmatis->getsub_ris, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Misses local row IS");
1382: PetscCheck(newmatis->getsub_cis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Misses local col IS");
1383: }
1384: /* irow and icol may not have duplicate entries */
1385: if (PetscDefined(USE_DEBUG)) {
1386: Vec rtest, ltest;
1387: const PetscScalar *array;
1389: PetscCall(MatCreateVecs(mat, <est, &rtest));
1390: PetscCall(ISGetLocalSize(irow, &n));
1391: PetscCall(ISGetIndices(irow, &idxs));
1392: for (i = 0; i < n; i++) PetscCall(VecSetValue(rtest, idxs[i], 1.0, ADD_VALUES));
1393: PetscCall(VecAssemblyBegin(rtest));
1394: PetscCall(VecAssemblyEnd(rtest));
1395: PetscCall(VecGetLocalSize(rtest, &n));
1396: PetscCall(VecGetOwnershipRange(rtest, &m, NULL));
1397: PetscCall(VecGetArrayRead(rtest, &array));
1398: for (i = 0; i < n; i++) PetscCheck(array[i] == 0. || array[i] == 1., PETSC_COMM_SELF, PETSC_ERR_SUP, "Index %" PetscInt_FMT " counted %" PetscInt_FMT " times! Irow may not have duplicate entries", i + m, (PetscInt)PetscRealPart(array[i]));
1399: PetscCall(VecRestoreArrayRead(rtest, &array));
1400: PetscCall(ISRestoreIndices(irow, &idxs));
1401: PetscCall(ISGetLocalSize(icol, &n));
1402: PetscCall(ISGetIndices(icol, &idxs));
1403: for (i = 0; i < n; i++) PetscCall(VecSetValue(ltest, idxs[i], 1.0, ADD_VALUES));
1404: PetscCall(VecAssemblyBegin(ltest));
1405: PetscCall(VecAssemblyEnd(ltest));
1406: PetscCall(VecGetLocalSize(ltest, &n));
1407: PetscCall(VecGetOwnershipRange(ltest, &m, NULL));
1408: PetscCall(VecGetArrayRead(ltest, &array));
1409: for (i = 0; i < n; i++) PetscCheck(array[i] == 0. || array[i] == 1., PETSC_COMM_SELF, PETSC_ERR_SUP, "Index %" PetscInt_FMT " counted %" PetscInt_FMT " times! Icol may not have duplicate entries", i + m, (PetscInt)PetscRealPart(array[i]));
1410: PetscCall(VecRestoreArrayRead(ltest, &array));
1411: PetscCall(ISRestoreIndices(icol, &idxs));
1412: PetscCall(VecDestroy(&rtest));
1413: PetscCall(VecDestroy(<est));
1414: }
1415: if (scall == MAT_INITIAL_MATRIX) {
1416: Mat_IS *matis = (Mat_IS *)mat->data;
1417: ISLocalToGlobalMapping rl2g;
1418: IS is;
1419: PetscInt *lidxs, *lgidxs, *newgidxs;
1420: PetscInt ll, newloc, irbs, icbs, arbs, acbs, rbs, cbs;
1421: PetscBool cong;
1422: MPI_Comm comm;
1424: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
1425: PetscCall(MatGetBlockSizes(mat, &arbs, &acbs));
1426: PetscCall(ISGetBlockSize(irow, &irbs));
1427: PetscCall(ISGetBlockSize(icol, &icbs));
1428: rbs = arbs == irbs ? irbs : 1;
1429: cbs = acbs == icbs ? icbs : 1;
1430: PetscCall(ISGetLocalSize(irow, &m));
1431: PetscCall(ISGetLocalSize(icol, &n));
1432: PetscCall(MatCreate(comm, newmat));
1433: PetscCall(MatSetType(*newmat, MATIS));
1434: PetscCall(MatISSetAllowRepeated(*newmat, matis->allow_repeated));
1435: PetscCall(MatSetSizes(*newmat, m, n, PETSC_DECIDE, PETSC_DECIDE));
1436: PetscCall(MatSetBlockSizes(*newmat, rbs, cbs));
1437: /* communicate irow to their owners in the layout */
1438: PetscCall(ISGetIndices(irow, &idxs));
1439: PetscCall(PetscLayoutMapLocal(mat->rmap, m, idxs, &ll, &lidxs, &lgidxs));
1440: PetscCall(ISRestoreIndices(irow, &idxs));
1441: PetscCall(PetscArrayzero(matis->sf_rootdata, matis->sf->nroots));
1442: for (i = 0; i < ll; i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i] + 1;
1443: PetscCall(PetscFree(lidxs));
1444: PetscCall(PetscFree(lgidxs));
1445: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1446: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1447: for (i = 0, newloc = 0; i < matis->sf->nleaves; i++)
1448: if (matis->sf_leafdata[i]) newloc++;
1449: PetscCall(PetscMalloc1(newloc, &newgidxs));
1450: PetscCall(PetscMalloc1(newloc, &lidxs));
1451: for (i = 0, newloc = 0; i < matis->sf->nleaves; i++)
1452: if (matis->sf_leafdata[i]) {
1453: lidxs[newloc] = i;
1454: newgidxs[newloc++] = matis->sf_leafdata[i] - 1;
1455: }
1456: PetscCall(ISCreateGeneral(comm, newloc, newgidxs, PETSC_OWN_POINTER, &is));
1457: PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
1458: PetscCall(ISLocalToGlobalMappingSetBlockSize(rl2g, rbs));
1459: PetscCall(ISDestroy(&is));
1460: /* local is to extract local submatrix */
1461: newmatis = (Mat_IS *)(*newmat)->data;
1462: PetscCall(ISCreateGeneral(comm, newloc, lidxs, PETSC_OWN_POINTER, &newmatis->getsub_ris));
1463: PetscCall(MatHasCongruentLayouts(mat, &cong));
1464: if (cong && irow == icol && matis->csf == matis->sf) {
1465: PetscCall(MatSetLocalToGlobalMapping(*newmat, rl2g, rl2g));
1466: PetscCall(PetscObjectReference((PetscObject)newmatis->getsub_ris));
1467: newmatis->getsub_cis = newmatis->getsub_ris;
1468: } else {
1469: ISLocalToGlobalMapping cl2g;
1471: /* communicate icol to their owners in the layout */
1472: PetscCall(ISGetIndices(icol, &idxs));
1473: PetscCall(PetscLayoutMapLocal(mat->cmap, n, idxs, &ll, &lidxs, &lgidxs));
1474: PetscCall(ISRestoreIndices(icol, &idxs));
1475: PetscCall(PetscArrayzero(matis->csf_rootdata, matis->csf->nroots));
1476: for (i = 0; i < ll; i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i] + 1;
1477: PetscCall(PetscFree(lidxs));
1478: PetscCall(PetscFree(lgidxs));
1479: PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, matis->csf_rootdata, matis->csf_leafdata, MPI_REPLACE));
1480: PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, matis->csf_rootdata, matis->csf_leafdata, MPI_REPLACE));
1481: for (i = 0, newloc = 0; i < matis->csf->nleaves; i++)
1482: if (matis->csf_leafdata[i]) newloc++;
1483: PetscCall(PetscMalloc1(newloc, &newgidxs));
1484: PetscCall(PetscMalloc1(newloc, &lidxs));
1485: for (i = 0, newloc = 0; i < matis->csf->nleaves; i++)
1486: if (matis->csf_leafdata[i]) {
1487: lidxs[newloc] = i;
1488: newgidxs[newloc++] = matis->csf_leafdata[i] - 1;
1489: }
1490: PetscCall(ISCreateGeneral(comm, newloc, newgidxs, PETSC_OWN_POINTER, &is));
1491: PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
1492: PetscCall(ISLocalToGlobalMappingSetBlockSize(cl2g, cbs));
1493: PetscCall(ISDestroy(&is));
1494: /* local is to extract local submatrix */
1495: PetscCall(ISCreateGeneral(comm, newloc, lidxs, PETSC_OWN_POINTER, &newmatis->getsub_cis));
1496: PetscCall(MatSetLocalToGlobalMapping(*newmat, rl2g, cl2g));
1497: PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
1498: }
1499: PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
1500: } else {
1501: PetscCall(MatISGetLocalMat(*newmat, &newlocmat));
1502: }
1503: PetscCall(MatISGetLocalMat(mat, &locmat));
1504: newmatis = (Mat_IS *)(*newmat)->data;
1505: PetscCall(MatCreateSubMatrix(locmat, newmatis->getsub_ris, newmatis->getsub_cis, scall, &newlocmat));
1506: if (scall == MAT_INITIAL_MATRIX) {
1507: PetscCall(MatISSetLocalMat(*newmat, newlocmat));
1508: PetscCall(MatDestroy(&newlocmat));
1509: }
1510: PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY));
1511: PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY));
1512: PetscFunctionReturn(PETSC_SUCCESS);
1513: }
1515: static PetscErrorCode MatCopy_IS(Mat A, Mat B, MatStructure str)
1516: {
1517: Mat_IS *a = (Mat_IS *)A->data, *b;
1518: PetscBool ismatis;
1520: PetscFunctionBegin;
1521: PetscCall(PetscObjectTypeCompare((PetscObject)B, MATIS, &ismatis));
1522: PetscCheck(ismatis, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Need to be implemented");
1523: b = (Mat_IS *)B->data;
1524: PetscCall(MatCopy(a->A, b->A, str));
1525: PetscCall(PetscObjectStateIncrease((PetscObject)B));
1526: PetscFunctionReturn(PETSC_SUCCESS);
1527: }
1529: static PetscErrorCode MatMissingDiagonal_IS(Mat A, PetscBool *missing, PetscInt *d)
1530: {
1531: Vec v;
1532: const PetscScalar *array;
1533: PetscInt i, n;
1535: PetscFunctionBegin;
1536: *missing = PETSC_FALSE;
1537: PetscCall(MatCreateVecs(A, NULL, &v));
1538: PetscCall(MatGetDiagonal(A, v));
1539: PetscCall(VecGetLocalSize(v, &n));
1540: PetscCall(VecGetArrayRead(v, &array));
1541: for (i = 0; i < n; i++)
1542: if (array[i] == 0.) break;
1543: PetscCall(VecRestoreArrayRead(v, &array));
1544: PetscCall(VecDestroy(&v));
1545: if (i != n) *missing = PETSC_TRUE;
1546: if (d) {
1547: *d = -1;
1548: if (*missing) {
1549: PetscInt rstart;
1550: PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
1551: *d = i + rstart;
1552: }
1553: }
1554: PetscFunctionReturn(PETSC_SUCCESS);
1555: }
1557: static PetscErrorCode MatISSetUpSF_IS(Mat B)
1558: {
1559: Mat_IS *matis = (Mat_IS *)B->data;
1560: const PetscInt *gidxs;
1561: PetscInt nleaves;
1563: PetscFunctionBegin;
1564: if (matis->sf) PetscFunctionReturn(PETSC_SUCCESS);
1565: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)B), &matis->sf));
1566: PetscCall(ISLocalToGlobalMappingGetIndices(matis->rmapping, &gidxs));
1567: PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &nleaves));
1568: PetscCall(PetscSFSetGraphLayout(matis->sf, B->rmap, nleaves, NULL, PETSC_OWN_POINTER, gidxs));
1569: PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->rmapping, &gidxs));
1570: PetscCall(PetscMalloc2(matis->sf->nroots, &matis->sf_rootdata, matis->sf->nleaves, &matis->sf_leafdata));
1571: if (matis->rmapping != matis->cmapping) { /* setup SF for columns */
1572: PetscCall(ISLocalToGlobalMappingGetSize(matis->cmapping, &nleaves));
1573: PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)B), &matis->csf));
1574: PetscCall(ISLocalToGlobalMappingGetIndices(matis->cmapping, &gidxs));
1575: PetscCall(PetscSFSetGraphLayout(matis->csf, B->cmap, nleaves, NULL, PETSC_OWN_POINTER, gidxs));
1576: PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->cmapping, &gidxs));
1577: PetscCall(PetscMalloc2(matis->csf->nroots, &matis->csf_rootdata, matis->csf->nleaves, &matis->csf_leafdata));
1578: } else {
1579: matis->csf = matis->sf;
1580: matis->csf_leafdata = matis->sf_leafdata;
1581: matis->csf_rootdata = matis->sf_rootdata;
1582: }
1583: PetscFunctionReturn(PETSC_SUCCESS);
1584: }
1586: /*@
1587: MatISGetAllowRepeated - Get the flag to allow repeated entries in the local to global map
1589: Not Collective
1591: Input Parameter:
1592: . A - the matrix
1594: Output Parameter:
1595: . flg - the boolean flag
1597: Level: intermediate
1599: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatSetLocalToGlobalMapping()`, `MatISSetAllowRepeated()`
1600: @*/
1601: PetscErrorCode MatISGetAllowRepeated(Mat A, PetscBool *flg)
1602: {
1603: PetscBool ismatis;
1605: PetscFunctionBegin;
1607: PetscAssertPointer(flg, 2);
1608: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATIS, &ismatis));
1609: PetscCheck(ismatis, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)A)->type_name);
1610: *flg = ((Mat_IS *)A->data)->allow_repeated;
1611: PetscFunctionReturn(PETSC_SUCCESS);
1612: }
1614: /*@
1615: MatISSetAllowRepeated - Set the flag to allow repeated entries in the local to global map
1617: Logically Collective
1619: Input Parameters:
1620: + A - the matrix
1621: - flg - the boolean flag
1623: Level: intermediate
1625: Notes:
1626: The default value is `PETSC_FALSE`.
1627: When called AFTER calling `MatSetLocalToGlobalMapping()` it will recreate the local matrices
1628: if `flg` is different from the previously set value.
1629: Specifically, when `flg` is true it will just recreate the local matrices, while if
1630: `flg` is false will assemble the local matrices summing up repeated entries.
1632: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatSetLocalToGlobalMapping()`, `MatISGetAllowRepeated()`
1633: @*/
1634: PetscErrorCode MatISSetAllowRepeated(Mat A, PetscBool flg)
1635: {
1636: PetscFunctionBegin;
1640: PetscTryMethod(A, "MatISSetAllowRepeated_C", (Mat, PetscBool), (A, flg));
1641: PetscFunctionReturn(PETSC_SUCCESS);
1642: }
1644: static PetscErrorCode MatISSetAllowRepeated_IS(Mat A, PetscBool flg)
1645: {
1646: Mat_IS *matis = (Mat_IS *)A->data;
1647: Mat lA = NULL;
1648: ISLocalToGlobalMapping lrmap, lcmap;
1650: PetscFunctionBegin;
1651: if (flg == matis->allow_repeated) PetscFunctionReturn(PETSC_SUCCESS);
1652: if (!matis->A) { /* matrix has not been preallocated yet */
1653: matis->allow_repeated = flg;
1654: PetscFunctionReturn(PETSC_SUCCESS);
1655: }
1656: PetscCheck(!matis->islocalref, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not implemented for local references");
1657: if (matis->allow_repeated) { /* we will assemble the old local matrix if needed */
1658: lA = matis->A;
1659: PetscCall(PetscObjectReference((PetscObject)lA));
1660: }
1661: /* In case flg is True, we only recreate the local matrix */
1662: matis->allow_repeated = flg;
1663: PetscCall(MatSetLocalToGlobalMapping(A, A->rmap->mapping, A->cmap->mapping));
1664: if (lA) { /* assemble previous local matrix if needed */
1665: Mat nA = matis->A;
1667: PetscCall(MatGetLocalToGlobalMapping(nA, &lrmap, &lcmap));
1668: if (!lrmap && !lcmap) {
1669: PetscCall(MatISSetLocalMat(A, lA));
1670: } else {
1671: Mat P = NULL, R = NULL;
1672: MatProductType ptype;
1674: if (lrmap == lcmap) {
1675: ptype = MATPRODUCT_PtAP;
1676: PetscCall(MatCreateFromISLocalToGlobalMapping(lcmap, nA, PETSC_TRUE, PETSC_FALSE, NULL, &P));
1677: PetscCall(MatProductCreate(lA, P, NULL, &nA));
1678: } else {
1679: if (lcmap) PetscCall(MatCreateFromISLocalToGlobalMapping(lcmap, nA, PETSC_TRUE, PETSC_FALSE, NULL, &P));
1680: if (lrmap) PetscCall(MatCreateFromISLocalToGlobalMapping(lrmap, nA, PETSC_FALSE, PETSC_TRUE, NULL, &R));
1681: if (R && P) {
1682: ptype = MATPRODUCT_ABC;
1683: PetscCall(MatProductCreate(R, lA, P, &nA));
1684: } else if (R) {
1685: ptype = MATPRODUCT_AB;
1686: PetscCall(MatProductCreate(R, lA, NULL, &nA));
1687: } else {
1688: ptype = MATPRODUCT_AB;
1689: PetscCall(MatProductCreate(lA, P, NULL, &nA));
1690: }
1691: }
1692: PetscCall(MatProductSetType(nA, ptype));
1693: PetscCall(MatProductSetFromOptions(nA));
1694: PetscCall(MatProductSymbolic(nA));
1695: PetscCall(MatProductNumeric(nA));
1696: PetscCall(MatProductClear(nA));
1697: PetscCall(MatConvert(nA, matis->lmattype, MAT_INPLACE_MATRIX, &nA));
1698: PetscCall(MatISSetLocalMat(A, nA));
1699: PetscCall(MatDestroy(&nA));
1700: PetscCall(MatDestroy(&P));
1701: PetscCall(MatDestroy(&R));
1702: }
1703: }
1704: PetscCall(MatDestroy(&lA));
1705: PetscFunctionReturn(PETSC_SUCCESS);
1706: }
1708: /*@
1709: MatISStoreL2L - Store local-to-local operators during the Galerkin process of computing `MatPtAP()`
1711: Logically Collective
1713: Input Parameters:
1714: + A - the matrix
1715: - store - the boolean flag
1717: Level: advanced
1719: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatISSetPreallocation()`, `MatPtAP()`
1720: @*/
1721: PetscErrorCode MatISStoreL2L(Mat A, PetscBool store)
1722: {
1723: PetscFunctionBegin;
1727: PetscTryMethod(A, "MatISStoreL2L_C", (Mat, PetscBool), (A, store));
1728: PetscFunctionReturn(PETSC_SUCCESS);
1729: }
1731: static PetscErrorCode MatISStoreL2L_IS(Mat A, PetscBool store)
1732: {
1733: Mat_IS *matis = (Mat_IS *)A->data;
1735: PetscFunctionBegin;
1736: matis->storel2l = store;
1737: if (!store) PetscCall(PetscObjectCompose((PetscObject)A, "_MatIS_PtAP_l2l", NULL));
1738: PetscFunctionReturn(PETSC_SUCCESS);
1739: }
1741: /*@
1742: MatISFixLocalEmpty - Compress out zero local rows from the local matrices
1744: Logically Collective
1746: Input Parameters:
1747: + A - the matrix
1748: - fix - the boolean flag
1750: Level: advanced
1752: Note:
1753: When `fix` is `PETSC_TRUE`, new local matrices and l2g maps are generated during the final assembly process.
1755: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatCreate()`, `MatCreateIS()`, `MatISSetPreallocation()`, `MatAssemblyEnd()`, `MAT_FINAL_ASSEMBLY`
1756: @*/
1757: PetscErrorCode MatISFixLocalEmpty(Mat A, PetscBool fix)
1758: {
1759: PetscFunctionBegin;
1763: PetscTryMethod(A, "MatISFixLocalEmpty_C", (Mat, PetscBool), (A, fix));
1764: PetscFunctionReturn(PETSC_SUCCESS);
1765: }
1767: static PetscErrorCode MatISFixLocalEmpty_IS(Mat A, PetscBool fix)
1768: {
1769: Mat_IS *matis = (Mat_IS *)A->data;
1771: PetscFunctionBegin;
1772: matis->locempty = fix;
1773: PetscFunctionReturn(PETSC_SUCCESS);
1774: }
1776: /*@
1777: MatISSetPreallocation - Preallocates memory for a `MATIS` parallel matrix.
1779: Collective
1781: Input Parameters:
1782: + B - the matrix
1783: . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix
1784: (same value is used for all local rows)
1785: . d_nnz - array containing the number of nonzeros in the various rows of the
1786: DIAGONAL portion of the local submatrix (possibly different for each row)
1787: or `NULL`, if `d_nz` is used to specify the nonzero structure.
1788: The size of this array is equal to the number of local rows, i.e `m`.
1789: For matrices that will be factored, you must leave room for (and set)
1790: the diagonal entry even if it is zero.
1791: . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local
1792: submatrix (same value is used for all local rows).
1793: - o_nnz - array containing the number of nonzeros in the various rows of the
1794: OFF-DIAGONAL portion of the local submatrix (possibly different for
1795: each row) or `NULL`, if `o_nz` is used to specify the nonzero
1796: structure. The size of this array is equal to the number
1797: of local rows, i.e `m`.
1799: If the *_nnz parameter is given then the *_nz parameter is ignored
1801: Level: intermediate
1803: Note:
1804: This function has the same interface as the `MATMPIAIJ` preallocation routine in order to simplify the transition
1805: from the asssembled format to the unassembled one. It overestimates the preallocation of `MATIS` local
1806: matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
1808: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateIS()`, `MatMPIAIJSetPreallocation()`, `MatISGetLocalMat()`, `MATIS`
1809: @*/
1810: PetscErrorCode MatISSetPreallocation(Mat B, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
1811: {
1812: PetscFunctionBegin;
1815: PetscTryMethod(B, "MatISSetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, d_nz, d_nnz, o_nz, o_nnz));
1816: PetscFunctionReturn(PETSC_SUCCESS);
1817: }
1819: static PetscErrorCode MatISSetPreallocation_IS(Mat B, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
1820: {
1821: Mat_IS *matis = (Mat_IS *)B->data;
1822: PetscInt bs, i, nlocalcols;
1824: PetscFunctionBegin;
1825: PetscCall(MatSetUp(B));
1826: if (!d_nnz)
1827: for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] = d_nz;
1828: else
1829: for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] = d_nnz[i];
1831: if (!o_nnz)
1832: for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] += o_nz;
1833: else
1834: for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] += o_nnz[i];
1836: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1837: PetscCall(MatGetSize(matis->A, NULL, &nlocalcols));
1838: PetscCall(MatGetBlockSize(matis->A, &bs));
1839: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1841: for (i = 0; i < matis->sf->nleaves; i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i], nlocalcols);
1842: PetscCall(MatSeqAIJSetPreallocation(matis->A, 0, matis->sf_leafdata));
1843: #if defined(PETSC_HAVE_HYPRE)
1844: PetscCall(MatHYPRESetPreallocation(matis->A, 0, matis->sf_leafdata, 0, NULL));
1845: #endif
1847: for (i = 0; i < matis->sf->nleaves / bs; i++) {
1848: PetscInt b;
1850: matis->sf_leafdata[i] = matis->sf_leafdata[i * bs] / bs;
1851: for (b = 1; b < bs; b++) matis->sf_leafdata[i] = PetscMax(matis->sf_leafdata[i], matis->sf_leafdata[i * bs + b] / bs);
1852: }
1853: PetscCall(MatSeqBAIJSetPreallocation(matis->A, bs, 0, matis->sf_leafdata));
1855: nlocalcols /= bs;
1856: for (i = 0; i < matis->sf->nleaves / bs; i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i], nlocalcols - i);
1857: PetscCall(MatSeqSBAIJSetPreallocation(matis->A, bs, 0, matis->sf_leafdata));
1859: /* for other matrix types */
1860: PetscCall(MatSetUp(matis->A));
1861: PetscFunctionReturn(PETSC_SUCCESS);
1862: }
1864: PETSC_INTERN PetscErrorCode MatConvert_IS_XAIJ(Mat mat, MatType mtype, MatReuse reuse, Mat *M)
1865: {
1866: Mat_IS *matis = (Mat_IS *)mat->data;
1867: Mat local_mat = NULL, MT;
1868: PetscInt rbs, cbs, rows, cols, lrows, lcols;
1869: PetscInt local_rows, local_cols;
1870: PetscBool isseqdense, isseqsbaij, isseqaij, isseqbaij;
1871: PetscMPIInt size;
1872: const PetscScalar *array;
1874: PetscFunctionBegin;
1875: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
1876: if (size == 1 && mat->rmap->N == matis->A->rmap->N && mat->cmap->N == matis->A->cmap->N && !matis->allow_repeated) {
1877: Mat B;
1878: IS irows = NULL, icols = NULL;
1879: PetscInt rbs, cbs;
1881: PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->rmapping, &rbs));
1882: PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->cmapping, &cbs));
1883: if (reuse != MAT_REUSE_MATRIX) { /* check if l2g maps are one-to-one */
1884: IS rows, cols;
1885: const PetscInt *ridxs, *cidxs;
1886: PetscInt i, nw;
1887: PetscBT work;
1889: PetscCall(ISLocalToGlobalMappingGetBlockIndices(matis->rmapping, &ridxs));
1890: PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &nw));
1891: nw = nw / rbs;
1892: PetscCall(PetscBTCreate(nw, &work));
1893: for (i = 0; i < nw; i++) PetscCall(PetscBTSet(work, ridxs[i]));
1894: for (i = 0; i < nw; i++)
1895: if (!PetscBTLookup(work, i)) break;
1896: if (i == nw) {
1897: PetscCall(ISCreateBlock(PETSC_COMM_SELF, rbs, nw, ridxs, PETSC_USE_POINTER, &rows));
1898: PetscCall(ISSetPermutation(rows));
1899: PetscCall(ISInvertPermutation(rows, PETSC_DECIDE, &irows));
1900: PetscCall(ISDestroy(&rows));
1901: }
1902: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(matis->rmapping, &ridxs));
1903: PetscCall(PetscBTDestroy(&work));
1904: if (irows && matis->rmapping != matis->cmapping) {
1905: PetscCall(ISLocalToGlobalMappingGetBlockIndices(matis->cmapping, &cidxs));
1906: PetscCall(ISLocalToGlobalMappingGetSize(matis->cmapping, &nw));
1907: nw = nw / cbs;
1908: PetscCall(PetscBTCreate(nw, &work));
1909: for (i = 0; i < nw; i++) PetscCall(PetscBTSet(work, cidxs[i]));
1910: for (i = 0; i < nw; i++)
1911: if (!PetscBTLookup(work, i)) break;
1912: if (i == nw) {
1913: PetscCall(ISCreateBlock(PETSC_COMM_SELF, cbs, nw, cidxs, PETSC_USE_POINTER, &cols));
1914: PetscCall(ISSetPermutation(cols));
1915: PetscCall(ISInvertPermutation(cols, PETSC_DECIDE, &icols));
1916: PetscCall(ISDestroy(&cols));
1917: }
1918: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(matis->cmapping, &cidxs));
1919: PetscCall(PetscBTDestroy(&work));
1920: } else if (irows) {
1921: PetscCall(PetscObjectReference((PetscObject)irows));
1922: icols = irows;
1923: }
1924: } else {
1925: PetscCall(PetscObjectQuery((PetscObject)*M, "_MatIS_IS_XAIJ_irows", (PetscObject *)&irows));
1926: PetscCall(PetscObjectQuery((PetscObject)*M, "_MatIS_IS_XAIJ_icols", (PetscObject *)&icols));
1927: if (irows) PetscCall(PetscObjectReference((PetscObject)irows));
1928: if (icols) PetscCall(PetscObjectReference((PetscObject)icols));
1929: }
1930: if (!irows || !icols) {
1931: PetscCall(ISDestroy(&icols));
1932: PetscCall(ISDestroy(&irows));
1933: goto general_assembly;
1934: }
1935: PetscCall(MatConvert(matis->A, mtype, MAT_INITIAL_MATRIX, &B));
1936: if (reuse != MAT_INPLACE_MATRIX) {
1937: PetscCall(MatCreateSubMatrix(B, irows, icols, reuse, M));
1938: PetscCall(PetscObjectCompose((PetscObject)*M, "_MatIS_IS_XAIJ_irows", (PetscObject)irows));
1939: PetscCall(PetscObjectCompose((PetscObject)*M, "_MatIS_IS_XAIJ_icols", (PetscObject)icols));
1940: } else {
1941: Mat C;
1943: PetscCall(MatCreateSubMatrix(B, irows, icols, MAT_INITIAL_MATRIX, &C));
1944: PetscCall(MatHeaderReplace(mat, &C));
1945: }
1946: PetscCall(MatDestroy(&B));
1947: PetscCall(ISDestroy(&icols));
1948: PetscCall(ISDestroy(&irows));
1949: PetscFunctionReturn(PETSC_SUCCESS);
1950: }
1951: general_assembly:
1952: PetscCall(MatGetSize(mat, &rows, &cols));
1953: PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->rmapping, &rbs));
1954: PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->cmapping, &cbs));
1955: PetscCall(MatGetLocalSize(mat, &lrows, &lcols));
1956: PetscCall(MatGetSize(matis->A, &local_rows, &local_cols));
1957: PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQDENSE, &isseqdense));
1958: PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQAIJ, &isseqaij));
1959: PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQBAIJ, &isseqbaij));
1960: PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &isseqsbaij));
1961: PetscCheck(isseqdense || isseqaij || isseqbaij || isseqsbaij, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)matis->A)->type_name);
1962: if (PetscDefined(USE_DEBUG)) {
1963: PetscBool lb[4], bb[4];
1965: lb[0] = isseqdense;
1966: lb[1] = isseqaij;
1967: lb[2] = isseqbaij;
1968: lb[3] = isseqsbaij;
1969: PetscCallMPI(MPIU_Allreduce(lb, bb, 4, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
1970: PetscCheck(bb[0] || bb[1] || bb[2] || bb[3], PETSC_COMM_SELF, PETSC_ERR_SUP, "Local matrices must have the same type");
1971: }
1973: if (reuse != MAT_REUSE_MATRIX) {
1974: PetscCount ncoo;
1975: PetscInt *coo_i, *coo_j;
1977: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &MT));
1978: PetscCall(MatSetSizes(MT, lrows, lcols, rows, cols));
1979: PetscCall(MatSetType(MT, mtype));
1980: PetscCall(MatSetBlockSizes(MT, rbs, cbs));
1981: if (!isseqaij && !isseqdense) {
1982: PetscCall(MatConvert(matis->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &local_mat));
1983: } else {
1984: PetscCall(PetscObjectReference((PetscObject)matis->A));
1985: local_mat = matis->A;
1986: }
1987: PetscCall(MatSetLocalToGlobalMapping(MT, matis->rmapping, matis->cmapping));
1988: if (isseqdense) {
1989: PetscInt nr, nc;
1991: PetscCall(MatGetSize(local_mat, &nr, &nc));
1992: ncoo = nr * nc;
1993: PetscCall(PetscMalloc2(ncoo, &coo_i, ncoo, &coo_j));
1994: for (PetscInt j = 0; j < nc; j++) {
1995: for (PetscInt i = 0; i < nr; i++) {
1996: coo_i[j * nr + i] = i;
1997: coo_j[j * nr + i] = j;
1998: }
1999: }
2000: } else {
2001: const PetscInt *ii, *jj;
2002: PetscInt nr;
2003: PetscBool done;
2005: PetscCall(MatGetRowIJ(local_mat, 0, PETSC_FALSE, PETSC_FALSE, &nr, &ii, &jj, &done));
2006: PetscCheck(done, PetscObjectComm((PetscObject)local_mat), PETSC_ERR_PLIB, "Error in MatGetRowIJ");
2007: ncoo = ii[nr];
2008: PetscCall(PetscMalloc2(ncoo, &coo_i, ncoo, &coo_j));
2009: PetscCall(PetscArraycpy(coo_j, jj, ncoo));
2010: for (PetscInt i = 0; i < nr; i++) {
2011: for (PetscInt j = ii[i]; j < ii[i + 1]; j++) coo_i[j] = i;
2012: }
2013: PetscCall(MatRestoreRowIJ(local_mat, 0, PETSC_FALSE, PETSC_FALSE, &nr, &ii, &jj, &done));
2014: PetscCheck(done, PetscObjectComm((PetscObject)local_mat), PETSC_ERR_PLIB, "Error in MatRestoreRowIJ");
2015: }
2016: PetscCall(MatSetPreallocationCOOLocal(MT, ncoo, coo_i, coo_j));
2017: PetscCall(PetscFree2(coo_i, coo_j));
2018: } else {
2019: PetscInt mrbs, mcbs, mrows, mcols, mlrows, mlcols;
2021: /* some checks */
2022: MT = *M;
2023: PetscCall(MatGetBlockSizes(MT, &mrbs, &mcbs));
2024: PetscCall(MatGetSize(MT, &mrows, &mcols));
2025: PetscCall(MatGetLocalSize(MT, &mlrows, &mlcols));
2026: PetscCheck(mrows == rows, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of rows (%" PetscInt_FMT " != %" PetscInt_FMT ")", rows, mrows);
2027: PetscCheck(mcols == cols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of cols (%" PetscInt_FMT " != %" PetscInt_FMT ")", cols, mcols);
2028: PetscCheck(mlrows == lrows, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of local rows (%" PetscInt_FMT " != %" PetscInt_FMT ")", lrows, mlrows);
2029: PetscCheck(mlcols == lcols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of local cols (%" PetscInt_FMT " != %" PetscInt_FMT ")", lcols, mlcols);
2030: PetscCheck(mrbs == rbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong row block size (%" PetscInt_FMT " != %" PetscInt_FMT ")", rbs, mrbs);
2031: PetscCheck(mcbs == cbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong col block size (%" PetscInt_FMT " != %" PetscInt_FMT ")", cbs, mcbs);
2032: PetscCall(MatZeroEntries(MT));
2033: if (!isseqaij && !isseqdense) {
2034: PetscCall(MatConvert(matis->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &local_mat));
2035: } else {
2036: PetscCall(PetscObjectReference((PetscObject)matis->A));
2037: local_mat = matis->A;
2038: }
2039: }
2041: /* Set values */
2042: if (isseqdense) {
2043: PetscCall(MatDenseGetArrayRead(local_mat, &array));
2044: PetscCall(MatSetValuesCOO(MT, array, INSERT_VALUES));
2045: PetscCall(MatDenseRestoreArrayRead(local_mat, &array));
2046: } else {
2047: PetscCall(MatSeqAIJGetArrayRead(local_mat, &array));
2048: PetscCall(MatSetValuesCOO(MT, array, INSERT_VALUES));
2049: PetscCall(MatSeqAIJRestoreArrayRead(local_mat, &array));
2050: }
2051: PetscCall(MatDestroy(&local_mat));
2052: PetscCall(MatAssemblyBegin(MT, MAT_FINAL_ASSEMBLY));
2053: PetscCall(MatAssemblyEnd(MT, MAT_FINAL_ASSEMBLY));
2054: if (reuse == MAT_INPLACE_MATRIX) {
2055: PetscCall(MatHeaderReplace(mat, &MT));
2056: } else if (reuse == MAT_INITIAL_MATRIX) {
2057: *M = MT;
2058: }
2059: PetscFunctionReturn(PETSC_SUCCESS);
2060: }
2062: static PetscErrorCode MatDuplicate_IS(Mat mat, MatDuplicateOption op, Mat *newmat)
2063: {
2064: Mat_IS *matis = (Mat_IS *)mat->data;
2065: PetscInt rbs, cbs, m, n, M, N;
2066: Mat B, localmat;
2068: PetscFunctionBegin;
2069: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &rbs));
2070: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &cbs));
2071: PetscCall(MatGetSize(mat, &M, &N));
2072: PetscCall(MatGetLocalSize(mat, &m, &n));
2073: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
2074: PetscCall(MatSetSizes(B, m, n, M, N));
2075: PetscCall(MatSetBlockSize(B, rbs == cbs ? rbs : 1));
2076: PetscCall(MatSetType(B, MATIS));
2077: PetscCall(MatISSetLocalMatType(B, matis->lmattype));
2078: PetscCall(MatISSetAllowRepeated(B, matis->allow_repeated));
2079: PetscCall(MatSetLocalToGlobalMapping(B, mat->rmap->mapping, mat->cmap->mapping));
2080: PetscCall(MatDuplicate(matis->A, op, &localmat));
2081: PetscCall(MatSetLocalToGlobalMapping(localmat, matis->A->rmap->mapping, matis->A->cmap->mapping));
2082: PetscCall(MatISSetLocalMat(B, localmat));
2083: PetscCall(MatDestroy(&localmat));
2084: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2085: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2086: *newmat = B;
2087: PetscFunctionReturn(PETSC_SUCCESS);
2088: }
2090: static PetscErrorCode MatIsHermitian_IS(Mat A, PetscReal tol, PetscBool *flg)
2091: {
2092: Mat_IS *matis = (Mat_IS *)A->data;
2093: PetscBool local_sym;
2095: PetscFunctionBegin;
2096: PetscCall(MatIsHermitian(matis->A, tol, &local_sym));
2097: PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2098: PetscFunctionReturn(PETSC_SUCCESS);
2099: }
2101: static PetscErrorCode MatIsSymmetric_IS(Mat A, PetscReal tol, PetscBool *flg)
2102: {
2103: Mat_IS *matis = (Mat_IS *)A->data;
2104: PetscBool local_sym;
2106: PetscFunctionBegin;
2107: if (matis->rmapping != matis->cmapping) {
2108: *flg = PETSC_FALSE;
2109: PetscFunctionReturn(PETSC_SUCCESS);
2110: }
2111: PetscCall(MatIsSymmetric(matis->A, tol, &local_sym));
2112: PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2113: PetscFunctionReturn(PETSC_SUCCESS);
2114: }
2116: static PetscErrorCode MatIsStructurallySymmetric_IS(Mat A, PetscBool *flg)
2117: {
2118: Mat_IS *matis = (Mat_IS *)A->data;
2119: PetscBool local_sym;
2121: PetscFunctionBegin;
2122: if (matis->rmapping != matis->cmapping) {
2123: *flg = PETSC_FALSE;
2124: PetscFunctionReturn(PETSC_SUCCESS);
2125: }
2126: PetscCall(MatIsStructurallySymmetric(matis->A, &local_sym));
2127: PetscCallMPI(MPIU_Allreduce(&local_sym, flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2128: PetscFunctionReturn(PETSC_SUCCESS);
2129: }
2131: static PetscErrorCode MatDestroy_IS(Mat A)
2132: {
2133: Mat_IS *b = (Mat_IS *)A->data;
2135: PetscFunctionBegin;
2136: PetscCall(PetscFree(b->bdiag));
2137: PetscCall(PetscFree(b->lmattype));
2138: PetscCall(MatDestroy(&b->A));
2139: PetscCall(VecScatterDestroy(&b->cctx));
2140: PetscCall(VecScatterDestroy(&b->rctx));
2141: PetscCall(VecDestroy(&b->x));
2142: PetscCall(VecDestroy(&b->y));
2143: PetscCall(VecDestroy(&b->counter));
2144: PetscCall(ISDestroy(&b->getsub_ris));
2145: PetscCall(ISDestroy(&b->getsub_cis));
2146: if (b->sf != b->csf) {
2147: PetscCall(PetscSFDestroy(&b->csf));
2148: PetscCall(PetscFree2(b->csf_rootdata, b->csf_leafdata));
2149: } else b->csf = NULL;
2150: PetscCall(PetscSFDestroy(&b->sf));
2151: PetscCall(PetscFree2(b->sf_rootdata, b->sf_leafdata));
2152: PetscCall(ISLocalToGlobalMappingDestroy(&b->rmapping));
2153: PetscCall(ISLocalToGlobalMappingDestroy(&b->cmapping));
2154: PetscCall(MatDestroy(&b->dA));
2155: PetscCall(MatDestroy(&b->assembledA));
2156: PetscCall(PetscFree(A->data));
2157: PetscCall(PetscObjectChangeTypeName((PetscObject)A, NULL));
2158: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", NULL));
2159: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalMat_C", NULL));
2160: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMat_C", NULL));
2161: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISRestoreLocalMat_C", NULL));
2162: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetPreallocation_C", NULL));
2163: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISStoreL2L_C", NULL));
2164: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISFixLocalEmpty_C", NULL));
2165: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalToGlobalMapping_C", NULL));
2166: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpiaij_C", NULL));
2167: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpibaij_C", NULL));
2168: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpisbaij_C", NULL));
2169: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqaij_C", NULL));
2170: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqbaij_C", NULL));
2171: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqsbaij_C", NULL));
2172: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_aij_C", NULL));
2173: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", NULL));
2174: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
2175: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
2176: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetAllowRepeated_C", NULL));
2177: PetscFunctionReturn(PETSC_SUCCESS);
2178: }
2180: static PetscErrorCode MatMult_IS(Mat A, Vec x, Vec y)
2181: {
2182: Mat_IS *is = (Mat_IS *)A->data;
2183: PetscScalar zero = 0.0;
2185: PetscFunctionBegin;
2186: /* scatter the global vector x into the local work vector */
2187: PetscCall(VecScatterBegin(is->cctx, x, is->x, INSERT_VALUES, SCATTER_FORWARD));
2188: PetscCall(VecScatterEnd(is->cctx, x, is->x, INSERT_VALUES, SCATTER_FORWARD));
2190: /* multiply the local matrix */
2191: PetscCall(MatMult(is->A, is->x, is->y));
2193: /* scatter product back into global memory */
2194: PetscCall(VecSet(y, zero));
2195: PetscCall(VecScatterBegin(is->rctx, is->y, y, ADD_VALUES, SCATTER_REVERSE));
2196: PetscCall(VecScatterEnd(is->rctx, is->y, y, ADD_VALUES, SCATTER_REVERSE));
2197: PetscFunctionReturn(PETSC_SUCCESS);
2198: }
2200: static PetscErrorCode MatMultAdd_IS(Mat A, Vec v1, Vec v2, Vec v3)
2201: {
2202: Vec temp_vec;
2204: PetscFunctionBegin; /* v3 = v2 + A * v1.*/
2205: if (v3 != v2) {
2206: PetscCall(MatMult(A, v1, v3));
2207: PetscCall(VecAXPY(v3, 1.0, v2));
2208: } else {
2209: PetscCall(VecDuplicate(v2, &temp_vec));
2210: PetscCall(MatMult(A, v1, temp_vec));
2211: PetscCall(VecAXPY(temp_vec, 1.0, v2));
2212: PetscCall(VecCopy(temp_vec, v3));
2213: PetscCall(VecDestroy(&temp_vec));
2214: }
2215: PetscFunctionReturn(PETSC_SUCCESS);
2216: }
2218: static PetscErrorCode MatMultTranspose_IS(Mat A, Vec y, Vec x)
2219: {
2220: Mat_IS *is = (Mat_IS *)A->data;
2222: PetscFunctionBegin;
2223: /* scatter the global vector x into the local work vector */
2224: PetscCall(VecScatterBegin(is->rctx, y, is->y, INSERT_VALUES, SCATTER_FORWARD));
2225: PetscCall(VecScatterEnd(is->rctx, y, is->y, INSERT_VALUES, SCATTER_FORWARD));
2227: /* multiply the local matrix */
2228: PetscCall(MatMultTranspose(is->A, is->y, is->x));
2230: /* scatter product back into global vector */
2231: PetscCall(VecSet(x, 0));
2232: PetscCall(VecScatterBegin(is->cctx, is->x, x, ADD_VALUES, SCATTER_REVERSE));
2233: PetscCall(VecScatterEnd(is->cctx, is->x, x, ADD_VALUES, SCATTER_REVERSE));
2234: PetscFunctionReturn(PETSC_SUCCESS);
2235: }
2237: static PetscErrorCode MatMultTransposeAdd_IS(Mat A, Vec v1, Vec v2, Vec v3)
2238: {
2239: Vec temp_vec;
2241: PetscFunctionBegin; /* v3 = v2 + A' * v1.*/
2242: if (v3 != v2) {
2243: PetscCall(MatMultTranspose(A, v1, v3));
2244: PetscCall(VecAXPY(v3, 1.0, v2));
2245: } else {
2246: PetscCall(VecDuplicate(v2, &temp_vec));
2247: PetscCall(MatMultTranspose(A, v1, temp_vec));
2248: PetscCall(VecAXPY(temp_vec, 1.0, v2));
2249: PetscCall(VecCopy(temp_vec, v3));
2250: PetscCall(VecDestroy(&temp_vec));
2251: }
2252: PetscFunctionReturn(PETSC_SUCCESS);
2253: }
2255: static PetscErrorCode ISLocalToGlobalMappingView_Multi(ISLocalToGlobalMapping mapping, PetscInt lsize, PetscInt gsize, const PetscInt vblocks[], PetscViewer viewer)
2256: {
2257: PetscInt tr[3], n;
2258: const PetscInt *indices;
2260: PetscFunctionBegin;
2261: tr[0] = IS_LTOGM_FILE_CLASSID;
2262: tr[1] = 1;
2263: tr[2] = gsize;
2264: PetscCall(PetscViewerBinaryWrite(viewer, tr, 3, PETSC_INT));
2265: PetscCall(PetscViewerBinaryWriteAll(viewer, vblocks, lsize, PETSC_DETERMINE, PETSC_DETERMINE, PETSC_INT));
2266: PetscCall(ISLocalToGlobalMappingGetSize(mapping, &n));
2267: PetscCall(ISLocalToGlobalMappingGetIndices(mapping, &indices));
2268: PetscCall(PetscViewerBinaryWriteAll(viewer, indices, n, PETSC_DETERMINE, PETSC_DETERMINE, PETSC_INT));
2269: PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping, &indices));
2270: PetscFunctionReturn(PETSC_SUCCESS);
2271: }
2273: static PetscErrorCode MatView_IS(Mat A, PetscViewer viewer)
2274: {
2275: Mat_IS *a = (Mat_IS *)A->data;
2276: PetscViewer sviewer;
2277: PetscBool isascii, isbinary, viewl2g = PETSC_FALSE, native;
2278: PetscViewerFormat format;
2279: ISLocalToGlobalMapping rmap, cmap;
2281: PetscFunctionBegin;
2282: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2283: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2284: PetscCall(PetscViewerGetFormat(viewer, &format));
2285: native = (PetscBool)(format == PETSC_VIEWER_NATIVE);
2286: if (native) {
2287: rmap = A->rmap->mapping;
2288: cmap = A->cmap->mapping;
2289: } else {
2290: rmap = a->rmapping;
2291: cmap = a->cmapping;
2292: }
2293: if (isascii) {
2294: if (format == PETSC_VIEWER_ASCII_INFO) PetscFunctionReturn(PETSC_SUCCESS);
2295: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL || format == PETSC_VIEWER_ASCII_MATLAB) viewl2g = PETSC_TRUE;
2296: } else if (isbinary) {
2297: PetscInt tr[6], nr, nc, lsize = 0;
2298: char lmattype[64] = {'\0'};
2299: PetscMPIInt size;
2300: PetscBool skipHeader, vbs = PETSC_FALSE;
2301: IS is;
2302: const PetscInt *vblocks = NULL;
2304: PetscCall(PetscViewerSetUp(viewer));
2305: PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)A)->prefix, "-mat_is_view_variableblocksizes", &vbs, NULL));
2306: if (vbs) {
2307: PetscCall(MatGetVariableBlockSizes(a->A, &lsize, &vblocks));
2308: PetscCall(PetscMPIIntCast(lsize, &size));
2309: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &size, 1, MPI_INT, MPI_SUM, PetscObjectComm((PetscObject)viewer)));
2310: } else {
2311: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
2312: }
2313: tr[0] = MAT_FILE_CLASSID;
2314: tr[1] = A->rmap->N;
2315: tr[2] = A->cmap->N;
2316: tr[3] = -size; /* AIJ stores nnz here */
2317: tr[4] = (PetscInt)(rmap == cmap);
2318: tr[5] = a->allow_repeated;
2319: PetscCall(PetscSNPrintf(lmattype, sizeof(lmattype), "%s", a->lmattype));
2321: PetscCall(PetscViewerBinaryWrite(viewer, tr, PETSC_STATIC_ARRAY_LENGTH(tr), PETSC_INT));
2322: PetscCall(PetscViewerBinaryWrite(viewer, lmattype, sizeof(lmattype), PETSC_CHAR));
2324: /* first dump l2g info (we need the header for proper loading on different number of processes) */
2325: PetscCall(PetscViewerBinaryGetSkipHeader(viewer, &skipHeader));
2326: PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_FALSE));
2327: if (vbs) {
2328: PetscCall(ISLocalToGlobalMappingView_Multi(rmap, lsize, size, vblocks, viewer));
2329: if (cmap != rmap) PetscCall(ISLocalToGlobalMappingView_Multi(cmap, lsize, size, vblocks, viewer));
2330: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), lsize, vblocks, PETSC_USE_POINTER, &is));
2331: PetscCall(ISView(is, viewer));
2332: PetscCall(ISView(is, viewer));
2333: PetscCall(ISDestroy(&is));
2334: } else {
2335: PetscCall(ISLocalToGlobalMappingView(rmap, viewer));
2336: if (cmap != rmap) PetscCall(ISLocalToGlobalMappingView(cmap, viewer));
2338: /* then the sizes of the local matrices */
2339: PetscCall(MatGetSize(a->A, &nr, &nc));
2340: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), 1, &nr, PETSC_USE_POINTER, &is));
2341: PetscCall(ISView(is, viewer));
2342: PetscCall(ISDestroy(&is));
2343: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), 1, &nc, PETSC_USE_POINTER, &is));
2344: PetscCall(ISView(is, viewer));
2345: PetscCall(ISDestroy(&is));
2346: }
2347: PetscCall(PetscViewerBinarySetSkipHeader(viewer, skipHeader));
2348: }
2349: if (format == PETSC_VIEWER_ASCII_MATLAB) {
2350: char name[64];
2351: PetscMPIInt size, rank;
2353: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
2354: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)viewer), &rank));
2355: if (size > 1) PetscCall(PetscSNPrintf(name, sizeof(name), "lmat_%d", rank));
2356: else PetscCall(PetscSNPrintf(name, sizeof(name), "lmat"));
2357: PetscCall(PetscObjectSetName((PetscObject)a->A, name));
2358: }
2360: /* Dump the local matrices */
2361: if (isbinary) { /* ViewerGetSubViewer does not work in parallel */
2362: PetscBool isaij;
2363: PetscInt nr, nc;
2364: Mat lA, B;
2365: Mat_MPIAIJ *b;
2367: /* We create a temporary MPIAIJ matrix that stores the unassembled operator */
2368: PetscCall(PetscObjectBaseTypeCompare((PetscObject)a->A, MATAIJ, &isaij));
2369: if (!isaij) PetscCall(MatConvert(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &lA));
2370: else {
2371: PetscCall(PetscObjectReference((PetscObject)a->A));
2372: lA = a->A;
2373: }
2374: PetscCall(MatCreate(PetscObjectComm((PetscObject)viewer), &B));
2375: PetscCall(MatSetType(B, MATMPIAIJ));
2376: PetscCall(MatGetSize(lA, &nr, &nc));
2377: PetscCall(MatSetSizes(B, nr, nc, PETSC_DECIDE, PETSC_DECIDE));
2378: PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));
2380: b = (Mat_MPIAIJ *)B->data;
2381: PetscCall(MatDestroy(&b->A));
2382: b->A = lA;
2384: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2385: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2386: PetscCall(MatView(B, viewer));
2387: PetscCall(MatDestroy(&B));
2388: } else {
2389: PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
2390: PetscCall(MatView(a->A, sviewer));
2391: PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
2392: }
2394: /* with ASCII, we dump the l2gmaps at the end */
2395: if (viewl2g) {
2396: if (format == PETSC_VIEWER_ASCII_MATLAB) {
2397: PetscCall(PetscObjectSetName((PetscObject)rmap, "row"));
2398: PetscCall(ISLocalToGlobalMappingView(rmap, viewer));
2399: PetscCall(PetscObjectSetName((PetscObject)cmap, "col"));
2400: PetscCall(ISLocalToGlobalMappingView(cmap, viewer));
2401: } else {
2402: PetscCall(ISLocalToGlobalMappingView(rmap, viewer));
2403: PetscCall(ISLocalToGlobalMappingView(cmap, viewer));
2404: }
2405: }
2406: PetscFunctionReturn(PETSC_SUCCESS);
2407: }
2409: static PetscErrorCode ISLocalToGlobalMappingHasRepeatedLocal_Private(ISLocalToGlobalMapping map, PetscBool *has)
2410: {
2411: const PetscInt *idxs;
2412: PetscHSetI ht;
2413: PetscInt n, bs;
2415: PetscFunctionBegin;
2416: PetscCall(ISLocalToGlobalMappingGetSize(map, &n));
2417: PetscCall(ISLocalToGlobalMappingGetBlockSize(map, &bs));
2418: PetscCall(ISLocalToGlobalMappingGetBlockIndices(map, &idxs));
2419: PetscCall(PetscHSetICreate(&ht));
2420: *has = PETSC_FALSE;
2421: for (PetscInt i = 0; i < n / bs; i++) {
2422: PetscBool missing = PETSC_TRUE;
2423: if (idxs[i] < 0) continue;
2424: PetscCall(PetscHSetIQueryAdd(ht, idxs[i], &missing));
2425: if (!missing) {
2426: *has = PETSC_TRUE;
2427: break;
2428: }
2429: }
2430: PetscCall(PetscHSetIDestroy(&ht));
2431: PetscFunctionReturn(PETSC_SUCCESS);
2432: }
2434: static PetscErrorCode MatLoad_IS(Mat A, PetscViewer viewer)
2435: {
2436: ISLocalToGlobalMapping rmap, cmap;
2437: MPI_Comm comm = PetscObjectComm((PetscObject)A);
2438: PetscBool isbinary, samel, allow, isbaij;
2439: PetscInt tr[6], M, N, nr, nc, Asize, isn;
2440: const PetscInt *idx;
2441: PetscMPIInt size;
2442: char lmattype[64];
2443: Mat dA, lA;
2444: IS is;
2446: PetscFunctionBegin;
2447: PetscCheckSameComm(A, 1, viewer, 2);
2448: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2449: PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Invalid viewer of type %s", ((PetscObject)viewer)->type_name);
2451: PetscCall(PetscViewerBinaryRead(viewer, tr, PETSC_STATIC_ARRAY_LENGTH(tr), NULL, PETSC_INT));
2452: PetscCheck(tr[0] == MAT_FILE_CLASSID, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a matrix next in file");
2453: PetscCheck(tr[1] >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2454: PetscCheck(tr[2] >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2455: PetscCheck(tr[3] < 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2456: PetscCheck(tr[4] == 0 || tr[4] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2457: PetscCheck(tr[5] == 0 || tr[5] == 1, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a IS matrix next in file");
2458: M = tr[1];
2459: N = tr[2];
2460: Asize = -tr[3];
2461: samel = (PetscBool)tr[4];
2462: allow = (PetscBool)tr[5];
2463: PetscCall(PetscViewerBinaryRead(viewer, lmattype, sizeof(lmattype), NULL, PETSC_CHAR));
2465: /* if we are loading from a larger set of processes, allow repeated entries */
2466: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
2467: if (Asize > size) allow = PETSC_TRUE;
2469: /* set global sizes if not set already */
2470: if (A->rmap->N < 0) A->rmap->N = M;
2471: if (A->cmap->N < 0) A->cmap->N = N;
2472: PetscCall(PetscLayoutSetUp(A->rmap));
2473: PetscCall(PetscLayoutSetUp(A->cmap));
2474: PetscCheck(M == A->rmap->N, comm, PETSC_ERR_ARG_SIZ, "Matrix rows should be %" PetscInt_FMT ", found %" PetscInt_FMT, M, A->rmap->N);
2475: PetscCheck(N == A->cmap->N, comm, PETSC_ERR_ARG_SIZ, "Matrix columns should be %" PetscInt_FMT ", found %" PetscInt_FMT, N, A->cmap->N);
2477: /* load l2g maps */
2478: PetscCall(ISLocalToGlobalMappingCreate(comm, 0, 0, NULL, PETSC_USE_POINTER, &rmap));
2479: PetscCall(ISLocalToGlobalMappingLoad(rmap, viewer));
2480: if (!samel) {
2481: PetscCall(ISLocalToGlobalMappingCreate(comm, 0, 0, NULL, PETSC_USE_POINTER, &cmap));
2482: PetscCall(ISLocalToGlobalMappingLoad(cmap, viewer));
2483: } else {
2484: PetscCall(PetscObjectReference((PetscObject)rmap));
2485: cmap = rmap;
2486: }
2488: /* load sizes of local matrices */
2489: PetscCall(ISCreate(comm, &is));
2490: PetscCall(ISSetType(is, ISGENERAL));
2491: PetscCall(ISLoad(is, viewer));
2492: PetscCall(ISGetLocalSize(is, &isn));
2493: PetscCall(ISGetIndices(is, &idx));
2494: nr = 0;
2495: for (PetscInt i = 0; i < isn; i++) nr += idx[i];
2496: PetscCall(ISRestoreIndices(is, &idx));
2497: PetscCall(ISDestroy(&is));
2498: PetscCall(ISCreate(comm, &is));
2499: PetscCall(ISSetType(is, ISGENERAL));
2500: PetscCall(ISLoad(is, viewer));
2501: PetscCall(ISGetLocalSize(is, &isn));
2502: PetscCall(ISGetIndices(is, &idx));
2503: nc = 0;
2504: for (PetscInt i = 0; i < isn; i++) nc += idx[i];
2505: PetscCall(ISRestoreIndices(is, &idx));
2506: PetscCall(ISDestroy(&is));
2508: /* now load the unassembled operator */
2509: PetscCall(MatCreate(comm, &dA));
2510: PetscCall(MatSetType(dA, MATMPIAIJ));
2511: PetscCall(MatSetSizes(dA, nr, nc, PETSC_DECIDE, PETSC_DECIDE));
2512: PetscCall(MatLoad(dA, viewer));
2513: PetscCall(MatMPIAIJGetSeqAIJ(dA, &lA, NULL, NULL));
2514: PetscCall(PetscObjectReference((PetscObject)lA));
2515: PetscCall(MatDestroy(&dA));
2517: /* and convert to the desired format */
2518: PetscCall(PetscStrcmpAny(lmattype, &isbaij, MATSBAIJ, MATSEQSBAIJ, ""));
2519: if (isbaij) PetscCall(MatSetOption(lA, MAT_SYMMETRIC, PETSC_TRUE));
2520: PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));
2522: /* check if we actually have repeated entries */
2523: if (allow) {
2524: PetscBool rhas, chas, hasrepeated;
2526: PetscCall(ISLocalToGlobalMappingHasRepeatedLocal_Private(rmap, &rhas));
2527: if (rmap != cmap) PetscCall(ISLocalToGlobalMappingHasRepeatedLocal_Private(cmap, &chas));
2528: else chas = rhas;
2529: hasrepeated = (PetscBool)(rhas || chas);
2530: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &hasrepeated, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
2531: if (!hasrepeated) allow = PETSC_FALSE;
2532: }
2534: /* assemble the MATIS object */
2535: PetscCall(MatISSetAllowRepeated(A, allow));
2536: PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap));
2537: PetscCall(MatISSetLocalMat(A, lA));
2538: PetscCall(MatDestroy(&lA));
2539: PetscCall(ISLocalToGlobalMappingDestroy(&rmap));
2540: PetscCall(ISLocalToGlobalMappingDestroy(&cmap));
2541: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
2542: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
2543: PetscFunctionReturn(PETSC_SUCCESS);
2544: }
2546: static PetscErrorCode MatInvertBlockDiagonal_IS(Mat mat, const PetscScalar **values)
2547: {
2548: Mat_IS *is = (Mat_IS *)mat->data;
2549: MPI_Datatype nodeType;
2550: const PetscScalar *lv;
2551: PetscInt bs;
2552: PetscMPIInt mbs;
2554: PetscFunctionBegin;
2555: PetscCall(MatGetBlockSize(mat, &bs));
2556: PetscCall(MatSetBlockSize(is->A, bs));
2557: PetscCall(MatInvertBlockDiagonal(is->A, &lv));
2558: if (!is->bdiag) PetscCall(PetscMalloc1(bs * mat->rmap->n, &is->bdiag));
2559: PetscCall(PetscMPIIntCast(bs, &mbs));
2560: PetscCallMPI(MPI_Type_contiguous(mbs, MPIU_SCALAR, &nodeType));
2561: PetscCallMPI(MPI_Type_commit(&nodeType));
2562: PetscCall(PetscSFReduceBegin(is->sf, nodeType, lv, is->bdiag, MPI_REPLACE));
2563: PetscCall(PetscSFReduceEnd(is->sf, nodeType, lv, is->bdiag, MPI_REPLACE));
2564: PetscCallMPI(MPI_Type_free(&nodeType));
2565: if (values) *values = is->bdiag;
2566: PetscFunctionReturn(PETSC_SUCCESS);
2567: }
2569: static PetscErrorCode MatISSetUpScatters_Private(Mat A)
2570: {
2571: Vec cglobal, rglobal;
2572: IS from;
2573: Mat_IS *is = (Mat_IS *)A->data;
2574: PetscScalar sum;
2575: const PetscInt *garray;
2576: PetscInt nr, rbs, nc, cbs;
2577: VecType rtype;
2579: PetscFunctionBegin;
2580: PetscCall(ISLocalToGlobalMappingGetSize(is->rmapping, &nr));
2581: PetscCall(ISLocalToGlobalMappingGetBlockSize(is->rmapping, &rbs));
2582: PetscCall(ISLocalToGlobalMappingGetSize(is->cmapping, &nc));
2583: PetscCall(ISLocalToGlobalMappingGetBlockSize(is->cmapping, &cbs));
2584: PetscCall(VecDestroy(&is->x));
2585: PetscCall(VecDestroy(&is->y));
2586: PetscCall(VecDestroy(&is->counter));
2587: PetscCall(VecScatterDestroy(&is->rctx));
2588: PetscCall(VecScatterDestroy(&is->cctx));
2589: PetscCall(MatCreateVecs(is->A, &is->x, &is->y));
2590: PetscCall(VecBindToCPU(is->y, PETSC_TRUE));
2591: PetscCall(VecGetRootType_Private(is->y, &rtype));
2592: PetscCall(PetscFree(A->defaultvectype));
2593: PetscCall(PetscStrallocpy(rtype, &A->defaultvectype));
2594: PetscCall(MatCreateVecs(A, &cglobal, &rglobal));
2595: PetscCall(VecBindToCPU(rglobal, PETSC_TRUE));
2596: PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->rmapping, &garray));
2597: PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), rbs, nr / rbs, garray, PETSC_USE_POINTER, &from));
2598: PetscCall(VecScatterCreate(rglobal, from, is->y, NULL, &is->rctx));
2599: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->rmapping, &garray));
2600: PetscCall(ISDestroy(&from));
2601: if (is->rmapping != is->cmapping) {
2602: PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->cmapping, &garray));
2603: PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), cbs, nc / cbs, garray, PETSC_USE_POINTER, &from));
2604: PetscCall(VecScatterCreate(cglobal, from, is->x, NULL, &is->cctx));
2605: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->cmapping, &garray));
2606: PetscCall(ISDestroy(&from));
2607: } else {
2608: PetscCall(PetscObjectReference((PetscObject)is->rctx));
2609: is->cctx = is->rctx;
2610: }
2611: PetscCall(VecDestroy(&cglobal));
2613: /* interface counter vector (local) */
2614: PetscCall(VecDuplicate(is->y, &is->counter));
2615: PetscCall(VecBindToCPU(is->counter, PETSC_TRUE));
2616: PetscCall(VecSet(is->y, 1.));
2617: PetscCall(VecScatterBegin(is->rctx, is->y, rglobal, ADD_VALUES, SCATTER_REVERSE));
2618: PetscCall(VecScatterEnd(is->rctx, is->y, rglobal, ADD_VALUES, SCATTER_REVERSE));
2619: PetscCall(VecScatterBegin(is->rctx, rglobal, is->counter, INSERT_VALUES, SCATTER_FORWARD));
2620: PetscCall(VecScatterEnd(is->rctx, rglobal, is->counter, INSERT_VALUES, SCATTER_FORWARD));
2621: PetscCall(VecBindToCPU(is->y, PETSC_FALSE));
2622: PetscCall(VecBindToCPU(is->counter, PETSC_FALSE));
2624: /* special functions for block-diagonal matrices */
2625: PetscCall(VecSum(rglobal, &sum));
2626: A->ops->invertblockdiagonal = NULL;
2627: if ((PetscInt)(PetscRealPart(sum)) == A->rmap->N && A->rmap->N == A->cmap->N && is->rmapping == is->cmapping) A->ops->invertblockdiagonal = MatInvertBlockDiagonal_IS;
2628: PetscCall(VecDestroy(&rglobal));
2630: /* setup SF for general purpose shared indices based communications */
2631: PetscCall(MatISSetUpSF_IS(A));
2632: PetscFunctionReturn(PETSC_SUCCESS);
2633: }
2635: static PetscErrorCode MatISFilterL2GMap(Mat A, ISLocalToGlobalMapping map, ISLocalToGlobalMapping *nmap, ISLocalToGlobalMapping *lmap)
2636: {
2637: Mat_IS *matis = (Mat_IS *)A->data;
2638: IS is;
2639: ISLocalToGlobalMappingType l2gtype;
2640: const PetscInt *idxs;
2641: PetscHSetI ht;
2642: PetscInt *nidxs;
2643: PetscInt i, n, bs, c;
2644: PetscBool flg[] = {PETSC_FALSE, PETSC_FALSE};
2646: PetscFunctionBegin;
2647: PetscCall(ISLocalToGlobalMappingGetSize(map, &n));
2648: PetscCall(ISLocalToGlobalMappingGetBlockSize(map, &bs));
2649: PetscCall(ISLocalToGlobalMappingGetBlockIndices(map, &idxs));
2650: PetscCall(PetscHSetICreate(&ht));
2651: PetscCall(PetscMalloc1(n / bs, &nidxs));
2652: for (i = 0, c = 0; i < n / bs; i++) {
2653: PetscBool missing = PETSC_TRUE;
2654: if (idxs[i] < 0) {
2655: flg[0] = PETSC_TRUE;
2656: continue;
2657: }
2658: if (!matis->allow_repeated) PetscCall(PetscHSetIQueryAdd(ht, idxs[i], &missing));
2659: if (!missing) flg[1] = PETSC_TRUE;
2660: else nidxs[c++] = idxs[i];
2661: }
2662: PetscCall(PetscHSetIDestroy(&ht));
2663: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, flg, 2, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
2664: if (!flg[0] && !flg[1]) { /* Entries are all non negative and unique */
2665: *nmap = NULL;
2666: *lmap = NULL;
2667: PetscCall(PetscFree(nidxs));
2668: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(map, &idxs));
2669: PetscFunctionReturn(PETSC_SUCCESS);
2670: }
2672: /* New l2g map without negative indices (and repeated indices if not allowed) */
2673: PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, c, nidxs, PETSC_USE_POINTER, &is));
2674: PetscCall(ISLocalToGlobalMappingCreateIS(is, nmap));
2675: PetscCall(ISDestroy(&is));
2676: PetscCall(ISLocalToGlobalMappingGetType(map, &l2gtype));
2677: PetscCall(ISLocalToGlobalMappingSetType(*nmap, l2gtype));
2679: /* New local l2g map for repeated indices if not allowed */
2680: PetscCall(ISGlobalToLocalMappingApplyBlock(*nmap, IS_GTOLM_MASK, n / bs, idxs, NULL, nidxs));
2681: PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, n / bs, nidxs, PETSC_USE_POINTER, &is));
2682: PetscCall(ISLocalToGlobalMappingCreateIS(is, lmap));
2683: PetscCall(ISDestroy(&is));
2684: PetscCall(PetscFree(nidxs));
2685: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(map, &idxs));
2686: PetscFunctionReturn(PETSC_SUCCESS);
2687: }
2689: static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2690: {
2691: Mat_IS *is = (Mat_IS *)A->data;
2692: ISLocalToGlobalMapping localrmapping = NULL, localcmapping = NULL;
2693: PetscInt nr, rbs, nc, cbs;
2694: PetscBool cong, freem[] = {PETSC_FALSE, PETSC_FALSE};
2696: PetscFunctionBegin;
2697: if (rmapping) PetscCheckSameComm(A, 1, rmapping, 2);
2698: if (cmapping) PetscCheckSameComm(A, 1, cmapping, 3);
2700: PetscCall(ISLocalToGlobalMappingDestroy(&is->rmapping));
2701: PetscCall(ISLocalToGlobalMappingDestroy(&is->cmapping));
2702: PetscCall(PetscLayoutSetUp(A->rmap));
2703: PetscCall(PetscLayoutSetUp(A->cmap));
2704: PetscCall(MatHasCongruentLayouts(A, &cong));
2706: /* If NULL, local space matches global space */
2707: if (!rmapping) {
2708: IS is;
2710: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->rmap->N, 0, 1, &is));
2711: PetscCall(ISLocalToGlobalMappingCreateIS(is, &rmapping));
2712: PetscCall(ISLocalToGlobalMappingSetBlockSize(rmapping, A->rmap->bs));
2713: PetscCall(ISDestroy(&is));
2714: freem[0] = PETSC_TRUE;
2715: if (!cmapping && cong && A->rmap->bs == A->cmap->bs) cmapping = rmapping;
2716: } else if (!is->islocalref) { /* check if the l2g map has negative or repeated entries */
2717: PetscCall(MatISFilterL2GMap(A, rmapping, &is->rmapping, &localrmapping));
2718: if (rmapping == cmapping) {
2719: PetscCall(PetscObjectReference((PetscObject)is->rmapping));
2720: is->cmapping = is->rmapping;
2721: PetscCall(PetscObjectReference((PetscObject)localrmapping));
2722: localcmapping = localrmapping;
2723: }
2724: }
2725: if (!cmapping) {
2726: IS is;
2728: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->cmap->N, 0, 1, &is));
2729: PetscCall(ISLocalToGlobalMappingCreateIS(is, &cmapping));
2730: PetscCall(ISLocalToGlobalMappingSetBlockSize(cmapping, A->cmap->bs));
2731: PetscCall(ISDestroy(&is));
2732: freem[1] = PETSC_TRUE;
2733: } else if (cmapping != rmapping && !is->islocalref) { /* check if the l2g map has negative or repeated entries */
2734: PetscCall(MatISFilterL2GMap(A, cmapping, &is->cmapping, &localcmapping));
2735: }
2736: if (!is->rmapping) {
2737: PetscCall(PetscObjectReference((PetscObject)rmapping));
2738: is->rmapping = rmapping;
2739: }
2740: if (!is->cmapping) {
2741: PetscCall(PetscObjectReference((PetscObject)cmapping));
2742: is->cmapping = cmapping;
2743: }
2745: /* Clean up */
2746: is->lnnzstate = 0;
2747: PetscCall(MatDestroy(&is->dA));
2748: PetscCall(MatDestroy(&is->assembledA));
2749: PetscCall(MatDestroy(&is->A));
2750: if (is->csf != is->sf) {
2751: PetscCall(PetscSFDestroy(&is->csf));
2752: PetscCall(PetscFree2(is->csf_rootdata, is->csf_leafdata));
2753: } else is->csf = NULL;
2754: PetscCall(PetscSFDestroy(&is->sf));
2755: PetscCall(PetscFree2(is->sf_rootdata, is->sf_leafdata));
2756: PetscCall(PetscFree(is->bdiag));
2758: /* check if the two mappings are actually the same for square matrices since MATIS has some optimization for this case
2759: (DOLFIN passes 2 different objects) */
2760: PetscCall(ISLocalToGlobalMappingGetSize(is->rmapping, &nr));
2761: PetscCall(ISLocalToGlobalMappingGetBlockSize(is->rmapping, &rbs));
2762: PetscCall(ISLocalToGlobalMappingGetSize(is->cmapping, &nc));
2763: PetscCall(ISLocalToGlobalMappingGetBlockSize(is->cmapping, &cbs));
2764: if (is->rmapping != is->cmapping && cong) {
2765: PetscBool same = PETSC_FALSE;
2766: if (nr == nc && cbs == rbs) {
2767: const PetscInt *idxs1, *idxs2;
2769: PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->rmapping, &idxs1));
2770: PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->cmapping, &idxs2));
2771: PetscCall(PetscArraycmp(idxs1, idxs2, nr / rbs, &same));
2772: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->rmapping, &idxs1));
2773: PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->cmapping, &idxs2));
2774: }
2775: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &same, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2776: if (same) {
2777: PetscCall(ISLocalToGlobalMappingDestroy(&is->cmapping));
2778: PetscCall(PetscObjectReference((PetscObject)is->rmapping));
2779: is->cmapping = is->rmapping;
2780: }
2781: }
2782: PetscCall(PetscLayoutSetBlockSize(A->rmap, rbs));
2783: PetscCall(PetscLayoutSetBlockSize(A->cmap, cbs));
2784: /* Pass the user defined maps to the layout */
2785: PetscCall(PetscLayoutSetISLocalToGlobalMapping(A->rmap, rmapping));
2786: PetscCall(PetscLayoutSetISLocalToGlobalMapping(A->cmap, cmapping));
2787: if (freem[0]) PetscCall(ISLocalToGlobalMappingDestroy(&rmapping));
2788: if (freem[1]) PetscCall(ISLocalToGlobalMappingDestroy(&cmapping));
2790: if (!is->islocalref) {
2791: /* Create the local matrix A */
2792: PetscCall(MatCreate(PETSC_COMM_SELF, &is->A));
2793: PetscCall(MatSetType(is->A, is->lmattype));
2794: PetscCall(MatSetSizes(is->A, nr, nc, nr, nc));
2795: PetscCall(MatSetBlockSizes(is->A, rbs, cbs));
2796: PetscCall(MatSetOptionsPrefix(is->A, "is_"));
2797: PetscCall(MatAppendOptionsPrefix(is->A, ((PetscObject)A)->prefix));
2798: PetscCall(PetscLayoutSetUp(is->A->rmap));
2799: PetscCall(PetscLayoutSetUp(is->A->cmap));
2800: PetscCall(MatSetLocalToGlobalMapping(is->A, localrmapping, localcmapping));
2801: PetscCall(ISLocalToGlobalMappingDestroy(&localrmapping));
2802: PetscCall(ISLocalToGlobalMappingDestroy(&localcmapping));
2803: /* setup scatters and local vectors for MatMult */
2804: PetscCall(MatISSetUpScatters_Private(A));
2805: }
2806: A->preallocated = PETSC_TRUE;
2807: PetscFunctionReturn(PETSC_SUCCESS);
2808: }
2810: static PetscErrorCode MatSetUp_IS(Mat A)
2811: {
2812: Mat_IS *is = (Mat_IS *)A->data;
2813: ISLocalToGlobalMapping rmap, cmap;
2815: PetscFunctionBegin;
2816: if (!is->sf) {
2817: PetscCall(MatGetLocalToGlobalMapping(A, &rmap, &cmap));
2818: PetscCall(MatSetLocalToGlobalMapping(A, rmap, cmap));
2819: }
2820: PetscFunctionReturn(PETSC_SUCCESS);
2821: }
2823: static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2824: {
2825: Mat_IS *is = (Mat_IS *)mat->data;
2826: PetscInt buf[2 * MATIS_MAX_ENTRIES_INSERTION], *rows_l = NULL, *cols_l = NULL;
2828: PetscFunctionBegin;
2829: IndexSpaceGet(buf, m, n, rows_l, cols_l);
2830: PetscCall(ISGlobalToLocalMappingApply(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l));
2831: if (m != n || rows != cols || is->cmapping != is->rmapping) {
2832: PetscCall(ISGlobalToLocalMappingApply(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l));
2833: PetscCall(MatSetValues(is->A, m, rows_l, n, cols_l, values, addv));
2834: } else {
2835: PetscCall(MatSetValues(is->A, m, rows_l, m, rows_l, values, addv));
2836: }
2837: IndexSpaceRestore(buf, m, n, rows_l, cols_l);
2838: PetscFunctionReturn(PETSC_SUCCESS);
2839: }
2841: static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2842: {
2843: Mat_IS *is = (Mat_IS *)mat->data;
2844: PetscInt buf[2 * MATIS_MAX_ENTRIES_INSERTION], *rows_l = NULL, *cols_l = NULL;
2846: PetscFunctionBegin;
2847: IndexSpaceGet(buf, m, n, rows_l, cols_l);
2848: PetscCall(ISGlobalToLocalMappingApplyBlock(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l));
2849: if (m != n || rows != cols || is->cmapping != is->rmapping) {
2850: PetscCall(ISGlobalToLocalMappingApplyBlock(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l));
2851: PetscCall(MatSetValuesBlocked(is->A, m, rows_l, n, cols_l, values, addv));
2852: } else {
2853: PetscCall(MatSetValuesBlocked(is->A, m, rows_l, m, rows_l, values, addv));
2854: }
2855: IndexSpaceRestore(buf, m, n, rows_l, cols_l);
2856: PetscFunctionReturn(PETSC_SUCCESS);
2857: }
2859: static PetscErrorCode MatSetValuesLocal_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2860: {
2861: Mat_IS *is = (Mat_IS *)A->data;
2863: PetscFunctionBegin;
2864: if (is->A->rmap->mapping || is->A->cmap->mapping) {
2865: PetscCall(MatSetValuesLocal(is->A, m, rows, n, cols, values, addv));
2866: } else {
2867: PetscCall(MatSetValues(is->A, m, rows, n, cols, values, addv));
2868: }
2869: PetscFunctionReturn(PETSC_SUCCESS);
2870: }
2872: static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2873: {
2874: Mat_IS *is = (Mat_IS *)A->data;
2876: PetscFunctionBegin;
2877: if (is->A->rmap->mapping || is->A->cmap->mapping) {
2878: PetscCall(MatSetValuesBlockedLocal(is->A, m, rows, n, cols, values, addv));
2879: } else {
2880: PetscCall(MatSetValuesBlocked(is->A, m, rows, n, cols, values, addv));
2881: }
2882: PetscFunctionReturn(PETSC_SUCCESS);
2883: }
2885: static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, PetscBool columns)
2886: {
2887: Mat_IS *is = (Mat_IS *)A->data;
2889: PetscFunctionBegin;
2890: if (!n) PetscFunctionReturn(PETSC_SUCCESS);
2891: is->pure_neumann = PETSC_FALSE;
2893: if (columns) {
2894: PetscCall(MatZeroRowsColumns(is->A, n, rows, diag, NULL, NULL));
2895: } else {
2896: PetscCall(MatZeroRows(is->A, n, rows, diag, NULL, NULL));
2897: }
2898: if (diag != 0.) {
2899: const PetscScalar *array;
2901: PetscCall(VecGetArrayRead(is->counter, &array));
2902: for (PetscInt i = 0; i < n; i++) PetscCall(MatSetValue(is->A, rows[i], rows[i], diag / (array[rows[i]]), INSERT_VALUES));
2903: PetscCall(VecRestoreArrayRead(is->counter, &array));
2904: PetscCall(MatAssemblyBegin(is->A, MAT_FINAL_ASSEMBLY));
2905: PetscCall(MatAssemblyEnd(is->A, MAT_FINAL_ASSEMBLY));
2906: }
2907: PetscFunctionReturn(PETSC_SUCCESS);
2908: }
2910: static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b, PetscBool columns)
2911: {
2912: Mat_IS *matis = (Mat_IS *)A->data;
2913: PetscInt nr, nl, len;
2914: PetscInt *lrows = NULL;
2916: PetscFunctionBegin;
2917: if (PetscUnlikelyDebug(columns || diag != 0. || (x && b))) {
2918: PetscBool cong;
2920: PetscCall(PetscLayoutCompare(A->rmap, A->cmap, &cong));
2921: cong = (PetscBool)(cong && matis->sf == matis->csf);
2922: PetscCheck(cong || !columns, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Columns can be zeroed if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS");
2923: PetscCheck(cong || diag == 0., PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS");
2924: PetscCheck(cong || !x || !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "A->rmap and A->cmap need to be congruent, and the l2g maps be the same");
2925: }
2926: PetscCall(MatGetSize(matis->A, &nl, NULL));
2927: /* get locally owned rows */
2928: PetscCall(PetscLayoutMapLocal(A->rmap, n, rows, &len, &lrows, NULL));
2929: /* fix right-hand side if needed */
2930: if (x && b) {
2931: const PetscScalar *xx;
2932: PetscScalar *bb;
2934: PetscCall(VecGetArrayRead(x, &xx));
2935: PetscCall(VecGetArray(b, &bb));
2936: for (PetscInt i = 0; i < len; ++i) bb[lrows[i]] = diag * xx[lrows[i]];
2937: PetscCall(VecRestoreArrayRead(x, &xx));
2938: PetscCall(VecRestoreArray(b, &bb));
2939: }
2940: /* get rows associated to the local matrices */
2941: PetscCall(PetscArrayzero(matis->sf_leafdata, nl));
2942: PetscCall(PetscArrayzero(matis->sf_rootdata, A->rmap->n));
2943: for (PetscInt i = 0; i < len; i++) matis->sf_rootdata[lrows[i]] = 1;
2944: PetscCall(PetscFree(lrows));
2945: PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
2946: PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
2947: PetscCall(PetscMalloc1(nl, &lrows));
2948: nr = 0;
2949: for (PetscInt i = 0; i < nl; i++)
2950: if (matis->sf_leafdata[i]) lrows[nr++] = i;
2951: PetscCall(MatISZeroRowsColumnsLocal_Private(A, nr, lrows, diag, columns));
2952: PetscCall(PetscFree(lrows));
2953: PetscFunctionReturn(PETSC_SUCCESS);
2954: }
2956: static PetscErrorCode MatZeroRows_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2957: {
2958: PetscFunctionBegin;
2959: PetscCall(MatZeroRowsColumns_Private_IS(A, n, rows, diag, x, b, PETSC_FALSE));
2960: PetscFunctionReturn(PETSC_SUCCESS);
2961: }
2963: static PetscErrorCode MatZeroRowsColumns_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2964: {
2965: PetscFunctionBegin;
2966: PetscCall(MatZeroRowsColumns_Private_IS(A, n, rows, diag, x, b, PETSC_TRUE));
2967: PetscFunctionReturn(PETSC_SUCCESS);
2968: }
2970: static PetscErrorCode MatAssemblyBegin_IS(Mat A, MatAssemblyType type)
2971: {
2972: Mat_IS *is = (Mat_IS *)A->data;
2974: PetscFunctionBegin;
2975: PetscCall(MatAssemblyBegin(is->A, type));
2976: PetscFunctionReturn(PETSC_SUCCESS);
2977: }
2979: static PetscErrorCode MatAssemblyEnd_IS(Mat A, MatAssemblyType type)
2980: {
2981: Mat_IS *is = (Mat_IS *)A->data;
2982: PetscBool lnnz;
2984: PetscFunctionBegin;
2985: PetscCall(MatAssemblyEnd(is->A, type));
2986: /* fix for local empty rows/cols */
2987: if (is->locempty && type == MAT_FINAL_ASSEMBLY) {
2988: Mat newlA;
2989: ISLocalToGlobalMapping rl2g, cl2g;
2990: IS nzr, nzc;
2991: PetscInt nr, nc, nnzr, nnzc;
2992: PetscBool lnewl2g, newl2g;
2994: PetscCall(MatGetSize(is->A, &nr, &nc));
2995: PetscCall(MatFindNonzeroRowsOrCols_Basic(is->A, PETSC_FALSE, PETSC_SMALL, &nzr));
2996: if (!nzr) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)is->A), nr, 0, 1, &nzr));
2997: PetscCall(MatFindNonzeroRowsOrCols_Basic(is->A, PETSC_TRUE, PETSC_SMALL, &nzc));
2998: if (!nzc) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)is->A), nc, 0, 1, &nzc));
2999: PetscCall(ISGetSize(nzr, &nnzr));
3000: PetscCall(ISGetSize(nzc, &nnzc));
3001: if (nnzr != nr || nnzc != nc) { /* need new global l2g map */
3002: lnewl2g = PETSC_TRUE;
3003: PetscCallMPI(MPIU_Allreduce(&lnewl2g, &newl2g, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
3005: /* extract valid submatrix */
3006: PetscCall(MatCreateSubMatrix(is->A, nzr, nzc, MAT_INITIAL_MATRIX, &newlA));
3007: } else { /* local matrix fully populated */
3008: lnewl2g = PETSC_FALSE;
3009: PetscCallMPI(MPIU_Allreduce(&lnewl2g, &newl2g, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
3010: PetscCall(PetscObjectReference((PetscObject)is->A));
3011: newlA = is->A;
3012: }
3014: /* attach new global l2g map if needed */
3015: if (newl2g) {
3016: IS zr, zc;
3017: const PetscInt *ridxs, *cidxs, *zridxs, *zcidxs;
3018: PetscInt *nidxs, i;
3020: PetscCall(ISComplement(nzr, 0, nr, &zr));
3021: PetscCall(ISComplement(nzc, 0, nc, &zc));
3022: PetscCall(PetscMalloc1(PetscMax(nr, nc), &nidxs));
3023: PetscCall(ISLocalToGlobalMappingGetIndices(is->rmapping, &ridxs));
3024: PetscCall(ISLocalToGlobalMappingGetIndices(is->cmapping, &cidxs));
3025: PetscCall(ISGetIndices(zr, &zridxs));
3026: PetscCall(ISGetIndices(zc, &zcidxs));
3027: PetscCall(ISGetLocalSize(zr, &nnzr));
3028: PetscCall(ISGetLocalSize(zc, &nnzc));
3030: PetscCall(PetscArraycpy(nidxs, ridxs, nr));
3031: for (i = 0; i < nnzr; i++) nidxs[zridxs[i]] = -1;
3032: PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, nr, nidxs, PETSC_COPY_VALUES, &rl2g));
3033: PetscCall(PetscArraycpy(nidxs, cidxs, nc));
3034: for (i = 0; i < nnzc; i++) nidxs[zcidxs[i]] = -1;
3035: PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, nc, nidxs, PETSC_COPY_VALUES, &cl2g));
3037: PetscCall(ISRestoreIndices(zr, &zridxs));
3038: PetscCall(ISRestoreIndices(zc, &zcidxs));
3039: PetscCall(ISLocalToGlobalMappingRestoreIndices(is->rmapping, &ridxs));
3040: PetscCall(ISLocalToGlobalMappingRestoreIndices(is->cmapping, &cidxs));
3041: PetscCall(ISDestroy(&nzr));
3042: PetscCall(ISDestroy(&nzc));
3043: PetscCall(ISDestroy(&zr));
3044: PetscCall(ISDestroy(&zc));
3045: PetscCall(PetscFree(nidxs));
3046: PetscCall(MatSetLocalToGlobalMapping(A, rl2g, cl2g));
3047: PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
3048: PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
3049: }
3050: PetscCall(MatISSetLocalMat(A, newlA));
3051: PetscCall(MatDestroy(&newlA));
3052: PetscCall(ISDestroy(&nzr));
3053: PetscCall(ISDestroy(&nzc));
3054: is->locempty = PETSC_FALSE;
3055: }
3056: lnnz = (PetscBool)(is->A->nonzerostate == is->lnnzstate);
3057: is->lnnzstate = is->A->nonzerostate;
3058: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &lnnz, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
3059: if (!lnnz) A->nonzerostate++;
3060: PetscFunctionReturn(PETSC_SUCCESS);
3061: }
3063: static PetscErrorCode MatISGetLocalMat_IS(Mat mat, Mat *local)
3064: {
3065: Mat_IS *is = (Mat_IS *)mat->data;
3067: PetscFunctionBegin;
3068: *local = is->A;
3069: PetscFunctionReturn(PETSC_SUCCESS);
3070: }
3072: static PetscErrorCode MatISRestoreLocalMat_IS(Mat mat, Mat *local)
3073: {
3074: PetscFunctionBegin;
3075: *local = NULL;
3076: PetscFunctionReturn(PETSC_SUCCESS);
3077: }
3079: /*@
3080: MatISGetLocalMat - Gets the local matrix stored inside a `MATIS` matrix.
3082: Not Collective.
3084: Input Parameter:
3085: . mat - the matrix
3087: Output Parameter:
3088: . local - the local matrix
3090: Level: intermediate
3092: Notes:
3093: This can be called if you have precomputed the nonzero structure of the
3094: matrix and want to provide it to the inner matrix object to improve the performance
3095: of the `MatSetValues()` operation.
3097: Call `MatISRestoreLocalMat()` when finished with the local matrix.
3099: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISRestoreLocalMat()`
3100: @*/
3101: PetscErrorCode MatISGetLocalMat(Mat mat, Mat *local)
3102: {
3103: PetscFunctionBegin;
3105: PetscAssertPointer(local, 2);
3106: PetscUseMethod(mat, "MatISGetLocalMat_C", (Mat, Mat *), (mat, local));
3107: PetscFunctionReturn(PETSC_SUCCESS);
3108: }
3110: /*@
3111: MatISRestoreLocalMat - Restores the local matrix obtained with `MatISGetLocalMat()`
3113: Not Collective.
3115: Input Parameters:
3116: + mat - the matrix
3117: - local - the local matrix
3119: Level: intermediate
3121: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISGetLocalMat()`
3122: @*/
3123: PetscErrorCode MatISRestoreLocalMat(Mat mat, Mat *local)
3124: {
3125: PetscFunctionBegin;
3127: PetscAssertPointer(local, 2);
3128: PetscUseMethod(mat, "MatISRestoreLocalMat_C", (Mat, Mat *), (mat, local));
3129: PetscFunctionReturn(PETSC_SUCCESS);
3130: }
3132: static PetscErrorCode MatISSetLocalMatType_IS(Mat mat, MatType mtype)
3133: {
3134: Mat_IS *is = (Mat_IS *)mat->data;
3136: PetscFunctionBegin;
3137: if (is->A) PetscCall(MatSetType(is->A, mtype));
3138: PetscCall(PetscFree(is->lmattype));
3139: PetscCall(PetscStrallocpy(mtype, &is->lmattype));
3140: PetscFunctionReturn(PETSC_SUCCESS);
3141: }
3143: /*@
3144: MatISSetLocalMatType - Specifies the type of local matrix inside the `MATIS`
3146: Logically Collective.
3148: Input Parameters:
3149: + mat - the matrix
3150: - mtype - the local matrix type
3152: Level: intermediate
3154: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatSetType()`, `MatType`
3155: @*/
3156: PetscErrorCode MatISSetLocalMatType(Mat mat, MatType mtype)
3157: {
3158: PetscFunctionBegin;
3160: PetscUseMethod(mat, "MatISSetLocalMatType_C", (Mat, MatType), (mat, mtype));
3161: PetscFunctionReturn(PETSC_SUCCESS);
3162: }
3164: static PetscErrorCode MatISSetLocalMat_IS(Mat mat, Mat local)
3165: {
3166: Mat_IS *is = (Mat_IS *)mat->data;
3167: PetscInt nrows, ncols, orows, ocols;
3168: MatType mtype, otype;
3169: PetscBool sametype = PETSC_TRUE;
3171: PetscFunctionBegin;
3172: if (is->A && !is->islocalref) {
3173: PetscCall(MatGetSize(is->A, &orows, &ocols));
3174: PetscCall(MatGetSize(local, &nrows, &ncols));
3175: PetscCheck(orows == nrows && ocols == ncols, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Local MATIS matrix should be of size %" PetscInt_FMT "x%" PetscInt_FMT " (passed a %" PetscInt_FMT "x%" PetscInt_FMT " matrix)", orows, ocols, nrows, ncols);
3176: PetscCall(MatGetType(local, &mtype));
3177: PetscCall(MatGetType(is->A, &otype));
3178: PetscCall(PetscStrcmp(mtype, otype, &sametype));
3179: }
3180: PetscCall(PetscObjectReference((PetscObject)local));
3181: PetscCall(MatDestroy(&is->A));
3182: is->A = local;
3183: PetscCall(MatGetType(is->A, &mtype));
3184: PetscCall(MatISSetLocalMatType(mat, mtype));
3185: if (!sametype && !is->islocalref) PetscCall(MatISSetUpScatters_Private(mat));
3186: is->lnnzstate = 0;
3187: PetscFunctionReturn(PETSC_SUCCESS);
3188: }
3190: /*@
3191: MatISSetLocalMat - Replace the local matrix stored inside a `MATIS` object.
3193: Not Collective
3195: Input Parameters:
3196: + mat - the matrix
3197: - local - the local matrix
3199: Level: intermediate
3201: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatISSetLocalMatType`, `MatISGetLocalMat()`
3202: @*/
3203: PetscErrorCode MatISSetLocalMat(Mat mat, Mat local)
3204: {
3205: PetscFunctionBegin;
3208: PetscUseMethod(mat, "MatISSetLocalMat_C", (Mat, Mat), (mat, local));
3209: PetscFunctionReturn(PETSC_SUCCESS);
3210: }
3212: static PetscErrorCode MatZeroEntries_IS(Mat A)
3213: {
3214: Mat_IS *a = (Mat_IS *)A->data;
3216: PetscFunctionBegin;
3217: PetscCall(MatZeroEntries(a->A));
3218: PetscFunctionReturn(PETSC_SUCCESS);
3219: }
3221: static PetscErrorCode MatScale_IS(Mat A, PetscScalar a)
3222: {
3223: Mat_IS *is = (Mat_IS *)A->data;
3225: PetscFunctionBegin;
3226: PetscCall(MatScale(is->A, a));
3227: PetscFunctionReturn(PETSC_SUCCESS);
3228: }
3230: static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
3231: {
3232: Mat_IS *is = (Mat_IS *)A->data;
3234: PetscFunctionBegin;
3235: /* get diagonal of the local matrix */
3236: PetscCall(MatGetDiagonal(is->A, is->y));
3238: /* scatter diagonal back into global vector */
3239: PetscCall(VecSet(v, 0));
3240: PetscCall(VecScatterBegin(is->rctx, is->y, v, ADD_VALUES, SCATTER_REVERSE));
3241: PetscCall(VecScatterEnd(is->rctx, is->y, v, ADD_VALUES, SCATTER_REVERSE));
3242: PetscFunctionReturn(PETSC_SUCCESS);
3243: }
3245: static PetscErrorCode MatSetOption_IS(Mat A, MatOption op, PetscBool flg)
3246: {
3247: Mat_IS *a = (Mat_IS *)A->data;
3249: PetscFunctionBegin;
3250: PetscCall(MatSetOption(a->A, op, flg));
3251: PetscFunctionReturn(PETSC_SUCCESS);
3252: }
3254: static PetscErrorCode MatAXPY_IS(Mat Y, PetscScalar a, Mat X, MatStructure str)
3255: {
3256: Mat_IS *y = (Mat_IS *)Y->data;
3257: Mat_IS *x;
3259: PetscFunctionBegin;
3260: if (PetscDefined(USE_DEBUG)) {
3261: PetscBool ismatis;
3262: PetscCall(PetscObjectTypeCompare((PetscObject)X, MATIS, &ismatis));
3263: PetscCheck(ismatis, PetscObjectComm((PetscObject)Y), PETSC_ERR_SUP, "Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
3264: }
3265: x = (Mat_IS *)X->data;
3266: PetscCall(MatAXPY(y->A, a, x->A, str));
3267: PetscFunctionReturn(PETSC_SUCCESS);
3268: }
3270: static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A, IS row, IS col, Mat *submat)
3271: {
3272: Mat lA;
3273: Mat_IS *matis = (Mat_IS *)A->data;
3274: ISLocalToGlobalMapping rl2g, cl2g;
3275: IS is;
3276: const PetscInt *rg, *rl;
3277: PetscInt nrg, rbs, cbs;
3278: PetscInt N, M, nrl, i, *idxs;
3280: PetscFunctionBegin;
3281: PetscCall(ISGetBlockSize(row, &rbs));
3282: PetscCall(ISGetBlockSize(col, &cbs));
3283: PetscCall(ISLocalToGlobalMappingGetIndices(A->rmap->mapping, &rg));
3284: PetscCall(ISGetLocalSize(row, &nrl));
3285: PetscCall(ISGetIndices(row, &rl));
3286: PetscCall(ISLocalToGlobalMappingGetSize(A->rmap->mapping, &nrg));
3287: if (PetscDefined(USE_DEBUG)) {
3288: for (i = 0; i < nrl; i++) PetscCheck(rl[i] < nrg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Local row index %" PetscInt_FMT " -> %" PetscInt_FMT " greater than maximum possible %" PetscInt_FMT, i, rl[i], nrg);
3289: }
3290: if (nrg % rbs) nrg = rbs * (nrg / rbs + 1);
3291: PetscCall(PetscMalloc1(nrg, &idxs));
3292: /* map from [0,nrl) to row */
3293: for (i = 0; i < nrl; i++) idxs[i] = rl[i];
3294: for (i = nrl; i < nrg; i++) idxs[i] = -1;
3295: PetscCall(ISRestoreIndices(row, &rl));
3296: PetscCall(ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping, &rg));
3297: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), nrg, idxs, PETSC_OWN_POINTER, &is));
3298: PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
3299: PetscCall(ISLocalToGlobalMappingSetBlockSize(rl2g, rbs));
3300: PetscCall(ISDestroy(&is));
3301: /* compute new l2g map for columns */
3302: if (col != row || matis->rmapping != matis->cmapping || matis->A->rmap->mapping != matis->A->cmap->mapping) {
3303: const PetscInt *cg, *cl;
3304: PetscInt ncg;
3305: PetscInt ncl;
3307: PetscCall(ISLocalToGlobalMappingGetIndices(A->cmap->mapping, &cg));
3308: PetscCall(ISGetLocalSize(col, &ncl));
3309: PetscCall(ISGetIndices(col, &cl));
3310: PetscCall(ISLocalToGlobalMappingGetSize(A->cmap->mapping, &ncg));
3311: if (PetscDefined(USE_DEBUG)) {
3312: for (i = 0; i < ncl; i++) PetscCheck(cl[i] < ncg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Local column index %" PetscInt_FMT " -> %" PetscInt_FMT " greater than maximum possible %" PetscInt_FMT, i, cl[i], ncg);
3313: }
3314: if (ncg % cbs) ncg = cbs * (ncg / cbs + 1);
3315: PetscCall(PetscMalloc1(ncg, &idxs));
3316: /* map from [0,ncl) to col */
3317: for (i = 0; i < ncl; i++) idxs[i] = cl[i];
3318: for (i = ncl; i < ncg; i++) idxs[i] = -1;
3319: PetscCall(ISRestoreIndices(col, &cl));
3320: PetscCall(ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping, &cg));
3321: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), ncg, idxs, PETSC_OWN_POINTER, &is));
3322: PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
3323: PetscCall(ISLocalToGlobalMappingSetBlockSize(cl2g, cbs));
3324: PetscCall(ISDestroy(&is));
3325: } else {
3326: PetscCall(PetscObjectReference((PetscObject)rl2g));
3327: cl2g = rl2g;
3328: }
3330: /* create the MATIS submatrix */
3331: PetscCall(MatGetSize(A, &M, &N));
3332: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), submat));
3333: PetscCall(MatSetSizes(*submat, PETSC_DECIDE, PETSC_DECIDE, M, N));
3334: PetscCall(MatSetType(*submat, MATIS));
3335: matis = (Mat_IS *)((*submat)->data);
3336: matis->islocalref = A;
3337: PetscCall(MatSetLocalToGlobalMapping(*submat, rl2g, cl2g));
3338: PetscCall(MatISGetLocalMat(A, &lA));
3339: PetscCall(MatISSetLocalMat(*submat, lA));
3340: PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
3341: PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
3343: /* remove unsupported ops */
3344: PetscCall(PetscMemzero((*submat)->ops, sizeof(struct _MatOps)));
3345: (*submat)->ops->destroy = MatDestroy_IS;
3346: (*submat)->ops->setvalueslocal = MatSetValuesLocal_SubMat_IS;
3347: (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
3348: (*submat)->ops->zerorowslocal = MatZeroRowsLocal_SubMat_IS;
3349: (*submat)->ops->zerorowscolumnslocal = MatZeroRowsColumnsLocal_SubMat_IS;
3350: (*submat)->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS;
3351: PetscFunctionReturn(PETSC_SUCCESS);
3352: }
3354: static PetscErrorCode MatSetFromOptions_IS(Mat A, PetscOptionItems PetscOptionsObject)
3355: {
3356: Mat_IS *a = (Mat_IS *)A->data;
3357: char type[256];
3358: PetscBool flg;
3360: PetscFunctionBegin;
3361: PetscOptionsHeadBegin(PetscOptionsObject, "MATIS options");
3362: PetscCall(PetscOptionsDeprecated("-matis_keepassembled", "-mat_is_keepassembled", "3.21", NULL));
3363: PetscCall(PetscOptionsDeprecated("-matis_fixempty", "-mat_is_fixempty", "3.21", NULL));
3364: PetscCall(PetscOptionsDeprecated("-matis_storel2l", "-mat_is_storel2l", "3.21", NULL));
3365: PetscCall(PetscOptionsDeprecated("-matis_localmat_type", "-mat_is_localmat_type", "3.21", NULL));
3366: PetscCall(PetscOptionsBool("-mat_is_keepassembled", "Store an assembled version if needed", NULL, a->keepassembled, &a->keepassembled, NULL));
3367: PetscCall(PetscOptionsBool("-mat_is_fixempty", "Fix local matrices in case of empty local rows/columns", "MatISFixLocalEmpty", a->locempty, &a->locempty, NULL));
3368: PetscCall(PetscOptionsBool("-mat_is_storel2l", "Store local-to-local matrices generated from PtAP operations", "MatISStoreL2L", a->storel2l, &a->storel2l, NULL));
3369: PetscCall(PetscOptionsBool("-mat_is_allow_repeated", "Allow local repeated entries", "MatISSetAllowRepeated", a->allow_repeated, &a->allow_repeated, NULL));
3370: PetscCall(PetscOptionsFList("-mat_is_localmat_type", "Matrix type", "MatISSetLocalMatType", MatList, a->lmattype, type, 256, &flg));
3371: if (flg) PetscCall(MatISSetLocalMatType(A, type));
3372: if (a->A) PetscCall(MatSetFromOptions(a->A));
3373: PetscOptionsHeadEnd();
3374: PetscFunctionReturn(PETSC_SUCCESS);
3375: }
3377: /*@
3378: MatCreateIS - Creates a "process" unassembled matrix.
3380: Collective.
3382: Input Parameters:
3383: + comm - MPI communicator that will share the matrix
3384: . bs - block size of the matrix
3385: . m - local size of left vector used in matrix vector products
3386: . n - local size of right vector used in matrix vector products
3387: . M - global size of left vector used in matrix vector products
3388: . N - global size of right vector used in matrix vector products
3389: . rmap - local to global map for rows
3390: - cmap - local to global map for cols
3392: Output Parameter:
3393: . A - the resulting matrix
3395: Level: intermediate
3397: Notes:
3398: `m` and `n` are NOT related to the size of the map; they represent the size of the local parts of the distributed vectors
3399: used in `MatMult()` operations. The local sizes of `rmap` and `cmap` define the size of the local matrices.
3401: If `rmap` (`cmap`) is `NULL`, then the local row (column) spaces matches the global space.
3403: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatSetLocalToGlobalMapping()`
3404: @*/
3405: PetscErrorCode MatCreateIS(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, ISLocalToGlobalMapping rmap, ISLocalToGlobalMapping cmap, Mat *A)
3406: {
3407: PetscFunctionBegin;
3408: PetscCall(MatCreate(comm, A));
3409: PetscCall(MatSetSizes(*A, m, n, M, N));
3410: if (bs > 0) PetscCall(MatSetBlockSize(*A, bs));
3411: PetscCall(MatSetType(*A, MATIS));
3412: PetscCall(MatSetLocalToGlobalMapping(*A, rmap, cmap));
3413: PetscFunctionReturn(PETSC_SUCCESS);
3414: }
3416: static PetscErrorCode MatHasOperation_IS(Mat A, MatOperation op, PetscBool *has)
3417: {
3418: Mat_IS *a = (Mat_IS *)A->data;
3419: MatOperation tobefiltered[] = {MATOP_MULT_ADD, MATOP_MULT_TRANSPOSE_ADD, MATOP_GET_DIAGONAL_BLOCK, MATOP_INCREASE_OVERLAP};
3421: PetscFunctionBegin;
3422: *has = PETSC_FALSE;
3423: if (!((void **)A->ops)[op] || !a->A) PetscFunctionReturn(PETSC_SUCCESS);
3424: *has = PETSC_TRUE;
3425: for (PetscInt i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(tobefiltered); i++)
3426: if (op == tobefiltered[i]) PetscFunctionReturn(PETSC_SUCCESS);
3427: PetscCall(MatHasOperation(a->A, op, has));
3428: PetscFunctionReturn(PETSC_SUCCESS);
3429: }
3431: static PetscErrorCode MatSetValuesCOO_IS(Mat A, const PetscScalar v[], InsertMode imode)
3432: {
3433: Mat_IS *a = (Mat_IS *)A->data;
3435: PetscFunctionBegin;
3436: PetscCall(MatSetValuesCOO(a->A, v, imode));
3437: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
3438: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
3439: PetscFunctionReturn(PETSC_SUCCESS);
3440: }
3442: static PetscErrorCode MatSetPreallocationCOOLocal_IS(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
3443: {
3444: Mat_IS *a = (Mat_IS *)A->data;
3446: PetscFunctionBegin;
3447: PetscCheck(a->A, PetscObjectComm((PetscObject)A), PETSC_ERR_ORDER, "Need to provide l2g map first via MatSetLocalToGlobalMapping");
3448: if (a->A->rmap->mapping || a->A->cmap->mapping) {
3449: PetscCall(MatSetPreallocationCOOLocal(a->A, ncoo, coo_i, coo_j));
3450: } else {
3451: PetscCall(MatSetPreallocationCOO(a->A, ncoo, coo_i, coo_j));
3452: }
3453: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", MatSetValuesCOO_IS));
3454: A->preallocated = PETSC_TRUE;
3455: PetscFunctionReturn(PETSC_SUCCESS);
3456: }
3458: static PetscErrorCode MatSetPreallocationCOO_IS(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
3459: {
3460: Mat_IS *a = (Mat_IS *)A->data;
3461: PetscInt ncoo_i;
3463: PetscFunctionBegin;
3464: PetscCheck(a->A, PetscObjectComm((PetscObject)A), PETSC_ERR_ORDER, "Need to provide l2g map first via MatSetLocalToGlobalMapping");
3465: PetscCall(PetscIntCast(ncoo, &ncoo_i));
3466: PetscCall(ISGlobalToLocalMappingApply(a->rmapping, IS_GTOLM_MASK, ncoo_i, coo_i, NULL, coo_i));
3467: PetscCall(ISGlobalToLocalMappingApply(a->cmapping, IS_GTOLM_MASK, ncoo_i, coo_j, NULL, coo_j));
3468: PetscCall(MatSetPreallocationCOO(a->A, ncoo, coo_i, coo_j));
3469: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", MatSetValuesCOO_IS));
3470: A->preallocated = PETSC_TRUE;
3471: PetscFunctionReturn(PETSC_SUCCESS);
3472: }
3474: static PetscErrorCode MatISGetAssembled_Private(Mat A, Mat *tA)
3475: {
3476: Mat_IS *a = (Mat_IS *)A->data;
3477: PetscObjectState Astate, aAstate = PETSC_INT_MIN;
3478: PetscObjectState Annzstate, aAnnzstate = PETSC_INT_MIN;
3480: PetscFunctionBegin;
3481: PetscCall(PetscObjectStateGet((PetscObject)A, &Astate));
3482: Annzstate = A->nonzerostate;
3483: if (a->assembledA) {
3484: PetscCall(PetscObjectStateGet((PetscObject)a->assembledA, &aAstate));
3485: aAnnzstate = a->assembledA->nonzerostate;
3486: }
3487: if (aAnnzstate != Annzstate) PetscCall(MatDestroy(&a->assembledA));
3488: if (Astate != aAstate || !a->assembledA) {
3489: MatType aAtype;
3490: PetscMPIInt size;
3491: PetscInt rbs, cbs, bs;
3493: /* the assembled form is used as temporary storage for parallel operations
3494: like createsubmatrices and the like, do not waste device memory */
3495: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
3496: PetscCall(ISLocalToGlobalMappingGetBlockSize(a->cmapping, &cbs));
3497: PetscCall(ISLocalToGlobalMappingGetBlockSize(a->rmapping, &rbs));
3498: bs = rbs == cbs ? rbs : 1;
3499: if (a->assembledA) PetscCall(MatGetType(a->assembledA, &aAtype));
3500: else if (size > 1) aAtype = bs > 1 ? MATMPIBAIJ : MATMPIAIJ;
3501: else aAtype = bs > 1 ? MATSEQBAIJ : MATSEQAIJ;
3503: PetscCall(MatConvert(A, aAtype, a->assembledA ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &a->assembledA));
3504: PetscCall(PetscObjectStateSet((PetscObject)a->assembledA, Astate));
3505: a->assembledA->nonzerostate = Annzstate;
3506: }
3507: PetscCall(PetscObjectReference((PetscObject)a->assembledA));
3508: *tA = a->assembledA;
3509: if (!a->keepassembled) PetscCall(MatDestroy(&a->assembledA));
3510: PetscFunctionReturn(PETSC_SUCCESS);
3511: }
3513: static PetscErrorCode MatISRestoreAssembled_Private(Mat A, Mat *tA)
3514: {
3515: PetscFunctionBegin;
3516: PetscCall(MatDestroy(tA));
3517: *tA = NULL;
3518: PetscFunctionReturn(PETSC_SUCCESS);
3519: }
3521: static PetscErrorCode MatGetDiagonalBlock_IS(Mat A, Mat *dA)
3522: {
3523: Mat_IS *a = (Mat_IS *)A->data;
3524: PetscObjectState Astate, dAstate = PETSC_INT_MIN;
3526: PetscFunctionBegin;
3527: PetscCall(PetscObjectStateGet((PetscObject)A, &Astate));
3528: if (a->dA) PetscCall(PetscObjectStateGet((PetscObject)a->dA, &dAstate));
3529: if (Astate != dAstate) {
3530: Mat tA;
3531: MatType ltype;
3533: PetscCall(MatDestroy(&a->dA));
3534: PetscCall(MatISGetAssembled_Private(A, &tA));
3535: PetscCall(MatGetDiagonalBlock(tA, &a->dA));
3536: PetscCall(MatPropagateSymmetryOptions(tA, a->dA));
3537: PetscCall(MatGetType(a->A, <ype));
3538: PetscCall(MatConvert(a->dA, ltype, MAT_INPLACE_MATRIX, &a->dA));
3539: PetscCall(PetscObjectReference((PetscObject)a->dA));
3540: PetscCall(MatISRestoreAssembled_Private(A, &tA));
3541: PetscCall(PetscObjectStateSet((PetscObject)a->dA, Astate));
3542: }
3543: *dA = a->dA;
3544: PetscFunctionReturn(PETSC_SUCCESS);
3545: }
3547: static PetscErrorCode MatCreateSubMatrices_IS(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse reuse, Mat *submat[])
3548: {
3549: Mat tA;
3551: PetscFunctionBegin;
3552: PetscCall(MatISGetAssembled_Private(A, &tA));
3553: PetscCall(MatCreateSubMatrices(tA, n, irow, icol, reuse, submat));
3554: /* MatCreateSubMatrices_MPIAIJ is a mess at the moment */
3555: #if 0
3556: {
3557: Mat_IS *a = (Mat_IS*)A->data;
3558: MatType ltype;
3559: VecType vtype;
3560: char *flg;
3562: PetscCall(MatGetType(a->A,<ype));
3563: PetscCall(MatGetVecType(a->A,&vtype));
3564: PetscCall(PetscStrstr(vtype,"cuda",&flg));
3565: if (!flg) PetscCall(PetscStrstr(vtype,"hip",&flg));
3566: if (!flg) PetscCall(PetscStrstr(vtype,"kokkos",&flg));
3567: if (flg) {
3568: for (PetscInt i = 0; i < n; i++) {
3569: Mat sA = (*submat)[i];
3571: PetscCall(MatConvert(sA,ltype,MAT_INPLACE_MATRIX,&sA));
3572: (*submat)[i] = sA;
3573: }
3574: }
3575: }
3576: #endif
3577: PetscCall(MatISRestoreAssembled_Private(A, &tA));
3578: PetscFunctionReturn(PETSC_SUCCESS);
3579: }
3581: static PetscErrorCode MatIncreaseOverlap_IS(Mat A, PetscInt n, IS is[], PetscInt ov)
3582: {
3583: Mat tA;
3585: PetscFunctionBegin;
3586: PetscCall(MatISGetAssembled_Private(A, &tA));
3587: PetscCall(MatIncreaseOverlap(tA, n, is, ov));
3588: PetscCall(MatISRestoreAssembled_Private(A, &tA));
3589: PetscFunctionReturn(PETSC_SUCCESS);
3590: }
3592: /*@
3593: MatISGetLocalToGlobalMapping - Gets the local-to-global numbering of the `MATIS` object
3595: Not Collective
3597: Input Parameter:
3598: . A - the matrix
3600: Output Parameters:
3601: + rmapping - row mapping
3602: - cmapping - column mapping
3604: Level: advanced
3606: Note:
3607: The returned map can be different from the one used to construct the `MATIS` object, since it will not contain negative or repeated indices.
3609: .seealso: [](ch_matrices), `Mat`, `MATIS`, `MatSetLocalToGlobalMapping()`
3610: @*/
3611: PetscErrorCode MatISGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
3612: {
3613: PetscFunctionBegin;
3616: if (rmapping) PetscAssertPointer(rmapping, 2);
3617: if (cmapping) PetscAssertPointer(cmapping, 3);
3618: PetscUseMethod(A, "MatISGetLocalToGlobalMapping_C", (Mat, ISLocalToGlobalMapping *, ISLocalToGlobalMapping *), (A, rmapping, cmapping));
3619: PetscFunctionReturn(PETSC_SUCCESS);
3620: }
3622: static PetscErrorCode MatISGetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapping *r, ISLocalToGlobalMapping *c)
3623: {
3624: Mat_IS *a = (Mat_IS *)A->data;
3626: PetscFunctionBegin;
3627: if (r) *r = a->rmapping;
3628: if (c) *c = a->cmapping;
3629: PetscFunctionReturn(PETSC_SUCCESS);
3630: }
3632: static PetscErrorCode MatSetBlockSizes_IS(Mat A, PetscInt rbs, PetscInt cbs)
3633: {
3634: Mat_IS *a = (Mat_IS *)A->data;
3636: PetscFunctionBegin;
3637: if (a->A) PetscCall(MatSetBlockSizes(a->A, rbs, cbs));
3638: PetscFunctionReturn(PETSC_SUCCESS);
3639: }
3641: /*MC
3642: MATIS - MATIS = "is" - A matrix type to be used for non-overlapping domain decomposition methods (e.g. `PCBDDC` or `KSPFETIDP`).
3643: This stores the matrices in globally unassembled form and the parallel matrix vector product is handled "implicitly".
3645: Options Database Keys:
3646: + -mat_type is - Set the matrix type to `MATIS`.
3647: . -mat_is_allow_repeated - Allow repeated entries in the local part of the local to global maps.
3648: . -mat_is_fixempty - Fix local matrices in case of empty local rows/columns.
3649: - -mat_is_storel2l - Store the local-to-local operators generated by the Galerkin process of `MatPtAP()`.
3651: Level: intermediate
3653: Notes:
3654: Options prefix for the inner matrix are given by `-is_mat_xxx`
3656: You must call `MatSetLocalToGlobalMapping()` before using this matrix type.
3658: You can do matrix preallocation on the local matrix after you obtain it with
3659: `MatISGetLocalMat()`; otherwise, you could use `MatISSetPreallocation()` or `MatXAIJSetPreallocation()`
3661: .seealso: [](ch_matrices), `Mat`, `MATIS`, `Mat`, `MatISGetLocalMat()`, `MatSetLocalToGlobalMapping()`, `MatISSetPreallocation()`, `MatCreateIS()`, `PCBDDC`, `KSPFETIDP`
3662: M*/
3663: PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
3664: {
3665: Mat_IS *a;
3667: PetscFunctionBegin;
3668: PetscCall(PetscNew(&a));
3669: PetscCall(PetscStrallocpy(MATAIJ, &a->lmattype));
3670: A->data = (void *)a;
3672: /* matrix ops */
3673: PetscCall(PetscMemzero(A->ops, sizeof(struct _MatOps)));
3674: A->ops->mult = MatMult_IS;
3675: A->ops->multadd = MatMultAdd_IS;
3676: A->ops->multtranspose = MatMultTranspose_IS;
3677: A->ops->multtransposeadd = MatMultTransposeAdd_IS;
3678: A->ops->destroy = MatDestroy_IS;
3679: A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
3680: A->ops->setvalues = MatSetValues_IS;
3681: A->ops->setvaluesblocked = MatSetValuesBlocked_IS;
3682: A->ops->setvalueslocal = MatSetValuesLocal_IS;
3683: A->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_IS;
3684: A->ops->zerorows = MatZeroRows_IS;
3685: A->ops->zerorowscolumns = MatZeroRowsColumns_IS;
3686: A->ops->assemblybegin = MatAssemblyBegin_IS;
3687: A->ops->assemblyend = MatAssemblyEnd_IS;
3688: A->ops->view = MatView_IS;
3689: A->ops->load = MatLoad_IS;
3690: A->ops->zeroentries = MatZeroEntries_IS;
3691: A->ops->scale = MatScale_IS;
3692: A->ops->getdiagonal = MatGetDiagonal_IS;
3693: A->ops->setoption = MatSetOption_IS;
3694: A->ops->ishermitian = MatIsHermitian_IS;
3695: A->ops->issymmetric = MatIsSymmetric_IS;
3696: A->ops->isstructurallysymmetric = MatIsStructurallySymmetric_IS;
3697: A->ops->duplicate = MatDuplicate_IS;
3698: A->ops->missingdiagonal = MatMissingDiagonal_IS;
3699: A->ops->copy = MatCopy_IS;
3700: A->ops->getlocalsubmatrix = MatGetLocalSubMatrix_IS;
3701: A->ops->createsubmatrix = MatCreateSubMatrix_IS;
3702: A->ops->axpy = MatAXPY_IS;
3703: A->ops->diagonalset = MatDiagonalSet_IS;
3704: A->ops->shift = MatShift_IS;
3705: A->ops->transpose = MatTranspose_IS;
3706: A->ops->getinfo = MatGetInfo_IS;
3707: A->ops->diagonalscale = MatDiagonalScale_IS;
3708: A->ops->setfromoptions = MatSetFromOptions_IS;
3709: A->ops->setup = MatSetUp_IS;
3710: A->ops->hasoperation = MatHasOperation_IS;
3711: A->ops->getdiagonalblock = MatGetDiagonalBlock_IS;
3712: A->ops->createsubmatrices = MatCreateSubMatrices_IS;
3713: A->ops->increaseoverlap = MatIncreaseOverlap_IS;
3714: A->ops->setblocksizes = MatSetBlockSizes_IS;
3716: /* special MATIS functions */
3717: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", MatISSetLocalMatType_IS));
3718: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalMat_C", MatISGetLocalMat_IS));
3719: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISRestoreLocalMat_C", MatISRestoreLocalMat_IS));
3720: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMat_C", MatISSetLocalMat_IS));
3721: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetPreallocation_C", MatISSetPreallocation_IS));
3722: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetAllowRepeated_C", MatISSetAllowRepeated_IS));
3723: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISStoreL2L_C", MatISStoreL2L_IS));
3724: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISFixLocalEmpty_C", MatISFixLocalEmpty_IS));
3725: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalToGlobalMapping_C", MatISGetLocalToGlobalMapping_IS));
3726: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpiaij_C", MatConvert_IS_XAIJ));
3727: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpibaij_C", MatConvert_IS_XAIJ));
3728: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpisbaij_C", MatConvert_IS_XAIJ));
3729: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqaij_C", MatConvert_IS_XAIJ));
3730: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqbaij_C", MatConvert_IS_XAIJ));
3731: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqsbaij_C", MatConvert_IS_XAIJ));
3732: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_aij_C", MatConvert_IS_XAIJ));
3733: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", MatSetPreallocationCOOLocal_IS));
3734: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_IS));
3735: PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATIS));
3736: PetscFunctionReturn(PETSC_SUCCESS);
3737: }