Actual source code: mpiaij.h

  1: #pragma once

  3: #include <../src/mat/impls/aij/seq/aij.h>

  5: typedef struct { /* used by MatCreateMPIAIJSumSeqAIJ for reusing the merged matrix */
  6:   PetscLayout  rowmap;
  7:   PetscInt   **buf_ri, **buf_rj;
  8:   PetscMPIInt *len_s, *len_r, *id_r; /* array of length of comm->size, store send/recv matrix values */
  9:   PetscMPIInt  nsend, nrecv;
 10:   PetscInt    *bi, *bj;               /* i and j array of the local portion of mpi C (matrix product) - rename to ci, cj! */
 11:   PetscInt    *owners_co, *coi, *coj; /* i and j array of (p->B)^T*A*P - used in the communication */
 12: } Mat_Merge_SeqsToMPI;

 14: typedef struct {                                /* used by MatPtAPXXX_MPIAIJ_MPIAIJ() and MatMatMultXXX_MPIAIJ_MPIAIJ() */
 15:   PetscInt              *startsj_s, *startsj_r; /* used by MatGetBrowsOfAoCols_MPIAIJ */
 16:   PetscScalar           *bufa;                  /* used by MatGetBrowsOfAoCols_MPIAIJ */
 17:   Mat                    P_loc, P_oth;          /* partial B_seq -- intend to replace B_seq */
 18:   PetscInt              *api, *apj;             /* symbolic i and j arrays of the local product A_loc*B_seq */
 19:   PetscScalar           *apv;
 20:   MatReuse               reuse; /* flag to skip MatGetBrowsOfAoCols_MPIAIJ() and MatMPIAIJGetLocalMat() in 1st call of MatPtAPNumeric_MPIAIJ_MPIAIJ() */
 21:   PetscScalar           *apa;   /* tmp array for store a row of A*P used in MatMatMult() */
 22:   Mat                    A_loc; /* used by MatTransposeMatMult(), contains api and apj */
 23:   ISLocalToGlobalMapping ltog;  /* mapping from local column indices to global column indices for A_loc */
 24:   Mat                    Pt;    /* used by MatTransposeMatMult(), Pt = P^T */
 25:   Mat                    Rd, Ro, AP_loc, C_loc, C_oth;
 26:   PetscInt               algType; /* implementation algorithm */
 27:   PetscSF                sf;      /* use it to communicate remote part of C */
 28:   PetscInt              *c_othi, *c_rmti;

 30:   Mat_Merge_SeqsToMPI *merge;
 31: } Mat_APMPI;

 33: #if defined(PETSC_USE_CTABLE)
 34:   #define PETSCTABLE PetscHMapI
 35: #else
 36:   #define PETSCTABLE PetscInt *
 37: #endif

 39: // Shared by MPIAIJ, MPIBAIJ, MPISBAIJ so that we can access common fields in the same way.
 40: #define MPIAIJHEADER \
 41:   Mat         A, B; /* local submatrices: A (diag part), B (off-diag part) */ \
 42:   PetscMPIInt size; /* size of communicator */ \
 43:   PetscMPIInt rank; /* rank of proc in communicator */ \
 44: \
 45:   /* The following variables are used for matrix assembly */ \
 46:   PetscBool    donotstash;        /* if 1, off processor entries dropped */ \
 47:   MPI_Request *send_waits;        /* array of send requests */ \
 48:   MPI_Request *recv_waits;        /* array of receive requests */ \
 49:   PetscInt     nsends, nrecvs;    /* numbers of sends and receives */ \
 50:   MatScalar   *svalues, *rvalues; /* sending and receiving data */ \
 51:   PetscInt     rmax;              /* maximum message length */ \
 52:   PETSCTABLE   colmap;            /* local col number of off-diag col */ \
 53:   PetscInt    *garray;            /* work array */ \
 54: \
 55:   /* The following variables are used for matrix-vector products */ \
 56:   Vec        lvec;        /* local vector */ \
 57:   VecScatter Mvctx;       /* scatter context for vector */ \
 58:   PetscBool  roworiented; /* if true, row-oriented input, default true */ \
 59: \
 60:   /* The following variables are for MatGetRow() */ \
 61:   PetscInt    *rowindices; /* column indices for row */ \
 62:   PetscScalar *rowvalues;  /* nonzero values in row */ \
 63:   PetscBool    getrowactive

 65: typedef struct {
 66:   MPIAIJHEADER;
 67:   Vec       diag;
 68:   PetscInt *ld; /* number of entries per row left of diagonal block */

 70:   /* Used by device classes */
 71:   void *spptr;

 73:   struct _MatOps cops;
 74: } Mat_MPIAIJ;

 76: typedef struct {
 77:   PetscCount   n;                          /* Number of COOs passed to MatSetPreallocationCOO)() */
 78:   PetscSF      sf;                         /* SF to send/recv remote values in MatSetValuesCOO() */
 79:   PetscCount   Annz, Bnnz;                 /* Number of entries in diagonal A and off-diagonal B */
 80:   PetscCount   Annz2, Bnnz2;               /* Number of unique remote entries belonging to A and B */
 81:   PetscCount   Atot1, Atot2, Btot1, Btot2; /* Total local (tot1) and remote (tot2) entries (which might contain repeats) belonging to A and B */
 82:   PetscCount  *Ajmap1, *Aperm1;            /* Lengths: [Annz+1], [Atot1]. Local entries to diag */
 83:   PetscCount  *Bjmap1, *Bperm1;            /* Lengths: [Bnnz+1], [Btot1]. Local entries to offdiag */
 84:   PetscCount  *Aimap2, *Ajmap2, *Aperm2;   /* Lengths: [Annz2], [Annz2+1], [Atot2]. Remote entries to diag */
 85:   PetscCount  *Bimap2, *Bjmap2, *Bperm2;   /* Lengths: [Bnnz2], [Bnnz2+1], [Btot2]. Remote entries to offdiag */
 86:   PetscCount  *Cperm1;                     /* [sendlen] Permutation to fill MPI send buffer. 'C' for communication */
 87:   PetscScalar *sendbuf, *recvbuf;          /* Buffers for remote values in MatSetValuesCOO() */
 88:   PetscInt     sendlen, recvlen;           /* Lengths (in unit of PetscScalar) of send/recvbuf */
 89: } MatCOOStruct_MPIAIJ;

 91: PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat);

 93: PETSC_INTERN PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat, MatAssemblyType);

 95: PETSC_INTERN PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat);
 96: PETSC_INTERN PetscErrorCode MatDisAssemble_MPIAIJ(Mat);
 97: PETSC_INTERN PetscErrorCode MatDuplicate_MPIAIJ(Mat, MatDuplicateOption, Mat *);
 98: PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ(Mat, PetscInt, IS[], PetscInt);
 99: PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ_Scalable(Mat, PetscInt, IS[], PetscInt);
100: PETSC_INTERN PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat, ISColoring, MatFDColoring);
101: PETSC_INTERN PetscErrorCode MatFDColoringSetUp_MPIXAIJ(Mat, ISColoring, MatFDColoring);
102: PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
103: PETSC_INTERN PetscErrorCode MatCreateSubMatricesMPI_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
104: PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_All(Mat, MatCreateSubMatrixOption, MatReuse, Mat *[]);
105: PETSC_INTERN PetscErrorCode MatView_MPIAIJ(Mat, PetscViewer);

107: PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ(Mat, IS, IS, MatReuse, Mat *);
108: PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_nonscalable(Mat, IS, IS, PetscInt, MatReuse, Mat *);
109: PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowDist(Mat, IS, IS, IS, MatReuse, Mat *);
110: PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowColDist(Mat, IS, IS, MatReuse, Mat *);
111: PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat, MPI_Comm, MatReuse, Mat *);

113: PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ(Mat, PetscViewer);
114: PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ_Binary(Mat, PetscViewer);
115: PETSC_INTERN PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat);

117: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJ(Mat);
118: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJBACKEND(Mat);
119: PETSC_INTERN PetscErrorCode MatProductSymbolic_MPIAIJBACKEND(Mat);
120: PETSC_INTERN PetscErrorCode MatProductSymbolic_AB_MPIAIJ_MPIAIJ(Mat);

122: PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat);

124: PETSC_INTERN PetscErrorCode MatProductSymbolic_RARt_MPIAIJ_MPIAIJ(Mat);
125: PETSC_INTERN PetscErrorCode MatProductNumeric_RARt_MPIAIJ_MPIAIJ(Mat);

127: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat);
128: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(Mat, Mat, PetscReal, Mat);
129: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
130: PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
131: PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat);

133: PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, PetscReal, Mat);
134: PETSC_INTERN PetscErrorCode MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, Mat);

136: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
137: PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);

139: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat, Mat, PetscReal, Mat);
140: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, PetscReal, Mat);
141: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, PetscReal, Mat);
142: PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat, Mat, Mat);
143: PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, Mat);
144: PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, Mat);

146: #if defined(PETSC_HAVE_HYPRE)
147: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat, Mat, PetscReal, Mat);
148: #endif
149: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_MPIDense(Mat, MatType, MatReuse, Mat *);
150: #if defined(PETSC_HAVE_SCALAPACK)
151: PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
152: #endif

154: PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
155: PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_PtAP(void *);
156: PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(void *);

158: PETSC_INTERN PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat, Mat, MatReuse, PetscInt **, PetscInt **, MatScalar **, Mat *);
159: PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
160: PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat(Mat, const PetscInt[], const PetscInt[], const PetscScalar[]);
161: PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Mat, const PetscInt[], const PetscInt[]);
162: PETSC_INTERN PetscErrorCode MatSetOption_MPIAIJ(Mat, MatOption, PetscBool);

164: PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat);
165: PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
166: PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
167: PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat);
168: PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult(Mat, Mat, Mat);
169: PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIDense(Mat, Mat, PetscReal, Mat);
170: PETSC_INTERN PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat, Mat *);

172: PETSC_INTERN PetscErrorCode MatSetFromOptions_MPIAIJ(Mat, PetscOptionItems *);
173: PETSC_INTERN PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]);

175: #if !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128) && !defined(PETSC_USE_REAL___FP16)
176: PETSC_INTERN PetscErrorCode MatLUFactorSymbolic_MPIAIJ_TFS(Mat, IS, IS, const MatFactorInfo *, Mat *);
177: #endif
178: PETSC_INTERN PetscErrorCode MatSolve_MPIAIJ(Mat, Vec, Vec);
179: PETSC_INTERN PetscErrorCode MatILUFactor_MPIAIJ(Mat, IS, IS, const MatFactorInfo *);

181: PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIX_private(PetscInt, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, PetscInt *);

183: extern PetscErrorCode MatGetDiagonalBlock_MPIAIJ(Mat, Mat *);
184: extern PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat, Vec);

186: PETSC_INTERN PetscErrorCode MatGetSeqMats_MPIAIJ(Mat, Mat *, Mat *);
187: PETSC_INTERN PetscErrorCode MatSetSeqMats_MPIAIJ(Mat, IS, IS, IS, MatStructure, Mat, Mat);

189: PETSC_INTERN PetscErrorCode MatSetPreallocationCOO_MPIAIJ(Mat, PetscCount, PetscInt[], PetscInt[]);
190: PETSC_INTERN PetscErrorCode MatResetPreallocationCOO_MPIAIJ(Mat);

192: /* compute apa = A[i,:]*P = Ad[i,:]*P_loc + Ao*[i,:]*P_oth using sparse axpy */
193: #define AProw_scalable(i, ad, ao, p_loc, p_oth, api, apj, apa) \
194:   do { \
195:     PetscInt     _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj, _nextp, *_apJ; \
196:     PetscScalar *_aa, _valtmp, *_pa; \
197:     _apJ = apj + api[i]; \
198:     /* diagonal portion of A */ \
199:     _ai  = ad->i; \
200:     _anz = _ai[i + 1] - _ai[i]; \
201:     _aj  = ad->j + _ai[i]; \
202:     _aa  = ad->a + _ai[i]; \
203:     for (_j = 0; _j < _anz; _j++) { \
204:       _row = _aj[_j]; \
205:       _pi  = p_loc->i; \
206:       _pnz = _pi[_row + 1] - _pi[_row]; \
207:       _pj  = p_loc->j + _pi[_row]; \
208:       _pa  = p_loc->a + _pi[_row]; \
209:       /* perform sparse axpy */ \
210:       _valtmp = _aa[_j]; \
211:       _nextp  = 0; \
212:       for (_k = 0; _nextp < _pnz; _k++) { \
213:         if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \
214:           apa[_k] += _valtmp * _pa[_nextp++]; \
215:         } \
216:       } \
217:       (void)PetscLogFlops(2.0 * _pnz); \
218:     } \
219:     /* off-diagonal portion of A */ \
220:     if (p_oth) { \
221:       _ai  = ao->i; \
222:       _anz = _ai[i + 1] - _ai[i]; \
223:       _aj  = ao->j + _ai[i]; \
224:       _aa  = ao->a + _ai[i]; \
225:       for (_j = 0; _j < _anz; _j++) { \
226:         _row = _aj[_j]; \
227:         _pi  = p_oth->i; \
228:         _pnz = _pi[_row + 1] - _pi[_row]; \
229:         _pj  = p_oth->j + _pi[_row]; \
230:         _pa  = p_oth->a + _pi[_row]; \
231:         /* perform sparse axpy */ \
232:         _valtmp = _aa[_j]; \
233:         _nextp  = 0; \
234:         for (_k = 0; _nextp < _pnz; _k++) { \
235:           if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \
236:             apa[_k] += _valtmp * _pa[_nextp++]; \
237:           } \
238:         } \
239:         (void)PetscLogFlops(2.0 * _pnz); \
240:       } \
241:     } \
242:   } while (0)

244: #define AProw_nonscalable(i, ad, ao, p_loc, p_oth, apa) \
245:   do { \
246:     PetscInt     _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj; \
247:     PetscScalar *_aa, _valtmp, *_pa; \
248:     /* diagonal portion of A */ \
249:     _ai  = ad->i; \
250:     _anz = _ai[i + 1] - _ai[i]; \
251:     _aj  = PetscSafePointerPlusOffset(ad->j, _ai[i]); \
252:     _aa  = PetscSafePointerPlusOffset(ad->a, _ai[i]); \
253:     for (_j = 0; _j < _anz; _j++) { \
254:       _row = _aj[_j]; \
255:       _pi  = p_loc->i; \
256:       _pnz = _pi[_row + 1] - _pi[_row]; \
257:       _pj  = p_loc->j + _pi[_row]; \
258:       _pa  = p_loc->a + _pi[_row]; \
259:       /* perform dense axpy */ \
260:       _valtmp = _aa[_j]; \
261:       for (_k = 0; _k < _pnz; _k++) apa[_pj[_k]] += _valtmp * _pa[_k]; \
262:       (void)PetscLogFlops(2.0 * _pnz); \
263:     } \
264:     /* off-diagonal portion of A */ \
265:     if (p_oth) { \
266:       _ai  = ao->i; \
267:       _anz = _ai[i + 1] - _ai[i]; \
268:       _aj  = PetscSafePointerPlusOffset(ao->j, _ai[i]); \
269:       _aa  = PetscSafePointerPlusOffset(ao->a, _ai[i]); \
270:       for (_j = 0; _j < _anz; _j++) { \
271:         _row = _aj[_j]; \
272:         _pi  = p_oth->i; \
273:         _pnz = _pi[_row + 1] - _pi[_row]; \
274:         _pj  = p_oth->j + _pi[_row]; \
275:         _pa  = p_oth->a + _pi[_row]; \
276:         /* perform dense axpy */ \
277:         _valtmp = _aa[_j]; \
278:         for (_k = 0; _k < _pnz; _k++) apa[_pj[_k]] += _valtmp * _pa[_k]; \
279:         (void)PetscLogFlops(2.0 * _pnz); \
280:       } \
281:     } \
282:   } while (0)