Actual source code: mhypre.c

  1: /*
  2:     Creates hypre ijmatrix from PETSc matrix
  3: */

  5: #include <petscpkg_version.h>
  6: #include <petsc/private/petschypre.h>
  7: #include <petscmathypre.h>
  8: #include <petsc/private/matimpl.h>
  9: #include <petsc/private/deviceimpl.h>
 10: #include <../src/mat/impls/hypre/mhypre.h>
 11: #include <../src/mat/impls/aij/mpi/mpiaij.h>
 12: #include <../src/vec/vec/impls/hypre/vhyp.h>
 13: #include <HYPRE.h>
 14: #include <HYPRE_utilities.h>
 15: #include <_hypre_parcsr_ls.h>
 16: #include <_hypre_sstruct_ls.h>
 17: #include <_hypre_utilities.h>

 19: #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
 20:   #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A)
 21: #endif

 23: #if PETSC_PKG_HYPRE_VERSION_GE(2, 15, 0)
 24:   #define HYPRE_AssumedPartitionCheck() 1
 25: #endif

 27: static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *);
 28: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix);
 29: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix);
 30: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix);
 31: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool);
 32: static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins);

 34: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
 35: {
 36:   PetscInt        i, n_d, n_o;
 37:   const PetscInt *ia_d, *ia_o;
 38:   PetscBool       done_d = PETSC_FALSE, done_o = PETSC_FALSE;
 39:   HYPRE_Int      *nnz_d = NULL, *nnz_o = NULL;

 41:   PetscFunctionBegin;
 42:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 43:     PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d));
 44:     if (done_d) {
 45:       PetscCall(PetscMalloc1(n_d, &nnz_d));
 46:       for (i = 0; i < n_d; i++) nnz_d[i] = (HYPRE_Int)(ia_d[i + 1] - ia_d[i]);
 47:     }
 48:     PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d));
 49:   }
 50:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 51:     PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
 52:     if (done_o) {
 53:       PetscCall(PetscMalloc1(n_o, &nnz_o));
 54:       for (i = 0; i < n_o; i++) nnz_o[i] = (HYPRE_Int)(ia_o[i + 1] - ia_o[i]);
 55:     }
 56:     PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
 57:   }
 58:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 59:     if (!done_o) { /* only diagonal part */
 60:       PetscCall(PetscCalloc1(n_d, &nnz_o));
 61:     }
 62: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
 63:     { /* If we don't do this, the columns of the matrix will be all zeros! */
 64:       hypre_AuxParCSRMatrix *aux_matrix;
 65:       aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
 66:       hypre_AuxParCSRMatrixDestroy(aux_matrix);
 67:       hypre_IJMatrixTranslator(ij) = NULL;
 68:       PetscCallHYPRE(HYPRE_IJMatrixSetDiagOffdSizes(ij, nnz_d, nnz_o));
 69:       /* it seems they partially fixed it in 2.19.0 */
 70:   #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
 71:       aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
 72:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
 73:   #endif
 74:     }
 75: #else
 76:     PetscCallHYPRE(HYPRE_IJMatrixSetDiagOffdSizes(ij, nnz_d, nnz_o));
 77: #endif
 78:     PetscCall(PetscFree(nnz_d));
 79:     PetscCall(PetscFree(nnz_o));
 80:   }
 81:   PetscFunctionReturn(PETSC_SUCCESS);
 82: }

 84: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
 85: {
 86:   HYPRE_Int rstart, rend, cstart, cend;

 88:   PetscFunctionBegin;
 89:   PetscCall(PetscLayoutSetUp(A->rmap));
 90:   PetscCall(PetscLayoutSetUp(A->cmap));
 91:   rstart = (HYPRE_Int)A->rmap->rstart;
 92:   rend   = (HYPRE_Int)A->rmap->rend;
 93:   cstart = (HYPRE_Int)A->cmap->rstart;
 94:   cend   = (HYPRE_Int)A->cmap->rend;
 95:   PetscCall(PetscHYPREInitialize());
 96:   if (hA->ij) {
 97:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
 98:     PetscCallHYPRE(HYPRE_IJMatrixDestroy(hA->ij));
 99:   }
100:   PetscCallHYPRE(HYPRE_IJMatrixCreate(hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij));
101:   PetscCallHYPRE(HYPRE_IJMatrixSetObjectType(hA->ij, HYPRE_PARCSR));
102:   {
103:     PetscBool       same;
104:     Mat             A_d, A_o;
105:     const PetscInt *colmap;
106:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same));
107:     if (same) {
108:       PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap));
109:       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
110:       PetscFunctionReturn(PETSC_SUCCESS);
111:     }
112:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same));
113:     if (same) {
114:       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap));
115:       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
116:       PetscFunctionReturn(PETSC_SUCCESS);
117:     }
118:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same));
119:     if (same) {
120:       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
121:       PetscFunctionReturn(PETSC_SUCCESS);
122:     }
123:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same));
124:     if (same) {
125:       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
126:       PetscFunctionReturn(PETSC_SUCCESS);
127:     }
128:   }
129:   PetscFunctionReturn(PETSC_SUCCESS);
130: }

132: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij)
133: {
134:   PetscBool flg;

136:   PetscFunctionBegin;
137: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
138:   PetscCallHYPRE(HYPRE_IJMatrixInitialize(ij));
139: #else
140:   PetscCallHYPRE(HYPRE_IJMatrixInitialize_v2(ij, HYPRE_MEMORY_HOST));
141: #endif
142:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
143:   if (flg) {
144:     PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij));
145:     PetscFunctionReturn(PETSC_SUCCESS);
146:   }
147:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
148:   if (flg) {
149:     PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij));
150:     PetscFunctionReturn(PETSC_SUCCESS);
151:   }
152:   PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name);
153:   PetscFunctionReturn(PETSC_SUCCESS);
154: }

156: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
157: {
158:   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ *)A->data;
159:   HYPRE_Int              type;
160:   hypre_ParCSRMatrix    *par_matrix;
161:   hypre_AuxParCSRMatrix *aux_matrix;
162:   hypre_CSRMatrix       *hdiag;
163:   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));

165:   PetscFunctionBegin;
166:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(ij, &type));
167:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
168:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(ij, (void **)&par_matrix));
169:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
170:   /*
171:        this is the Hack part where we monkey directly with the hypre datastructures
172:   */
173:   if (sameint) {
174:     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1));
175:     PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz));
176:   } else {
177:     PetscInt i;

179:     for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
180:     for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
181:   }

183:   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
184:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
185:   PetscFunctionReturn(PETSC_SUCCESS);
186: }

188: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
189: {
190:   Mat_MPIAIJ            *pA = (Mat_MPIAIJ *)A->data;
191:   Mat_SeqAIJ            *pdiag, *poffd;
192:   PetscInt               i, *garray = pA->garray, *jj, cstart, *pjj;
193:   HYPRE_Int             *hjj, type;
194:   hypre_ParCSRMatrix    *par_matrix;
195:   hypre_AuxParCSRMatrix *aux_matrix;
196:   hypre_CSRMatrix       *hdiag, *hoffd;
197:   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));

199:   PetscFunctionBegin;
200:   pdiag = (Mat_SeqAIJ *)pA->A->data;
201:   poffd = (Mat_SeqAIJ *)pA->B->data;
202:   /* cstart is only valid for square MPIAIJ laid out in the usual way */
203:   PetscCall(MatGetOwnershipRange(A, &cstart, NULL));

205:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(ij, &type));
206:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
207:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(ij, (void **)&par_matrix));
208:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
209:   hoffd = hypre_ParCSRMatrixOffd(par_matrix);

211:   if (sameint) {
212:     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1));
213:   } else {
214:     for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
215:   }

217:   hjj = hdiag->j;
218:   pjj = pdiag->j;
219: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
220:   for (i = 0; i < pdiag->nz; i++) hjj[i] = (HYPRE_Int)pjj[i];
221: #else
222:   for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i];
223: #endif
224:   if (sameint) {
225:     PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1));
226:   } else {
227:     for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)poffd->i[i];
228:   }

230:   jj = (PetscInt *)hoffd->j;
231: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
232:   PetscCallHYPRE(hypre_CSRMatrixBigInitialize(hoffd));
233:   jj = (PetscInt *)hoffd->big_j;
234: #endif
235:   pjj = poffd->j;
236:   for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]];

238:   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
239:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
240:   PetscFunctionReturn(PETSC_SUCCESS);
241: }

243: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B)
244: {
245:   Mat_HYPRE             *mhA = (Mat_HYPRE *)A->data;
246:   Mat                    lA;
247:   ISLocalToGlobalMapping rl2g, cl2g;
248:   IS                     is;
249:   hypre_ParCSRMatrix    *hA;
250:   hypre_CSRMatrix       *hdiag, *hoffd;
251:   MPI_Comm               comm;
252:   HYPRE_Complex         *hdd, *hod, *aa;
253:   PetscScalar           *data;
254:   HYPRE_BigInt          *col_map_offd;
255:   HYPRE_Int             *hdi, *hdj, *hoi, *hoj;
256:   PetscInt              *ii, *jj, *iptr, *jptr;
257:   PetscInt               cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N;
258:   HYPRE_Int              type;
259:   MatType                lmattype   = NULL;
260:   PetscBool              freeparcsr = PETSC_FALSE;

262:   PetscFunctionBegin;
263:   comm = PetscObjectComm((PetscObject)A);
264:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(mhA->ij, &type));
265:   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
266:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(mhA->ij, (void **)&hA));
267: #if defined(PETSC_HAVE_HYPRE_DEVICE)
268:   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(mhA->ij)) {
269:     /* Support by copying back on the host and copy to GPU
270:        Kind of inefficient, but this is the best we can do now */
271:   #if defined(HYPRE_USING_HIP)
272:     lmattype = MATSEQAIJHIPSPARSE;
273:   #elif defined(HYPRE_USING_CUDA)
274:     lmattype = MATSEQAIJCUSPARSE;
275:   #endif
276:     hA         = hypre_ParCSRMatrixClone_v2(hA, 1, HYPRE_MEMORY_HOST);
277:     freeparcsr = PETSC_TRUE;
278:   }
279: #endif
280:   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
281:   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
282:   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
283:   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
284:   hdiag = hypre_ParCSRMatrixDiag(hA);
285:   hoffd = hypre_ParCSRMatrixOffd(hA);
286:   dr    = hypre_CSRMatrixNumRows(hdiag);
287:   dc    = hypre_CSRMatrixNumCols(hdiag);
288:   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
289:   hdi   = hypre_CSRMatrixI(hdiag);
290:   hdj   = hypre_CSRMatrixJ(hdiag);
291:   hdd   = hypre_CSRMatrixData(hdiag);
292:   oc    = hypre_CSRMatrixNumCols(hoffd);
293:   nnz += hypre_CSRMatrixNumNonzeros(hoffd);
294:   hoi = hypre_CSRMatrixI(hoffd);
295:   hoj = hypre_CSRMatrixJ(hoffd);
296:   hod = hypre_CSRMatrixData(hoffd);
297:   if (reuse != MAT_REUSE_MATRIX) {
298:     PetscInt *aux;

300:     /* generate l2g maps for rows and cols */
301:     PetscCall(ISCreateStride(comm, dr, str, 1, &is));
302:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
303:     PetscCall(ISDestroy(&is));
304:     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
305:     PetscCall(PetscMalloc1(dc + oc, &aux));
306:     for (i = 0; i < dc; i++) aux[i] = i + stc;
307:     for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i];
308:     PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is));
309:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
310:     PetscCall(ISDestroy(&is));
311:     /* create MATIS object */
312:     PetscCall(MatCreate(comm, B));
313:     PetscCall(MatSetSizes(*B, dr, dc, M, N));
314:     PetscCall(MatSetType(*B, MATIS));
315:     PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g));
316:     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
317:     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));

319:     /* allocate CSR for local matrix */
320:     PetscCall(PetscMalloc1(dr + 1, &iptr));
321:     PetscCall(PetscMalloc1(nnz, &jptr));
322:     PetscCall(PetscMalloc1(nnz, &data));
323:   } else {
324:     PetscInt  nr;
325:     PetscBool done;
326:     PetscCall(MatISGetLocalMat(*B, &lA));
327:     PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done));
328:     PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr);
329:     PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz);
330:     PetscCall(MatSeqAIJGetArrayWrite(lA, &data));
331:   }
332:   /* merge local matrices */
333:   ii  = iptr;
334:   jj  = jptr;
335:   aa  = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
336:   *ii = *(hdi++) + *(hoi++);
337:   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
338:     PetscScalar *aold = (PetscScalar *)aa;
339:     PetscInt    *jold = jj, nc = jd + jo;
340:     for (; jd < *hdi; jd++) {
341:       *jj++ = *hdj++;
342:       *aa++ = *hdd++;
343:     }
344:     for (; jo < *hoi; jo++) {
345:       *jj++ = *hoj++ + dc;
346:       *aa++ = *hod++;
347:     }
348:     *(++ii) = *(hdi++) + *(hoi++);
349:     PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold));
350:   }
351:   for (; cum < dr; cum++) *(++ii) = nnz;
352:   if (reuse != MAT_REUSE_MATRIX) {
353:     Mat_SeqAIJ *a;

355:     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA));
356:     /* hack SeqAIJ */
357:     a          = (Mat_SeqAIJ *)lA->data;
358:     a->free_a  = PETSC_TRUE;
359:     a->free_ij = PETSC_TRUE;
360:     if (lmattype) PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));
361:     PetscCall(MatISSetLocalMat(*B, lA));
362:     PetscCall(MatDestroy(&lA));
363:   } else {
364:     PetscCall(MatSeqAIJRestoreArrayWrite(lA, &data));
365:   }
366:   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
367:   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
368:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B));
369:   if (freeparcsr) PetscCallHYPRE(hypre_ParCSRMatrixDestroy(hA));
370:   PetscFunctionReturn(PETSC_SUCCESS);
371: }

373: static PetscErrorCode MatHYPRE_DestroyCOOMat(Mat mat)
374: {
375:   Mat_HYPRE *hA = (Mat_HYPRE *)mat->data;

377:   PetscFunctionBegin;
378:   if (hA->cooMat) { /* If cooMat is present we need to destroy the column indices */
379:     PetscCall(MatDestroy(&hA->cooMat));
380:     if (hA->cooMatAttached) {
381:       hypre_CSRMatrix     *csr;
382:       hypre_ParCSRMatrix  *parcsr;
383:       HYPRE_MemoryLocation mem;

385:       PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&parcsr));
386:       csr = hypre_ParCSRMatrixDiag(parcsr);
387:       if (csr) {
388:         mem = hypre_CSRMatrixMemoryLocation(csr);
389:         PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
390:         PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
391:       }
392:       csr = hypre_ParCSRMatrixOffd(parcsr);
393:       if (csr) {
394:         mem = hypre_CSRMatrixMemoryLocation(csr);
395:         PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
396:         PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
397:       }
398:     }
399:   }
400:   hA->cooMatAttached = PETSC_FALSE;
401:   PetscFunctionReturn(PETSC_SUCCESS);
402: }

404: static PetscErrorCode MatHYPRE_CreateCOOMat(Mat mat)
405: {
406:   MPI_Comm    comm;
407:   PetscMPIInt size;
408:   PetscLayout rmap, cmap;
409:   Mat_HYPRE  *hmat    = (Mat_HYPRE *)mat->data;
410:   MatType     matType = MATAIJ; /* default type of cooMat */

412:   PetscFunctionBegin;
413:   /* Build an agent matrix cooMat with AIJ format
414:      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
415:    */
416:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
417:   PetscCallMPI(MPI_Comm_size(comm, &size));
418:   PetscCall(PetscLayoutSetUp(mat->rmap));
419:   PetscCall(PetscLayoutSetUp(mat->cmap));
420:   PetscCall(MatGetLayouts(mat, &rmap, &cmap));

422: #if defined(PETSC_HAVE_HYPRE_DEVICE)
423:   if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */
424:   #if defined(HYPRE_USING_HIP)
425:     matType = MATAIJHIPSPARSE;
426:   #elif defined(HYPRE_USING_CUDA)
427:     matType = MATAIJCUSPARSE;
428:   #elif defined(HYPRE_USING_SYCL) && defined(PETSC_HAVE_KOKKOS_KERNELS)
429:     matType = MATAIJKOKKOS;
430:   #else
431:     SETERRQ(comm, PETSC_ERR_SUP, "No HYPRE device available. Suggest re-installing with Kokkos Kernels");
432:   #endif
433:   }
434: #endif

436:   /* Do COO preallocation through cooMat */
437:   PetscCall(MatHYPRE_DestroyCOOMat(mat));
438:   PetscCall(MatCreate(comm, &hmat->cooMat));
439:   PetscCall(MatSetType(hmat->cooMat, matType));
440:   PetscCall(MatSetLayouts(hmat->cooMat, rmap, cmap));

442:   /* allocate local matrices if needed */
443:   PetscCall(MatMPIAIJSetPreallocation(hmat->cooMat, 0, NULL, 0, NULL));
444:   PetscFunctionReturn(PETSC_SUCCESS);
445: }

447: /* Attach cooMat data array to hypre matrix.
448:    When AIJCUPMSPARSE will support raw device pointers and not THRUSTARRAY
449:    we should swap the arrays: i.e., attach hypre matrix array to cooMat
450:    This is because hypre should be in charge of handling the memory,
451:    cooMat is only a way to reuse PETSc COO code.
452:    attaching the memory will then be done at MatSetValuesCOO time and it will dynamically
453:    support hypre matrix migrating to host.
454: */
455: static PetscErrorCode MatHYPRE_AttachCOOMat(Mat mat)
456: {
457:   Mat_HYPRE           *hmat = (Mat_HYPRE *)mat->data;
458:   hypre_CSRMatrix     *diag, *offd;
459:   hypre_ParCSRMatrix  *parCSR;
460:   HYPRE_MemoryLocation hmem = HYPRE_MEMORY_HOST;
461:   PetscMemType         pmem;
462:   Mat                  A, B;
463:   PetscScalar         *a;
464:   PetscMPIInt          size;
465:   MPI_Comm             comm;

467:   PetscFunctionBegin;
468:   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
469:   if (hmat->cooMatAttached) PetscFunctionReturn(PETSC_SUCCESS);
470:   PetscCheck(hmat->cooMat->preallocated, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix is not preallocated");
471:   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
472:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
473:   PetscCallMPI(MPI_Comm_size(comm, &size));

475:   /* Alias cooMat's data array to IJMatrix's */
476:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hmat->ij, (void **)&parCSR));
477:   diag = hypre_ParCSRMatrixDiag(parCSR);
478:   offd = hypre_ParCSRMatrixOffd(parCSR);

480:   A = (size == 1) ? hmat->cooMat : ((Mat_MPIAIJ *)hmat->cooMat->data)->A;
481:   B = (size == 1) ? NULL : ((Mat_MPIAIJ *)hmat->cooMat->data)->B;

483:   PetscCall(PetscObjectSetName((PetscObject)A, "_internal_COO_mat_for_hypre"));
484:   hmem = hypre_CSRMatrixMemoryLocation(diag);
485:   PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &a, &pmem));
486:   PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
487:   PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hmem));
488:   hypre_CSRMatrixData(diag)     = (HYPRE_Complex *)a;
489:   hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */

491:   if (B) {
492:     hmem = hypre_CSRMatrixMemoryLocation(offd);
493:     PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &a, &pmem));
494:     PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
495:     PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hmem));
496:     hypre_CSRMatrixData(offd)     = (HYPRE_Complex *)a;
497:     hypre_CSRMatrixOwnsData(offd) = 0;
498:   }
499:   hmat->cooMatAttached = PETSC_TRUE;
500:   PetscFunctionReturn(PETSC_SUCCESS);
501: }

503: // Build COO's coordinate list i[], j[] based on CSR's i[], j[] arrays and the number of local rows 'n'
504: static PetscErrorCode CSRtoCOO_Private(PetscInt n, const PetscInt ii[], const PetscInt jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
505: {
506:   PetscInt *cooi, *cooj;

508:   PetscFunctionBegin;
509:   *ncoo = ii[n];
510:   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
511:   for (PetscInt i = 0; i < n; i++) {
512:     for (PetscInt j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
513:   }
514:   PetscCall(PetscArraycpy(cooj, jj, *ncoo));
515:   *coo_i = cooi;
516:   *coo_j = cooj;
517:   PetscFunctionReturn(PETSC_SUCCESS);
518: }

520: // Similar to CSRtoCOO_Private, but the CSR's i[], j[] are of type HYPRE_Int
521: static PetscErrorCode CSRtoCOO_HYPRE_Int_Private(PetscInt n, const HYPRE_Int ii[], const HYPRE_Int jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
522: {
523:   PetscInt *cooi, *cooj;

525:   PetscFunctionBegin;
526:   *ncoo = ii[n];
527:   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
528:   for (PetscInt i = 0; i < n; i++) {
529:     for (HYPRE_Int j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
530:   }
531:   for (PetscCount i = 0; i < *ncoo; i++) cooj[i] = jj[i];
532:   *coo_i = cooi;
533:   *coo_j = cooj;
534:   PetscFunctionReturn(PETSC_SUCCESS);
535: }

537: // Build a COO data structure for the seqaij matrix, as if the nonzeros are laid out in the same order as in the CSR
538: static PetscErrorCode MatSeqAIJGetCOO_Private(Mat A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
539: {
540:   PetscInt        n;
541:   const PetscInt *ii, *jj;
542:   PetscBool       done;

544:   PetscFunctionBegin;
545:   PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
546:   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatGetRowIJ");
547:   PetscCall(CSRtoCOO_Private(n, ii, jj, ncoo, coo_i, coo_j));
548:   PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
549:   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatRestoreRowIJ");
550:   PetscFunctionReturn(PETSC_SUCCESS);
551: }

553: // Build a COO data structure for the hypreCSRMatrix, as if the nonzeros are laid out in the same order as in the hypreCSRMatrix
554: static PetscErrorCode hypreCSRMatrixGetCOO_Private(hypre_CSRMatrix *A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
555: {
556:   PetscInt             n = hypre_CSRMatrixNumRows(A);
557:   HYPRE_Int           *ii, *jj;
558:   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;

560:   PetscFunctionBegin;
561: #if defined(PETSC_HAVE_HYPRE_DEVICE)
562:   mem = hypre_CSRMatrixMemoryLocation(A);
563:   if (mem != HYPRE_MEMORY_HOST) {
564:     PetscCount nnz = hypre_CSRMatrixNumNonzeros(A);
565:     PetscCall(PetscMalloc2(n + 1, &ii, nnz, &jj));
566:     hypre_TMemcpy(ii, hypre_CSRMatrixI(A), HYPRE_Int, n + 1, HYPRE_MEMORY_HOST, mem);
567:     hypre_TMemcpy(jj, hypre_CSRMatrixJ(A), HYPRE_Int, nnz, HYPRE_MEMORY_HOST, mem);
568:   } else {
569: #else
570:   {
571: #endif
572:     ii = hypre_CSRMatrixI(A);
573:     jj = hypre_CSRMatrixJ(A);
574:   }
575:   PetscCall(CSRtoCOO_HYPRE_Int_Private(n, ii, jj, ncoo, coo_i, coo_j));
576:   if (mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree2(ii, jj));
577:   PetscFunctionReturn(PETSC_SUCCESS);
578: }

580: static PetscErrorCode MatSetValuesCOOFromCSRMatrix_Private(Mat A, hypre_CSRMatrix *H)
581: {
582:   PetscBool            iscpu = PETSC_TRUE;
583:   PetscScalar         *a;
584:   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;

586:   PetscFunctionBegin;
587: #if defined(PETSC_HAVE_HYPRE_DEVICE)
588:   mem = hypre_CSRMatrixMemoryLocation(H);
589:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &iscpu));
590: #endif
591:   if (iscpu && mem != HYPRE_MEMORY_HOST) {
592:     PetscCount nnz = hypre_CSRMatrixNumNonzeros(H);
593:     PetscCall(PetscMalloc1(nnz, &a));
594:     hypre_TMemcpy(a, hypre_CSRMatrixData(H), PetscScalar, nnz, HYPRE_MEMORY_HOST, mem);
595:   } else {
596:     a = (PetscScalar *)hypre_CSRMatrixData(H);
597:   }
598:   PetscCall(MatSetValuesCOO(A, a, INSERT_VALUES));
599:   if (iscpu && mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree(a));
600:   PetscFunctionReturn(PETSC_SUCCESS);
601: }

603: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
604: {
605:   MPI_Comm     comm = PetscObjectComm((PetscObject)A);
606:   Mat          M = NULL, dH = NULL, oH = NULL, dA = NULL, oA = NULL;
607:   PetscBool    ismpiaij, issbaij, isbaij, boundtocpu = PETSC_TRUE;
608:   Mat_HYPRE   *hA;
609:   PetscMemType memtype = PETSC_MEMTYPE_HOST;

611:   PetscFunctionBegin;
612:   if (PetscDefined(HAVE_HYPRE_DEVICE)) {
613:     PetscCall(MatGetCurrentMemType(A, &memtype));
614:     PetscCall(PetscHYPREInitialize());
615:     boundtocpu = PetscMemTypeHost(memtype) ? PETSC_TRUE : PETSC_FALSE;
616:     PetscCallHYPRE(HYPRE_SetMemoryLocation(boundtocpu ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE));
617:   }

619:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, ""));
620:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, ""));
621:   if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */
622:     PetscBool ismpi;
623:     MatType   newtype;

625:     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, ""));
626:     newtype = ismpi ? MATMPIAIJ : MATSEQAIJ;
627:     if (reuse == MAT_REUSE_MATRIX) {
628:       PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B));
629:       PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B));
630:       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
631:     } else if (reuse == MAT_INITIAL_MATRIX) {
632:       PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B));
633:       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
634:     } else {
635:       PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A));
636:       PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A));
637:     }
638: #if defined(PETSC_HAVE_DEVICE)
639:     (*B)->boundtocpu = boundtocpu;
640: #endif
641:     PetscFunctionReturn(PETSC_SUCCESS);
642:   }

644:   dA = A;
645:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
646:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));

648:   if (reuse != MAT_REUSE_MATRIX) {
649:     PetscCount coo_n;
650:     PetscInt  *coo_i, *coo_j;

652:     PetscCall(MatCreate(comm, &M));
653:     PetscCall(MatSetType(M, MATHYPRE));
654:     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
655:     PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE));
656:     PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));

658:     hA = (Mat_HYPRE *)M->data;
659:     PetscCall(MatHYPRE_CreateFromMat(A, hA));
660:     PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij));

662:     PetscCall(MatHYPRE_CreateCOOMat(M));

664:     dH = hA->cooMat;
665:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
666:     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));

668:     PetscCall(PetscObjectSetName((PetscObject)dH, "_internal_COO_mat_for_hypre"));
669:     PetscCall(MatSeqAIJGetCOO_Private(dA, &coo_n, &coo_i, &coo_j));
670:     PetscCall(MatSetPreallocationCOO(dH, coo_n, coo_i, coo_j));
671:     PetscCall(PetscFree2(coo_i, coo_j));
672:     if (oH) {
673:       PetscCall(PetscLayoutDestroy(&oH->cmap));
674:       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oH), oA->cmap->n, oA->cmap->n, 1, &oH->cmap));
675:       PetscCall(MatSeqAIJGetCOO_Private(oA, &coo_n, &coo_i, &coo_j));
676:       PetscCall(MatSetPreallocationCOO(oH, coo_n, coo_i, coo_j));
677:       PetscCall(PetscFree2(coo_i, coo_j));
678:     }
679:     hA->cooMat->assembled = PETSC_TRUE;

681:     M->preallocated = PETSC_TRUE;
682:     PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
683:     PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY));

685:     PetscCall(MatHYPRE_AttachCOOMat(M));
686:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
687:   } else M = *B;

689:   hA = (Mat_HYPRE *)M->data;
690:   PetscCheck(hA->cooMat, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");

692:   dH = hA->cooMat;
693:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
694:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));

696:   PetscScalar *a;
697:   PetscCall(MatSeqAIJGetCSRAndMemType(dA, NULL, NULL, &a, NULL));
698:   PetscCall(MatSetValuesCOO(dH, a, INSERT_VALUES));
699:   if (oH) {
700:     PetscCall(MatSeqAIJGetCSRAndMemType(oA, NULL, NULL, &a, NULL));
701:     PetscCall(MatSetValuesCOO(oH, a, INSERT_VALUES));
702:   }

704:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
705: #if defined(PETSC_HAVE_DEVICE)
706:   (*B)->boundtocpu = boundtocpu;
707: #endif
708:   PetscFunctionReturn(PETSC_SUCCESS);
709: }

711: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
712: {
713:   Mat                 M, dA = NULL, oA = NULL;
714:   hypre_ParCSRMatrix *parcsr;
715:   hypre_CSRMatrix    *dH, *oH;
716:   MPI_Comm            comm;
717:   PetscBool           ismpiaij, isseqaij;

719:   PetscFunctionBegin;
720:   comm = PetscObjectComm((PetscObject)A);
721:   if (reuse == MAT_REUSE_MATRIX) {
722:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij));
723:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij));
724:     PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ base types are supported");
725:   }
726:   PetscCall(MatHYPREGetParCSR(A, &parcsr));
727: #if defined(PETSC_HAVE_HYPRE_DEVICE)
728:   if (HYPRE_MEMORY_DEVICE == hypre_ParCSRMatrixMemoryLocation(parcsr)) {
729:     PetscBool isaij;

731:     PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
732:     if (isaij) {
733:       PetscMPIInt size;

735:       PetscCallMPI(MPI_Comm_size(comm, &size));
736:   #if defined(HYPRE_USING_HIP)
737:       mtype = size > 1 ? MATMPIAIJHIPSPARSE : MATSEQAIJHIPSPARSE;
738:   #elif defined(HYPRE_USING_CUDA)
739:       mtype = size > 1 ? MATMPIAIJCUSPARSE : MATSEQAIJCUSPARSE;
740:   #else
741:       mtype = size > 1 ? MATMPIAIJ : MATSEQAIJ;
742:   #endif
743:     }
744:   }
745: #endif
746:   dH = hypre_ParCSRMatrixDiag(parcsr);
747:   oH = hypre_ParCSRMatrixOffd(parcsr);
748:   if (reuse != MAT_REUSE_MATRIX) {
749:     PetscCount coo_n;
750:     PetscInt  *coo_i, *coo_j;

752:     PetscCall(MatCreate(comm, &M));
753:     PetscCall(MatSetType(M, mtype));
754:     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
755:     PetscCall(MatMPIAIJSetPreallocation(M, 0, NULL, 0, NULL));

757:     dA = M;
758:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
759:     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));

761:     PetscCall(hypreCSRMatrixGetCOO_Private(dH, &coo_n, &coo_i, &coo_j));
762:     PetscCall(MatSetPreallocationCOO(dA, coo_n, coo_i, coo_j));
763:     PetscCall(PetscFree2(coo_i, coo_j));
764:     if (ismpiaij) {
765:       HYPRE_Int nc = hypre_CSRMatrixNumCols(oH);

767:       PetscCall(PetscLayoutDestroy(&oA->cmap));
768:       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oA), nc, nc, 1, &oA->cmap));
769:       PetscCall(hypreCSRMatrixGetCOO_Private(oH, &coo_n, &coo_i, &coo_j));
770:       PetscCall(MatSetPreallocationCOO(oA, coo_n, coo_i, coo_j));
771:       PetscCall(PetscFree2(coo_i, coo_j));

773:       /* garray */
774:       Mat_MPIAIJ   *aij    = (Mat_MPIAIJ *)M->data;
775:       HYPRE_BigInt *harray = hypre_ParCSRMatrixColMapOffd(parcsr);
776:       PetscInt     *garray;

778:       PetscCall(PetscFree(aij->garray));
779:       PetscCall(PetscMalloc1(nc, &garray));
780:       for (HYPRE_Int i = 0; i < nc; i++) garray[i] = (PetscInt)harray[i];
781:       aij->garray = garray;
782:       PetscCall(MatSetUpMultiply_MPIAIJ(M));
783:     }
784:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
785:   } else M = *B;

787:   dA = M;
788:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
789:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
790:   PetscCall(MatSetValuesCOOFromCSRMatrix_Private(dA, dH));
791:   if (oA) PetscCall(MatSetValuesCOOFromCSRMatrix_Private(oA, oH));
792:   M->assembled = PETSC_TRUE;
793:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
794:   PetscFunctionReturn(PETSC_SUCCESS);
795: }

797: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
798: {
799:   hypre_ParCSRMatrix *tA;
800:   hypre_CSRMatrix    *hdiag, *hoffd;
801:   Mat_SeqAIJ         *diag, *offd;
802:   PetscInt           *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts;
803:   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
804:   PetscBool           ismpiaij, isseqaij;
805:   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
806:   HYPRE_Int          *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL;
807:   PetscInt           *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL;
808:   PetscBool           iscuda, iship;
809: #if defined(PETSC_HAVE_DEVICE) && defined(PETSC_HAVE_HYPRE_DEVICE)
810:   PetscBool boundtocpu = A->boundtocpu;
811: #else
812:   PetscBool boundtocpu = PETSC_TRUE;
813: #endif

815:   PetscFunctionBegin;
816:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
817:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
818:   PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name);
819:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
820:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
821:   PetscCall(PetscHYPREInitialize());
822:   if (ismpiaij) {
823:     Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;

825:     diag = (Mat_SeqAIJ *)a->A->data;
826:     offd = (Mat_SeqAIJ *)a->B->data;
827:     if (!boundtocpu && (iscuda || iship)) {
828: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
829:       if (iscuda) {
830:         sameint = PETSC_TRUE;
831:         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
832:         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
833:       }
834: #endif
835: #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
836:       if (iship) {
837:         sameint = PETSC_TRUE;
838:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
839:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
840:       }
841: #endif
842:     } else {
843:       boundtocpu = PETSC_TRUE;
844:       pdi        = diag->i;
845:       pdj        = diag->j;
846:       poi        = offd->i;
847:       poj        = offd->j;
848:       if (sameint) {
849:         hdi = (HYPRE_Int *)pdi;
850:         hdj = (HYPRE_Int *)pdj;
851:         hoi = (HYPRE_Int *)poi;
852:         hoj = (HYPRE_Int *)poj;
853:       }
854:     }
855:     garray = a->garray;
856:     noffd  = a->B->cmap->N;
857:     dnnz   = diag->nz;
858:     onnz   = offd->nz;
859:   } else {
860:     diag = (Mat_SeqAIJ *)A->data;
861:     offd = NULL;
862:     if (!boundtocpu && (iscuda || iship)) {
863: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
864:       if (iscuda) {
865:         sameint = PETSC_TRUE;
866:         PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
867:       }
868: #endif
869: #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
870:       if (iship) {
871:         sameint = PETSC_TRUE;
872:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
873:       }
874: #endif
875:     } else {
876:       boundtocpu = PETSC_TRUE;
877:       pdi        = diag->i;
878:       pdj        = diag->j;
879:       if (sameint) {
880:         hdi = (HYPRE_Int *)pdi;
881:         hdj = (HYPRE_Int *)pdj;
882:       }
883:     }
884:     garray = NULL;
885:     noffd  = 0;
886:     dnnz   = diag->nz;
887:     onnz   = 0;
888:   }

890:   /* create a temporary ParCSR */
891:   if (HYPRE_AssumedPartitionCheck()) {
892:     PetscMPIInt myid;

894:     PetscCallMPI(MPI_Comm_rank(comm, &myid));
895:     row_starts = A->rmap->range + myid;
896:     col_starts = A->cmap->range + myid;
897:   } else {
898:     row_starts = A->rmap->range;
899:     col_starts = A->cmap->range;
900:   }
901:   tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, (HYPRE_Int)noffd, (HYPRE_Int)dnnz, (HYPRE_Int)onnz);
902: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
903:   hypre_ParCSRMatrixSetRowStartsOwner(tA, 0);
904:   hypre_ParCSRMatrixSetColStartsOwner(tA, 0);
905: #endif

907:   /* set diagonal part */
908:   hdiag = hypre_ParCSRMatrixDiag(tA);
909:   if (!sameint) { /* malloc CSR pointers */
910:     PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj));
911:     for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)pdi[i];
912:     for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)pdj[i];
913:   }
914:   hypre_CSRMatrixI(hdiag)           = hdi;
915:   hypre_CSRMatrixJ(hdiag)           = hdj;
916:   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex *)diag->a;
917:   hypre_CSRMatrixNumNonzeros(hdiag) = (HYPRE_Int)diag->nz;
918:   hypre_CSRMatrixSetDataOwner(hdiag, 0);

920:   /* set off-diagonal part */
921:   hoffd = hypre_ParCSRMatrixOffd(tA);
922:   if (offd) {
923:     if (!sameint) { /* malloc CSR pointers */
924:       PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj));
925:       for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)poi[i];
926:       for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)poj[i];
927:     }
928:     hypre_CSRMatrixI(hoffd)           = hoi;
929:     hypre_CSRMatrixJ(hoffd)           = hoj;
930:     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex *)offd->a;
931:     hypre_CSRMatrixNumNonzeros(hoffd) = (HYPRE_Int)offd->nz;
932:     hypre_CSRMatrixSetDataOwner(hoffd, 0);
933:   }
934: #if defined(PETSC_HAVE_HYPRE_DEVICE)
935:   PetscCallHYPRE(hypre_ParCSRMatrixInitialize_v2(tA, !boundtocpu ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST));
936: #else
937:   #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
938:   PetscCallHYPRE(hypre_ParCSRMatrixInitialize(tA));
939:   #else
940:   PetscCallHYPRE(hypre_ParCSRMatrixInitialize_v2(tA, HYPRE_MEMORY_HOST));
941:   #endif
942: #endif

944:   /* MatrixSetRownnz comes after MatrixInitialize, so the first uses the right memory location */
945:   hypre_CSRMatrixSetRownnz(hdiag);
946:   if (offd) hypre_CSRMatrixSetRownnz(hoffd);

948:   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST);
949:   hypre_ParCSRMatrixSetNumNonzeros(tA);
950:   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray;
951:   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallHYPRE(hypre_MatvecCommPkgCreate(tA));
952:   *hA = tA;
953:   PetscFunctionReturn(PETSC_SUCCESS);
954: }

956: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
957: {
958:   hypre_CSRMatrix *hdiag, *hoffd;
959:   PetscBool        ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
960:   PetscBool        iscuda, iship;

962:   PetscFunctionBegin;
963:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
964:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
965:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
966: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
967:   if (iscuda) sameint = PETSC_TRUE;
968: #elif defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
969:   if (iship) sameint = PETSC_TRUE;
970: #endif
971:   hdiag = hypre_ParCSRMatrixDiag(*hA);
972:   hoffd = hypre_ParCSRMatrixOffd(*hA);
973:   /* free temporary memory allocated by PETSc
974:      set pointers to NULL before destroying tA */
975:   if (!sameint) {
976:     HYPRE_Int *hi, *hj;

978:     hi = hypre_CSRMatrixI(hdiag);
979:     hj = hypre_CSRMatrixJ(hdiag);
980:     PetscCall(PetscFree2(hi, hj));
981:     if (ismpiaij) {
982:       hi = hypre_CSRMatrixI(hoffd);
983:       hj = hypre_CSRMatrixJ(hoffd);
984:       PetscCall(PetscFree2(hi, hj));
985:     }
986:   }
987:   hypre_CSRMatrixI(hdiag)    = NULL;
988:   hypre_CSRMatrixJ(hdiag)    = NULL;
989:   hypre_CSRMatrixData(hdiag) = NULL;
990:   if (ismpiaij) {
991:     hypre_CSRMatrixI(hoffd)    = NULL;
992:     hypre_CSRMatrixJ(hoffd)    = NULL;
993:     hypre_CSRMatrixData(hoffd) = NULL;
994:   }
995:   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
996:   hypre_ParCSRMatrixDestroy(*hA);
997:   *hA = NULL;
998:   PetscFunctionReturn(PETSC_SUCCESS);
999: }

1001: /* calls RAP from BoomerAMG:
1002:    the resulting ParCSR will not own the column and row starts
1003:    It looks like we don't need to have the diagonal entries ordered first */
1004: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
1005: {
1006: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1007:   HYPRE_Int P_owns_col_starts, R_owns_row_starts;
1008: #endif

1010:   PetscFunctionBegin;
1011: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1012:   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
1013:   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
1014: #endif
1015:   /* can be replaced by version test later */
1016: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1017:   PetscStackPushExternal("hypre_ParCSRMatrixRAP");
1018:   *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP);
1019:   PetscStackPop;
1020: #else
1021:   PetscCallHYPRE(hypre_BoomerAMGBuildCoarseOperator(hR, hA, hP, hRAP));
1022:   PetscCallHYPRE(hypre_ParCSRMatrixSetNumNonzeros(*hRAP));
1023: #endif
1024:   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
1025: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1026:   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0);
1027:   hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0);
1028:   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1);
1029:   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1);
1030: #endif
1031:   PetscFunctionReturn(PETSC_SUCCESS);
1032: }

1034: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C)
1035: {
1036:   Mat                 B;
1037:   hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL;
1038:   Mat_Product        *product = C->product;

1040:   PetscFunctionBegin;
1041:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1042:   PetscCall(MatAIJGetParCSR_Private(P, &hP));
1043:   PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP));
1044:   PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B));

1046:   PetscCall(MatHeaderMerge(C, &B));
1047:   C->product = product;

1049:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1050:   PetscCall(MatAIJRestoreParCSR_Private(P, &hP));
1051:   PetscFunctionReturn(PETSC_SUCCESS);
1052: }

1054: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C)
1055: {
1056:   PetscFunctionBegin;
1057:   PetscCall(MatSetType(C, MATAIJ));
1058:   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
1059:   C->ops->productnumeric = MatProductNumeric_PtAP;
1060:   PetscFunctionReturn(PETSC_SUCCESS);
1061: }

1063: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C)
1064: {
1065:   Mat                 B;
1066:   Mat_HYPRE          *hP;
1067:   hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL;
1068:   HYPRE_Int           type;
1069:   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
1070:   PetscBool           ishypre;

1072:   PetscFunctionBegin;
1073:   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1074:   PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1075:   hP = (Mat_HYPRE *)P->data;
1076:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hP->ij, &type));
1077:   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1078:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hP->ij, (void **)&Pparcsr));

1080:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1081:   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr));
1082:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));

1084:   /* create temporary matrix and merge to C */
1085:   PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B));
1086:   PetscCall(MatHeaderMerge(C, &B));
1087:   PetscFunctionReturn(PETSC_SUCCESS);
1088: }

1090: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C)
1091: {
1092:   Mat                 B;
1093:   hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL;
1094:   Mat_HYPRE          *hA, *hP;
1095:   PetscBool           ishypre;
1096:   HYPRE_Int           type;

1098:   PetscFunctionBegin;
1099:   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1100:   PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1101:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1102:   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1103:   hA = (Mat_HYPRE *)A->data;
1104:   hP = (Mat_HYPRE *)P->data;
1105:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hA->ij, &type));
1106:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1107:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hP->ij, &type));
1108:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1109:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&Aparcsr));
1110:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hP->ij, (void **)&Pparcsr));
1111:   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr));
1112:   PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B));
1113:   PetscCall(MatHeaderMerge(C, &B));
1114:   PetscFunctionReturn(PETSC_SUCCESS);
1115: }

1117: /* calls hypre_ParMatmul
1118:    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
1119:    hypre_ParMatrixCreate does not duplicate the communicator
1120:    It looks like we don't need to have the diagonal entries ordered first */
1121: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
1122: {
1123:   PetscFunctionBegin;
1124:   /* can be replaced by version test later */
1125: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1126:   PetscStackPushExternal("hypre_ParCSRMatMat");
1127:   *hAB = hypre_ParCSRMatMat(hA, hB);
1128: #else
1129:   PetscStackPushExternal("hypre_ParMatmul");
1130:   *hAB = hypre_ParMatmul(hA, hB);
1131: #endif
1132:   PetscStackPop;
1133:   PetscFunctionReturn(PETSC_SUCCESS);
1134: }

1136: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C)
1137: {
1138:   Mat                 D;
1139:   hypre_ParCSRMatrix *hA, *hB, *hAB = NULL;
1140:   Mat_Product        *product = C->product;

1142:   PetscFunctionBegin;
1143:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1144:   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1145:   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB));
1146:   PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D));

1148:   PetscCall(MatHeaderMerge(C, &D));
1149:   C->product = product;

1151:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1152:   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1153:   PetscFunctionReturn(PETSC_SUCCESS);
1154: }

1156: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C)
1157: {
1158:   PetscFunctionBegin;
1159:   PetscCall(MatSetType(C, MATAIJ));
1160:   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1161:   C->ops->productnumeric = MatProductNumeric_AB;
1162:   PetscFunctionReturn(PETSC_SUCCESS);
1163: }

1165: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C)
1166: {
1167:   Mat                 D;
1168:   hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL;
1169:   Mat_HYPRE          *hA, *hB;
1170:   PetscBool           ishypre;
1171:   HYPRE_Int           type;
1172:   Mat_Product        *product;

1174:   PetscFunctionBegin;
1175:   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre));
1176:   PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE);
1177:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1178:   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1179:   hA = (Mat_HYPRE *)A->data;
1180:   hB = (Mat_HYPRE *)B->data;
1181:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hA->ij, &type));
1182:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1183:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hB->ij, &type));
1184:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1185:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&Aparcsr));
1186:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hB->ij, (void **)&Bparcsr));
1187:   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr));
1188:   PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D));

1190:   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1191:   product    = C->product; /* save it from MatHeaderReplace() */
1192:   C->product = NULL;
1193:   PetscCall(MatHeaderReplace(C, &D));
1194:   C->product             = product;
1195:   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1196:   C->ops->productnumeric = MatProductNumeric_AB;
1197:   PetscFunctionReturn(PETSC_SUCCESS);
1198: }

1200: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D)
1201: {
1202:   Mat                 E;
1203:   hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL;

1205:   PetscFunctionBegin;
1206:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1207:   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1208:   PetscCall(MatAIJGetParCSR_Private(C, &hC));
1209:   PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC));
1210:   PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E));
1211:   PetscCall(MatHeaderMerge(D, &E));
1212:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1213:   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1214:   PetscCall(MatAIJRestoreParCSR_Private(C, &hC));
1215:   PetscFunctionReturn(PETSC_SUCCESS);
1216: }

1218: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D)
1219: {
1220:   PetscFunctionBegin;
1221:   PetscCall(MatSetType(D, MATAIJ));
1222:   PetscFunctionReturn(PETSC_SUCCESS);
1223: }

1225: static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1226: {
1227:   PetscFunctionBegin;
1228:   C->ops->productnumeric = MatProductNumeric_AB;
1229:   PetscFunctionReturn(PETSC_SUCCESS);
1230: }

1232: static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1233: {
1234:   Mat_Product *product = C->product;
1235:   PetscBool    Ahypre;

1237:   PetscFunctionBegin;
1238:   PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre));
1239:   if (Ahypre) { /* A is a Hypre matrix */
1240:     PetscCall(MatSetType(C, MATHYPRE));
1241:     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1242:     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1243:     PetscFunctionReturn(PETSC_SUCCESS);
1244:   }
1245:   PetscFunctionReturn(PETSC_SUCCESS);
1246: }

1248: static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1249: {
1250:   PetscFunctionBegin;
1251:   C->ops->productnumeric = MatProductNumeric_PtAP;
1252:   PetscFunctionReturn(PETSC_SUCCESS);
1253: }

1255: static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1256: {
1257:   Mat_Product *product = C->product;
1258:   PetscBool    flg;
1259:   PetscInt     type        = 0;
1260:   const char  *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"};
1261:   PetscInt     ntype       = 4;
1262:   Mat          A           = product->A;
1263:   PetscBool    Ahypre;

1265:   PetscFunctionBegin;
1266:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre));
1267:   if (Ahypre) { /* A is a Hypre matrix */
1268:     PetscCall(MatSetType(C, MATHYPRE));
1269:     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1270:     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1271:     PetscFunctionReturn(PETSC_SUCCESS);
1272:   }

1274:   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1275:   /* Get runtime option */
1276:   if (product->api_user) {
1277:     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat");
1278:     PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg));
1279:     PetscOptionsEnd();
1280:   } else {
1281:     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat");
1282:     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg));
1283:     PetscOptionsEnd();
1284:   }

1286:   if (type == 0 || type == 1 || type == 2) {
1287:     PetscCall(MatSetType(C, MATAIJ));
1288:   } else if (type == 3) {
1289:     PetscCall(MatSetType(C, MATHYPRE));
1290:   } else SETERRQ(PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported");
1291:   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1292:   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1293:   PetscFunctionReturn(PETSC_SUCCESS);
1294: }

1296: static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1297: {
1298:   Mat_Product *product = C->product;

1300:   PetscFunctionBegin;
1301:   switch (product->type) {
1302:   case MATPRODUCT_AB:
1303:     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1304:     break;
1305:   case MATPRODUCT_PtAP:
1306:     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1307:     break;
1308:   default:
1309:     break;
1310:   }
1311:   PetscFunctionReturn(PETSC_SUCCESS);
1312: }

1314: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1315: {
1316:   PetscFunctionBegin;
1317:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE));
1318:   PetscFunctionReturn(PETSC_SUCCESS);
1319: }

1321: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1322: {
1323:   PetscFunctionBegin;
1324:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE));
1325:   PetscFunctionReturn(PETSC_SUCCESS);
1326: }

1328: static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1329: {
1330:   PetscFunctionBegin;
1331:   if (y != z) PetscCall(VecCopy(y, z));
1332:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE));
1333:   PetscFunctionReturn(PETSC_SUCCESS);
1334: }

1336: static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1337: {
1338:   PetscFunctionBegin;
1339:   if (y != z) PetscCall(VecCopy(y, z));
1340:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE));
1341:   PetscFunctionReturn(PETSC_SUCCESS);
1342: }

1344: /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1345: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1346: {
1347:   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
1348:   hypre_ParCSRMatrix *parcsr;
1349:   hypre_ParVector    *hx, *hy;

1351:   PetscFunctionBegin;
1352:   if (trans) {
1353:     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x));
1354:     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y));
1355:     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y));
1356:     PetscCallHYPRE(HYPRE_IJVectorGetObject(hA->b->ij, (void **)&hx));
1357:     PetscCallHYPRE(HYPRE_IJVectorGetObject(hA->x->ij, (void **)&hy));
1358:   } else {
1359:     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x));
1360:     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y));
1361:     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y));
1362:     PetscCallHYPRE(HYPRE_IJVectorGetObject(hA->x->ij, (void **)&hx));
1363:     PetscCallHYPRE(HYPRE_IJVectorGetObject(hA->b->ij, (void **)&hy));
1364:   }
1365:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&parcsr));
1366:   if (trans) {
1367:     PetscCallHYPRE(hypre_ParCSRMatrixMatvecT(a, parcsr, hx, b, hy));
1368:   } else {
1369:     PetscCallHYPRE(hypre_ParCSRMatrixMatvec(a, parcsr, hx, b, hy));
1370:   }
1371:   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1372:   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1373:   PetscFunctionReturn(PETSC_SUCCESS);
1374: }

1376: static PetscErrorCode MatDestroy_HYPRE(Mat A)
1377: {
1378:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1380:   PetscFunctionBegin;
1381:   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1382:   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1383:   PetscCall(MatHYPRE_DestroyCOOMat(A)); /* must be called before destroying the individual CSR */
1384:   if (hA->ij) {
1385:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1386:     PetscCallHYPRE(HYPRE_IJMatrixDestroy(hA->ij));
1387:   }
1388:   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm));

1390:   PetscCall(MatStashDestroy_Private(&A->stash));
1391:   PetscCall(PetscFree(hA->array));
1392:   if (hA->rows_d) PetscCallExternalVoid("hypre_Free", hypre_Free(hA->rows_d, HYPRE_MEMORY_DEVICE));

1394:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL));
1395:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL));
1396:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL));
1397:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL));
1398:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", NULL));
1399:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", NULL));
1400:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_hypre_C", NULL));
1401:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", NULL));
1402:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL));
1403:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL));
1404:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
1405:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
1406:   PetscCall(PetscFree(A->data));
1407:   PetscFunctionReturn(PETSC_SUCCESS);
1408: }

1410: static PetscErrorCode MatSetUp_HYPRE(Mat A)
1411: {
1412:   PetscFunctionBegin;
1413:   if (!A->preallocated) PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL));
1414:   PetscFunctionReturn(PETSC_SUCCESS);
1415: }

1417: //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1418: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1419: static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1420: {
1421:   Mat_HYPRE           *hA   = (Mat_HYPRE *)A->data;
1422:   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;

1424:   PetscFunctionBegin;
1425:   A->boundtocpu = bind;
1426:   if (hA->cooMat) {
1427:     PetscBool coobound;
1428:     PetscCall(MatBoundToCPU(hA->cooMat, &coobound));
1429:     if (coobound != bind) PetscCall(MatBindToCPU(hA->cooMat, bind));
1430:   }
1431:   if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1432:     hypre_ParCSRMatrix *parcsr;
1433:     PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&parcsr));
1434:     PetscCallHYPRE(hypre_ParCSRMatrixMigrate(parcsr, hmem));
1435:   }
1436:   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind));
1437:   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind));
1438:   PetscFunctionReturn(PETSC_SUCCESS);
1439: }
1440: #endif

1442: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1443: {
1444:   Mat_HYPRE   *hA = (Mat_HYPRE *)A->data;
1445:   PetscMPIInt  n;
1446:   PetscInt     i, j, rstart, ncols, flg;
1447:   PetscInt    *row, *col;
1448:   PetscScalar *val;

1450:   PetscFunctionBegin;
1451:   PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");

1453:   if (!A->nooffprocentries) {
1454:     while (1) {
1455:       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
1456:       if (!flg) break;

1458:       for (i = 0; i < n;) {
1459:         /* Now identify the consecutive vals belonging to the same row */
1460:         for (j = i, rstart = row[j]; j < n; j++) {
1461:           if (row[j] != rstart) break;
1462:         }
1463:         if (j < n) ncols = j - i;
1464:         else ncols = n - i;
1465:         /* Now assemble all these values with a single function call */
1466:         PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode));

1468:         i = j;
1469:       }
1470:     }
1471:     PetscCall(MatStashScatterEnd_Private(&A->stash));
1472:   }

1474:   PetscCallHYPRE(HYPRE_IJMatrixAssemble(hA->ij));
1475:   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1476:   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1477:   if (!A->sortedfull) {
1478:     hypre_AuxParCSRMatrix *aux_matrix;

1480:     /* call destroy just to make sure we do not leak anything */
1481:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1482:     PetscCallHYPRE(hypre_AuxParCSRMatrixDestroy(aux_matrix));
1483:     hypre_IJMatrixTranslator(hA->ij) = NULL;

1485:     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1486:     PetscCallHYPRE(HYPRE_IJMatrixInitialize(hA->ij));
1487:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1488:     if (aux_matrix) {
1489:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1490: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1491:       PetscCallHYPRE(hypre_AuxParCSRMatrixInitialize(aux_matrix));
1492: #else
1493:       PetscCallHYPRE(hypre_AuxParCSRMatrixInitialize_v2(aux_matrix, HYPRE_MEMORY_HOST));
1494: #endif
1495:     }
1496:   }
1497:   {
1498:     hypre_ParCSRMatrix *parcsr;

1500:     PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&parcsr));
1501:     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallHYPRE(hypre_MatvecCommPkgCreate(parcsr));
1502:   }
1503:   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x));
1504:   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b));
1505: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1506:   PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu));
1507: #endif
1508:   PetscFunctionReturn(PETSC_SUCCESS);
1509: }

1511: static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1512: {
1513:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1515:   PetscFunctionBegin;
1516:   PetscCheck(hA->array_available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use");

1518:   if (hA->array_size >= size) {
1519:     *array = hA->array;
1520:   } else {
1521:     PetscCall(PetscFree(hA->array));
1522:     hA->array_size = size;
1523:     PetscCall(PetscMalloc(hA->array_size, &hA->array));
1524:     *array = hA->array;
1525:   }

1527:   hA->array_available = PETSC_FALSE;
1528:   PetscFunctionReturn(PETSC_SUCCESS);
1529: }

1531: static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1532: {
1533:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1535:   PetscFunctionBegin;
1536:   *array              = NULL;
1537:   hA->array_available = PETSC_TRUE;
1538:   PetscFunctionReturn(PETSC_SUCCESS);
1539: }

1541: static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1542: {
1543:   Mat_HYPRE     *hA   = (Mat_HYPRE *)A->data;
1544:   PetscScalar   *vals = (PetscScalar *)v;
1545:   HYPRE_Complex *sscr;
1546:   PetscInt      *cscr[2];
1547:   PetscInt       i, nzc;
1548:   PetscInt       rst = A->rmap->rstart, ren = A->rmap->rend;
1549:   void          *array = NULL;

1551:   PetscFunctionBegin;
1552:   PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array));
1553:   cscr[0] = (PetscInt *)array;
1554:   cscr[1] = ((PetscInt *)array) + nc;
1555:   sscr    = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2);
1556:   for (i = 0, nzc = 0; i < nc; i++) {
1557:     if (cols[i] >= 0) {
1558:       cscr[0][nzc]   = cols[i];
1559:       cscr[1][nzc++] = i;
1560:     }
1561:   }
1562:   if (!nzc) {
1563:     PetscCall(MatRestoreArray_HYPRE(A, &array));
1564:     PetscFunctionReturn(PETSC_SUCCESS);
1565:   }

1567: #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1568:   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1569:     hypre_ParCSRMatrix *parcsr;

1571:     PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij,(void**)&parcsr));
1572:     PetscCallHYPRE(hypre_ParCSRMatrixMigrate(parcsr, HYPRE_MEMORY_HOST));
1573:   }
1574: #endif

1576:   if (ins == ADD_VALUES) {
1577:     for (i = 0; i < nr; i++) {
1578:       if (rows[i] >= 0) {
1579:         PetscInt  j;
1580:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1582:         if (!nzc) continue;
1583:         /* nonlocal values */
1584:         if (rows[i] < rst || rows[i] >= ren) {
1585:           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1586:           if (hA->donotstash) continue;
1587:         }
1588:         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1589:         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1590:         PetscCallHYPRE(HYPRE_IJMatrixAddToValues(hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr));
1591:       }
1592:       vals += nc;
1593:     }
1594:   } else { /* INSERT_VALUES */
1595:     for (i = 0; i < nr; i++) {
1596:       if (rows[i] >= 0) {
1597:         PetscInt  j;
1598:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1600:         if (!nzc) continue;
1601:         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1602:         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1603:         /* nonlocal values */
1604:         if (rows[i] < rst || rows[i] >= ren) {
1605:           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1606:           if (!hA->donotstash) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE));
1607:         }
1608:         /* local values */
1609:         else
1610:           PetscCallHYPRE(HYPRE_IJMatrixSetValues(hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr));
1611:       }
1612:       vals += nc;
1613:     }
1614:   }

1616:   PetscCall(MatRestoreArray_HYPRE(A, &array));
1617:   PetscFunctionReturn(PETSC_SUCCESS);
1618: }

1620: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1621: {
1622:   Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
1623:   HYPRE_Int  *hdnnz, *honnz;
1624:   PetscInt    i, rs, re, cs, ce, bs;
1625:   PetscMPIInt size;

1627:   PetscFunctionBegin;
1628:   PetscCall(PetscLayoutSetUp(A->rmap));
1629:   PetscCall(PetscLayoutSetUp(A->cmap));
1630:   rs = A->rmap->rstart;
1631:   re = A->rmap->rend;
1632:   cs = A->cmap->rstart;
1633:   ce = A->cmap->rend;
1634:   if (!hA->ij) {
1635:     PetscCallHYPRE(HYPRE_IJMatrixCreate(hA->comm, rs, re - 1, cs, ce - 1, &hA->ij));
1636:     PetscCallHYPRE(HYPRE_IJMatrixSetObjectType(hA->ij, HYPRE_PARCSR));
1637:   } else {
1638:     HYPRE_BigInt hrs, hre, hcs, hce;
1639:     PetscCallHYPRE(HYPRE_IJMatrixGetLocalRange(hA->ij, &hrs, &hre, &hcs, &hce));
1640:     PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re);
1641:     PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce);
1642:   }
1643:   PetscCall(MatHYPRE_DestroyCOOMat(A));
1644:   PetscCall(MatGetBlockSize(A, &bs));
1645:   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs;
1646:   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs;

1648:   if (!dnnz) {
1649:     PetscCall(PetscMalloc1(A->rmap->n, &hdnnz));
1650:     for (i = 0; i < A->rmap->n; i++) hdnnz[i] = (HYPRE_Int)dnz;
1651:   } else {
1652:     hdnnz = (HYPRE_Int *)dnnz;
1653:   }
1654:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1655:   if (size > 1) {
1656:     hypre_AuxParCSRMatrix *aux_matrix;
1657:     if (!onnz) {
1658:       PetscCall(PetscMalloc1(A->rmap->n, &honnz));
1659:       for (i = 0; i < A->rmap->n; i++) honnz[i] = (HYPRE_Int)onz;
1660:     } else honnz = (HYPRE_Int *)onnz;
1661:     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1662:        they assume the user will input the entire row values, properly sorted
1663:        In PETSc, we don't make such an assumption and set this flag to 1,
1664:        unless the option MAT_SORTED_FULL is set to true.
1665:        Also, to avoid possible memory leaks, we destroy and recreate the translator
1666:        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1667:        the IJ matrix for us */
1668:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1669:     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1670:     hypre_IJMatrixTranslator(hA->ij) = NULL;
1671:     PetscCallHYPRE(HYPRE_IJMatrixSetDiagOffdSizes(hA->ij, hdnnz, honnz));
1672:     aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1673:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !A->sortedfull;
1674:   } else {
1675:     honnz = NULL;
1676:     PetscCallHYPRE(HYPRE_IJMatrixSetRowSizes(hA->ij, hdnnz));
1677:   }

1679:   /* reset assembled flag and call the initialize method */
1680:   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1681: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1682:   PetscCallHYPRE(HYPRE_IJMatrixInitialize(hA->ij));
1683: #else
1684:   PetscCallHYPRE(HYPRE_IJMatrixInitialize_v2(hA->ij, HYPRE_MEMORY_HOST));
1685: #endif
1686:   if (!dnnz) PetscCall(PetscFree(hdnnz));
1687:   if (!onnz && honnz) PetscCall(PetscFree(honnz));
1688:   /* Match AIJ logic */
1689:   A->preallocated = PETSC_TRUE;
1690:   A->assembled    = PETSC_FALSE;
1691:   PetscFunctionReturn(PETSC_SUCCESS);
1692: }

1694: /*@C
1695:   MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format

1697:   Collective

1699:   Input Parameters:
1700: + A    - the matrix
1701: . dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1702:           (same value is used for all local rows)
1703: . dnnz - array containing the number of nonzeros in the various rows of the
1704:           DIAGONAL portion of the local submatrix (possibly different for each row)
1705:           or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure.
1706:           The size of this array is equal to the number of local rows, i.e `m`.
1707:           For matrices that will be factored, you must leave room for (and set)
1708:           the diagonal entry even if it is zero.
1709: . onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1710:           submatrix (same value is used for all local rows).
1711: - onnz - array containing the number of nonzeros in the various rows of the
1712:           OFF-DIAGONAL portion of the local submatrix (possibly different for
1713:           each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero
1714:           structure. The size of this array is equal to the number
1715:           of local rows, i.e `m`.

1717:   Level: intermediate

1719:   Note:
1720:   If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored.

1722: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ`
1723: @*/
1724: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1725: {
1726:   PetscFunctionBegin;
1729:   PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz));
1730:   PetscFunctionReturn(PETSC_SUCCESS);
1731: }

1733: /*@C
1734:   MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix`

1736:   Collective

1738:   Input Parameters:
1739: + parcsr   - the pointer to the `hypre_ParCSRMatrix`
1740: . mtype    - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported.
1741: - copymode - PETSc copying options, see  `PetscCopyMode`

1743:   Output Parameter:
1744: . A - the matrix

1746:   Level: intermediate

1748: .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1749: @*/
1750: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A)
1751: {
1752:   Mat        T;
1753:   Mat_HYPRE *hA;
1754:   MPI_Comm   comm;
1755:   PetscInt   rstart, rend, cstart, cend, M, N;
1756:   PetscBool  isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis;

1758:   PetscFunctionBegin;
1759:   comm = hypre_ParCSRMatrixComm(parcsr);
1760:   PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij));
1761:   PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl));
1762:   PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij));
1763:   PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
1764:   PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp));
1765:   PetscCall(PetscStrcmp(mtype, MATIS, &isis));
1766:   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1767:   /* TODO */
1768:   PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE);
1769:   /* access ParCSRMatrix */
1770:   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1771:   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1772:   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1773:   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1774:   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1775:   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);

1777:   /* create PETSc matrix with MatHYPRE */
1778:   PetscCall(MatCreate(comm, &T));
1779:   PetscCall(MatSetSizes(T, PetscMax(rend - rstart + 1, 0), PetscMax(cend - cstart + 1, 0), M, N));
1780:   PetscCall(MatSetType(T, MATHYPRE));
1781:   hA = (Mat_HYPRE *)T->data;

1783:   /* create HYPRE_IJMatrix */
1784:   PetscCallHYPRE(HYPRE_IJMatrixCreate(hA->comm, rstart, rend, cstart, cend, &hA->ij));
1785:   PetscCallHYPRE(HYPRE_IJMatrixSetObjectType(hA->ij, HYPRE_PARCSR));

1787:   /* create new ParCSR object if needed */
1788:   if (ishyp && copymode == PETSC_COPY_VALUES) {
1789:     hypre_ParCSRMatrix *new_parcsr;
1790: #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
1791:     hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd;

1793:     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
1794:     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1795:     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1796:     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1797:     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1798:     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag)));
1799:     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd)));
1800: #else
1801:     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1);
1802: #endif
1803:     parcsr   = new_parcsr;
1804:     copymode = PETSC_OWN_POINTER;
1805:   }

1807:   /* set ParCSR object */
1808:   hypre_IJMatrixObject(hA->ij) = parcsr;
1809:   T->preallocated              = PETSC_TRUE;

1811:   /* set assembled flag */
1812:   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1813: #if 0
1814:   PetscCallHYPRE(HYPRE_IJMatrixInitialize(hA->ij));
1815: #endif
1816:   if (ishyp) {
1817:     PetscMPIInt myid = 0;

1819:     /* make sure we always have row_starts and col_starts available */
1820:     if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid));
1821: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1822:     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1823:       PetscLayout map;

1825:       PetscCall(MatGetLayouts(T, NULL, &map));
1826:       PetscCall(PetscLayoutSetUp(map));
1827:       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1828:     }
1829:     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1830:       PetscLayout map;

1832:       PetscCall(MatGetLayouts(T, &map, NULL));
1833:       PetscCall(PetscLayoutSetUp(map));
1834:       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1835:     }
1836: #endif
1837:     /* prevent from freeing the pointer */
1838:     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1839:     *A = T;
1840:     PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE));
1841:     PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
1842:     PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
1843:   } else if (isaij) {
1844:     if (copymode != PETSC_OWN_POINTER) {
1845:       /* prevent from freeing the pointer */
1846:       hA->inner_free = PETSC_FALSE;
1847:       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A));
1848:       PetscCall(MatDestroy(&T));
1849:     } else { /* AIJ return type with PETSC_OWN_POINTER */
1850:       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T));
1851:       *A = T;
1852:     }
1853:   } else if (isis) {
1854:     PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A));
1855:     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1856:     PetscCall(MatDestroy(&T));
1857:   }
1858:   PetscFunctionReturn(PETSC_SUCCESS);
1859: }

1861: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1862: {
1863:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1864:   HYPRE_Int  type;

1866:   PetscFunctionBegin;
1867:   PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present");
1868:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hA->ij, &type));
1869:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1870:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)parcsr));
1871:   PetscFunctionReturn(PETSC_SUCCESS);
1872: }

1874: /*@C
1875:   MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix

1877:   Not Collective, No Fortran Support

1879:   Input Parameter:
1880: . A - the `MATHYPRE` object

1882:   Output Parameter:
1883: . parcsr - the pointer to the `hypre_ParCSRMatrix`

1885:   Level: intermediate

1887: .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1888: @*/
1889: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1890: {
1891:   PetscFunctionBegin;
1894:   PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr));
1895:   PetscFunctionReturn(PETSC_SUCCESS);
1896: }

1898: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1899: {
1900:   hypre_ParCSRMatrix *parcsr;
1901: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1902:   hypre_CSRMatrix *ha;
1903: #endif
1904:   HYPRE_Complex hs;

1906:   PetscFunctionBegin;
1907:   PetscCall(PetscHYPREScalarCast(s, &hs));
1908:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1909: #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0)
1910:   PetscCallHYPRE(hypre_ParCSRMatrixScale(parcsr, hs));
1911: #else /* diagonal part */
1912:   ha = hypre_ParCSRMatrixDiag(parcsr);
1913:   if (ha) {
1914:     PetscInt       size, i;
1915:     HYPRE_Int     *ii;
1916:     HYPRE_Complex *a;

1918:     size = hypre_CSRMatrixNumRows(ha);
1919:     a    = hypre_CSRMatrixData(ha);
1920:     ii   = hypre_CSRMatrixI(ha);
1921:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1922:   }
1923:   /* off-diagonal part */
1924:   ha = hypre_ParCSRMatrixOffd(parcsr);
1925:   if (ha) {
1926:     PetscInt       size, i;
1927:     HYPRE_Int     *ii;
1928:     HYPRE_Complex *a;

1930:     size = hypre_CSRMatrixNumRows(ha);
1931:     a    = hypre_CSRMatrixData(ha);
1932:     ii   = hypre_CSRMatrixI(ha);
1933:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1934:   }
1935: #endif
1936:   PetscFunctionReturn(PETSC_SUCCESS);
1937: }

1939: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1940: {
1941:   hypre_ParCSRMatrix *parcsr;
1942:   HYPRE_Int          *lrows;
1943:   PetscInt            rst, ren, i;

1945:   PetscFunctionBegin;
1946:   PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented");
1947:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1948:   PetscCall(PetscMalloc1(numRows, &lrows));
1949:   PetscCall(MatGetOwnershipRange(A, &rst, &ren));
1950:   for (i = 0; i < numRows; i++) {
1951:     PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported");
1952:     lrows[i] = (HYPRE_Int)(rows[i] - rst);
1953:   }
1954:   PetscCallHYPRE(hypre_ParCSRMatrixEliminateRowsCols(parcsr, (HYPRE_Int)numRows, lrows));
1955:   PetscCall(PetscFree(lrows));
1956:   PetscFunctionReturn(PETSC_SUCCESS);
1957: }

1959: static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1960: {
1961:   PetscFunctionBegin;
1962:   if (ha) {
1963:     HYPRE_Int     *ii, size;
1964:     HYPRE_Complex *a;

1966:     size = hypre_CSRMatrixNumRows(ha);
1967:     a    = hypre_CSRMatrixData(ha);
1968:     ii   = hypre_CSRMatrixI(ha);

1970:     if (a) PetscCall(PetscArrayzero(a, ii[size]));
1971:   }
1972:   PetscFunctionReturn(PETSC_SUCCESS);
1973: }

1975: static PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1976: {
1977:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1979:   PetscFunctionBegin;
1980:   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
1981:     PetscCallHYPRE(HYPRE_IJMatrixSetConstantValues(hA->ij, 0.0));
1982:   } else {
1983:     hypre_ParCSRMatrix *parcsr;

1985:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1986:     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
1987:     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
1988:   }
1989:   PetscFunctionReturn(PETSC_SUCCESS);
1990: }

1992: static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag)
1993: {
1994:   PetscInt       ii;
1995:   HYPRE_Int     *i, *j;
1996:   HYPRE_Complex *a;

1998:   PetscFunctionBegin;
1999:   if (!hA) PetscFunctionReturn(PETSC_SUCCESS);

2001:   i = hypre_CSRMatrixI(hA);
2002:   j = hypre_CSRMatrixJ(hA);
2003:   a = hypre_CSRMatrixData(hA);
2004: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2005:   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hA)) {
2006:   #if defined(HYPRE_USING_CUDA)
2007:     PetscCall(MatZeroRows_CUDA(N, rows, i, j, a, diag));
2008:   #elif defined(HYPRE_USING_HIP)
2009:     PetscCall(MatZeroRows_HIP(N, rows, i, j, a, diag));
2010:   #elif defined(PETSC_HAVE_KOKKOS)
2011:     PetscCall(MatZeroRows_Kokkos(N, rows, i, j, a, diag));
2012:   #else
2013:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for MatZeroRows on a hypre matrix in this memory location");
2014:   #endif
2015:   } else
2016: #endif
2017:   {
2018:     for (ii = 0; ii < N; ii++) {
2019:       HYPRE_Int jj, ibeg, iend, irow;

2021:       irow = (HYPRE_Int)rows[ii];
2022:       ibeg = i[irow];
2023:       iend = i[irow + 1];
2024:       for (jj = ibeg; jj < iend; jj++)
2025:         if (j[jj] == irow) a[jj] = diag;
2026:         else a[jj] = 0.0;
2027:     }
2028:   }
2029:   PetscFunctionReturn(PETSC_SUCCESS);
2030: }

2032: static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2033: {
2034:   hypre_ParCSRMatrix *parcsr;
2035:   PetscInt           *lrows, len, *lrows2;
2036:   HYPRE_Complex       hdiag;

2038:   PetscFunctionBegin;
2039:   PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
2040:   PetscCall(PetscHYPREScalarCast(diag, &hdiag));
2041:   /* retrieve the internal matrix */
2042:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2043:   /* get locally owned rows */
2044:   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));

2046: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2047:   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hypre_ParCSRMatrixDiag(parcsr))) {
2048:     Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2049:     PetscInt   m;
2050:     PetscCall(MatGetLocalSize(A, &m, NULL));
2051:     if (!hA->rows_d) {
2052:       hA->rows_d = hypre_TAlloc(PetscInt, m, HYPRE_MEMORY_DEVICE);
2053:       if (m) PetscCheck(hA->rows_d, PETSC_COMM_SELF, PETSC_ERR_MEM, "HYPRE_TAlloc failed");
2054:     }
2055:     PetscCheck(len <= m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Too many rows in rows[]");
2056:     PetscCallExternalVoid("hypre_Memcpy", hypre_Memcpy(hA->rows_d, lrows, sizeof(PetscInt) * len, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST));
2057:     lrows2 = hA->rows_d;
2058:   } else
2059: #endif
2060:   {
2061:     lrows2 = lrows;
2062:   }

2064:   /* zero diagonal part */
2065:   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows2, hdiag));
2066:   /* zero off-diagonal part */
2067:   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows2, 0.0));

2069:   PetscCall(PetscFree(lrows));
2070:   PetscFunctionReturn(PETSC_SUCCESS);
2071: }

2073: static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode)
2074: {
2075:   PetscFunctionBegin;
2076:   if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);

2078:   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
2079:   PetscFunctionReturn(PETSC_SUCCESS);
2080: }

2082: static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2083: {
2084:   hypre_ParCSRMatrix *parcsr;
2085:   HYPRE_Int           hnz;
2086: #ifdef PETSC_HAVE_HYPRE_DEVICE
2087:   PetscInt    *didx;
2088:   PetscScalar *dv;
2089: #endif

2091:   PetscFunctionBegin;
2092:   /* retrieve the internal matrix */
2093:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2094: #ifdef PETSC_HAVE_HYPRE_DEVICE
2095:   if (hypre_ParCSRMatrixMemoryLocation(parcsr) == HYPRE_MEMORY_DEVICE) {
2096:     PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)&didx, (HYPRE_Complex **)&dv);
2097:     if (idx) {
2098:       PetscCall(PetscMalloc1(hnz, idx));
2099:       hypre_TMemcpy(*idx, didx, PetscInt, hnz, HYPRE_MEMORY_HOST, HYPRE_MEMORY_DEVICE);
2100:     }
2101:     if (v) {
2102:       PetscCall(PetscMalloc1(hnz, v));
2103:       hypre_TMemcpy(*v, dv, PetscScalar, hnz, HYPRE_MEMORY_HOST, HYPRE_MEMORY_DEVICE);
2104:     }
2105:   } else
2106: #endif
2107:     /* call HYPRE API */
2108:     PetscCallHYPRE(HYPRE_ParCSRMatrixGetRow(parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v));

2110:   if (nz) *nz = (PetscInt)hnz;
2111:   PetscFunctionReturn(PETSC_SUCCESS);
2112: }

2114: static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2115: {
2116:   hypre_ParCSRMatrix *parcsr;
2117:   HYPRE_Int           hnz;

2119:   PetscFunctionBegin;
2120:   /* retrieve the internal matrix */
2121:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2122: #ifdef PETSC_HAVE_HYPRE_DEVICE
2123:   if (hypre_ParCSRMatrixMemoryLocation(parcsr) == HYPRE_MEMORY_DEVICE) {
2124:     if (idx) PetscFree(*idx);
2125:     if (v) PetscFree(*v);
2126:   }
2127: #endif
2128:   /* call HYPRE API. It doesn't actually use any of the arguments so it's ok if we've actually
2129:      already free'd idx and v above */
2130:   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2131:   PetscCallHYPRE(HYPRE_ParCSRMatrixRestoreRow(parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v));
2132:   PetscFunctionReturn(PETSC_SUCCESS);
2133: }

2135: static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2136: {
2137:   Mat_HYPRE    *hA = (Mat_HYPRE *)A->data;
2138:   PetscInt      i;
2139:   HYPRE_Int     hypre_host_n;
2140:   HYPRE_BigInt  hypre_host_idxm;
2141:   HYPRE_BigInt *device_idxm = NULL, *device_idxn = NULL, *hypre_host_idxn;
2142:   HYPRE_Int    *device_n      = NULL;
2143:   PetscScalar  *device_values = NULL;
2144:   PetscBool     hypre_on_host = PETSC_TRUE;

2146:   PetscFunctionBegin;
2147:   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);

2149:   PetscCall(PetscHYPREIntCast(n, &hypre_host_n));

2151:   // Setup HYPRE_BigInt host idxn array
2152:   if (sizeof(HYPRE_BigInt) > sizeof(PetscInt)) {
2153:     PetscCall(PetscMalloc1(n, &hypre_host_idxn));
2154:     for (PetscInt j = 0; j < n; ++j) hypre_host_idxn[j] = idxn[j];
2155:   } else {
2156:     PetscCheck(sizeof(HYPRE_BigInt) == sizeof(PetscInt), PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing handling of HYPRE_BigInt size less than PetscInt size");
2157:     hypre_host_idxn = (HYPRE_BigInt *)idxn;
2158:   }

2160:   // Check compatibility of PetscScalar and HYPRE_Complex
2161:   PetscCheck(sizeof(PetscScalar) == sizeof(HYPRE_Complex), PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing handling of incompatible PetscScalar and HYPRE_Complex sizes");

2163: #ifdef PETSC_HAVE_HYPRE_DEVICE
2164:   if (hypre_IJMatrixMemoryLocation(hA->ij) == HYPRE_MEMORY_DEVICE) {
2165:     hypre_on_host = PETSC_FALSE;
2166:     device_idxm   = hypre_TAlloc(HYPRE_BigInt, 1, HYPRE_MEMORY_DEVICE);
2167:     device_n      = hypre_TAlloc(HYPRE_Int, 1, HYPRE_MEMORY_DEVICE);
2168:     device_values = hypre_TAlloc(PetscScalar, n, HYPRE_MEMORY_DEVICE);
2169:     device_idxn   = hypre_TAlloc(HYPRE_BigInt, n, HYPRE_MEMORY_DEVICE);
2170:     hypre_TMemcpy(device_idxn, hypre_host_idxn, HYPRE_BigInt, n, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST);
2171:     hypre_TMemcpy(device_n, &hypre_host_n, HYPRE_Int, 1, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST);
2172:   }
2173: #endif

2175:   /* Ignore negative row indices
2176:    * And negative column indices should be automatically ignored in hypre
2177:    * */
2178:   for (i = 0; i < m; i++) {
2179:     if (idxm[i] >= 0) {
2180:       HYPRE_BigInt  *rows, *cols;
2181:       HYPRE_Int     *ncols;
2182:       HYPRE_Complex *values;
2183:       hypre_host_idxm = idxm[i];
2184:       if (!hypre_on_host) hypre_TMemcpy(device_idxm, &hypre_host_idxm, HYPRE_BigInt, 1, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST);
2185:       ncols  = hypre_on_host ? &hypre_host_n : device_n;
2186:       rows   = hypre_on_host ? &hypre_host_idxm : device_idxm;
2187:       cols   = hypre_on_host ? hypre_host_idxn : device_idxn;
2188:       values = hypre_on_host ? (HYPRE_Complex *)(v + i * n) : (HYPRE_Complex *)device_values;
2189:       PetscCallHYPRE(HYPRE_IJMatrixGetValues2(hA->ij, 1, ncols, rows, NULL, cols, values));

2191:       if (!hypre_on_host) hypre_TMemcpy((HYPRE_Complex *)(v + i * n), device_values, HYPRE_Complex, n, HYPRE_MEMORY_HOST, HYPRE_MEMORY_DEVICE);
2192:     }
2193:   }

2195:   if (sizeof(PetscInt) < sizeof(HYPRE_BigInt)) PetscCall(PetscFree(hypre_host_idxn));
2196: #ifdef PETSC_HAVE_HYPRE_DEVICE
2197:   if (hypre_IJMatrixMemoryLocation(hA->ij) == HYPRE_MEMORY_DEVICE) {
2198:     hypre_TFree(device_idxm, HYPRE_MEMORY_DEVICE);
2199:     hypre_TFree(device_idxn, HYPRE_MEMORY_DEVICE);
2200:     hypre_TFree(device_values, HYPRE_MEMORY_DEVICE);
2201:     hypre_TFree(device_n, HYPRE_MEMORY_DEVICE);
2202:   }
2203: #endif
2204:   PetscFunctionReturn(PETSC_SUCCESS);
2205: }

2207: static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg)
2208: {
2209:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

2211:   PetscFunctionBegin;
2212:   switch (op) {
2213:   case MAT_NO_OFF_PROC_ENTRIES:
2214:     if (flg) PetscCallHYPRE(HYPRE_IJMatrixSetMaxOffProcElmts(hA->ij, 0));
2215:     break;
2216:   case MAT_IGNORE_OFF_PROC_ENTRIES:
2217:     hA->donotstash = flg;
2218:     break;
2219:   default:
2220:     break;
2221:   }
2222:   PetscFunctionReturn(PETSC_SUCCESS);
2223: }

2225: static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2226: {
2227:   PetscViewerFormat format;

2229:   PetscFunctionBegin;
2230:   PetscCall(PetscViewerGetFormat(view, &format));
2231:   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS);
2232:   if (format != PETSC_VIEWER_NATIVE) {
2233:     Mat                 B;
2234:     hypre_ParCSRMatrix *parcsr;
2235:     PetscErrorCode (*mview)(Mat, PetscViewer) = NULL;

2237:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2238:     PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B));
2239:     PetscCall(MatGetOperation(B, MATOP_VIEW, (PetscErrorCodeFn **)&mview));
2240:     PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation");
2241:     PetscCall((*mview)(B, view));
2242:     PetscCall(MatDestroy(&B));
2243:   } else {
2244:     Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
2245:     PetscMPIInt size;
2246:     PetscBool   isascii;
2247:     const char *filename;

2249:     /* HYPRE uses only text files */
2250:     PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii));
2251:     PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name);
2252:     PetscCall(PetscViewerFileGetName(view, &filename));
2253:     PetscCallHYPRE(HYPRE_IJMatrixPrint(hA->ij, filename));
2254:     PetscCallMPI(MPI_Comm_size(hA->comm, &size));
2255:     if (size > 1) {
2256:       PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1));
2257:     } else {
2258:       PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0));
2259:     }
2260:   }
2261:   PetscFunctionReturn(PETSC_SUCCESS);
2262: }

2264: static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2265: {
2266:   hypre_ParCSRMatrix *acsr, *bcsr;

2268:   PetscFunctionBegin;
2269:   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2270:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr));
2271:     PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr));
2272:     PetscCallHYPRE(hypre_ParCSRMatrixCopy(acsr, bcsr, 1));
2273:     PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2274:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2275:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2276:   } else {
2277:     PetscCall(MatCopy_Basic(A, B, str));
2278:   }
2279:   PetscFunctionReturn(PETSC_SUCCESS);
2280: }

2282: static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2283: {
2284:   hypre_ParCSRMatrix *parcsr;
2285:   hypre_CSRMatrix    *dmat;
2286:   HYPRE_Complex      *a;
2287:   PetscBool           cong;

2289:   PetscFunctionBegin;
2290:   PetscCall(MatHasCongruentLayouts(A, &cong));
2291:   PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns");
2292:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2293:   dmat = hypre_ParCSRMatrixDiag(parcsr);
2294:   if (dmat) {
2295: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2296:     HYPRE_MemoryLocation mem = hypre_CSRMatrixMemoryLocation(dmat);
2297: #else
2298:     HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
2299: #endif

2301:     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecGetArrayWriteAndMemType(d, (PetscScalar **)&a, NULL));
2302:     else PetscCall(VecGetArrayWrite(d, (PetscScalar **)&a));
2303:     hypre_CSRMatrixExtractDiagonal(dmat, a, 0);
2304:     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecRestoreArrayWriteAndMemType(d, (PetscScalar **)&a));
2305:     else PetscCall(VecRestoreArrayWrite(d, (PetscScalar **)&a));
2306:   }
2307:   PetscFunctionReturn(PETSC_SUCCESS);
2308: }

2310: #include <petscblaslapack.h>

2312: static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str)
2313: {
2314:   PetscFunctionBegin;
2315: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2316:   {
2317:     Mat                 B;
2318:     hypre_ParCSRMatrix *x, *y, *z;

2320:     PetscCall(MatHYPREGetParCSR(Y, &y));
2321:     PetscCall(MatHYPREGetParCSR(X, &x));
2322:     PetscCallHYPRE(hypre_ParCSRMatrixAdd(1.0, y, 1.0, x, &z));
2323:     PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B));
2324:     PetscCall(MatHeaderMerge(Y, &B));
2325:   }
2326: #else
2327:   if (str == SAME_NONZERO_PATTERN) {
2328:     hypre_ParCSRMatrix *x, *y;
2329:     hypre_CSRMatrix    *xloc, *yloc;
2330:     PetscInt            xnnz, ynnz;
2331:     HYPRE_Complex      *xarr, *yarr;
2332:     PetscBLASInt        one = 1, bnz;

2334:     PetscCall(MatHYPREGetParCSR(Y, &y));
2335:     PetscCall(MatHYPREGetParCSR(X, &x));

2337:     /* diagonal block */
2338:     xloc = hypre_ParCSRMatrixDiag(x);
2339:     yloc = hypre_ParCSRMatrixDiag(y);
2340:     xnnz = 0;
2341:     ynnz = 0;
2342:     xarr = NULL;
2343:     yarr = NULL;
2344:     if (xloc) {
2345:       xarr = hypre_CSRMatrixData(xloc);
2346:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2347:     }
2348:     if (yloc) {
2349:       yarr = hypre_CSRMatrixData(yloc);
2350:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2351:     }
2352:     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2353:     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2354:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));

2356:     /* off-diagonal block */
2357:     xloc = hypre_ParCSRMatrixOffd(x);
2358:     yloc = hypre_ParCSRMatrixOffd(y);
2359:     xnnz = 0;
2360:     ynnz = 0;
2361:     xarr = NULL;
2362:     yarr = NULL;
2363:     if (xloc) {
2364:       xarr = hypre_CSRMatrixData(xloc);
2365:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2366:     }
2367:     if (yloc) {
2368:       yarr = hypre_CSRMatrixData(yloc);
2369:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2370:     }
2371:     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2372:     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2373:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2374:   } else if (str == SUBSET_NONZERO_PATTERN) {
2375:     PetscCall(MatAXPY_Basic(Y, a, X, str));
2376:   } else {
2377:     Mat B;

2379:     PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B));
2380:     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
2381:     PetscCall(MatHeaderReplace(Y, &B));
2382:   }
2383: #endif
2384:   PetscFunctionReturn(PETSC_SUCCESS);
2385: }

2387: static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B)
2388: {
2389:   hypre_ParCSRMatrix *parcsr = NULL;
2390:   PetscCopyMode       cpmode;
2391:   Mat_HYPRE          *hA;

2393:   PetscFunctionBegin;
2394:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2395:   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2396:     parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
2397:     cpmode = PETSC_OWN_POINTER;
2398:   } else {
2399:     cpmode = PETSC_COPY_VALUES;
2400:   }
2401:   PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B));
2402:   hA = (Mat_HYPRE *)A->data;
2403:   if (hA->cooMat) {
2404:     Mat_HYPRE *hB = (Mat_HYPRE *)((*B)->data);
2405:     op            = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES;
2406:     /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */
2407:     PetscCall(MatDuplicate(hA->cooMat, op, &hB->cooMat));
2408:     PetscCall(MatHYPRE_AttachCOOMat(*B));
2409:   }
2410:   PetscFunctionReturn(PETSC_SUCCESS);
2411: }

2413: static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[])
2414: {
2415:   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;

2417:   PetscFunctionBegin;
2418:   /* Build an agent matrix cooMat with AIJ format
2419:      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
2420:    */
2421:   PetscCall(MatHYPRE_CreateCOOMat(mat));
2422:   PetscCall(MatSetOption(hmat->cooMat, MAT_IGNORE_OFF_PROC_ENTRIES, hmat->donotstash));
2423:   PetscCall(MatSetOption(hmat->cooMat, MAT_NO_OFF_PROC_ENTRIES, mat->nooffprocentries));

2425:   /* MatSetPreallocationCOO_SeqAIJ and MatSetPreallocationCOO_MPIAIJ uses this specific
2426:      name to automatically put the diagonal entries first */
2427:   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
2428:   PetscCall(MatSetPreallocationCOO(hmat->cooMat, coo_n, coo_i, coo_j));
2429:   hmat->cooMat->assembled = PETSC_TRUE;

2431:   /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */
2432:   PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE));
2433:   PetscCall(MatHYPRE_CreateFromMat(hmat->cooMat, hmat));      /* Create hmat->ij and preallocate it */
2434:   PetscCall(MatHYPRE_IJMatrixCopyIJ(hmat->cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */

2436:   mat->preallocated = PETSC_TRUE;
2437:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
2438:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */

2440:   /* Attach cooMat to mat */
2441:   PetscCall(MatHYPRE_AttachCOOMat(mat));
2442:   PetscFunctionReturn(PETSC_SUCCESS);
2443: }

2445: static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode)
2446: {
2447:   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;

2449:   PetscFunctionBegin;
2450:   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
2451:   PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode));
2452:   PetscCall(MatViewFromOptions(hmat->cooMat, (PetscObject)mat, "-cooMat_view"));
2453:   PetscFunctionReturn(PETSC_SUCCESS);
2454: }

2456: static PetscErrorCode MatGetCurrentMemType_HYPRE(Mat A, PetscMemType *m)
2457: {
2458:   PetscBool petsconcpu;

2460:   PetscFunctionBegin;
2461:   PetscCall(MatBoundToCPU(A, &petsconcpu));
2462:   *m = petsconcpu ? PETSC_MEMTYPE_HOST : PETSC_MEMTYPE_DEVICE;
2463:   PetscFunctionReturn(PETSC_SUCCESS);
2464: }

2466: /*MC
2467:    MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2468:           based on the hypre IJ interface.

2470:    Level: intermediate

2472: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation`
2473: M*/
2474: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2475: {
2476:   Mat_HYPRE *hB;
2477: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2478:   HYPRE_MemoryLocation memory_location;
2479: #endif

2481:   PetscFunctionBegin;
2482:   PetscCall(PetscHYPREInitialize());
2483:   PetscCall(PetscNew(&hB));

2485:   hB->inner_free      = PETSC_TRUE;
2486:   hB->array_available = PETSC_TRUE;

2488:   B->data = (void *)hB;

2490:   PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps)));
2491:   B->ops->mult                  = MatMult_HYPRE;
2492:   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2493:   B->ops->multadd               = MatMultAdd_HYPRE;
2494:   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2495:   B->ops->setup                 = MatSetUp_HYPRE;
2496:   B->ops->destroy               = MatDestroy_HYPRE;
2497:   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2498:   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2499:   B->ops->setvalues             = MatSetValues_HYPRE;
2500:   B->ops->scale                 = MatScale_HYPRE;
2501:   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2502:   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2503:   B->ops->zerorows              = MatZeroRows_HYPRE;
2504:   B->ops->getrow                = MatGetRow_HYPRE;
2505:   B->ops->restorerow            = MatRestoreRow_HYPRE;
2506:   B->ops->getvalues             = MatGetValues_HYPRE;
2507:   B->ops->setoption             = MatSetOption_HYPRE;
2508:   B->ops->duplicate             = MatDuplicate_HYPRE;
2509:   B->ops->copy                  = MatCopy_HYPRE;
2510:   B->ops->view                  = MatView_HYPRE;
2511:   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2512:   B->ops->axpy                  = MatAXPY_HYPRE;
2513:   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2514:   B->ops->getcurrentmemtype     = MatGetCurrentMemType_HYPRE;
2515: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2516:   B->ops->bindtocpu = MatBindToCPU_HYPRE;
2517:   /* Get hypre's default memory location. Users can control this using the corresponding HYPRE_SetMemoryLocation API */
2518:   PetscCallHYPRE(HYPRE_GetMemoryLocation(&memory_location));
2519:   B->boundtocpu = (memory_location == HYPRE_MEMORY_HOST) ? PETSC_TRUE : PETSC_FALSE;
2520: #endif

2522:   /* build cache for off array entries formed */
2523:   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));

2525:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm));
2526:   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE));
2527:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ));
2528:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS));
2529:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE));
2530:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE));
2531:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE));
2532:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE));
2533:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE));
2534:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE));
2535: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2536:   #if defined(HYPRE_USING_HIP)
2537:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2538:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2539:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2540:   PetscCall(MatSetVecType(B, VECHIP));
2541:   #endif
2542:   #if defined(HYPRE_USING_CUDA)
2543:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2544:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2545:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2546:   PetscCall(MatSetVecType(B, VECCUDA));
2547:   #endif
2548: #endif
2549:   PetscFunctionReturn(PETSC_SUCCESS);
2550: }