Actual source code: mhypre.c

  1: /*
  2:     Creates hypre ijmatrix from PETSc matrix
  3: */

  5: #include <petscpkg_version.h>
  6: #include <petsc/private/petschypre.h>
  7: #include <petscmathypre.h>
  8: #include <petsc/private/matimpl.h>
  9: #include <petsc/private/deviceimpl.h>
 10: #include <../src/mat/impls/hypre/mhypre.h>
 11: #include <../src/mat/impls/aij/mpi/mpiaij.h>
 12: #include <../src/vec/vec/impls/hypre/vhyp.h>
 13: #include <HYPRE.h>
 14: #include <HYPRE_utilities.h>
 15: #include <_hypre_parcsr_ls.h>
 16: #include <_hypre_sstruct_ls.h>
 17: #include <_hypre_utilities.h>

 19: #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
 20:   #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A)
 21: #endif

 23: #if PETSC_PKG_HYPRE_VERSION_GE(2, 15, 0)
 24:   #define HYPRE_AssumedPartitionCheck() 1
 25: #endif

 27: static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *);
 28: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix);
 29: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix);
 30: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix);
 31: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool);
 32: static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins);

 34: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
 35: {
 36:   PetscInt        i, n_d, n_o;
 37:   const PetscInt *ia_d, *ia_o;
 38:   PetscBool       done_d = PETSC_FALSE, done_o = PETSC_FALSE;
 39:   HYPRE_Int      *nnz_d = NULL, *nnz_o = NULL;

 41:   PetscFunctionBegin;
 42:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 43:     PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d));
 44:     if (done_d) {
 45:       PetscCall(PetscMalloc1(n_d, &nnz_d));
 46:       for (i = 0; i < n_d; i++) nnz_d[i] = (HYPRE_Int)(ia_d[i + 1] - ia_d[i]);
 47:     }
 48:     PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d));
 49:   }
 50:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 51:     PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
 52:     if (done_o) {
 53:       PetscCall(PetscMalloc1(n_o, &nnz_o));
 54:       for (i = 0; i < n_o; i++) nnz_o[i] = (HYPRE_Int)(ia_o[i + 1] - ia_o[i]);
 55:     }
 56:     PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
 57:   }
 58:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 59:     if (!done_o) { /* only diagonal part */
 60:       PetscCall(PetscCalloc1(n_d, &nnz_o));
 61:     }
 62: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
 63:     { /* If we don't do this, the columns of the matrix will be all zeros! */
 64:       hypre_AuxParCSRMatrix *aux_matrix;
 65:       aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
 66:       hypre_AuxParCSRMatrixDestroy(aux_matrix);
 67:       hypre_IJMatrixTranslator(ij) = NULL;
 68:       PetscCallHYPRE(HYPRE_IJMatrixSetDiagOffdSizes(ij, nnz_d, nnz_o));
 69:       /* it seems they partially fixed it in 2.19.0 */
 70:   #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
 71:       aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
 72:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
 73:   #endif
 74:     }
 75: #else
 76:     PetscCallHYPRE(HYPRE_IJMatrixSetDiagOffdSizes(ij, nnz_d, nnz_o));
 77: #endif
 78:     PetscCall(PetscFree(nnz_d));
 79:     PetscCall(PetscFree(nnz_o));
 80:   }
 81:   PetscFunctionReturn(PETSC_SUCCESS);
 82: }

 84: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
 85: {
 86:   HYPRE_Int rstart, rend, cstart, cend;

 88:   PetscFunctionBegin;
 89:   PetscCall(PetscLayoutSetUp(A->rmap));
 90:   PetscCall(PetscLayoutSetUp(A->cmap));
 91:   rstart = (HYPRE_Int)A->rmap->rstart;
 92:   rend   = (HYPRE_Int)A->rmap->rend;
 93:   cstart = (HYPRE_Int)A->cmap->rstart;
 94:   cend   = (HYPRE_Int)A->cmap->rend;
 95:   PetscCall(PetscHYPREInitialize());
 96:   if (hA->ij) {
 97:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
 98:     PetscCallHYPRE(HYPRE_IJMatrixDestroy(hA->ij));
 99:   }
100:   PetscCallHYPRE(HYPRE_IJMatrixCreate(hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij));
101:   PetscCallHYPRE(HYPRE_IJMatrixSetObjectType(hA->ij, HYPRE_PARCSR));
102:   {
103:     PetscBool       same;
104:     Mat             A_d, A_o;
105:     const PetscInt *colmap;
106:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same));
107:     if (same) {
108:       PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap));
109:       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
110:       PetscFunctionReturn(PETSC_SUCCESS);
111:     }
112:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same));
113:     if (same) {
114:       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap));
115:       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
116:       PetscFunctionReturn(PETSC_SUCCESS);
117:     }
118:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same));
119:     if (same) {
120:       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
121:       PetscFunctionReturn(PETSC_SUCCESS);
122:     }
123:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same));
124:     if (same) {
125:       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
126:       PetscFunctionReturn(PETSC_SUCCESS);
127:     }
128:   }
129:   PetscFunctionReturn(PETSC_SUCCESS);
130: }

132: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij)
133: {
134:   PetscBool flg;

136:   PetscFunctionBegin;
137: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
138:   PetscCallHYPRE(HYPRE_IJMatrixInitialize(ij));
139: #else
140:   PetscCallHYPRE(HYPRE_IJMatrixInitialize_v2(ij, HYPRE_MEMORY_HOST));
141: #endif
142:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
143:   if (flg) {
144:     PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij));
145:     PetscFunctionReturn(PETSC_SUCCESS);
146:   }
147:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
148:   if (flg) {
149:     PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij));
150:     PetscFunctionReturn(PETSC_SUCCESS);
151:   }
152:   PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name);
153:   PetscFunctionReturn(PETSC_SUCCESS);
154: }

156: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
157: {
158:   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ *)A->data;
159:   HYPRE_Int              type;
160:   hypre_ParCSRMatrix    *par_matrix;
161:   hypre_AuxParCSRMatrix *aux_matrix;
162:   hypre_CSRMatrix       *hdiag;
163:   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));

165:   PetscFunctionBegin;
166:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(ij, &type));
167:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
168:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(ij, (void **)&par_matrix));
169:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
170:   /*
171:        this is the Hack part where we monkey directly with the hypre datastructures
172:   */
173:   if (sameint) {
174:     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1));
175:     PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz));
176:   } else {
177:     PetscInt i;

179:     for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
180:     for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
181:   }

183:   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
184:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
185:   PetscFunctionReturn(PETSC_SUCCESS);
186: }

188: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
189: {
190:   Mat_MPIAIJ            *pA = (Mat_MPIAIJ *)A->data;
191:   Mat_SeqAIJ            *pdiag, *poffd;
192:   PetscInt               i, *garray = pA->garray, *jj, cstart, *pjj;
193:   HYPRE_Int             *hjj, type;
194:   hypre_ParCSRMatrix    *par_matrix;
195:   hypre_AuxParCSRMatrix *aux_matrix;
196:   hypre_CSRMatrix       *hdiag, *hoffd;
197:   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));

199:   PetscFunctionBegin;
200:   pdiag = (Mat_SeqAIJ *)pA->A->data;
201:   poffd = (Mat_SeqAIJ *)pA->B->data;
202:   /* cstart is only valid for square MPIAIJ laid out in the usual way */
203:   PetscCall(MatGetOwnershipRange(A, &cstart, NULL));

205:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(ij, &type));
206:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
207:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(ij, (void **)&par_matrix));
208:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
209:   hoffd = hypre_ParCSRMatrixOffd(par_matrix);

211:   if (sameint) {
212:     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1));
213:   } else {
214:     for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
215:   }

217:   hjj = hdiag->j;
218:   pjj = pdiag->j;
219: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
220:   for (i = 0; i < pdiag->nz; i++) hjj[i] = (HYPRE_Int)pjj[i];
221: #else
222:   for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i];
223: #endif
224:   if (sameint) {
225:     PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1));
226:   } else {
227:     for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)poffd->i[i];
228:   }

230:   jj = (PetscInt *)hoffd->j;
231: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
232:   PetscCallHYPRE(hypre_CSRMatrixBigInitialize(hoffd));
233:   jj = (PetscInt *)hoffd->big_j;
234: #endif
235:   pjj = poffd->j;
236:   for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]];

238:   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
239:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
240:   PetscFunctionReturn(PETSC_SUCCESS);
241: }

243: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B)
244: {
245:   Mat_HYPRE             *mhA = (Mat_HYPRE *)A->data;
246:   Mat                    lA;
247:   ISLocalToGlobalMapping rl2g, cl2g;
248:   IS                     is;
249:   hypre_ParCSRMatrix    *hA;
250:   hypre_CSRMatrix       *hdiag, *hoffd;
251:   MPI_Comm               comm;
252:   HYPRE_Complex         *hdd, *hod, *aa;
253:   PetscScalar           *data;
254:   HYPRE_BigInt          *col_map_offd;
255:   HYPRE_Int             *hdi, *hdj, *hoi, *hoj;
256:   PetscInt              *ii, *jj, *iptr, *jptr;
257:   PetscInt               cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N;
258:   HYPRE_Int              type;
259:   MatType                lmattype   = NULL;
260:   PetscBool              freeparcsr = PETSC_FALSE;

262:   PetscFunctionBegin;
263:   comm = PetscObjectComm((PetscObject)A);
264:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(mhA->ij, &type));
265:   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
266:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(mhA->ij, (void **)&hA));
267: #if defined(PETSC_HAVE_HYPRE_DEVICE)
268:   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(mhA->ij)) {
269:     /* Support by copying back on the host and copy to GPU
270:        Kind of inefficient, but this is the best we can do now */
271:   #if defined(HYPRE_USING_HIP)
272:     lmattype = MATSEQAIJHIPSPARSE;
273:   #elif defined(HYPRE_USING_CUDA)
274:     lmattype = MATSEQAIJCUSPARSE;
275:   #endif
276:     hA         = hypre_ParCSRMatrixClone_v2(hA, 1, HYPRE_MEMORY_HOST);
277:     freeparcsr = PETSC_TRUE;
278:   }
279: #endif
280:   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
281:   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
282:   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
283:   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
284:   hdiag = hypre_ParCSRMatrixDiag(hA);
285:   hoffd = hypre_ParCSRMatrixOffd(hA);
286:   dr    = hypre_CSRMatrixNumRows(hdiag);
287:   dc    = hypre_CSRMatrixNumCols(hdiag);
288:   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
289:   hdi   = hypre_CSRMatrixI(hdiag);
290:   hdj   = hypre_CSRMatrixJ(hdiag);
291:   hdd   = hypre_CSRMatrixData(hdiag);
292:   oc    = hypre_CSRMatrixNumCols(hoffd);
293:   nnz += hypre_CSRMatrixNumNonzeros(hoffd);
294:   hoi = hypre_CSRMatrixI(hoffd);
295:   hoj = hypre_CSRMatrixJ(hoffd);
296:   hod = hypre_CSRMatrixData(hoffd);
297:   if (reuse != MAT_REUSE_MATRIX) {
298:     PetscInt *aux;

300:     /* generate l2g maps for rows and cols */
301:     PetscCall(ISCreateStride(comm, dr, str, 1, &is));
302:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
303:     PetscCall(ISDestroy(&is));
304:     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
305:     PetscCall(PetscMalloc1(dc + oc, &aux));
306:     for (i = 0; i < dc; i++) aux[i] = i + stc;
307:     for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i];
308:     PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is));
309:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
310:     PetscCall(ISDestroy(&is));
311:     /* create MATIS object */
312:     PetscCall(MatCreate(comm, B));
313:     PetscCall(MatSetSizes(*B, dr, dc, M, N));
314:     PetscCall(MatSetType(*B, MATIS));
315:     PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g));
316:     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
317:     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));

319:     /* allocate CSR for local matrix */
320:     PetscCall(PetscMalloc1(dr + 1, &iptr));
321:     PetscCall(PetscMalloc1(nnz, &jptr));
322:     PetscCall(PetscMalloc1(nnz, &data));
323:   } else {
324:     PetscInt  nr;
325:     PetscBool done;
326:     PetscCall(MatISGetLocalMat(*B, &lA));
327:     PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done));
328:     PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr);
329:     PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz);
330:     PetscCall(MatSeqAIJGetArrayWrite(lA, &data));
331:   }
332:   /* merge local matrices */
333:   ii  = iptr;
334:   jj  = jptr;
335:   aa  = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
336:   *ii = *(hdi++) + *(hoi++);
337:   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
338:     PetscScalar *aold = (PetscScalar *)aa;
339:     PetscInt    *jold = jj, nc = jd + jo;
340:     for (; jd < *hdi; jd++) {
341:       *jj++ = *hdj++;
342:       *aa++ = *hdd++;
343:     }
344:     for (; jo < *hoi; jo++) {
345:       *jj++ = *hoj++ + dc;
346:       *aa++ = *hod++;
347:     }
348:     *(++ii) = *(hdi++) + *(hoi++);
349:     PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold));
350:   }
351:   for (; cum < dr; cum++) *(++ii) = nnz;
352:   if (reuse != MAT_REUSE_MATRIX) {
353:     Mat_SeqAIJ *a;

355:     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA));
356:     /* hack SeqAIJ */
357:     a          = (Mat_SeqAIJ *)lA->data;
358:     a->free_a  = PETSC_TRUE;
359:     a->free_ij = PETSC_TRUE;
360:     if (lmattype) PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));
361:     PetscCall(MatISSetLocalMat(*B, lA));
362:     PetscCall(MatDestroy(&lA));
363:   } else {
364:     PetscCall(MatSeqAIJRestoreArrayWrite(lA, &data));
365:   }
366:   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
367:   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
368:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B));
369:   if (freeparcsr) PetscCallHYPRE(hypre_ParCSRMatrixDestroy(hA));
370:   PetscFunctionReturn(PETSC_SUCCESS);
371: }

373: static PetscErrorCode MatHYPRE_DestroyCOOMat(Mat mat)
374: {
375:   Mat_HYPRE *hA = (Mat_HYPRE *)mat->data;

377:   PetscFunctionBegin;
378:   if (hA->cooMat) { /* If cooMat is present we need to destroy the column indices */
379:     PetscCall(MatDestroy(&hA->cooMat));
380:     if (hA->cooMatAttached) {
381:       hypre_CSRMatrix     *csr;
382:       hypre_ParCSRMatrix  *parcsr;
383:       HYPRE_MemoryLocation mem;

385:       PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&parcsr));
386:       csr = hypre_ParCSRMatrixDiag(parcsr);
387:       if (csr) {
388:         mem = hypre_CSRMatrixMemoryLocation(csr);
389:         PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
390:         PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
391:       }
392:       csr = hypre_ParCSRMatrixOffd(parcsr);
393:       if (csr) {
394:         mem = hypre_CSRMatrixMemoryLocation(csr);
395:         PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
396:         PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
397:       }
398:     }
399:   }
400:   hA->cooMatAttached = PETSC_FALSE;
401:   PetscFunctionReturn(PETSC_SUCCESS);
402: }

404: static PetscErrorCode MatHYPRE_CreateCOOMat(Mat mat)
405: {
406:   MPI_Comm    comm;
407:   PetscMPIInt size;
408:   PetscLayout rmap, cmap;
409:   Mat_HYPRE  *hmat    = (Mat_HYPRE *)mat->data;
410:   MatType     matType = MATAIJ; /* default type of cooMat */

412:   PetscFunctionBegin;
413:   /* Build an agent matrix cooMat with AIJ format
414:      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
415:    */
416:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
417:   PetscCallMPI(MPI_Comm_size(comm, &size));
418:   PetscCall(PetscLayoutSetUp(mat->rmap));
419:   PetscCall(PetscLayoutSetUp(mat->cmap));
420:   PetscCall(MatGetLayouts(mat, &rmap, &cmap));

422: #if defined(PETSC_HAVE_HYPRE_DEVICE)
423:   if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */
424:   #if defined(HYPRE_USING_HIP)
425:     matType = MATAIJHIPSPARSE;
426:   #elif defined(HYPRE_USING_CUDA)
427:     matType = MATAIJCUSPARSE;
428:   #elif defined(HYPRE_USING_SYCL) && defined(PETSC_HAVE_KOKKOS_KERNELS)
429:     matType = MATAIJKOKKOS;
430:   #else
431:     SETERRQ(comm, PETSC_ERR_SUP, "No HYPRE device available. Suggest re-installing with Kokkos Kernels");
432:   #endif
433:   }
434: #endif

436:   /* Do COO preallocation through cooMat */
437:   PetscCall(MatHYPRE_DestroyCOOMat(mat));
438:   PetscCall(MatCreate(comm, &hmat->cooMat));
439:   PetscCall(MatSetType(hmat->cooMat, matType));
440:   PetscCall(MatSetLayouts(hmat->cooMat, rmap, cmap));

442:   /* allocate local matrices if needed */
443:   PetscCall(MatMPIAIJSetPreallocation(hmat->cooMat, 0, NULL, 0, NULL));
444:   PetscFunctionReturn(PETSC_SUCCESS);
445: }

447: /* Attach cooMat data array to hypre matrix.
448:    When AIJCUPMSPARSE will support raw device pointers and not THRUSTARRAY
449:    we should swap the arrays: i.e., attach hypre matrix array to cooMat
450:    This is because hypre should be in charge of handling the memory,
451:    cooMat is only a way to reuse PETSc COO code.
452:    attaching the memory will then be done at MatSetValuesCOO time and it will dynamically
453:    support hypre matrix migrating to host.
454: */
455: static PetscErrorCode MatHYPRE_AttachCOOMat(Mat mat)
456: {
457:   Mat_HYPRE           *hmat = (Mat_HYPRE *)mat->data;
458:   hypre_CSRMatrix     *diag, *offd;
459:   hypre_ParCSRMatrix  *parCSR;
460:   HYPRE_MemoryLocation hmem = HYPRE_MEMORY_HOST;
461:   PetscMemType         pmem;
462:   Mat                  A, B;
463:   PetscScalar         *a;
464:   PetscMPIInt          size;
465:   MPI_Comm             comm;

467:   PetscFunctionBegin;
468:   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
469:   if (hmat->cooMatAttached) PetscFunctionReturn(PETSC_SUCCESS);
470:   PetscCheck(hmat->cooMat->preallocated, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix is not preallocated");
471:   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
472:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
473:   PetscCallMPI(MPI_Comm_size(comm, &size));

475:   /* Alias cooMat's data array to IJMatrix's */
476:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hmat->ij, (void **)&parCSR));
477:   diag = hypre_ParCSRMatrixDiag(parCSR);
478:   offd = hypre_ParCSRMatrixOffd(parCSR);

480:   A = (size == 1) ? hmat->cooMat : ((Mat_MPIAIJ *)hmat->cooMat->data)->A;
481:   B = (size == 1) ? NULL : ((Mat_MPIAIJ *)hmat->cooMat->data)->B;

483:   PetscCall(PetscObjectSetName((PetscObject)A, "_internal_COO_mat_for_hypre"));
484:   hmem = hypre_CSRMatrixMemoryLocation(diag);
485:   PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &a, &pmem));
486:   PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
487:   PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hmem));
488:   hypre_CSRMatrixData(diag)     = (HYPRE_Complex *)a;
489:   hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */

491:   if (B) {
492:     hmem = hypre_CSRMatrixMemoryLocation(offd);
493:     PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &a, &pmem));
494:     PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
495:     PetscCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hmem));
496:     hypre_CSRMatrixData(offd)     = (HYPRE_Complex *)a;
497:     hypre_CSRMatrixOwnsData(offd) = 0;
498:   }
499:   hmat->cooMatAttached = PETSC_TRUE;
500:   PetscFunctionReturn(PETSC_SUCCESS);
501: }

503: // Build COO's coordinate list i[], j[] based on CSR's i[], j[] arrays and the number of local rows 'n'
504: static PetscErrorCode CSRtoCOO_Private(PetscInt n, const PetscInt ii[], const PetscInt jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
505: {
506:   PetscInt *cooi, *cooj;

508:   PetscFunctionBegin;
509:   *ncoo = ii[n];
510:   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
511:   for (PetscInt i = 0; i < n; i++) {
512:     for (PetscInt j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
513:   }
514:   PetscCall(PetscArraycpy(cooj, jj, *ncoo));
515:   *coo_i = cooi;
516:   *coo_j = cooj;
517:   PetscFunctionReturn(PETSC_SUCCESS);
518: }

520: // Similar to CSRtoCOO_Private, but the CSR's i[], j[] are of type HYPRE_Int
521: static PetscErrorCode CSRtoCOO_HYPRE_Int_Private(PetscInt n, const HYPRE_Int ii[], const HYPRE_Int jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
522: {
523:   PetscInt *cooi, *cooj;

525:   PetscFunctionBegin;
526:   *ncoo = ii[n];
527:   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
528:   for (PetscInt i = 0; i < n; i++) {
529:     for (HYPRE_Int j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
530:   }
531:   for (PetscCount i = 0; i < *ncoo; i++) cooj[i] = jj[i];
532:   *coo_i = cooi;
533:   *coo_j = cooj;
534:   PetscFunctionReturn(PETSC_SUCCESS);
535: }

537: // Build a COO data structure for the seqaij matrix, as if the nonzeros are laid out in the same order as in the CSR
538: static PetscErrorCode MatSeqAIJGetCOO_Private(Mat A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
539: {
540:   PetscInt        n;
541:   const PetscInt *ii, *jj;
542:   PetscBool       done;

544:   PetscFunctionBegin;
545:   PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
546:   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatGetRowIJ");
547:   PetscCall(CSRtoCOO_Private(n, ii, jj, ncoo, coo_i, coo_j));
548:   PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
549:   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatRestoreRowIJ");
550:   PetscFunctionReturn(PETSC_SUCCESS);
551: }

553: // Build a COO data structure for the hypreCSRMatrix, as if the nonzeros are laid out in the same order as in the hypreCSRMatrix
554: static PetscErrorCode hypreCSRMatrixGetCOO_Private(hypre_CSRMatrix *A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
555: {
556:   PetscInt             n = hypre_CSRMatrixNumRows(A);
557:   HYPRE_Int           *ii, *jj;
558:   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;

560:   PetscFunctionBegin;
561: #if defined(PETSC_HAVE_HYPRE_DEVICE)
562:   mem = hypre_CSRMatrixMemoryLocation(A);
563:   if (mem != HYPRE_MEMORY_HOST) {
564:     PetscCount nnz = hypre_CSRMatrixNumNonzeros(A);
565:     PetscCall(PetscMalloc2(n + 1, &ii, nnz, &jj));
566:     hypre_TMemcpy(ii, hypre_CSRMatrixI(A), HYPRE_Int, n + 1, HYPRE_MEMORY_HOST, mem);
567:     hypre_TMemcpy(jj, hypre_CSRMatrixJ(A), HYPRE_Int, nnz, HYPRE_MEMORY_HOST, mem);
568:   } else {
569: #else
570:   {
571: #endif
572:     ii = hypre_CSRMatrixI(A);
573:     jj = hypre_CSRMatrixJ(A);
574:   }
575:   PetscCall(CSRtoCOO_HYPRE_Int_Private(n, ii, jj, ncoo, coo_i, coo_j));
576:   if (mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree2(ii, jj));
577:   PetscFunctionReturn(PETSC_SUCCESS);
578: }

580: static PetscErrorCode MatSetValuesCOOFromCSRMatrix_Private(Mat A, hypre_CSRMatrix *H)
581: {
582:   PetscBool            iscpu = PETSC_TRUE;
583:   PetscScalar         *a;
584:   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;

586:   PetscFunctionBegin;
587: #if defined(PETSC_HAVE_HYPRE_DEVICE)
588:   mem = hypre_CSRMatrixMemoryLocation(H);
589:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &iscpu));
590: #endif
591:   if (iscpu && mem != HYPRE_MEMORY_HOST) {
592:     PetscCount nnz = hypre_CSRMatrixNumNonzeros(H);
593:     PetscCall(PetscMalloc1(nnz, &a));
594:     hypre_TMemcpy(a, hypre_CSRMatrixData(H), PetscScalar, nnz, HYPRE_MEMORY_HOST, mem);
595:   } else {
596:     a = (PetscScalar *)hypre_CSRMatrixData(H);
597:   }
598:   PetscCall(MatSetValuesCOO(A, a, INSERT_VALUES));
599:   if (iscpu && mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree(a));
600:   PetscFunctionReturn(PETSC_SUCCESS);
601: }

603: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
604: {
605:   MPI_Comm     comm = PetscObjectComm((PetscObject)A);
606:   Mat          M = NULL, dH = NULL, oH = NULL, dA = NULL, oA = NULL;
607:   PetscBool    ismpiaij, issbaij, isbaij, boundtocpu = PETSC_TRUE;
608:   Mat_HYPRE   *hA;
609:   PetscMemType memtype = PETSC_MEMTYPE_HOST;

611:   PetscFunctionBegin;
612:   if (PetscDefined(HAVE_HYPRE_DEVICE)) {
613:     PetscCall(MatGetCurrentMemType(A, &memtype));
614:     PetscCall(PetscHYPREInitialize());
615:     boundtocpu = PetscMemTypeHost(memtype) ? PETSC_TRUE : PETSC_FALSE;
616:     PetscCallHYPRE(HYPRE_SetMemoryLocation(boundtocpu ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE));
617:   }

619:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, ""));
620:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, ""));
621:   if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */
622:     PetscBool ismpi;
623:     MatType   newtype;

625:     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, ""));
626:     newtype = ismpi ? MATMPIAIJ : MATSEQAIJ;
627:     if (reuse == MAT_REUSE_MATRIX) {
628:       PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B));
629:       PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B));
630:       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
631:     } else if (reuse == MAT_INITIAL_MATRIX) {
632:       PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B));
633:       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
634:     } else {
635:       PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A));
636:       PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A));
637:     }
638: #if defined(PETSC_HAVE_DEVICE)
639:     (*B)->boundtocpu = boundtocpu;
640: #endif
641:     PetscFunctionReturn(PETSC_SUCCESS);
642:   }

644:   dA = A;
645:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
646:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));

648:   if (reuse != MAT_REUSE_MATRIX) {
649:     PetscCount coo_n;
650:     PetscInt  *coo_i, *coo_j;

652:     PetscCall(MatCreate(comm, &M));
653:     PetscCall(MatSetType(M, MATHYPRE));
654:     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
655:     PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE));
656:     PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));

658:     hA = (Mat_HYPRE *)M->data;
659:     PetscCall(MatHYPRE_CreateFromMat(A, hA));
660:     PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij));

662:     PetscCall(MatHYPRE_CreateCOOMat(M));

664:     dH = hA->cooMat;
665:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
666:     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));

668:     PetscCall(PetscObjectSetName((PetscObject)dH, "_internal_COO_mat_for_hypre"));
669:     PetscCall(MatSeqAIJGetCOO_Private(dA, &coo_n, &coo_i, &coo_j));
670:     PetscCall(MatSetPreallocationCOO(dH, coo_n, coo_i, coo_j));
671:     PetscCall(PetscFree2(coo_i, coo_j));
672:     if (oH) {
673:       PetscCall(PetscLayoutDestroy(&oH->cmap));
674:       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oH), oA->cmap->n, oA->cmap->n, 1, &oH->cmap));
675:       PetscCall(MatSeqAIJGetCOO_Private(oA, &coo_n, &coo_i, &coo_j));
676:       PetscCall(MatSetPreallocationCOO(oH, coo_n, coo_i, coo_j));
677:       PetscCall(PetscFree2(coo_i, coo_j));
678:     }
679:     hA->cooMat->assembled = PETSC_TRUE;

681:     M->preallocated = PETSC_TRUE;
682:     PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
683:     PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY));

685:     PetscCall(MatHYPRE_AttachCOOMat(M));
686:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
687:   } else M = *B;

689:   hA = (Mat_HYPRE *)M->data;
690:   PetscCheck(hA->cooMat, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");

692:   dH = hA->cooMat;
693:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
694:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));

696:   PetscScalar *a;
697:   PetscCall(MatSeqAIJGetCSRAndMemType(dA, NULL, NULL, &a, NULL));
698:   PetscCall(MatSetValuesCOO(dH, a, INSERT_VALUES));
699:   if (oH) {
700:     PetscCall(MatSeqAIJGetCSRAndMemType(oA, NULL, NULL, &a, NULL));
701:     PetscCall(MatSetValuesCOO(oH, a, INSERT_VALUES));
702:   }

704:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
705: #if defined(PETSC_HAVE_DEVICE)
706:   (*B)->boundtocpu = boundtocpu;
707: #endif
708:   PetscFunctionReturn(PETSC_SUCCESS);
709: }

711: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
712: {
713:   Mat                 M, dA = NULL, oA = NULL;
714:   hypre_ParCSRMatrix *parcsr;
715:   hypre_CSRMatrix    *dH, *oH;
716:   MPI_Comm            comm;
717:   PetscBool           ismpiaij, isseqaij;

719:   PetscFunctionBegin;
720:   comm = PetscObjectComm((PetscObject)A);
721:   if (reuse == MAT_REUSE_MATRIX) {
722:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij));
723:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij));
724:     PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ base types are supported");
725:   }
726:   PetscCall(MatHYPREGetParCSR(A, &parcsr));
727: #if defined(PETSC_HAVE_HYPRE_DEVICE)
728:   if (HYPRE_MEMORY_DEVICE == hypre_ParCSRMatrixMemoryLocation(parcsr)) {
729:     PetscBool isaij;

731:     PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
732:     if (isaij) {
733:       PetscMPIInt size;

735:       PetscCallMPI(MPI_Comm_size(comm, &size));
736:   #if defined(HYPRE_USING_HIP)
737:       mtype = size > 1 ? MATMPIAIJHIPSPARSE : MATSEQAIJHIPSPARSE;
738:   #elif defined(HYPRE_USING_CUDA)
739:       mtype = size > 1 ? MATMPIAIJCUSPARSE : MATSEQAIJCUSPARSE;
740:   #else
741:       mtype = size > 1 ? MATMPIAIJ : MATSEQAIJ;
742:   #endif
743:     }
744:   }
745: #endif
746:   dH = hypre_ParCSRMatrixDiag(parcsr);
747:   oH = hypre_ParCSRMatrixOffd(parcsr);
748:   if (reuse != MAT_REUSE_MATRIX) {
749:     PetscCount coo_n;
750:     PetscInt  *coo_i, *coo_j;

752:     PetscCall(MatCreate(comm, &M));
753:     PetscCall(MatSetType(M, mtype));
754:     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
755:     PetscCall(MatMPIAIJSetPreallocation(M, 0, NULL, 0, NULL));

757:     dA = M;
758:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
759:     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));

761:     PetscCall(hypreCSRMatrixGetCOO_Private(dH, &coo_n, &coo_i, &coo_j));
762:     PetscCall(MatSetPreallocationCOO(dA, coo_n, coo_i, coo_j));
763:     PetscCall(PetscFree2(coo_i, coo_j));
764:     if (ismpiaij) {
765:       HYPRE_Int nc = hypre_CSRMatrixNumCols(oH);

767:       PetscCall(PetscLayoutDestroy(&oA->cmap));
768:       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oA), nc, nc, 1, &oA->cmap));
769:       PetscCall(hypreCSRMatrixGetCOO_Private(oH, &coo_n, &coo_i, &coo_j));
770:       PetscCall(MatSetPreallocationCOO(oA, coo_n, coo_i, coo_j));
771:       PetscCall(PetscFree2(coo_i, coo_j));

773:       /* garray */
774:       Mat_MPIAIJ   *aij    = (Mat_MPIAIJ *)M->data;
775:       HYPRE_BigInt *harray = hypre_ParCSRMatrixColMapOffd(parcsr);
776:       PetscInt     *garray;

778:       PetscCall(PetscFree(aij->garray));
779:       PetscCall(PetscMalloc1(nc, &garray));
780:       for (HYPRE_Int i = 0; i < nc; i++) garray[i] = (PetscInt)harray[i];
781:       aij->garray = garray;
782:       PetscCall(MatSetUpMultiply_MPIAIJ(M));
783:     }
784:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
785:   } else M = *B;

787:   dA = M;
788:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
789:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
790:   PetscCall(MatSetValuesCOOFromCSRMatrix_Private(dA, dH));
791:   if (oA) PetscCall(MatSetValuesCOOFromCSRMatrix_Private(oA, oH));
792:   M->assembled = PETSC_TRUE;
793:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
794:   PetscFunctionReturn(PETSC_SUCCESS);
795: }

797: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
798: {
799:   hypre_ParCSRMatrix *tA;
800:   hypre_CSRMatrix    *hdiag, *hoffd;
801:   Mat_SeqAIJ         *diag, *offd;
802:   PetscInt           *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts;
803:   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
804:   PetscBool           ismpiaij, isseqaij;
805:   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
806:   HYPRE_Int          *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL;
807:   PetscInt           *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL;
808:   PetscBool           iscuda, iship;
809: #if defined(PETSC_HAVE_DEVICE) && defined(PETSC_HAVE_HYPRE_DEVICE)
810:   PetscBool boundtocpu = A->boundtocpu;
811: #else
812:   PetscBool boundtocpu = PETSC_TRUE;
813: #endif

815:   PetscFunctionBegin;
816:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
817:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
818:   PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name);
819:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
820:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
821:   PetscCall(PetscHYPREInitialize());
822:   if (ismpiaij) {
823:     Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;

825:     diag = (Mat_SeqAIJ *)a->A->data;
826:     offd = (Mat_SeqAIJ *)a->B->data;
827:     if (!boundtocpu && (iscuda || iship)) {
828: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
829:       if (iscuda) {
830:         sameint = PETSC_TRUE;
831:         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
832:         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
833:       }
834: #endif
835: #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
836:       if (iship) {
837:         sameint = PETSC_TRUE;
838:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
839:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
840:       }
841: #endif
842:     } else {
843:       boundtocpu = PETSC_TRUE;
844:       pdi        = diag->i;
845:       pdj        = diag->j;
846:       poi        = offd->i;
847:       poj        = offd->j;
848:       if (sameint) {
849:         hdi = (HYPRE_Int *)pdi;
850:         hdj = (HYPRE_Int *)pdj;
851:         hoi = (HYPRE_Int *)poi;
852:         hoj = (HYPRE_Int *)poj;
853:       }
854:     }
855:     garray = a->garray;
856:     noffd  = a->B->cmap->N;
857:     dnnz   = diag->nz;
858:     onnz   = offd->nz;
859:   } else {
860:     diag = (Mat_SeqAIJ *)A->data;
861:     offd = NULL;
862:     if (!boundtocpu && (iscuda || iship)) {
863: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
864:       if (iscuda) {
865:         sameint = PETSC_TRUE;
866:         PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
867:       }
868: #endif
869: #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
870:       if (iship) {
871:         sameint = PETSC_TRUE;
872:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
873:       }
874: #endif
875:     } else {
876:       boundtocpu = PETSC_TRUE;
877:       pdi        = diag->i;
878:       pdj        = diag->j;
879:       if (sameint) {
880:         hdi = (HYPRE_Int *)pdi;
881:         hdj = (HYPRE_Int *)pdj;
882:       }
883:     }
884:     garray = NULL;
885:     noffd  = 0;
886:     dnnz   = diag->nz;
887:     onnz   = 0;
888:   }

890:   /* create a temporary ParCSR */
891:   if (HYPRE_AssumedPartitionCheck()) {
892:     PetscMPIInt myid;

894:     PetscCallMPI(MPI_Comm_rank(comm, &myid));
895:     row_starts = A->rmap->range + myid;
896:     col_starts = A->cmap->range + myid;
897:   } else {
898:     row_starts = A->rmap->range;
899:     col_starts = A->cmap->range;
900:   }
901:   tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, (HYPRE_Int)noffd, (HYPRE_Int)dnnz, (HYPRE_Int)onnz);
902: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
903:   hypre_ParCSRMatrixSetRowStartsOwner(tA, 0);
904:   hypre_ParCSRMatrixSetColStartsOwner(tA, 0);
905: #endif

907:   /* set diagonal part */
908:   hdiag = hypre_ParCSRMatrixDiag(tA);
909:   if (!sameint) { /* malloc CSR pointers */
910:     PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj));
911:     for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)pdi[i];
912:     for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)pdj[i];
913:   }
914:   hypre_CSRMatrixI(hdiag)           = hdi;
915:   hypre_CSRMatrixJ(hdiag)           = hdj;
916:   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex *)diag->a;
917:   hypre_CSRMatrixNumNonzeros(hdiag) = (HYPRE_Int)diag->nz;
918:   hypre_CSRMatrixSetDataOwner(hdiag, 0);

920:   /* set off-diagonal part */
921:   hoffd = hypre_ParCSRMatrixOffd(tA);
922:   if (offd) {
923:     if (!sameint) { /* malloc CSR pointers */
924:       PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj));
925:       for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)poi[i];
926:       for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)poj[i];
927:     }
928:     hypre_CSRMatrixI(hoffd)           = hoi;
929:     hypre_CSRMatrixJ(hoffd)           = hoj;
930:     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex *)offd->a;
931:     hypre_CSRMatrixNumNonzeros(hoffd) = (HYPRE_Int)offd->nz;
932:     hypre_CSRMatrixSetDataOwner(hoffd, 0);
933:   }
934: #if defined(PETSC_HAVE_HYPRE_DEVICE)
935:   PetscCallHYPRE(hypre_ParCSRMatrixInitialize_v2(tA, !boundtocpu ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST));
936: #else
937:   #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
938:   PetscCallHYPRE(hypre_ParCSRMatrixInitialize(tA));
939:   #else
940:   PetscCallHYPRE(hypre_ParCSRMatrixInitialize_v2(tA, HYPRE_MEMORY_HOST));
941:   #endif
942: #endif

944:   /* MatrixSetRownnz comes after MatrixInitialize, so the first uses the right memory location */
945:   hypre_CSRMatrixSetRownnz(hdiag);
946:   if (offd) hypre_CSRMatrixSetRownnz(hoffd);

948:   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST);
949:   hypre_ParCSRMatrixSetNumNonzeros(tA);
950:   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray;
951:   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallHYPRE(hypre_MatvecCommPkgCreate(tA));
952:   *hA = tA;
953:   PetscFunctionReturn(PETSC_SUCCESS);
954: }

956: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
957: {
958:   hypre_CSRMatrix *hdiag, *hoffd;
959:   PetscBool        ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
960:   PetscBool        iscuda, iship;

962:   PetscFunctionBegin;
963:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
964:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
965:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
966: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
967:   if (iscuda) sameint = PETSC_TRUE;
968: #elif defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
969:   if (iship) sameint = PETSC_TRUE;
970: #endif
971:   hdiag = hypre_ParCSRMatrixDiag(*hA);
972:   hoffd = hypre_ParCSRMatrixOffd(*hA);
973:   /* free temporary memory allocated by PETSc
974:      set pointers to NULL before destroying tA */
975:   if (!sameint) {
976:     HYPRE_Int *hi, *hj;

978:     hi = hypre_CSRMatrixI(hdiag);
979:     hj = hypre_CSRMatrixJ(hdiag);
980:     PetscCall(PetscFree2(hi, hj));
981:     if (ismpiaij) {
982:       hi = hypre_CSRMatrixI(hoffd);
983:       hj = hypre_CSRMatrixJ(hoffd);
984:       PetscCall(PetscFree2(hi, hj));
985:     }
986:   }
987:   hypre_CSRMatrixI(hdiag)    = NULL;
988:   hypre_CSRMatrixJ(hdiag)    = NULL;
989:   hypre_CSRMatrixData(hdiag) = NULL;
990:   if (ismpiaij) {
991:     hypre_CSRMatrixI(hoffd)    = NULL;
992:     hypre_CSRMatrixJ(hoffd)    = NULL;
993:     hypre_CSRMatrixData(hoffd) = NULL;
994:   }
995:   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
996:   hypre_ParCSRMatrixDestroy(*hA);
997:   *hA = NULL;
998:   PetscFunctionReturn(PETSC_SUCCESS);
999: }

1001: /* calls RAP from BoomerAMG:
1002:    the resulting ParCSR will not own the column and row starts
1003:    It looks like we don't need to have the diagonal entries ordered first */
1004: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
1005: {
1006: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1007:   HYPRE_Int P_owns_col_starts, R_owns_row_starts;
1008: #endif

1010:   PetscFunctionBegin;
1011: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1012:   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
1013:   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
1014: #endif
1015:   /* can be replaced by version test later */
1016: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1017:   PetscStackPushExternal("hypre_ParCSRMatrixRAP");
1018:   *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP);
1019:   PetscStackPop;
1020: #else
1021:   PetscCallHYPRE(hypre_BoomerAMGBuildCoarseOperator(hR, hA, hP, hRAP));
1022:   PetscCallHYPRE(hypre_ParCSRMatrixSetNumNonzeros(*hRAP));
1023: #endif
1024:   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
1025: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1026:   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0);
1027:   hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0);
1028:   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1);
1029:   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1);
1030: #endif
1031:   PetscFunctionReturn(PETSC_SUCCESS);
1032: }

1034: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C)
1035: {
1036:   Mat                 B;
1037:   hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL;
1038:   Mat_Product        *product = C->product;

1040:   PetscFunctionBegin;
1041:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1042:   PetscCall(MatAIJGetParCSR_Private(P, &hP));
1043:   PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP));
1044:   PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B));

1046:   PetscCall(MatHeaderMerge(C, &B));
1047:   C->product = product;

1049:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1050:   PetscCall(MatAIJRestoreParCSR_Private(P, &hP));
1051:   PetscFunctionReturn(PETSC_SUCCESS);
1052: }

1054: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C)
1055: {
1056:   PetscFunctionBegin;
1057:   PetscCall(MatSetType(C, MATAIJ));
1058:   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
1059:   C->ops->productnumeric = MatProductNumeric_PtAP;
1060:   PetscFunctionReturn(PETSC_SUCCESS);
1061: }

1063: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C)
1064: {
1065:   Mat                 B;
1066:   Mat_HYPRE          *hP;
1067:   hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL;
1068:   HYPRE_Int           type;
1069:   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
1070:   PetscBool           ishypre;

1072:   PetscFunctionBegin;
1073:   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1074:   PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1075:   hP = (Mat_HYPRE *)P->data;
1076:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hP->ij, &type));
1077:   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1078:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hP->ij, (void **)&Pparcsr));

1080:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1081:   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr));
1082:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));

1084:   /* create temporary matrix and merge to C */
1085:   PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B));
1086:   PetscCall(MatHeaderMerge(C, &B));
1087:   PetscFunctionReturn(PETSC_SUCCESS);
1088: }

1090: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C)
1091: {
1092:   Mat                 B;
1093:   hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL;
1094:   Mat_HYPRE          *hA, *hP;
1095:   PetscBool           ishypre;
1096:   HYPRE_Int           type;

1098:   PetscFunctionBegin;
1099:   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1100:   PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1101:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1102:   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1103:   hA = (Mat_HYPRE *)A->data;
1104:   hP = (Mat_HYPRE *)P->data;
1105:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hA->ij, &type));
1106:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1107:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hP->ij, &type));
1108:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1109:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&Aparcsr));
1110:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hP->ij, (void **)&Pparcsr));
1111:   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr));
1112:   PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B));
1113:   PetscCall(MatHeaderMerge(C, &B));
1114:   PetscFunctionReturn(PETSC_SUCCESS);
1115: }

1117: /* calls hypre_ParMatmul
1118:    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
1119:    hypre_ParMatrixCreate does not duplicate the communicator
1120:    It looks like we don't need to have the diagonal entries ordered first */
1121: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
1122: {
1123:   PetscFunctionBegin;
1124:   /* can be replaced by version test later */
1125: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1126:   PetscStackPushExternal("hypre_ParCSRMatMat");
1127:   *hAB = hypre_ParCSRMatMat(hA, hB);
1128: #else
1129:   PetscStackPushExternal("hypre_ParMatmul");
1130:   *hAB = hypre_ParMatmul(hA, hB);
1131: #endif
1132:   PetscStackPop;
1133:   PetscFunctionReturn(PETSC_SUCCESS);
1134: }

1136: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C)
1137: {
1138:   Mat                 D;
1139:   hypre_ParCSRMatrix *hA, *hB, *hAB = NULL;
1140:   Mat_Product        *product = C->product;

1142:   PetscFunctionBegin;
1143:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1144:   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1145:   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB));
1146:   PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D));

1148:   PetscCall(MatHeaderMerge(C, &D));
1149:   C->product = product;

1151:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1152:   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1153:   PetscFunctionReturn(PETSC_SUCCESS);
1154: }

1156: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C)
1157: {
1158:   PetscFunctionBegin;
1159:   PetscCall(MatSetType(C, MATAIJ));
1160:   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1161:   C->ops->productnumeric = MatProductNumeric_AB;
1162:   PetscFunctionReturn(PETSC_SUCCESS);
1163: }

1165: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C)
1166: {
1167:   Mat                 D;
1168:   hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL;
1169:   Mat_HYPRE          *hA, *hB;
1170:   PetscBool           ishypre;
1171:   HYPRE_Int           type;
1172:   Mat_Product        *product;

1174:   PetscFunctionBegin;
1175:   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre));
1176:   PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE);
1177:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1178:   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1179:   hA = (Mat_HYPRE *)A->data;
1180:   hB = (Mat_HYPRE *)B->data;
1181:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hA->ij, &type));
1182:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1183:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hB->ij, &type));
1184:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1185:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&Aparcsr));
1186:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hB->ij, (void **)&Bparcsr));
1187:   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr));
1188:   PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D));

1190:   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1191:   product    = C->product; /* save it from MatHeaderReplace() */
1192:   C->product = NULL;
1193:   PetscCall(MatHeaderReplace(C, &D));
1194:   C->product             = product;
1195:   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1196:   C->ops->productnumeric = MatProductNumeric_AB;
1197:   PetscFunctionReturn(PETSC_SUCCESS);
1198: }

1200: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D)
1201: {
1202:   Mat                 E;
1203:   hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL;

1205:   PetscFunctionBegin;
1206:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1207:   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1208:   PetscCall(MatAIJGetParCSR_Private(C, &hC));
1209:   PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC));
1210:   PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E));
1211:   PetscCall(MatHeaderMerge(D, &E));
1212:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1213:   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1214:   PetscCall(MatAIJRestoreParCSR_Private(C, &hC));
1215:   PetscFunctionReturn(PETSC_SUCCESS);
1216: }

1218: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D)
1219: {
1220:   PetscFunctionBegin;
1221:   PetscCall(MatSetType(D, MATAIJ));
1222:   PetscFunctionReturn(PETSC_SUCCESS);
1223: }

1225: static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1226: {
1227:   PetscFunctionBegin;
1228:   C->ops->productnumeric = MatProductNumeric_AB;
1229:   PetscFunctionReturn(PETSC_SUCCESS);
1230: }

1232: static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1233: {
1234:   Mat_Product *product = C->product;
1235:   PetscBool    Ahypre;

1237:   PetscFunctionBegin;
1238:   PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre));
1239:   if (Ahypre) { /* A is a Hypre matrix */
1240:     PetscCall(MatSetType(C, MATHYPRE));
1241:     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1242:     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1243:     PetscFunctionReturn(PETSC_SUCCESS);
1244:   }
1245:   PetscFunctionReturn(PETSC_SUCCESS);
1246: }

1248: static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1249: {
1250:   PetscFunctionBegin;
1251:   C->ops->productnumeric = MatProductNumeric_PtAP;
1252:   PetscFunctionReturn(PETSC_SUCCESS);
1253: }

1255: static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1256: {
1257:   Mat_Product *product = C->product;
1258:   PetscBool    flg;
1259:   PetscInt     type        = 0;
1260:   const char  *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"};
1261:   PetscInt     ntype       = 4;
1262:   Mat          A           = product->A;
1263:   PetscBool    Ahypre;

1265:   PetscFunctionBegin;
1266:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre));
1267:   if (Ahypre) { /* A is a Hypre matrix */
1268:     PetscCall(MatSetType(C, MATHYPRE));
1269:     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1270:     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1271:     PetscFunctionReturn(PETSC_SUCCESS);
1272:   }

1274:   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1275:   /* Get runtime option */
1276:   if (product->api_user) {
1277:     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat");
1278:     PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg));
1279:     PetscOptionsEnd();
1280:   } else {
1281:     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat");
1282:     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg));
1283:     PetscOptionsEnd();
1284:   }

1286:   if (type == 0 || type == 1 || type == 2) PetscCall(MatSetType(C, MATAIJ));
1287:   else {
1288:     PetscCheck(type == 3, PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported");
1289:     PetscCall(MatSetType(C, MATHYPRE));
1290:   }
1291:   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1292:   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1293:   PetscFunctionReturn(PETSC_SUCCESS);
1294: }

1296: static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1297: {
1298:   Mat_Product *product = C->product;

1300:   PetscFunctionBegin;
1301:   switch (product->type) {
1302:   case MATPRODUCT_AB:
1303:     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1304:     break;
1305:   case MATPRODUCT_PtAP:
1306:     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1307:     break;
1308:   default:
1309:     break;
1310:   }
1311:   PetscFunctionReturn(PETSC_SUCCESS);
1312: }

1314: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1315: {
1316:   PetscFunctionBegin;
1317:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE));
1318:   PetscFunctionReturn(PETSC_SUCCESS);
1319: }

1321: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1322: {
1323:   PetscFunctionBegin;
1324:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE));
1325:   PetscFunctionReturn(PETSC_SUCCESS);
1326: }

1328: static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1329: {
1330:   PetscFunctionBegin;
1331:   if (y != z) PetscCall(VecCopy(y, z));
1332:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE));
1333:   PetscFunctionReturn(PETSC_SUCCESS);
1334: }

1336: static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1337: {
1338:   PetscFunctionBegin;
1339:   if (y != z) PetscCall(VecCopy(y, z));
1340:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE));
1341:   PetscFunctionReturn(PETSC_SUCCESS);
1342: }

1344: /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1345: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1346: {
1347:   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
1348:   hypre_ParCSRMatrix *parcsr;
1349:   hypre_ParVector    *hx, *hy;

1351:   PetscFunctionBegin;
1352:   if (trans) {
1353:     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x));
1354:     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y));
1355:     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y));
1356:     PetscCallHYPRE(HYPRE_IJVectorGetObject(hA->b->ij, (void **)&hx));
1357:     PetscCallHYPRE(HYPRE_IJVectorGetObject(hA->x->ij, (void **)&hy));
1358:   } else {
1359:     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x));
1360:     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y));
1361:     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y));
1362:     PetscCallHYPRE(HYPRE_IJVectorGetObject(hA->x->ij, (void **)&hx));
1363:     PetscCallHYPRE(HYPRE_IJVectorGetObject(hA->b->ij, (void **)&hy));
1364:   }
1365:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&parcsr));
1366:   if (trans) {
1367:     PetscCallHYPRE(hypre_ParCSRMatrixMatvecT(a, parcsr, hx, b, hy));
1368:   } else {
1369:     PetscCallHYPRE(hypre_ParCSRMatrixMatvec(a, parcsr, hx, b, hy));
1370:   }
1371:   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1372:   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1373:   PetscFunctionReturn(PETSC_SUCCESS);
1374: }

1376: static PetscErrorCode MatDestroy_HYPRE(Mat A)
1377: {
1378:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1380:   PetscFunctionBegin;
1381:   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1382:   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1383:   PetscCall(MatHYPRE_DestroyCOOMat(A)); /* must be called before destroying the individual CSR */
1384:   if (hA->ij) {
1385:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1386:     PetscCallHYPRE(HYPRE_IJMatrixDestroy(hA->ij));
1387:   }
1388:   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm));

1390:   PetscCall(MatStashDestroy_Private(&A->stash));
1391:   PetscCall(PetscFree(hA->array));
1392:   if (hA->rows_d) PetscCallExternalVoid("hypre_Free", hypre_Free(hA->rows_d, HYPRE_MEMORY_DEVICE));

1394:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL));
1395:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL));
1396:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL));
1397:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL));
1398:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", NULL));
1399:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", NULL));
1400:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_hypre_C", NULL));
1401:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", NULL));
1402:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL));
1403:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL));
1404:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
1405:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
1406:   PetscCall(PetscFree(A->data));
1407:   PetscFunctionReturn(PETSC_SUCCESS);
1408: }

1410: static PetscErrorCode MatSetUp_HYPRE(Mat A)
1411: {
1412:   PetscFunctionBegin;
1413:   if (!A->preallocated) PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL));
1414:   PetscFunctionReturn(PETSC_SUCCESS);
1415: }

1417: //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1418: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1419: static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1420: {
1421:   Mat_HYPRE           *hA   = (Mat_HYPRE *)A->data;
1422:   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;

1424:   PetscFunctionBegin;
1425:   A->boundtocpu = bind;
1426:   if (hA->cooMat) {
1427:     PetscBool coobound;
1428:     PetscCall(MatBoundToCPU(hA->cooMat, &coobound));
1429:     if (coobound != bind) PetscCall(MatBindToCPU(hA->cooMat, bind));
1430:   }
1431:   if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1432:     hypre_ParCSRMatrix *parcsr;
1433:     PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&parcsr));
1434:     PetscCallHYPRE(hypre_ParCSRMatrixMigrate(parcsr, hmem));
1435:   }
1436:   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind));
1437:   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind));
1438:   PetscFunctionReturn(PETSC_SUCCESS);
1439: }
1440: #endif

1442: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1443: {
1444:   Mat_HYPRE   *hA = (Mat_HYPRE *)A->data;
1445:   PetscMPIInt  n;
1446:   PetscInt     i, j, rstart, ncols, flg;
1447:   PetscInt    *row, *col;
1448:   PetscScalar *val;

1450:   PetscFunctionBegin;
1451:   PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");

1453:   if (!A->nooffprocentries) {
1454:     while (1) {
1455:       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
1456:       if (!flg) break;

1458:       for (i = 0; i < n;) {
1459:         /* Now identify the consecutive vals belonging to the same row */
1460:         for (j = i, rstart = row[j]; j < n; j++) {
1461:           if (row[j] != rstart) break;
1462:         }
1463:         if (j < n) ncols = j - i;
1464:         else ncols = n - i;
1465:         /* Now assemble all these values with a single function call */
1466:         PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode));

1468:         i = j;
1469:       }
1470:     }
1471:     PetscCall(MatStashScatterEnd_Private(&A->stash));
1472:   }

1474:   PetscCallHYPRE(HYPRE_IJMatrixAssemble(hA->ij));
1475:   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1476:   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1477:   if (!A->sortedfull) {
1478:     hypre_AuxParCSRMatrix *aux_matrix;

1480:     /* call destroy just to make sure we do not leak anything */
1481:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1482:     PetscCallHYPRE(hypre_AuxParCSRMatrixDestroy(aux_matrix));
1483:     hypre_IJMatrixTranslator(hA->ij) = NULL;

1485:     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1486:     PetscCallHYPRE(HYPRE_IJMatrixInitialize(hA->ij));
1487:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1488:     if (aux_matrix) {
1489:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1490: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1491:       PetscCallHYPRE(hypre_AuxParCSRMatrixInitialize(aux_matrix));
1492: #else
1493:       PetscCallHYPRE(hypre_AuxParCSRMatrixInitialize_v2(aux_matrix, HYPRE_MEMORY_HOST));
1494: #endif
1495:     }
1496:   }
1497:   {
1498:     hypre_ParCSRMatrix *parcsr;

1500:     PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)&parcsr));
1501:     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallHYPRE(hypre_MatvecCommPkgCreate(parcsr));
1502:   }
1503:   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x));
1504:   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b));
1505: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1506:   PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu));
1507: #endif
1508:   PetscFunctionReturn(PETSC_SUCCESS);
1509: }

1511: static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1512: {
1513:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1515:   PetscFunctionBegin;
1516:   PetscCheck(hA->array_available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use");

1518:   if (hA->array_size >= size) {
1519:     *array = hA->array;
1520:   } else {
1521:     PetscCall(PetscFree(hA->array));
1522:     hA->array_size = size;
1523:     PetscCall(PetscMalloc(hA->array_size, &hA->array));
1524:     *array = hA->array;
1525:   }

1527:   hA->array_available = PETSC_FALSE;
1528:   PetscFunctionReturn(PETSC_SUCCESS);
1529: }

1531: static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1532: {
1533:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1535:   PetscFunctionBegin;
1536:   *array              = NULL;
1537:   hA->array_available = PETSC_TRUE;
1538:   PetscFunctionReturn(PETSC_SUCCESS);
1539: }

1541: static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1542: {
1543:   Mat_HYPRE     *hA   = (Mat_HYPRE *)A->data;
1544:   PetscScalar   *vals = (PetscScalar *)v;
1545:   HYPRE_Complex *sscr;
1546:   PetscInt      *cscr[2];
1547:   PetscInt       i, nzc;
1548:   PetscInt       rst = A->rmap->rstart, ren = A->rmap->rend;
1549:   void          *array = NULL;

1551:   PetscFunctionBegin;
1552:   PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array));
1553:   cscr[0] = (PetscInt *)array;
1554:   cscr[1] = ((PetscInt *)array) + nc;
1555:   sscr    = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2);
1556:   for (i = 0, nzc = 0; i < nc; i++) {
1557:     if (cols[i] >= 0) {
1558:       cscr[0][nzc]   = cols[i];
1559:       cscr[1][nzc++] = i;
1560:     }
1561:   }
1562:   if (!nzc) {
1563:     PetscCall(MatRestoreArray_HYPRE(A, &array));
1564:     PetscFunctionReturn(PETSC_SUCCESS);
1565:   }

1567: #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1568:   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1569:     hypre_ParCSRMatrix *parcsr;

1571:     PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij,(void**)&parcsr));
1572:     PetscCallHYPRE(hypre_ParCSRMatrixMigrate(parcsr, HYPRE_MEMORY_HOST));
1573:   }
1574: #endif

1576:   if (ins == ADD_VALUES) {
1577:     for (i = 0; i < nr; i++) {
1578:       if (rows[i] >= 0) {
1579:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1581:         if (!nzc) continue;
1582:         /* nonlocal values */
1583:         if (rows[i] < rst || rows[i] >= ren) {
1584:           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1585:           if (hA->donotstash) continue;
1586:         }
1587:         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1588:         for (PetscInt j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1589:         PetscCallHYPRE(HYPRE_IJMatrixAddToValues(hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr));
1590:       }
1591:       vals += nc;
1592:     }
1593:   } else { /* INSERT_VALUES */
1594:     for (i = 0; i < nr; i++) {
1595:       if (rows[i] >= 0) {
1596:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1598:         if (!nzc) continue;
1599:         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1600:         for (PetscInt j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1601:         /* nonlocal values */
1602:         if (rows[i] < rst || rows[i] >= ren) {
1603:           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1604:           if (!hA->donotstash) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE));
1605:         }
1606:         /* local values */
1607:         else
1608:           PetscCallHYPRE(HYPRE_IJMatrixSetValues(hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr));
1609:       }
1610:       vals += nc;
1611:     }
1612:   }

1614:   PetscCall(MatRestoreArray_HYPRE(A, &array));
1615:   PetscFunctionReturn(PETSC_SUCCESS);
1616: }

1618: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1619: {
1620:   Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
1621:   HYPRE_Int  *hdnnz, *honnz;
1622:   PetscInt    i, rs, re, cs, ce, bs;
1623:   PetscMPIInt size;

1625:   PetscFunctionBegin;
1626:   PetscCall(PetscLayoutSetUp(A->rmap));
1627:   PetscCall(PetscLayoutSetUp(A->cmap));
1628:   rs = A->rmap->rstart;
1629:   re = A->rmap->rend;
1630:   cs = A->cmap->rstart;
1631:   ce = A->cmap->rend;
1632:   if (!hA->ij) {
1633:     PetscCallHYPRE(HYPRE_IJMatrixCreate(hA->comm, rs, re - 1, cs, ce - 1, &hA->ij));
1634:     PetscCallHYPRE(HYPRE_IJMatrixSetObjectType(hA->ij, HYPRE_PARCSR));
1635:   } else {
1636:     HYPRE_BigInt hrs, hre, hcs, hce;
1637:     PetscCallHYPRE(HYPRE_IJMatrixGetLocalRange(hA->ij, &hrs, &hre, &hcs, &hce));
1638:     PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re);
1639:     PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce);
1640:   }
1641:   PetscCall(MatHYPRE_DestroyCOOMat(A));
1642:   PetscCall(MatGetBlockSize(A, &bs));
1643:   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs;
1644:   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs;

1646:   if (!dnnz) {
1647:     PetscCall(PetscMalloc1(A->rmap->n, &hdnnz));
1648:     for (i = 0; i < A->rmap->n; i++) hdnnz[i] = (HYPRE_Int)dnz;
1649:   } else {
1650:     hdnnz = (HYPRE_Int *)dnnz;
1651:   }
1652:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1653:   if (size > 1) {
1654:     hypre_AuxParCSRMatrix *aux_matrix;
1655:     if (!onnz) {
1656:       PetscCall(PetscMalloc1(A->rmap->n, &honnz));
1657:       for (i = 0; i < A->rmap->n; i++) honnz[i] = (HYPRE_Int)onz;
1658:     } else honnz = (HYPRE_Int *)onnz;
1659:     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1660:        they assume the user will input the entire row values, properly sorted
1661:        In PETSc, we don't make such an assumption and set this flag to 1,
1662:        unless the option MAT_SORTED_FULL is set to true.
1663:        Also, to avoid possible memory leaks, we destroy and recreate the translator
1664:        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1665:        the IJ matrix for us */
1666:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1667:     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1668:     hypre_IJMatrixTranslator(hA->ij) = NULL;
1669:     PetscCallHYPRE(HYPRE_IJMatrixSetDiagOffdSizes(hA->ij, hdnnz, honnz));
1670:     aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1671:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !A->sortedfull;
1672:   } else {
1673:     honnz = NULL;
1674:     PetscCallHYPRE(HYPRE_IJMatrixSetRowSizes(hA->ij, hdnnz));
1675:   }

1677:   /* reset assembled flag and call the initialize method */
1678:   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1679: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1680:   PetscCallHYPRE(HYPRE_IJMatrixInitialize(hA->ij));
1681: #else
1682:   PetscCallHYPRE(HYPRE_IJMatrixInitialize_v2(hA->ij, HYPRE_MEMORY_HOST));
1683: #endif
1684:   if (!dnnz) PetscCall(PetscFree(hdnnz));
1685:   if (!onnz && honnz) PetscCall(PetscFree(honnz));
1686:   /* Match AIJ logic */
1687:   A->preallocated = PETSC_TRUE;
1688:   A->assembled    = PETSC_FALSE;
1689:   PetscFunctionReturn(PETSC_SUCCESS);
1690: }

1692: /*@C
1693:   MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format

1695:   Collective

1697:   Input Parameters:
1698: + A    - the matrix
1699: . dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1700:           (same value is used for all local rows)
1701: . dnnz - array containing the number of nonzeros in the various rows of the
1702:           DIAGONAL portion of the local submatrix (possibly different for each row)
1703:           or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure.
1704:           The size of this array is equal to the number of local rows, i.e `m`.
1705:           For matrices that will be factored, you must leave room for (and set)
1706:           the diagonal entry even if it is zero.
1707: . onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1708:           submatrix (same value is used for all local rows).
1709: - onnz - array containing the number of nonzeros in the various rows of the
1710:           OFF-DIAGONAL portion of the local submatrix (possibly different for
1711:           each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero
1712:           structure. The size of this array is equal to the number
1713:           of local rows, i.e `m`.

1715:   Level: intermediate

1717:   Note:
1718:   If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored.

1720: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ`
1721: @*/
1722: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1723: {
1724:   PetscFunctionBegin;
1727:   PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz));
1728:   PetscFunctionReturn(PETSC_SUCCESS);
1729: }

1731: /*@C
1732:   MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix`

1734:   Collective

1736:   Input Parameters:
1737: + parcsr   - the pointer to the `hypre_ParCSRMatrix`
1738: . mtype    - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported.
1739: - copymode - PETSc copying options, see  `PetscCopyMode`

1741:   Output Parameter:
1742: . A - the matrix

1744:   Level: intermediate

1746: .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1747: @*/
1748: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A)
1749: {
1750:   Mat        T;
1751:   Mat_HYPRE *hA;
1752:   MPI_Comm   comm;
1753:   PetscInt   rstart, rend, cstart, cend, M, N;
1754:   PetscBool  isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis;

1756:   PetscFunctionBegin;
1757:   comm = hypre_ParCSRMatrixComm(parcsr);
1758:   PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij));
1759:   PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl));
1760:   PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij));
1761:   PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
1762:   PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp));
1763:   PetscCall(PetscStrcmp(mtype, MATIS, &isis));
1764:   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1765:   /* TODO */
1766:   PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE);
1767:   /* access ParCSRMatrix */
1768:   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1769:   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1770:   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1771:   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1772:   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1773:   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);

1775:   /* create PETSc matrix with MatHYPRE */
1776:   PetscCall(MatCreate(comm, &T));
1777:   PetscCall(MatSetSizes(T, PetscMax(rend - rstart + 1, 0), PetscMax(cend - cstart + 1, 0), M, N));
1778:   PetscCall(MatSetType(T, MATHYPRE));
1779:   hA = (Mat_HYPRE *)T->data;

1781:   /* create HYPRE_IJMatrix */
1782:   PetscCallHYPRE(HYPRE_IJMatrixCreate(hA->comm, rstart, rend, cstart, cend, &hA->ij));
1783:   PetscCallHYPRE(HYPRE_IJMatrixSetObjectType(hA->ij, HYPRE_PARCSR));

1785:   /* create new ParCSR object if needed */
1786:   if (ishyp && copymode == PETSC_COPY_VALUES) {
1787:     hypre_ParCSRMatrix *new_parcsr;
1788: #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
1789:     hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd;

1791:     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
1792:     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1793:     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1794:     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1795:     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1796:     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag)));
1797:     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd)));
1798: #else
1799:     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1);
1800: #endif
1801:     parcsr   = new_parcsr;
1802:     copymode = PETSC_OWN_POINTER;
1803:   }

1805:   /* set ParCSR object */
1806:   hypre_IJMatrixObject(hA->ij) = parcsr;
1807:   T->preallocated              = PETSC_TRUE;

1809:   /* set assembled flag */
1810:   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1811: #if 0
1812:   PetscCallHYPRE(HYPRE_IJMatrixInitialize(hA->ij));
1813: #endif
1814:   if (ishyp) {
1815:     PetscMPIInt myid = 0;

1817:     /* make sure we always have row_starts and col_starts available */
1818:     if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid));
1819: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1820:     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1821:       PetscLayout map;

1823:       PetscCall(MatGetLayouts(T, NULL, &map));
1824:       PetscCall(PetscLayoutSetUp(map));
1825:       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1826:     }
1827:     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1828:       PetscLayout map;

1830:       PetscCall(MatGetLayouts(T, &map, NULL));
1831:       PetscCall(PetscLayoutSetUp(map));
1832:       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1833:     }
1834: #endif
1835:     /* prevent from freeing the pointer */
1836:     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1837:     *A = T;
1838:     PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE));
1839:     PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
1840:     PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
1841:   } else if (isaij) {
1842:     if (copymode != PETSC_OWN_POINTER) {
1843:       /* prevent from freeing the pointer */
1844:       hA->inner_free = PETSC_FALSE;
1845:       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A));
1846:       PetscCall(MatDestroy(&T));
1847:     } else { /* AIJ return type with PETSC_OWN_POINTER */
1848:       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T));
1849:       *A = T;
1850:     }
1851:   } else if (isis) {
1852:     PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A));
1853:     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1854:     PetscCall(MatDestroy(&T));
1855:   }
1856:   PetscFunctionReturn(PETSC_SUCCESS);
1857: }

1859: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1860: {
1861:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1862:   HYPRE_Int  type;

1864:   PetscFunctionBegin;
1865:   PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present");
1866:   PetscCallHYPRE(HYPRE_IJMatrixGetObjectType(hA->ij, &type));
1867:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1868:   PetscCallHYPRE(HYPRE_IJMatrixGetObject(hA->ij, (void **)parcsr));
1869:   PetscFunctionReturn(PETSC_SUCCESS);
1870: }

1872: /*@C
1873:   MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix

1875:   Not Collective, No Fortran Support

1877:   Input Parameter:
1878: . A - the `MATHYPRE` object

1880:   Output Parameter:
1881: . parcsr - the pointer to the `hypre_ParCSRMatrix`

1883:   Level: intermediate

1885: .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1886: @*/
1887: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1888: {
1889:   PetscFunctionBegin;
1892:   PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr));
1893:   PetscFunctionReturn(PETSC_SUCCESS);
1894: }

1896: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1897: {
1898:   hypre_ParCSRMatrix *parcsr;
1899: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1900:   hypre_CSRMatrix *ha;
1901: #endif
1902:   HYPRE_Complex hs;

1904:   PetscFunctionBegin;
1905:   PetscCall(PetscHYPREScalarCast(s, &hs));
1906:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1907: #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0)
1908:   PetscCallHYPRE(hypre_ParCSRMatrixScale(parcsr, hs));
1909: #else /* diagonal part */
1910:   ha = hypre_ParCSRMatrixDiag(parcsr);
1911:   if (ha) {
1912:     PetscInt       size, i;
1913:     HYPRE_Int     *ii;
1914:     HYPRE_Complex *a;

1916:     size = hypre_CSRMatrixNumRows(ha);
1917:     a    = hypre_CSRMatrixData(ha);
1918:     ii   = hypre_CSRMatrixI(ha);
1919:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1920:   }
1921:   /* off-diagonal part */
1922:   ha = hypre_ParCSRMatrixOffd(parcsr);
1923:   if (ha) {
1924:     PetscInt       size, i;
1925:     HYPRE_Int     *ii;
1926:     HYPRE_Complex *a;

1928:     size = hypre_CSRMatrixNumRows(ha);
1929:     a    = hypre_CSRMatrixData(ha);
1930:     ii   = hypre_CSRMatrixI(ha);
1931:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1932:   }
1933: #endif
1934:   PetscFunctionReturn(PETSC_SUCCESS);
1935: }

1937: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1938: {
1939:   hypre_ParCSRMatrix *parcsr;
1940:   HYPRE_Int          *lrows;
1941:   PetscInt            rst, ren, i;

1943:   PetscFunctionBegin;
1944:   PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented");
1945:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1946:   PetscCall(PetscMalloc1(numRows, &lrows));
1947:   PetscCall(MatGetOwnershipRange(A, &rst, &ren));
1948:   for (i = 0; i < numRows; i++) {
1949:     PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported");
1950:     lrows[i] = (HYPRE_Int)(rows[i] - rst);
1951:   }
1952:   PetscCallHYPRE(hypre_ParCSRMatrixEliminateRowsCols(parcsr, (HYPRE_Int)numRows, lrows));
1953:   PetscCall(PetscFree(lrows));
1954:   PetscFunctionReturn(PETSC_SUCCESS);
1955: }

1957: static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1958: {
1959:   PetscFunctionBegin;
1960:   if (ha) {
1961:     HYPRE_Int     *ii, size;
1962:     HYPRE_Complex *a;

1964:     size = hypre_CSRMatrixNumRows(ha);
1965:     a    = hypre_CSRMatrixData(ha);
1966:     ii   = hypre_CSRMatrixI(ha);

1968:     if (a) PetscCall(PetscArrayzero(a, ii[size]));
1969:   }
1970:   PetscFunctionReturn(PETSC_SUCCESS);
1971: }

1973: static PetscErrorCode MatZeroEntries_HYPRE(Mat A)
1974: {
1975:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1977:   PetscFunctionBegin;
1978:   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
1979:     PetscCallHYPRE(HYPRE_IJMatrixSetConstantValues(hA->ij, 0.0));
1980:   } else {
1981:     hypre_ParCSRMatrix *parcsr;

1983:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1984:     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
1985:     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
1986:   }
1987:   PetscFunctionReturn(PETSC_SUCCESS);
1988: }

1990: static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag)
1991: {
1992:   HYPRE_Int     *i, *j;
1993:   HYPRE_Complex *a;

1995:   PetscFunctionBegin;
1996:   if (!hA) PetscFunctionReturn(PETSC_SUCCESS);

1998:   i = hypre_CSRMatrixI(hA);
1999:   j = hypre_CSRMatrixJ(hA);
2000:   a = hypre_CSRMatrixData(hA);
2001: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2002:   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hA)) {
2003:   #if defined(HYPRE_USING_CUDA)
2004:     PetscCall(MatZeroRows_CUDA(N, rows, i, j, a, diag));
2005:   #elif defined(HYPRE_USING_HIP)
2006:     PetscCall(MatZeroRows_HIP(N, rows, i, j, a, diag));
2007:   #elif defined(PETSC_HAVE_KOKKOS)
2008:     PetscCall(MatZeroRows_Kokkos(N, rows, i, j, a, diag));
2009:   #else
2010:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for MatZeroRows on a hypre matrix in this memory location");
2011:   #endif
2012:   } else
2013: #endif
2014:   {
2015:     for (PetscInt ii = 0; ii < N; ii++) {
2016:       HYPRE_Int jj, ibeg, iend, irow;

2018:       irow = (HYPRE_Int)rows[ii];
2019:       ibeg = i[irow];
2020:       iend = i[irow + 1];
2021:       for (jj = ibeg; jj < iend; jj++)
2022:         if (j[jj] == irow) a[jj] = diag;
2023:         else a[jj] = 0.0;
2024:     }
2025:   }
2026:   PetscFunctionReturn(PETSC_SUCCESS);
2027: }

2029: static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2030: {
2031:   hypre_ParCSRMatrix *parcsr;
2032:   PetscInt           *lrows, len, *lrows2;
2033:   HYPRE_Complex       hdiag;

2035:   PetscFunctionBegin;
2036:   PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
2037:   PetscCall(PetscHYPREScalarCast(diag, &hdiag));
2038:   /* retrieve the internal matrix */
2039:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2040:   /* get locally owned rows */
2041:   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));

2043: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2044:   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hypre_ParCSRMatrixDiag(parcsr))) {
2045:     Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2046:     PetscInt   m;
2047:     PetscCall(MatGetLocalSize(A, &m, NULL));
2048:     if (!hA->rows_d) {
2049:       hA->rows_d = hypre_TAlloc(PetscInt, m, HYPRE_MEMORY_DEVICE);
2050:       if (m) PetscCheck(hA->rows_d, PETSC_COMM_SELF, PETSC_ERR_MEM, "HYPRE_TAlloc failed");
2051:     }
2052:     PetscCheck(len <= m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Too many rows in rows[]");
2053:     PetscCallExternalVoid("hypre_Memcpy", hypre_Memcpy(hA->rows_d, lrows, sizeof(PetscInt) * len, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST));
2054:     lrows2 = hA->rows_d;
2055:   } else
2056: #endif
2057:   {
2058:     lrows2 = lrows;
2059:   }

2061:   /* zero diagonal part */
2062:   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows2, hdiag));
2063:   /* zero off-diagonal part */
2064:   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows2, 0.0));

2066:   PetscCall(PetscFree(lrows));
2067:   PetscFunctionReturn(PETSC_SUCCESS);
2068: }

2070: static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode)
2071: {
2072:   PetscFunctionBegin;
2073:   if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);

2075:   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
2076:   PetscFunctionReturn(PETSC_SUCCESS);
2077: }

2079: static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2080: {
2081:   hypre_ParCSRMatrix *parcsr;
2082:   HYPRE_Int           hnz;
2083: #ifdef PETSC_HAVE_HYPRE_DEVICE
2084:   PetscInt    *didx;
2085:   PetscScalar *dv;
2086: #endif

2088:   PetscFunctionBegin;
2089:   /* retrieve the internal matrix */
2090:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2091: #ifdef PETSC_HAVE_HYPRE_DEVICE
2092:   if (hypre_ParCSRMatrixMemoryLocation(parcsr) == HYPRE_MEMORY_DEVICE) {
2093:     PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)&didx, (HYPRE_Complex **)&dv);
2094:     if (idx) {
2095:       PetscCall(PetscMalloc1(hnz, idx));
2096:       hypre_TMemcpy(*idx, didx, PetscInt, hnz, HYPRE_MEMORY_HOST, HYPRE_MEMORY_DEVICE);
2097:     }
2098:     if (v) {
2099:       PetscCall(PetscMalloc1(hnz, v));
2100:       hypre_TMemcpy(*v, dv, PetscScalar, hnz, HYPRE_MEMORY_HOST, HYPRE_MEMORY_DEVICE);
2101:     }
2102:   } else
2103: #endif
2104:     /* call HYPRE API */
2105:     PetscCallHYPRE(HYPRE_ParCSRMatrixGetRow(parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v));

2107:   if (nz) *nz = (PetscInt)hnz;
2108:   PetscFunctionReturn(PETSC_SUCCESS);
2109: }

2111: static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2112: {
2113:   hypre_ParCSRMatrix *parcsr;
2114:   HYPRE_Int           hnz;

2116:   PetscFunctionBegin;
2117:   /* retrieve the internal matrix */
2118:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2119: #ifdef PETSC_HAVE_HYPRE_DEVICE
2120:   if (hypre_ParCSRMatrixMemoryLocation(parcsr) == HYPRE_MEMORY_DEVICE) {
2121:     if (idx) PetscCall(PetscFree(*idx));
2122:     if (v) PetscCall(PetscFree(*v));
2123:   }
2124: #endif
2125:   /* call HYPRE API. It doesn't actually use any of the arguments so it's ok if we've actually
2126:      already free'd idx and v above */
2127:   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2128:   PetscCallHYPRE(HYPRE_ParCSRMatrixRestoreRow(parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v));
2129:   PetscFunctionReturn(PETSC_SUCCESS);
2130: }

2132: static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2133: {
2134:   Mat_HYPRE    *hA = (Mat_HYPRE *)A->data;
2135:   HYPRE_Int     hypre_host_n;
2136:   HYPRE_BigInt  hypre_host_idxm;
2137:   HYPRE_BigInt *device_idxm = NULL, *device_idxn = NULL, *hypre_host_idxn;
2138:   HYPRE_Int    *device_n      = NULL;
2139:   PetscScalar  *device_values = NULL;
2140:   PetscBool     hypre_on_host = PETSC_TRUE;

2142:   PetscFunctionBegin;
2143:   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);

2145:   PetscCall(PetscHYPREIntCast(n, &hypre_host_n));

2147:   // Setup HYPRE_BigInt host idxn array
2148:   if (sizeof(HYPRE_BigInt) > sizeof(PetscInt)) {
2149:     PetscCall(PetscMalloc1(n, &hypre_host_idxn));
2150:     for (PetscInt j = 0; j < n; ++j) hypre_host_idxn[j] = idxn[j];
2151:   } else {
2152:     PetscCheck(sizeof(HYPRE_BigInt) == sizeof(PetscInt), PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing handling of HYPRE_BigInt size less than PetscInt size");
2153:     hypre_host_idxn = (HYPRE_BigInt *)idxn;
2154:   }

2156:   // Check compatibility of PetscScalar and HYPRE_Complex
2157:   PetscCheck(sizeof(PetscScalar) == sizeof(HYPRE_Complex), PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing handling of incompatible PetscScalar and HYPRE_Complex sizes");

2159: #ifdef PETSC_HAVE_HYPRE_DEVICE
2160:   if (hypre_IJMatrixMemoryLocation(hA->ij) == HYPRE_MEMORY_DEVICE) {
2161:     hypre_on_host = PETSC_FALSE;
2162:     device_idxm   = hypre_TAlloc(HYPRE_BigInt, 1, HYPRE_MEMORY_DEVICE);
2163:     device_n      = hypre_TAlloc(HYPRE_Int, 1, HYPRE_MEMORY_DEVICE);
2164:     device_values = hypre_TAlloc(PetscScalar, n, HYPRE_MEMORY_DEVICE);
2165:     device_idxn   = hypre_TAlloc(HYPRE_BigInt, n, HYPRE_MEMORY_DEVICE);
2166:     hypre_TMemcpy(device_idxn, hypre_host_idxn, HYPRE_BigInt, n, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST);
2167:     hypre_TMemcpy(device_n, &hypre_host_n, HYPRE_Int, 1, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST);
2168:   }
2169: #endif

2171:   /* Ignore negative row indices
2172:    * And negative column indices should be automatically ignored in hypre
2173:    * */
2174:   for (PetscInt i = 0; i < m; i++) {
2175:     if (idxm[i] >= 0) {
2176:       HYPRE_BigInt  *rows, *cols;
2177:       HYPRE_Int     *ncols;
2178:       HYPRE_Complex *values;
2179:       hypre_host_idxm = idxm[i];
2180:       if (!hypre_on_host) hypre_TMemcpy(device_idxm, &hypre_host_idxm, HYPRE_BigInt, 1, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST);
2181:       ncols  = hypre_on_host ? &hypre_host_n : device_n;
2182:       rows   = hypre_on_host ? &hypre_host_idxm : device_idxm;
2183:       cols   = hypre_on_host ? hypre_host_idxn : device_idxn;
2184:       values = hypre_on_host ? (HYPRE_Complex *)(v + i * n) : (HYPRE_Complex *)device_values;
2185:       PetscCallHYPRE(HYPRE_IJMatrixGetValues2(hA->ij, 1, ncols, rows, NULL, cols, values));

2187:       if (!hypre_on_host) hypre_TMemcpy((HYPRE_Complex *)(v + i * n), device_values, HYPRE_Complex, n, HYPRE_MEMORY_HOST, HYPRE_MEMORY_DEVICE);
2188:     }
2189:   }

2191:   if (sizeof(PetscInt) < sizeof(HYPRE_BigInt)) PetscCall(PetscFree(hypre_host_idxn));
2192: #ifdef PETSC_HAVE_HYPRE_DEVICE
2193:   if (hypre_IJMatrixMemoryLocation(hA->ij) == HYPRE_MEMORY_DEVICE) {
2194:     hypre_TFree(device_idxm, HYPRE_MEMORY_DEVICE);
2195:     hypre_TFree(device_idxn, HYPRE_MEMORY_DEVICE);
2196:     hypre_TFree(device_values, HYPRE_MEMORY_DEVICE);
2197:     hypre_TFree(device_n, HYPRE_MEMORY_DEVICE);
2198:   }
2199: #endif
2200:   PetscFunctionReturn(PETSC_SUCCESS);
2201: }

2203: static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg)
2204: {
2205:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

2207:   PetscFunctionBegin;
2208:   switch (op) {
2209:   case MAT_NO_OFF_PROC_ENTRIES:
2210:     if (flg) PetscCallHYPRE(HYPRE_IJMatrixSetMaxOffProcElmts(hA->ij, 0));
2211:     break;
2212:   case MAT_IGNORE_OFF_PROC_ENTRIES:
2213:     hA->donotstash = flg;
2214:     break;
2215:   default:
2216:     break;
2217:   }
2218:   PetscFunctionReturn(PETSC_SUCCESS);
2219: }

2221: static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2222: {
2223:   PetscViewerFormat format;

2225:   PetscFunctionBegin;
2226:   PetscCall(PetscViewerGetFormat(view, &format));
2227:   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS);
2228:   if (format != PETSC_VIEWER_NATIVE) {
2229:     Mat                 B;
2230:     hypre_ParCSRMatrix *parcsr;
2231:     PetscErrorCode (*mview)(Mat, PetscViewer) = NULL;

2233:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2234:     PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B));
2235:     PetscCall(MatGetOperation(B, MATOP_VIEW, (PetscErrorCodeFn **)&mview));
2236:     PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation");
2237:     PetscCall((*mview)(B, view));
2238:     PetscCall(MatDestroy(&B));
2239:   } else {
2240:     Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
2241:     PetscMPIInt size;
2242:     PetscBool   isascii;
2243:     const char *filename;

2245:     /* HYPRE uses only text files */
2246:     PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii));
2247:     PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name);
2248:     PetscCall(PetscViewerFileGetName(view, &filename));
2249:     PetscCallHYPRE(HYPRE_IJMatrixPrint(hA->ij, filename));
2250:     PetscCallMPI(MPI_Comm_size(hA->comm, &size));
2251:     if (size > 1) {
2252:       PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1));
2253:     } else {
2254:       PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0));
2255:     }
2256:   }
2257:   PetscFunctionReturn(PETSC_SUCCESS);
2258: }

2260: static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2261: {
2262:   hypre_ParCSRMatrix *acsr, *bcsr;

2264:   PetscFunctionBegin;
2265:   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2266:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr));
2267:     PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr));
2268:     PetscCallHYPRE(hypre_ParCSRMatrixCopy(acsr, bcsr, 1));
2269:     PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2270:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2271:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2272:   } else {
2273:     PetscCall(MatCopy_Basic(A, B, str));
2274:   }
2275:   PetscFunctionReturn(PETSC_SUCCESS);
2276: }

2278: static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2279: {
2280:   hypre_ParCSRMatrix *parcsr;
2281:   hypre_CSRMatrix    *dmat;
2282:   HYPRE_Complex      *a;
2283:   PetscBool           cong;

2285:   PetscFunctionBegin;
2286:   PetscCall(MatHasCongruentLayouts(A, &cong));
2287:   PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns");
2288:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2289:   dmat = hypre_ParCSRMatrixDiag(parcsr);
2290:   if (dmat) {
2291: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2292:     HYPRE_MemoryLocation mem = hypre_CSRMatrixMemoryLocation(dmat);
2293: #else
2294:     HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
2295: #endif

2297:     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecGetArrayWriteAndMemType(d, (PetscScalar **)&a, NULL));
2298:     else PetscCall(VecGetArrayWrite(d, (PetscScalar **)&a));
2299:     hypre_CSRMatrixExtractDiagonal(dmat, a, 0);
2300:     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecRestoreArrayWriteAndMemType(d, (PetscScalar **)&a));
2301:     else PetscCall(VecRestoreArrayWrite(d, (PetscScalar **)&a));
2302:   }
2303:   PetscFunctionReturn(PETSC_SUCCESS);
2304: }

2306: #include <petscblaslapack.h>

2308: static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str)
2309: {
2310:   PetscFunctionBegin;
2311: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2312:   {
2313:     Mat                 B;
2314:     hypre_ParCSRMatrix *x, *y, *z;

2316:     PetscCall(MatHYPREGetParCSR(Y, &y));
2317:     PetscCall(MatHYPREGetParCSR(X, &x));
2318:     PetscCallHYPRE(hypre_ParCSRMatrixAdd(1.0, y, 1.0, x, &z));
2319:     PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B));
2320:     PetscCall(MatHeaderMerge(Y, &B));
2321:   }
2322: #else
2323:   if (str == SAME_NONZERO_PATTERN) {
2324:     hypre_ParCSRMatrix *x, *y;
2325:     hypre_CSRMatrix    *xloc, *yloc;
2326:     PetscInt            xnnz, ynnz;
2327:     HYPRE_Complex      *xarr, *yarr;
2328:     PetscBLASInt        one = 1, bnz;

2330:     PetscCall(MatHYPREGetParCSR(Y, &y));
2331:     PetscCall(MatHYPREGetParCSR(X, &x));

2333:     /* diagonal block */
2334:     xloc = hypre_ParCSRMatrixDiag(x);
2335:     yloc = hypre_ParCSRMatrixDiag(y);
2336:     xnnz = 0;
2337:     ynnz = 0;
2338:     xarr = NULL;
2339:     yarr = NULL;
2340:     if (xloc) {
2341:       xarr = hypre_CSRMatrixData(xloc);
2342:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2343:     }
2344:     if (yloc) {
2345:       yarr = hypre_CSRMatrixData(yloc);
2346:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2347:     }
2348:     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2349:     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2350:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));

2352:     /* off-diagonal block */
2353:     xloc = hypre_ParCSRMatrixOffd(x);
2354:     yloc = hypre_ParCSRMatrixOffd(y);
2355:     xnnz = 0;
2356:     ynnz = 0;
2357:     xarr = NULL;
2358:     yarr = NULL;
2359:     if (xloc) {
2360:       xarr = hypre_CSRMatrixData(xloc);
2361:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2362:     }
2363:     if (yloc) {
2364:       yarr = hypre_CSRMatrixData(yloc);
2365:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2366:     }
2367:     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2368:     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2369:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2370:   } else if (str == SUBSET_NONZERO_PATTERN) {
2371:     PetscCall(MatAXPY_Basic(Y, a, X, str));
2372:   } else {
2373:     Mat B;

2375:     PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B));
2376:     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
2377:     PetscCall(MatHeaderReplace(Y, &B));
2378:   }
2379: #endif
2380:   PetscFunctionReturn(PETSC_SUCCESS);
2381: }

2383: static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B)
2384: {
2385:   hypre_ParCSRMatrix *parcsr = NULL;
2386:   PetscCopyMode       cpmode;
2387:   Mat_HYPRE          *hA;

2389:   PetscFunctionBegin;
2390:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2391:   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2392:     parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
2393:     cpmode = PETSC_OWN_POINTER;
2394:   } else {
2395:     cpmode = PETSC_COPY_VALUES;
2396:   }
2397:   PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B));
2398:   hA = (Mat_HYPRE *)A->data;
2399:   if (hA->cooMat) {
2400:     Mat_HYPRE *hB = (Mat_HYPRE *)((*B)->data);
2401:     op            = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES;
2402:     /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */
2403:     PetscCall(MatDuplicate(hA->cooMat, op, &hB->cooMat));
2404:     PetscCall(MatHYPRE_AttachCOOMat(*B));
2405:   }
2406:   PetscFunctionReturn(PETSC_SUCCESS);
2407: }

2409: static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[])
2410: {
2411:   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;

2413:   PetscFunctionBegin;
2414:   /* Build an agent matrix cooMat with AIJ format
2415:      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
2416:    */
2417:   PetscCall(MatHYPRE_CreateCOOMat(mat));
2418:   PetscCall(MatSetOption(hmat->cooMat, MAT_IGNORE_OFF_PROC_ENTRIES, hmat->donotstash));
2419:   PetscCall(MatSetOption(hmat->cooMat, MAT_NO_OFF_PROC_ENTRIES, mat->nooffprocentries));

2421:   /* MatSetPreallocationCOO_SeqAIJ and MatSetPreallocationCOO_MPIAIJ uses this specific
2422:      name to automatically put the diagonal entries first */
2423:   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
2424:   PetscCall(MatSetPreallocationCOO(hmat->cooMat, coo_n, coo_i, coo_j));
2425:   hmat->cooMat->assembled = PETSC_TRUE;

2427:   /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */
2428:   PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE));
2429:   PetscCall(MatHYPRE_CreateFromMat(hmat->cooMat, hmat));      /* Create hmat->ij and preallocate it */
2430:   PetscCall(MatHYPRE_IJMatrixCopyIJ(hmat->cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */

2432:   mat->preallocated = PETSC_TRUE;
2433:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
2434:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */

2436:   /* Attach cooMat to mat */
2437:   PetscCall(MatHYPRE_AttachCOOMat(mat));
2438:   PetscFunctionReturn(PETSC_SUCCESS);
2439: }

2441: static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode)
2442: {
2443:   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;

2445:   PetscFunctionBegin;
2446:   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
2447:   PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode));
2448:   PetscCall(MatViewFromOptions(hmat->cooMat, (PetscObject)mat, "-cooMat_view"));
2449:   PetscFunctionReturn(PETSC_SUCCESS);
2450: }

2452: static PetscErrorCode MatGetCurrentMemType_HYPRE(Mat A, PetscMemType *m)
2453: {
2454:   PetscBool petsconcpu;

2456:   PetscFunctionBegin;
2457:   PetscCall(MatBoundToCPU(A, &petsconcpu));
2458:   *m = petsconcpu ? PETSC_MEMTYPE_HOST : PETSC_MEMTYPE_DEVICE;
2459:   PetscFunctionReturn(PETSC_SUCCESS);
2460: }

2462: /*MC
2463:    MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2464:           based on the hypre IJ interface.

2466:    Level: intermediate

2468: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation`
2469: M*/
2470: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2471: {
2472:   Mat_HYPRE *hB;
2473: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2474:   HYPRE_MemoryLocation memory_location;
2475: #endif

2477:   PetscFunctionBegin;
2478:   PetscCall(PetscHYPREInitialize());
2479:   PetscCall(PetscNew(&hB));

2481:   hB->inner_free      = PETSC_TRUE;
2482:   hB->array_available = PETSC_TRUE;

2484:   B->data = (void *)hB;

2486:   PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps)));
2487:   B->ops->mult                  = MatMult_HYPRE;
2488:   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2489:   B->ops->multadd               = MatMultAdd_HYPRE;
2490:   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2491:   B->ops->setup                 = MatSetUp_HYPRE;
2492:   B->ops->destroy               = MatDestroy_HYPRE;
2493:   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2494:   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2495:   B->ops->setvalues             = MatSetValues_HYPRE;
2496:   B->ops->scale                 = MatScale_HYPRE;
2497:   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2498:   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2499:   B->ops->zerorows              = MatZeroRows_HYPRE;
2500:   B->ops->getrow                = MatGetRow_HYPRE;
2501:   B->ops->restorerow            = MatRestoreRow_HYPRE;
2502:   B->ops->getvalues             = MatGetValues_HYPRE;
2503:   B->ops->setoption             = MatSetOption_HYPRE;
2504:   B->ops->duplicate             = MatDuplicate_HYPRE;
2505:   B->ops->copy                  = MatCopy_HYPRE;
2506:   B->ops->view                  = MatView_HYPRE;
2507:   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2508:   B->ops->axpy                  = MatAXPY_HYPRE;
2509:   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2510:   B->ops->getcurrentmemtype     = MatGetCurrentMemType_HYPRE;
2511: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2512:   B->ops->bindtocpu = MatBindToCPU_HYPRE;
2513:   /* Get hypre's default memory location. Users can control this using the corresponding HYPRE_SetMemoryLocation API */
2514:   PetscCallHYPRE(HYPRE_GetMemoryLocation(&memory_location));
2515:   B->boundtocpu = (memory_location == HYPRE_MEMORY_HOST) ? PETSC_TRUE : PETSC_FALSE;
2516: #endif

2518:   /* build cache for off array entries formed */
2519:   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));

2521:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm));
2522:   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE));
2523:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ));
2524:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS));
2525:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE));
2526:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE));
2527:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE));
2528:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE));
2529:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE));
2530:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE));
2531: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2532:   #if defined(HYPRE_USING_HIP)
2533:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2534:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2535:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2536:   PetscCall(MatSetVecType(B, VECHIP));
2537:   #endif
2538:   #if defined(HYPRE_USING_CUDA)
2539:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2540:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2541:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2542:   PetscCall(MatSetVecType(B, VECCUDA));
2543:   #endif
2544: #endif
2545:   PetscFunctionReturn(PETSC_SUCCESS);
2546: }