Actual source code: mhypre.c

  1: /*
  2:     Creates hypre ijmatrix from PETSc matrix
  3: */

  5: #include <petscpkg_version.h>
  6: #include <petsc/private/petschypre.h>
  7: #include <petscmathypre.h>
  8: #include <petsc/private/matimpl.h>
  9: #include <petsc/private/deviceimpl.h>
 10: #include <../src/mat/impls/hypre/mhypre.h>
 11: #include <../src/mat/impls/aij/mpi/mpiaij.h>
 12: #include <../src/vec/vec/impls/hypre/vhyp.h>
 13: #include <HYPRE.h>
 14: #include <HYPRE_utilities.h>
 15: #include <_hypre_parcsr_ls.h>
 16: #include <_hypre_sstruct_ls.h>

 18: #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
 19:   #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A)
 20: #endif

 22: static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *);
 23: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix);
 24: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix);
 25: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix);
 26: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool);
 27: static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins);

 29: static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij)
 30: {
 31:   PetscInt        i, n_d, n_o;
 32:   const PetscInt *ia_d, *ia_o;
 33:   PetscBool       done_d = PETSC_FALSE, done_o = PETSC_FALSE;
 34:   HYPRE_Int      *nnz_d = NULL, *nnz_o = NULL;

 36:   PetscFunctionBegin;
 37:   if (A_d) { /* determine number of nonzero entries in local diagonal part */
 38:     PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d));
 39:     if (done_d) {
 40:       PetscCall(PetscMalloc1(n_d, &nnz_d));
 41:       for (i = 0; i < n_d; i++) nnz_d[i] = ia_d[i + 1] - ia_d[i];
 42:     }
 43:     PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d));
 44:   }
 45:   if (A_o) { /* determine number of nonzero entries in local off-diagonal part */
 46:     PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
 47:     if (done_o) {
 48:       PetscCall(PetscMalloc1(n_o, &nnz_o));
 49:       for (i = 0; i < n_o; i++) nnz_o[i] = ia_o[i + 1] - ia_o[i];
 50:     }
 51:     PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o));
 52:   }
 53:   if (done_d) {    /* set number of nonzeros in HYPRE IJ matrix */
 54:     if (!done_o) { /* only diagonal part */
 55:       PetscCall(PetscCalloc1(n_d, &nnz_o));
 56:     }
 57: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
 58:     { /* If we don't do this, the columns of the matrix will be all zeros! */
 59:       hypre_AuxParCSRMatrix *aux_matrix;
 60:       aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
 61:       hypre_AuxParCSRMatrixDestroy(aux_matrix);
 62:       hypre_IJMatrixTranslator(ij) = NULL;
 63:       PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
 64:       /* it seems they partially fixed it in 2.19.0 */
 65:   #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
 66:       aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
 67:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
 68:   #endif
 69:     }
 70: #else
 71:     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o);
 72: #endif
 73:     PetscCall(PetscFree(nnz_d));
 74:     PetscCall(PetscFree(nnz_o));
 75:   }
 76:   PetscFunctionReturn(PETSC_SUCCESS);
 77: }

 79: static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA)
 80: {
 81:   PetscInt rstart, rend, cstart, cend;

 83:   PetscFunctionBegin;
 84:   PetscCall(PetscLayoutSetUp(A->rmap));
 85:   PetscCall(PetscLayoutSetUp(A->cmap));
 86:   rstart = A->rmap->rstart;
 87:   rend   = A->rmap->rend;
 88:   cstart = A->cmap->rstart;
 89:   cend   = A->cmap->rend;
 90:   PetscHYPREInitialize();
 91:   if (hA->ij) {
 92:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
 93:     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
 94:   }
 95:   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij);
 96:   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
 97:   {
 98:     PetscBool       same;
 99:     Mat             A_d, A_o;
100:     const PetscInt *colmap;
101:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same));
102:     if (same) {
103:       PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap));
104:       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
105:       PetscFunctionReturn(PETSC_SUCCESS);
106:     }
107:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same));
108:     if (same) {
109:       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap));
110:       PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij));
111:       PetscFunctionReturn(PETSC_SUCCESS);
112:     }
113:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same));
114:     if (same) {
115:       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
116:       PetscFunctionReturn(PETSC_SUCCESS);
117:     }
118:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same));
119:     if (same) {
120:       PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij));
121:       PetscFunctionReturn(PETSC_SUCCESS);
122:     }
123:   }
124:   PetscFunctionReturn(PETSC_SUCCESS);
125: }

127: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij)
128: {
129:   PetscBool flg;

131:   PetscFunctionBegin;
132: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
133:   PetscCallExternal(HYPRE_IJMatrixInitialize, ij);
134: #else
135:   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, ij, HYPRE_MEMORY_HOST);
136: #endif
137:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg));
138:   if (flg) {
139:     PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij));
140:     PetscFunctionReturn(PETSC_SUCCESS);
141:   }
142:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg));
143:   if (flg) {
144:     PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij));
145:     PetscFunctionReturn(PETSC_SUCCESS);
146:   }
147:   PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name);
148:   PetscFunctionReturn(PETSC_SUCCESS);
149: }

151: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij)
152: {
153:   Mat_SeqAIJ            *pdiag = (Mat_SeqAIJ *)A->data;
154:   HYPRE_Int              type;
155:   hypre_ParCSRMatrix    *par_matrix;
156:   hypre_AuxParCSRMatrix *aux_matrix;
157:   hypre_CSRMatrix       *hdiag;
158:   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));

160:   PetscFunctionBegin;
161:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
162:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
163:   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
164:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
165:   /*
166:        this is the Hack part where we monkey directly with the hypre datastructures
167:   */
168:   if (sameint) {
169:     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1));
170:     PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz));
171:   } else {
172:     PetscInt i;

174:     for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
175:     for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i];
176:   }

178:   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
179:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
180:   PetscFunctionReturn(PETSC_SUCCESS);
181: }

183: static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
184: {
185:   Mat_MPIAIJ            *pA = (Mat_MPIAIJ *)A->data;
186:   Mat_SeqAIJ            *pdiag, *poffd;
187:   PetscInt               i, *garray = pA->garray, *jj, cstart, *pjj;
188:   HYPRE_Int             *hjj, type;
189:   hypre_ParCSRMatrix    *par_matrix;
190:   hypre_AuxParCSRMatrix *aux_matrix;
191:   hypre_CSRMatrix       *hdiag, *hoffd;
192:   PetscBool              sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));

194:   PetscFunctionBegin;
195:   pdiag = (Mat_SeqAIJ *)pA->A->data;
196:   poffd = (Mat_SeqAIJ *)pA->B->data;
197:   /* cstart is only valid for square MPIAIJ laid out in the usual way */
198:   PetscCall(MatGetOwnershipRange(A, &cstart, NULL));

200:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type);
201:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
202:   PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix);
203:   hdiag = hypre_ParCSRMatrixDiag(par_matrix);
204:   hoffd = hypre_ParCSRMatrixOffd(par_matrix);

206:   if (sameint) {
207:     PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1));
208:   } else {
209:     for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i];
210:   }

212:   hjj = hdiag->j;
213:   pjj = pdiag->j;
214: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
215:   for (i = 0; i < pdiag->nz; i++) hjj[i] = pjj[i];
216: #else
217:   for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i];
218: #endif
219:   if (sameint) {
220:     PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1));
221:   } else {
222:     for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)poffd->i[i];
223:   }

225:   jj = (PetscInt *)hoffd->j;
226: #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0)
227:   PetscCallExternal(hypre_CSRMatrixBigInitialize, hoffd);
228:   jj = (PetscInt *)hoffd->big_j;
229: #endif
230:   pjj = poffd->j;
231:   for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]];

233:   aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij);
234:   hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0;
235:   PetscFunctionReturn(PETSC_SUCCESS);
236: }

238: static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B)
239: {
240:   Mat_HYPRE             *mhA = (Mat_HYPRE *)A->data;
241:   Mat                    lA;
242:   ISLocalToGlobalMapping rl2g, cl2g;
243:   IS                     is;
244:   hypre_ParCSRMatrix    *hA;
245:   hypre_CSRMatrix       *hdiag, *hoffd;
246:   MPI_Comm               comm;
247:   HYPRE_Complex         *hdd, *hod, *aa;
248:   PetscScalar           *data;
249:   HYPRE_BigInt          *col_map_offd;
250:   HYPRE_Int             *hdi, *hdj, *hoi, *hoj;
251:   PetscInt              *ii, *jj, *iptr, *jptr;
252:   PetscInt               cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N;
253:   HYPRE_Int              type;
254:   MatType                lmattype   = NULL;
255:   PetscBool              freeparcsr = PETSC_FALSE;

257:   PetscFunctionBegin;
258:   comm = PetscObjectComm((PetscObject)A);
259:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, mhA->ij, &type);
260:   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
261:   PetscCallExternal(HYPRE_IJMatrixGetObject, mhA->ij, (void **)&hA);
262: #if defined(PETSC_HAVE_HYPRE_DEVICE)
263:   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(mhA->ij)) {
264:     /* Support by copying back on the host and copy to GPU
265:        Kind of inefficient, but this is the best we can do now */
266:   #if defined(HYPRE_USING_HIP)
267:     lmattype = MATSEQAIJHIPSPARSE;
268:   #elif defined(HYPRE_USING_CUDA)
269:     lmattype = MATSEQAIJCUSPARSE;
270:   #endif
271:     hA         = hypre_ParCSRMatrixClone_v2(hA, 1, HYPRE_MEMORY_HOST);
272:     freeparcsr = PETSC_TRUE;
273:   }
274: #endif
275:   M     = hypre_ParCSRMatrixGlobalNumRows(hA);
276:   N     = hypre_ParCSRMatrixGlobalNumCols(hA);
277:   str   = hypre_ParCSRMatrixFirstRowIndex(hA);
278:   stc   = hypre_ParCSRMatrixFirstColDiag(hA);
279:   hdiag = hypre_ParCSRMatrixDiag(hA);
280:   hoffd = hypre_ParCSRMatrixOffd(hA);
281:   dr    = hypre_CSRMatrixNumRows(hdiag);
282:   dc    = hypre_CSRMatrixNumCols(hdiag);
283:   nnz   = hypre_CSRMatrixNumNonzeros(hdiag);
284:   hdi   = hypre_CSRMatrixI(hdiag);
285:   hdj   = hypre_CSRMatrixJ(hdiag);
286:   hdd   = hypre_CSRMatrixData(hdiag);
287:   oc    = hypre_CSRMatrixNumCols(hoffd);
288:   nnz += hypre_CSRMatrixNumNonzeros(hoffd);
289:   hoi = hypre_CSRMatrixI(hoffd);
290:   hoj = hypre_CSRMatrixJ(hoffd);
291:   hod = hypre_CSRMatrixData(hoffd);
292:   if (reuse != MAT_REUSE_MATRIX) {
293:     PetscInt *aux;

295:     /* generate l2g maps for rows and cols */
296:     PetscCall(ISCreateStride(comm, dr, str, 1, &is));
297:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
298:     PetscCall(ISDestroy(&is));
299:     col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
300:     PetscCall(PetscMalloc1(dc + oc, &aux));
301:     for (i = 0; i < dc; i++) aux[i] = i + stc;
302:     for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i];
303:     PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is));
304:     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
305:     PetscCall(ISDestroy(&is));
306:     /* create MATIS object */
307:     PetscCall(MatCreate(comm, B));
308:     PetscCall(MatSetSizes(*B, dr, dc, M, N));
309:     PetscCall(MatSetType(*B, MATIS));
310:     PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g));
311:     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
312:     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));

314:     /* allocate CSR for local matrix */
315:     PetscCall(PetscMalloc1(dr + 1, &iptr));
316:     PetscCall(PetscMalloc1(nnz, &jptr));
317:     PetscCall(PetscMalloc1(nnz, &data));
318:   } else {
319:     PetscInt  nr;
320:     PetscBool done;
321:     PetscCall(MatISGetLocalMat(*B, &lA));
322:     PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done));
323:     PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr);
324:     PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz);
325:     PetscCall(MatSeqAIJGetArrayWrite(lA, &data));
326:   }
327:   /* merge local matrices */
328:   ii  = iptr;
329:   jj  = jptr;
330:   aa  = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */
331:   *ii = *(hdi++) + *(hoi++);
332:   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
333:     PetscScalar *aold = (PetscScalar *)aa;
334:     PetscInt    *jold = jj, nc = jd + jo;
335:     for (; jd < *hdi; jd++) {
336:       *jj++ = *hdj++;
337:       *aa++ = *hdd++;
338:     }
339:     for (; jo < *hoi; jo++) {
340:       *jj++ = *hoj++ + dc;
341:       *aa++ = *hod++;
342:     }
343:     *(++ii) = *(hdi++) + *(hoi++);
344:     PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold));
345:   }
346:   for (; cum < dr; cum++) *(++ii) = nnz;
347:   if (reuse != MAT_REUSE_MATRIX) {
348:     Mat_SeqAIJ *a;

350:     PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA));
351:     /* hack SeqAIJ */
352:     a          = (Mat_SeqAIJ *)lA->data;
353:     a->free_a  = PETSC_TRUE;
354:     a->free_ij = PETSC_TRUE;
355:     if (lmattype) PetscCall(MatConvert(lA, lmattype, MAT_INPLACE_MATRIX, &lA));
356:     PetscCall(MatISSetLocalMat(*B, lA));
357:     PetscCall(MatDestroy(&lA));
358:   } else {
359:     PetscCall(MatSeqAIJRestoreArrayWrite(lA, &data));
360:   }
361:   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
362:   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
363:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B));
364:   if (freeparcsr) PetscCallExternal(hypre_ParCSRMatrixDestroy, hA);
365:   PetscFunctionReturn(PETSC_SUCCESS);
366: }

368: static PetscErrorCode MatHYPRE_DestroyCOOMat(Mat mat)
369: {
370:   Mat_HYPRE *hA = (Mat_HYPRE *)mat->data;

372:   PetscFunctionBegin;
373:   if (hA->cooMat) { /* If cooMat is present we need to destroy the column indices */
374:     PetscCall(MatDestroy(&hA->cooMat));
375:     if (hA->cooMatAttached) {
376:       hypre_CSRMatrix     *csr;
377:       hypre_ParCSRMatrix  *parcsr;
378:       HYPRE_MemoryLocation mem;

380:       PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
381:       csr = hypre_ParCSRMatrixDiag(parcsr);
382:       if (csr) {
383:         mem = hypre_CSRMatrixMemoryLocation(csr);
384:         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
385:         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
386:       }
387:       csr = hypre_ParCSRMatrixOffd(parcsr);
388:       if (csr) {
389:         mem = hypre_CSRMatrixMemoryLocation(csr);
390:         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixJ(csr), mem));
391:         PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixBigJ(csr), mem));
392:       }
393:     }
394:   }
395:   hA->cooMatAttached = PETSC_FALSE;
396:   PetscFunctionReturn(PETSC_SUCCESS);
397: }

399: static PetscErrorCode MatHYPRE_CreateCOOMat(Mat mat)
400: {
401:   MPI_Comm    comm;
402:   PetscMPIInt size;
403:   PetscLayout rmap, cmap;
404:   Mat_HYPRE  *hmat    = (Mat_HYPRE *)mat->data;
405:   MatType     matType = MATAIJ; /* default type of cooMat */

407:   PetscFunctionBegin;
408:   /* Build an agent matrix cooMat with AIJ format
409:      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
410:    */
411:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
412:   PetscCallMPI(MPI_Comm_size(comm, &size));
413:   PetscCall(PetscLayoutSetUp(mat->rmap));
414:   PetscCall(PetscLayoutSetUp(mat->cmap));
415:   PetscCall(MatGetLayouts(mat, &rmap, &cmap));

417: #if defined(PETSC_HAVE_HYPRE_DEVICE)
418:   if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */
419:   #if defined(HYPRE_USING_HIP)
420:     matType = MATAIJHIPSPARSE;
421:   #elif defined(HYPRE_USING_CUDA)
422:     matType = MATAIJCUSPARSE;
423:   #else
424:     SETERRQ(comm, PETSC_ERR_SUP, "Do not know the HYPRE device");
425:   #endif
426:   }
427: #endif

429:   /* Do COO preallocation through cooMat */
430:   PetscCall(MatHYPRE_DestroyCOOMat(mat));
431:   PetscCall(MatCreate(comm, &hmat->cooMat));
432:   PetscCall(MatSetType(hmat->cooMat, matType));
433:   PetscCall(MatSetLayouts(hmat->cooMat, rmap, cmap));

435:   /* allocate local matrices if needed */
436:   PetscCall(MatMPIAIJSetPreallocation(hmat->cooMat, 0, NULL, 0, NULL));
437:   PetscFunctionReturn(PETSC_SUCCESS);
438: }

440: /* Attach cooMat data array to hypre matrix.
441:    When AIJCUPMSPARSE will support raw device pointers and not THRUSTARRAY
442:    we should swap the arrays: i.e., attach hypre matrix array to cooMat
443:    This is because hypre should be in charge of handling the memory,
444:    cooMat is only a way to reuse PETSc COO code.
445:    attaching the memory will then be done at MatSetValuesCOO time and it will dynamically
446:    support hypre matrix migrating to host.
447: */
448: static PetscErrorCode MatHYPRE_AttachCOOMat(Mat mat)
449: {
450:   Mat_HYPRE           *hmat = (Mat_HYPRE *)mat->data;
451:   hypre_CSRMatrix     *diag, *offd;
452:   hypre_ParCSRMatrix  *parCSR;
453:   HYPRE_MemoryLocation hmem = HYPRE_MEMORY_HOST;
454:   PetscMemType         pmem;
455:   Mat                  A, B;
456:   PetscScalar         *a;
457:   PetscMPIInt          size;
458:   MPI_Comm             comm;

460:   PetscFunctionBegin;
461:   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
462:   if (hmat->cooMatAttached) PetscFunctionReturn(PETSC_SUCCESS);
463:   PetscCheck(hmat->cooMat->preallocated, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix is not preallocated");
464:   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
465:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
466:   PetscCallMPI(MPI_Comm_size(comm, &size));

468:   /* Alias cooMat's data array to IJMatrix's */
469:   PetscCallExternal(HYPRE_IJMatrixGetObject, hmat->ij, (void **)&parCSR);
470:   diag = hypre_ParCSRMatrixDiag(parCSR);
471:   offd = hypre_ParCSRMatrixOffd(parCSR);

473:   A = (size == 1) ? hmat->cooMat : ((Mat_MPIAIJ *)hmat->cooMat->data)->A;
474:   B = (size == 1) ? NULL : ((Mat_MPIAIJ *)hmat->cooMat->data)->B;

476:   PetscCall(PetscObjectSetName((PetscObject)A, "_internal_COO_mat_for_hypre"));
477:   hmem = hypre_CSRMatrixMemoryLocation(diag);
478:   PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &a, &pmem));
479:   PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
480:   PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hmem));
481:   hypre_CSRMatrixData(diag)     = (HYPRE_Complex *)a;
482:   hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */

484:   if (B) {
485:     hmem = hypre_CSRMatrixMemoryLocation(offd);
486:     PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &a, &pmem));
487:     PetscAssert((PetscMemTypeHost(pmem) && hmem == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(pmem) && hmem == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch");
488:     PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hmem));
489:     hypre_CSRMatrixData(offd)     = (HYPRE_Complex *)a;
490:     hypre_CSRMatrixOwnsData(offd) = 0;
491:   }
492:   hmat->cooMatAttached = PETSC_TRUE;
493:   PetscFunctionReturn(PETSC_SUCCESS);
494: }

496: // Build COO's coordinate list i[], j[] based on CSR's i[], j[] arrays and the number of local rows 'n'
497: static PetscErrorCode CSRtoCOO_Private(PetscInt n, const PetscInt ii[], const PetscInt jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
498: {
499:   PetscInt *cooi, *cooj;

501:   PetscFunctionBegin;
502:   *ncoo = ii[n];
503:   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
504:   for (PetscInt i = 0; i < n; i++) {
505:     for (PetscInt j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
506:   }
507:   PetscCall(PetscArraycpy(cooj, jj, *ncoo));
508:   *coo_i = cooi;
509:   *coo_j = cooj;
510:   PetscFunctionReturn(PETSC_SUCCESS);
511: }

513: // Similar to CSRtoCOO_Private, but the CSR's i[], j[] are of type HYPRE_Int
514: static PetscErrorCode CSRtoCOO_HYPRE_Int_Private(PetscInt n, const HYPRE_Int ii[], const HYPRE_Int jj[], PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
515: {
516:   PetscInt *cooi, *cooj;

518:   PetscFunctionBegin;
519:   *ncoo = ii[n];
520:   PetscCall(PetscMalloc2(*ncoo, &cooi, *ncoo, &cooj));
521:   for (PetscInt i = 0; i < n; i++) {
522:     for (HYPRE_Int j = ii[i]; j < ii[i + 1]; j++) cooi[j] = i;
523:   }
524:   for (PetscCount i = 0; i < *ncoo; i++) cooj[i] = jj[i];
525:   *coo_i = cooi;
526:   *coo_j = cooj;
527:   PetscFunctionReturn(PETSC_SUCCESS);
528: }

530: // Build a COO data structure for the seqaij matrix, as if the nonzeros are laid out in the same order as in the CSR
531: static PetscErrorCode MatSeqAIJGetCOO_Private(Mat A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
532: {
533:   PetscInt        n;
534:   const PetscInt *ii, *jj;
535:   PetscBool       done;

537:   PetscFunctionBegin;
538:   PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
539:   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatGetRowIJ");
540:   PetscCall(CSRtoCOO_Private(n, ii, jj, ncoo, coo_i, coo_j));
541:   PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ii, &jj, &done));
542:   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Failure for MatRestoreRowIJ");
543:   PetscFunctionReturn(PETSC_SUCCESS);
544: }

546: // Build a COO data structure for the hypreCSRMatrix, as if the nonzeros are laid out in the same order as in the hypreCSRMatrix
547: static PetscErrorCode hypreCSRMatrixGetCOO_Private(hypre_CSRMatrix *A, PetscCount *ncoo, PetscInt **coo_i, PetscInt **coo_j)
548: {
549:   PetscInt             n = hypre_CSRMatrixNumRows(A);
550:   HYPRE_Int           *ii, *jj;
551:   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;

553:   PetscFunctionBegin;
554: #if defined(PETSC_HAVE_HYPRE_DEVICE)
555:   mem = hypre_CSRMatrixMemoryLocation(A);
556:   if (mem != HYPRE_MEMORY_HOST) {
557:     PetscCount nnz = hypre_CSRMatrixNumNonzeros(A);
558:     PetscCall(PetscMalloc2(n + 1, &ii, nnz, &jj));
559:     hypre_TMemcpy(ii, hypre_CSRMatrixI(A), HYPRE_Int, n + 1, HYPRE_MEMORY_HOST, mem);
560:     hypre_TMemcpy(jj, hypre_CSRMatrixJ(A), HYPRE_Int, nnz, HYPRE_MEMORY_HOST, mem);
561:   } else {
562: #else
563:   {
564: #endif
565:     ii = hypre_CSRMatrixI(A);
566:     jj = hypre_CSRMatrixJ(A);
567:   }
568:   PetscCall(CSRtoCOO_HYPRE_Int_Private(n, ii, jj, ncoo, coo_i, coo_j));
569:   if (mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree2(ii, jj));
570:   PetscFunctionReturn(PETSC_SUCCESS);
571: }

573: static PetscErrorCode MatSetValuesCOOFromCSRMatrix_Private(Mat A, hypre_CSRMatrix *H)
574: {
575:   PetscBool            iscpu = PETSC_TRUE;
576:   PetscScalar         *a;
577:   HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;

579:   PetscFunctionBegin;
580: #if defined(PETSC_HAVE_HYPRE_DEVICE)
581:   mem = hypre_CSRMatrixMemoryLocation(H);
582:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &iscpu));
583: #endif
584:   if (iscpu && mem != HYPRE_MEMORY_HOST) {
585:     PetscCount nnz = hypre_CSRMatrixNumNonzeros(H);
586:     PetscCall(PetscMalloc1(nnz, &a));
587:     hypre_TMemcpy(a, hypre_CSRMatrixData(H), PetscScalar, nnz, HYPRE_MEMORY_HOST, mem);
588:   } else {
589:     a = (PetscScalar *)hypre_CSRMatrixData(H);
590:   }
591:   PetscCall(MatSetValuesCOO(A, a, INSERT_VALUES));
592:   if (iscpu && mem != HYPRE_MEMORY_HOST) PetscCall(PetscFree(a));
593:   PetscFunctionReturn(PETSC_SUCCESS);
594: }

596: PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B)
597: {
598:   MPI_Comm   comm = PetscObjectComm((PetscObject)A);
599:   Mat        M = NULL, dH = NULL, oH = NULL, dA = NULL, oA = NULL;
600:   PetscBool  ismpiaij, issbaij, isbaij;
601:   Mat_HYPRE *hA;

603:   PetscFunctionBegin;
604:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, ""));
605:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, ""));
606:   if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */
607:     PetscBool ismpi;
608:     MatType   newtype;

610:     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, ""));
611:     newtype = ismpi ? MATMPIAIJ : MATSEQAIJ;
612:     if (reuse == MAT_REUSE_MATRIX) {
613:       PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B));
614:       PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B));
615:       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
616:     } else if (reuse == MAT_INITIAL_MATRIX) {
617:       PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B));
618:       PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B));
619:     } else {
620:       PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A));
621:       PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A));
622:     }
623:     PetscFunctionReturn(PETSC_SUCCESS);
624:   }

626: #if defined(PETSC_HAVE_HYPRE_DEVICE)
627:   {
628:     PetscBool isaij;
629:     // Hypre defaults to GPU when configured with GPU. We make it default to the memory location associated with the PETSc matrix,
630:     // i.e., when A is a host matrix, Hypre will be on the host; otherwise, when A is of type aijcusparse, aijhipsarse, aijkokkos etc,
631:     // Hypre will be on the device.
632:     PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isaij, MATSEQAIJ, MATMPIAIJ, ""));
633:     PetscHYPREInitialize();
634:     PetscCallExternal(HYPRE_SetMemoryLocation, isaij ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE);
635:   }
636: #endif

638:   dA = A;
639:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
640:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL));

642:   if (reuse != MAT_REUSE_MATRIX) {
643:     PetscCount coo_n;
644:     PetscInt  *coo_i, *coo_j;

646:     PetscCall(MatCreate(comm, &M));
647:     PetscCall(MatSetType(M, MATHYPRE));
648:     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
649:     PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE));
650:     PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));

652:     hA = (Mat_HYPRE *)M->data;
653:     PetscCall(MatHYPRE_CreateFromMat(A, hA));
654:     PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij));

656:     PetscCall(MatHYPRE_CreateCOOMat(M));

658:     dH = hA->cooMat;
659:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
660:     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));

662:     PetscCall(PetscObjectSetName((PetscObject)dH, "_internal_COO_mat_for_hypre"));
663:     PetscCall(MatSeqAIJGetCOO_Private(dA, &coo_n, &coo_i, &coo_j));
664:     PetscCall(MatSetPreallocationCOO(dH, coo_n, coo_i, coo_j));
665:     PetscCall(PetscFree2(coo_i, coo_j));
666:     if (oH) {
667:       PetscCall(PetscLayoutDestroy(&oH->cmap));
668:       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oH), oA->cmap->n, oA->cmap->n, 1, &oH->cmap));
669:       PetscCall(MatSeqAIJGetCOO_Private(oA, &coo_n, &coo_i, &coo_j));
670:       PetscCall(MatSetPreallocationCOO(oH, coo_n, coo_i, coo_j));
671:       PetscCall(PetscFree2(coo_i, coo_j));
672:     }
673:     hA->cooMat->assembled = PETSC_TRUE;

675:     M->preallocated = PETSC_TRUE;
676:     PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
677:     PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY));

679:     PetscCall(MatHYPRE_AttachCOOMat(M));
680:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
681:   } else M = *B;

683:   hA = (Mat_HYPRE *)M->data;
684:   PetscCheck(hA->cooMat, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");

686:   dH = hA->cooMat;
687:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)hA->cooMat, MATMPIAIJ, &ismpiaij));
688:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(hA->cooMat, &dH, &oH, NULL));

690:   PetscScalar *a;
691:   PetscCall(MatSeqAIJGetCSRAndMemType(dA, NULL, NULL, &a, NULL));
692:   PetscCall(MatSetValuesCOO(dH, a, INSERT_VALUES));
693:   if (oH) {
694:     PetscCall(MatSeqAIJGetCSRAndMemType(oA, NULL, NULL, &a, NULL));
695:     PetscCall(MatSetValuesCOO(oH, a, INSERT_VALUES));
696:   }

698:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
699:   PetscFunctionReturn(PETSC_SUCCESS);
700: }

702: static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B)
703: {
704:   Mat                 M, dA = NULL, oA = NULL;
705:   hypre_ParCSRMatrix *parcsr;
706:   hypre_CSRMatrix    *dH, *oH;
707:   MPI_Comm            comm;
708:   PetscBool           ismpiaij, isseqaij;

710:   PetscFunctionBegin;
711:   comm = PetscObjectComm((PetscObject)A);
712:   if (reuse == MAT_REUSE_MATRIX) {
713:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij));
714:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij));
715:     PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ base types are supported");
716:   }
717:   PetscCall(MatHYPREGetParCSR(A, &parcsr));
718: #if defined(PETSC_HAVE_HYPRE_DEVICE)
719:   if (HYPRE_MEMORY_DEVICE == hypre_ParCSRMatrixMemoryLocation(parcsr)) {
720:     PetscBool isaij;

722:     PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
723:     if (isaij) {
724:       PetscMPIInt size;

726:       PetscCallMPI(MPI_Comm_size(comm, &size));
727:   #if defined(HYPRE_USING_HIP)
728:       mtype = size > 1 ? MATMPIAIJHIPSPARSE : MATSEQAIJHIPSPARSE;
729:   #elif defined(HYPRE_USING_CUDA)
730:       mtype = size > 1 ? MATMPIAIJCUSPARSE : MATSEQAIJCUSPARSE;
731:   #else
732:       mtype = size > 1 ? MATMPIAIJ : MATSEQAIJ;
733:   #endif
734:     }
735:   }
736: #endif
737:   dH = hypre_ParCSRMatrixDiag(parcsr);
738:   oH = hypre_ParCSRMatrixOffd(parcsr);
739:   if (reuse != MAT_REUSE_MATRIX) {
740:     PetscCount coo_n;
741:     PetscInt  *coo_i, *coo_j;

743:     PetscCall(MatCreate(comm, &M));
744:     PetscCall(MatSetType(M, mtype));
745:     PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
746:     PetscCall(MatMPIAIJSetPreallocation(M, 0, NULL, 0, NULL));

748:     dA = M;
749:     PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
750:     if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));

752:     PetscCall(hypreCSRMatrixGetCOO_Private(dH, &coo_n, &coo_i, &coo_j));
753:     PetscCall(MatSetPreallocationCOO(dA, coo_n, coo_i, coo_j));
754:     PetscCall(PetscFree2(coo_i, coo_j));
755:     if (ismpiaij) {
756:       HYPRE_Int nc = hypre_CSRMatrixNumCols(oH);

758:       PetscCall(PetscLayoutDestroy(&oA->cmap));
759:       PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)oA), nc, nc, 1, &oA->cmap));
760:       PetscCall(hypreCSRMatrixGetCOO_Private(oH, &coo_n, &coo_i, &coo_j));
761:       PetscCall(MatSetPreallocationCOO(oA, coo_n, coo_i, coo_j));
762:       PetscCall(PetscFree2(coo_i, coo_j));

764:       /* garray */
765:       Mat_MPIAIJ   *aij    = (Mat_MPIAIJ *)M->data;
766:       HYPRE_BigInt *harray = hypre_ParCSRMatrixColMapOffd(parcsr);
767:       PetscInt     *garray;

769:       PetscCall(PetscFree(aij->garray));
770:       PetscCall(PetscMalloc1(nc, &garray));
771:       for (HYPRE_Int i = 0; i < nc; i++) garray[i] = (PetscInt)harray[i];
772:       aij->garray = garray;
773:       PetscCall(MatSetUpMultiply_MPIAIJ(M));
774:     }
775:     if (reuse == MAT_INITIAL_MATRIX) *B = M;
776:   } else M = *B;

778:   dA = M;
779:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)M, MATMPIAIJ, &ismpiaij));
780:   if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(M, &dA, &oA, NULL));
781:   PetscCall(MatSetValuesCOOFromCSRMatrix_Private(dA, dH));
782:   if (oA) PetscCall(MatSetValuesCOOFromCSRMatrix_Private(oA, oH));
783:   M->assembled = PETSC_TRUE;
784:   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M));
785:   PetscFunctionReturn(PETSC_SUCCESS);
786: }

788: static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
789: {
790:   hypre_ParCSRMatrix *tA;
791:   hypre_CSRMatrix    *hdiag, *hoffd;
792:   Mat_SeqAIJ         *diag, *offd;
793:   PetscInt           *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts;
794:   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
795:   PetscBool           ismpiaij, isseqaij;
796:   PetscBool           sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
797:   HYPRE_Int          *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL;
798:   PetscInt           *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL;
799:   PetscBool           iscuda, iship;
800: #if defined(PETSC_HAVE_DEVICE) && defined(PETSC_HAVE_HYPRE_DEVICE)
801:   PetscBool boundtocpu = A->boundtocpu;
802: #else
803:   PetscBool boundtocpu = PETSC_TRUE;
804: #endif

806:   PetscFunctionBegin;
807:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
808:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij));
809:   PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name);
810:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
811:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
812:   PetscHYPREInitialize();
813:   if (ismpiaij) {
814:     Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;

816:     diag = (Mat_SeqAIJ *)a->A->data;
817:     offd = (Mat_SeqAIJ *)a->B->data;
818:     if (!boundtocpu && (iscuda || iship)) {
819: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
820:       if (iscuda) {
821:         sameint = PETSC_TRUE;
822:         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
823:         PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
824:       }
825: #endif
826: #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
827:       if (iship) {
828:         sameint = PETSC_TRUE;
829:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
830:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj));
831:       }
832: #endif
833:     } else {
834:       boundtocpu = PETSC_TRUE;
835:       pdi        = diag->i;
836:       pdj        = diag->j;
837:       poi        = offd->i;
838:       poj        = offd->j;
839:       if (sameint) {
840:         hdi = (HYPRE_Int *)pdi;
841:         hdj = (HYPRE_Int *)pdj;
842:         hoi = (HYPRE_Int *)poi;
843:         hoj = (HYPRE_Int *)poj;
844:       }
845:     }
846:     garray = a->garray;
847:     noffd  = a->B->cmap->N;
848:     dnnz   = diag->nz;
849:     onnz   = offd->nz;
850:   } else {
851:     diag = (Mat_SeqAIJ *)A->data;
852:     offd = NULL;
853:     if (!boundtocpu && (iscuda || iship)) {
854: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
855:       if (iscuda) {
856:         sameint = PETSC_TRUE;
857:         PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
858:       }
859: #endif
860: #if defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
861:       if (iship) {
862:         sameint = PETSC_TRUE;
863:         PetscCall(MatSeqAIJHIPSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj));
864:       }
865: #endif
866:     } else {
867:       boundtocpu = PETSC_TRUE;
868:       pdi        = diag->i;
869:       pdj        = diag->j;
870:       if (sameint) {
871:         hdi = (HYPRE_Int *)pdi;
872:         hdj = (HYPRE_Int *)pdj;
873:       }
874:     }
875:     garray = NULL;
876:     noffd  = 0;
877:     dnnz   = diag->nz;
878:     onnz   = 0;
879:   }

881:   /* create a temporary ParCSR */
882:   if (HYPRE_AssumedPartitionCheck()) {
883:     PetscMPIInt myid;

885:     PetscCallMPI(MPI_Comm_rank(comm, &myid));
886:     row_starts = A->rmap->range + myid;
887:     col_starts = A->cmap->range + myid;
888:   } else {
889:     row_starts = A->rmap->range;
890:     col_starts = A->cmap->range;
891:   }
892:   tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, noffd, dnnz, onnz);
893: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
894:   hypre_ParCSRMatrixSetRowStartsOwner(tA, 0);
895:   hypre_ParCSRMatrixSetColStartsOwner(tA, 0);
896: #endif

898:   /* set diagonal part */
899:   hdiag = hypre_ParCSRMatrixDiag(tA);
900:   if (!sameint) { /* malloc CSR pointers */
901:     PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj));
902:     for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)pdi[i];
903:     for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)pdj[i];
904:   }
905:   hypre_CSRMatrixI(hdiag)           = hdi;
906:   hypre_CSRMatrixJ(hdiag)           = hdj;
907:   hypre_CSRMatrixData(hdiag)        = (HYPRE_Complex *)diag->a;
908:   hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz;
909:   hypre_CSRMatrixSetRownnz(hdiag);
910:   hypre_CSRMatrixSetDataOwner(hdiag, 0);

912:   /* set off-diagonal part */
913:   hoffd = hypre_ParCSRMatrixOffd(tA);
914:   if (offd) {
915:     if (!sameint) { /* malloc CSR pointers */
916:       PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj));
917:       for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)poi[i];
918:       for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)poj[i];
919:     }
920:     hypre_CSRMatrixI(hoffd)           = hoi;
921:     hypre_CSRMatrixJ(hoffd)           = hoj;
922:     hypre_CSRMatrixData(hoffd)        = (HYPRE_Complex *)offd->a;
923:     hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz;
924:     hypre_CSRMatrixSetRownnz(hoffd);
925:     hypre_CSRMatrixSetDataOwner(hoffd, 0);
926:   }
927: #if defined(PETSC_HAVE_HYPRE_DEVICE)
928:   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, !boundtocpu ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST);
929: #else
930:   #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
931:   PetscCallExternal(hypre_ParCSRMatrixInitialize, tA);
932:   #else
933:   PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, HYPRE_MEMORY_HOST);
934:   #endif
935: #endif
936:   hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST);
937:   hypre_ParCSRMatrixSetNumNonzeros(tA);
938:   hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray;
939:   if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallExternal(hypre_MatvecCommPkgCreate, tA);
940:   *hA = tA;
941:   PetscFunctionReturn(PETSC_SUCCESS);
942: }

944: static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA)
945: {
946:   hypre_CSRMatrix *hdiag, *hoffd;
947:   PetscBool        ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int));
948:   PetscBool        iscuda, iship;

950:   PetscFunctionBegin;
951:   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
952:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, ""));
953:   PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iship, MATSEQAIJHIPSPARSE, MATMPIAIJHIPSPARSE, ""));
954: #if defined(HYPRE_USING_CUDA) && defined(PETSC_HAVE_CUDA)
955:   if (iscuda) sameint = PETSC_TRUE;
956: #elif defined(HYPRE_USING_HIP) && defined(PETSC_HAVE_HIP)
957:   if (iship) sameint = PETSC_TRUE;
958: #endif
959:   hdiag = hypre_ParCSRMatrixDiag(*hA);
960:   hoffd = hypre_ParCSRMatrixOffd(*hA);
961:   /* free temporary memory allocated by PETSc
962:      set pointers to NULL before destroying tA */
963:   if (!sameint) {
964:     HYPRE_Int *hi, *hj;

966:     hi = hypre_CSRMatrixI(hdiag);
967:     hj = hypre_CSRMatrixJ(hdiag);
968:     PetscCall(PetscFree2(hi, hj));
969:     if (ismpiaij) {
970:       hi = hypre_CSRMatrixI(hoffd);
971:       hj = hypre_CSRMatrixJ(hoffd);
972:       PetscCall(PetscFree2(hi, hj));
973:     }
974:   }
975:   hypre_CSRMatrixI(hdiag)    = NULL;
976:   hypre_CSRMatrixJ(hdiag)    = NULL;
977:   hypre_CSRMatrixData(hdiag) = NULL;
978:   if (ismpiaij) {
979:     hypre_CSRMatrixI(hoffd)    = NULL;
980:     hypre_CSRMatrixJ(hoffd)    = NULL;
981:     hypre_CSRMatrixData(hoffd) = NULL;
982:   }
983:   hypre_ParCSRMatrixColMapOffd(*hA) = NULL;
984:   hypre_ParCSRMatrixDestroy(*hA);
985:   *hA = NULL;
986:   PetscFunctionReturn(PETSC_SUCCESS);
987: }

989: /* calls RAP from BoomerAMG:
990:    the resulting ParCSR will not own the column and row starts
991:    It looks like we don't need to have the diagonal entries ordered first */
992: static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP)
993: {
994: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
995:   HYPRE_Int P_owns_col_starts, R_owns_row_starts;
996: #endif

998:   PetscFunctionBegin;
999: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1000:   P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP);
1001:   R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR);
1002: #endif
1003:   /* can be replaced by version test later */
1004: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1005:   PetscStackPushExternal("hypre_ParCSRMatrixRAP");
1006:   *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP);
1007:   PetscStackPop;
1008: #else
1009:   PetscCallExternal(hypre_BoomerAMGBuildCoarseOperator, hR, hA, hP, hRAP);
1010:   PetscCallExternal(hypre_ParCSRMatrixSetNumNonzeros, *hRAP);
1011: #endif
1012:   /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */
1013: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1014:   hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0);
1015:   hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0);
1016:   if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1);
1017:   if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1);
1018: #endif
1019:   PetscFunctionReturn(PETSC_SUCCESS);
1020: }

1022: static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C)
1023: {
1024:   Mat                 B;
1025:   hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL;
1026:   Mat_Product        *product = C->product;

1028:   PetscFunctionBegin;
1029:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1030:   PetscCall(MatAIJGetParCSR_Private(P, &hP));
1031:   PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP));
1032:   PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B));

1034:   PetscCall(MatHeaderMerge(C, &B));
1035:   C->product = product;

1037:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1038:   PetscCall(MatAIJRestoreParCSR_Private(P, &hP));
1039:   PetscFunctionReturn(PETSC_SUCCESS);
1040: }

1042: PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C)
1043: {
1044:   PetscFunctionBegin;
1045:   PetscCall(MatSetType(C, MATAIJ));
1046:   C->ops->ptapnumeric    = MatPtAPNumeric_AIJ_AIJ_wHYPRE;
1047:   C->ops->productnumeric = MatProductNumeric_PtAP;
1048:   PetscFunctionReturn(PETSC_SUCCESS);
1049: }

1051: static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C)
1052: {
1053:   Mat                 B;
1054:   Mat_HYPRE          *hP;
1055:   hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL;
1056:   HYPRE_Int           type;
1057:   MPI_Comm            comm = PetscObjectComm((PetscObject)A);
1058:   PetscBool           ishypre;

1060:   PetscFunctionBegin;
1061:   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1062:   PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1063:   hP = (Mat_HYPRE *)P->data;
1064:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1065:   PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1066:   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);

1068:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1069:   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr));
1070:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));

1072:   /* create temporary matrix and merge to C */
1073:   PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B));
1074:   PetscCall(MatHeaderMerge(C, &B));
1075:   PetscFunctionReturn(PETSC_SUCCESS);
1076: }

1078: static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C)
1079: {
1080:   Mat                 B;
1081:   hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL;
1082:   Mat_HYPRE          *hA, *hP;
1083:   PetscBool           ishypre;
1084:   HYPRE_Int           type;

1086:   PetscFunctionBegin;
1087:   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre));
1088:   PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE);
1089:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1090:   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1091:   hA = (Mat_HYPRE *)A->data;
1092:   hP = (Mat_HYPRE *)P->data;
1093:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1094:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1095:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type);
1096:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1097:   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1098:   PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr);
1099:   PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr));
1100:   PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B));
1101:   PetscCall(MatHeaderMerge(C, &B));
1102:   PetscFunctionReturn(PETSC_SUCCESS);
1103: }

1105: /* calls hypre_ParMatmul
1106:    hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA
1107:    hypre_ParMatrixCreate does not duplicate the communicator
1108:    It looks like we don't need to have the diagonal entries ordered first */
1109: static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB)
1110: {
1111:   PetscFunctionBegin;
1112:   /* can be replaced by version test later */
1113: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1114:   PetscStackPushExternal("hypre_ParCSRMatMat");
1115:   *hAB = hypre_ParCSRMatMat(hA, hB);
1116: #else
1117:   PetscStackPushExternal("hypre_ParMatmul");
1118:   *hAB = hypre_ParMatmul(hA, hB);
1119: #endif
1120:   PetscStackPop;
1121:   PetscFunctionReturn(PETSC_SUCCESS);
1122: }

1124: static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C)
1125: {
1126:   Mat                 D;
1127:   hypre_ParCSRMatrix *hA, *hB, *hAB = NULL;
1128:   Mat_Product        *product = C->product;

1130:   PetscFunctionBegin;
1131:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1132:   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1133:   PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB));
1134:   PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D));

1136:   PetscCall(MatHeaderMerge(C, &D));
1137:   C->product = product;

1139:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1140:   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1141:   PetscFunctionReturn(PETSC_SUCCESS);
1142: }

1144: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C)
1145: {
1146:   PetscFunctionBegin;
1147:   PetscCall(MatSetType(C, MATAIJ));
1148:   C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE;
1149:   C->ops->productnumeric = MatProductNumeric_AB;
1150:   PetscFunctionReturn(PETSC_SUCCESS);
1151: }

1153: static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C)
1154: {
1155:   Mat                 D;
1156:   hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL;
1157:   Mat_HYPRE          *hA, *hB;
1158:   PetscBool           ishypre;
1159:   HYPRE_Int           type;
1160:   Mat_Product        *product;

1162:   PetscFunctionBegin;
1163:   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre));
1164:   PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE);
1165:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre));
1166:   PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE);
1167:   hA = (Mat_HYPRE *)A->data;
1168:   hB = (Mat_HYPRE *)B->data;
1169:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1170:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1171:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hB->ij, &type);
1172:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported");
1173:   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr);
1174:   PetscCallExternal(HYPRE_IJMatrixGetObject, hB->ij, (void **)&Bparcsr);
1175:   PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr));
1176:   PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D));

1178:   /* need to use HeaderReplace because HeaderMerge messes up with the communicator */
1179:   product    = C->product; /* save it from MatHeaderReplace() */
1180:   C->product = NULL;
1181:   PetscCall(MatHeaderReplace(C, &D));
1182:   C->product             = product;
1183:   C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE;
1184:   C->ops->productnumeric = MatProductNumeric_AB;
1185:   PetscFunctionReturn(PETSC_SUCCESS);
1186: }

1188: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D)
1189: {
1190:   Mat                 E;
1191:   hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL;

1193:   PetscFunctionBegin;
1194:   PetscCall(MatAIJGetParCSR_Private(A, &hA));
1195:   PetscCall(MatAIJGetParCSR_Private(B, &hB));
1196:   PetscCall(MatAIJGetParCSR_Private(C, &hC));
1197:   PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC));
1198:   PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E));
1199:   PetscCall(MatHeaderMerge(D, &E));
1200:   PetscCall(MatAIJRestoreParCSR_Private(A, &hA));
1201:   PetscCall(MatAIJRestoreParCSR_Private(B, &hB));
1202:   PetscCall(MatAIJRestoreParCSR_Private(C, &hC));
1203:   PetscFunctionReturn(PETSC_SUCCESS);
1204: }

1206: PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D)
1207: {
1208:   PetscFunctionBegin;
1209:   PetscCall(MatSetType(D, MATAIJ));
1210:   PetscFunctionReturn(PETSC_SUCCESS);
1211: }

1213: static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C)
1214: {
1215:   PetscFunctionBegin;
1216:   C->ops->productnumeric = MatProductNumeric_AB;
1217:   PetscFunctionReturn(PETSC_SUCCESS);
1218: }

1220: static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C)
1221: {
1222:   Mat_Product *product = C->product;
1223:   PetscBool    Ahypre;

1225:   PetscFunctionBegin;
1226:   PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre));
1227:   if (Ahypre) { /* A is a Hypre matrix */
1228:     PetscCall(MatSetType(C, MATHYPRE));
1229:     C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE;
1230:     C->ops->matmultnumeric  = MatMatMultNumeric_HYPRE_HYPRE;
1231:     PetscFunctionReturn(PETSC_SUCCESS);
1232:   }
1233:   PetscFunctionReturn(PETSC_SUCCESS);
1234: }

1236: static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C)
1237: {
1238:   PetscFunctionBegin;
1239:   C->ops->productnumeric = MatProductNumeric_PtAP;
1240:   PetscFunctionReturn(PETSC_SUCCESS);
1241: }

1243: static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C)
1244: {
1245:   Mat_Product *product = C->product;
1246:   PetscBool    flg;
1247:   PetscInt     type        = 0;
1248:   const char  *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"};
1249:   PetscInt     ntype       = 4;
1250:   Mat          A           = product->A;
1251:   PetscBool    Ahypre;

1253:   PetscFunctionBegin;
1254:   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre));
1255:   if (Ahypre) { /* A is a Hypre matrix */
1256:     PetscCall(MatSetType(C, MATHYPRE));
1257:     C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1258:     C->ops->ptapnumeric     = MatPtAPNumeric_HYPRE_HYPRE;
1259:     PetscFunctionReturn(PETSC_SUCCESS);
1260:   }

1262:   /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */
1263:   /* Get runtime option */
1264:   if (product->api_user) {
1265:     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat");
1266:     PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg));
1267:     PetscOptionsEnd();
1268:   } else {
1269:     PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat");
1270:     PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg));
1271:     PetscOptionsEnd();
1272:   }

1274:   if (type == 0 || type == 1 || type == 2) {
1275:     PetscCall(MatSetType(C, MATAIJ));
1276:   } else if (type == 3) {
1277:     PetscCall(MatSetType(C, MATHYPRE));
1278:   } else SETERRQ(PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported");
1279:   C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE;
1280:   C->ops->ptapnumeric     = MatPtAPNumeric_AIJ_HYPRE;
1281:   PetscFunctionReturn(PETSC_SUCCESS);
1282: }

1284: static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C)
1285: {
1286:   Mat_Product *product = C->product;

1288:   PetscFunctionBegin;
1289:   switch (product->type) {
1290:   case MATPRODUCT_AB:
1291:     PetscCall(MatProductSetFromOptions_HYPRE_AB(C));
1292:     break;
1293:   case MATPRODUCT_PtAP:
1294:     PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C));
1295:     break;
1296:   default:
1297:     break;
1298:   }
1299:   PetscFunctionReturn(PETSC_SUCCESS);
1300: }

1302: static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y)
1303: {
1304:   PetscFunctionBegin;
1305:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE));
1306:   PetscFunctionReturn(PETSC_SUCCESS);
1307: }

1309: static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y)
1310: {
1311:   PetscFunctionBegin;
1312:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE));
1313:   PetscFunctionReturn(PETSC_SUCCESS);
1314: }

1316: static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1317: {
1318:   PetscFunctionBegin;
1319:   if (y != z) PetscCall(VecCopy(y, z));
1320:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE));
1321:   PetscFunctionReturn(PETSC_SUCCESS);
1322: }

1324: static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z)
1325: {
1326:   PetscFunctionBegin;
1327:   if (y != z) PetscCall(VecCopy(y, z));
1328:   PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE));
1329:   PetscFunctionReturn(PETSC_SUCCESS);
1330: }

1332: /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */
1333: static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans)
1334: {
1335:   Mat_HYPRE          *hA = (Mat_HYPRE *)A->data;
1336:   hypre_ParCSRMatrix *parcsr;
1337:   hypre_ParVector    *hx, *hy;

1339:   PetscFunctionBegin;
1340:   if (trans) {
1341:     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x));
1342:     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y));
1343:     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y));
1344:     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hx);
1345:     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hy);
1346:   } else {
1347:     PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x));
1348:     if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y));
1349:     else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y));
1350:     PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hx);
1351:     PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hy);
1352:   }
1353:   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1354:   if (trans) {
1355:     PetscCallExternal(hypre_ParCSRMatrixMatvecT, a, parcsr, hx, b, hy);
1356:   } else {
1357:     PetscCallExternal(hypre_ParCSRMatrixMatvec, a, parcsr, hx, b, hy);
1358:   }
1359:   PetscCall(VecHYPRE_IJVectorPopVec(hA->x));
1360:   PetscCall(VecHYPRE_IJVectorPopVec(hA->b));
1361:   PetscFunctionReturn(PETSC_SUCCESS);
1362: }

1364: static PetscErrorCode MatDestroy_HYPRE(Mat A)
1365: {
1366:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1368:   PetscFunctionBegin;
1369:   PetscCall(VecHYPRE_IJVectorDestroy(&hA->x));
1370:   PetscCall(VecHYPRE_IJVectorDestroy(&hA->b));
1371:   PetscCall(MatHYPRE_DestroyCOOMat(A)); /* must be called before destroying the individual CSR */
1372:   if (hA->ij) {
1373:     if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL;
1374:     PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij);
1375:   }
1376:   if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm));

1378:   PetscCall(MatStashDestroy_Private(&A->stash));
1379:   PetscCall(PetscFree(hA->array));
1380:   if (hA->rows_d) PetscStackCallExternalVoid("hypre_Free", hypre_Free(hA->rows_d, HYPRE_MEMORY_DEVICE));

1382:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL));
1383:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL));
1384:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL));
1385:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL));
1386:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", NULL));
1387:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", NULL));
1388:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_hypre_C", NULL));
1389:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", NULL));
1390:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL));
1391:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL));
1392:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
1393:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
1394:   PetscCall(PetscFree(A->data));
1395:   PetscFunctionReturn(PETSC_SUCCESS);
1396: }

1398: static PetscErrorCode MatSetUp_HYPRE(Mat A)
1399: {
1400:   PetscFunctionBegin;
1401:   if (!A->preallocated) PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL));
1402:   PetscFunctionReturn(PETSC_SUCCESS);
1403: }

1405: //TODO FIX hypre_CSRMatrixMatvecOutOfPlace
1406: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1407: static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind)
1408: {
1409:   Mat_HYPRE           *hA   = (Mat_HYPRE *)A->data;
1410:   HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE;

1412:   PetscFunctionBegin;
1413:   A->boundtocpu = bind;
1414:   if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) {
1415:     hypre_ParCSRMatrix *parcsr;
1416:     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1417:     PetscCallExternal(hypre_ParCSRMatrixMigrate, parcsr, hmem);
1418:   }
1419:   if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind));
1420:   if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind));
1421:   PetscFunctionReturn(PETSC_SUCCESS);
1422: }
1423: #endif

1425: static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode)
1426: {
1427:   Mat_HYPRE   *hA = (Mat_HYPRE *)A->data;
1428:   PetscMPIInt  n;
1429:   PetscInt     i, j, rstart, ncols, flg;
1430:   PetscInt    *row, *col;
1431:   PetscScalar *val;

1433:   PetscFunctionBegin;
1434:   PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE");

1436:   if (!A->nooffprocentries) {
1437:     while (1) {
1438:       PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
1439:       if (!flg) break;

1441:       for (i = 0; i < n;) {
1442:         /* Now identify the consecutive vals belonging to the same row */
1443:         for (j = i, rstart = row[j]; j < n; j++) {
1444:           if (row[j] != rstart) break;
1445:         }
1446:         if (j < n) ncols = j - i;
1447:         else ncols = n - i;
1448:         /* Now assemble all these values with a single function call */
1449:         PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode));

1451:         i = j;
1452:       }
1453:     }
1454:     PetscCall(MatStashScatterEnd_Private(&A->stash));
1455:   }

1457:   PetscCallExternal(HYPRE_IJMatrixAssemble, hA->ij);
1458:   /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */
1459:   /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */
1460:   if (!A->sortedfull) {
1461:     hypre_AuxParCSRMatrix *aux_matrix;

1463:     /* call destroy just to make sure we do not leak anything */
1464:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1465:     PetscCallExternal(hypre_AuxParCSRMatrixDestroy, aux_matrix);
1466:     hypre_IJMatrixTranslator(hA->ij) = NULL;

1468:     /* Initialize with assembled flag -> it only recreates the aux_par_matrix */
1469:     PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1470:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1471:     if (aux_matrix) {
1472:       hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */
1473: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1474:       PetscCallExternal(hypre_AuxParCSRMatrixInitialize, aux_matrix);
1475: #else
1476:       PetscCallExternal(hypre_AuxParCSRMatrixInitialize_v2, aux_matrix, HYPRE_MEMORY_HOST);
1477: #endif
1478:     }
1479:   }
1480:   {
1481:     hypre_ParCSRMatrix *parcsr;

1483:     PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr);
1484:     if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallExternal(hypre_MatvecCommPkgCreate, parcsr);
1485:   }
1486:   if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x));
1487:   if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b));
1488: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1489:   PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu));
1490: #endif
1491:   PetscFunctionReturn(PETSC_SUCCESS);
1492: }

1494: static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array)
1495: {
1496:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1498:   PetscFunctionBegin;
1499:   PetscCheck(hA->array_available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use");

1501:   if (hA->array_size >= size) {
1502:     *array = hA->array;
1503:   } else {
1504:     PetscCall(PetscFree(hA->array));
1505:     hA->array_size = size;
1506:     PetscCall(PetscMalloc(hA->array_size, &hA->array));
1507:     *array = hA->array;
1508:   }

1510:   hA->array_available = PETSC_FALSE;
1511:   PetscFunctionReturn(PETSC_SUCCESS);
1512: }

1514: static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array)
1515: {
1516:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

1518:   PetscFunctionBegin;
1519:   *array              = NULL;
1520:   hA->array_available = PETSC_TRUE;
1521:   PetscFunctionReturn(PETSC_SUCCESS);
1522: }

1524: static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins)
1525: {
1526:   Mat_HYPRE     *hA   = (Mat_HYPRE *)A->data;
1527:   PetscScalar   *vals = (PetscScalar *)v;
1528:   HYPRE_Complex *sscr;
1529:   PetscInt      *cscr[2];
1530:   PetscInt       i, nzc;
1531:   PetscInt       rst = A->rmap->rstart, ren = A->rmap->rend;
1532:   void          *array = NULL;

1534:   PetscFunctionBegin;
1535:   PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array));
1536:   cscr[0] = (PetscInt *)array;
1537:   cscr[1] = ((PetscInt *)array) + nc;
1538:   sscr    = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2);
1539:   for (i = 0, nzc = 0; i < nc; i++) {
1540:     if (cols[i] >= 0) {
1541:       cscr[0][nzc]   = cols[i];
1542:       cscr[1][nzc++] = i;
1543:     }
1544:   }
1545:   if (!nzc) {
1546:     PetscCall(MatRestoreArray_HYPRE(A, &array));
1547:     PetscFunctionReturn(PETSC_SUCCESS);
1548:   }

1550: #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE)
1551:   if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) {
1552:     hypre_ParCSRMatrix *parcsr;

1554:     PetscCallExternal(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr);
1555:     PetscCallExternal(hypre_ParCSRMatrixMigrate,parcsr, HYPRE_MEMORY_HOST);
1556:   }
1557: #endif

1559:   if (ins == ADD_VALUES) {
1560:     for (i = 0; i < nr; i++) {
1561:       if (rows[i] >= 0) {
1562:         PetscInt  j;
1563:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1565:         if (!nzc) continue;
1566:         /* nonlocal values */
1567:         if (rows[i] < rst || rows[i] >= ren) {
1568:           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1569:           if (hA->donotstash) continue;
1570:         }
1571:         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1572:         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1573:         PetscCallExternal(HYPRE_IJMatrixAddToValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1574:       }
1575:       vals += nc;
1576:     }
1577:   } else { /* INSERT_VALUES */
1578:     for (i = 0; i < nr; i++) {
1579:       if (rows[i] >= 0) {
1580:         PetscInt  j;
1581:         HYPRE_Int hnc = (HYPRE_Int)nzc;

1583:         if (!nzc) continue;
1584:         PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]);
1585:         for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j]));
1586:         /* nonlocal values */
1587:         if (rows[i] < rst || rows[i] >= ren) {
1588:           PetscCheck(!A->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", rows[i]);
1589:           if (!hA->donotstash) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE));
1590:         }
1591:         /* local values */
1592:         else
1593:           PetscCallExternal(HYPRE_IJMatrixSetValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr);
1594:       }
1595:       vals += nc;
1596:     }
1597:   }

1599:   PetscCall(MatRestoreArray_HYPRE(A, &array));
1600:   PetscFunctionReturn(PETSC_SUCCESS);
1601: }

1603: static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1604: {
1605:   Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
1606:   HYPRE_Int  *hdnnz, *honnz;
1607:   PetscInt    i, rs, re, cs, ce, bs;
1608:   PetscMPIInt size;

1610:   PetscFunctionBegin;
1611:   PetscCall(PetscLayoutSetUp(A->rmap));
1612:   PetscCall(PetscLayoutSetUp(A->cmap));
1613:   rs = A->rmap->rstart;
1614:   re = A->rmap->rend;
1615:   cs = A->cmap->rstart;
1616:   ce = A->cmap->rend;
1617:   if (!hA->ij) {
1618:     PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rs, re - 1, cs, ce - 1, &hA->ij);
1619:     PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);
1620:   } else {
1621:     HYPRE_BigInt hrs, hre, hcs, hce;
1622:     PetscCallExternal(HYPRE_IJMatrixGetLocalRange, hA->ij, &hrs, &hre, &hcs, &hce);
1623:     PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re);
1624:     PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce);
1625:   }
1626:   PetscCall(MatHYPRE_DestroyCOOMat(A));
1627:   PetscCall(MatGetBlockSize(A, &bs));
1628:   if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs;
1629:   if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs;

1631:   if (!dnnz) {
1632:     PetscCall(PetscMalloc1(A->rmap->n, &hdnnz));
1633:     for (i = 0; i < A->rmap->n; i++) hdnnz[i] = dnz;
1634:   } else {
1635:     hdnnz = (HYPRE_Int *)dnnz;
1636:   }
1637:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1638:   if (size > 1) {
1639:     hypre_AuxParCSRMatrix *aux_matrix;
1640:     if (!onnz) {
1641:       PetscCall(PetscMalloc1(A->rmap->n, &honnz));
1642:       for (i = 0; i < A->rmap->n; i++) honnz[i] = onz;
1643:     } else honnz = (HYPRE_Int *)onnz;
1644:     /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems
1645:        they assume the user will input the entire row values, properly sorted
1646:        In PETSc, we don't make such an assumption and set this flag to 1,
1647:        unless the option MAT_SORTED_FULL is set to true.
1648:        Also, to avoid possible memory leaks, we destroy and recreate the translator
1649:        This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize
1650:        the IJ matrix for us */
1651:     aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1652:     hypre_AuxParCSRMatrixDestroy(aux_matrix);
1653:     hypre_IJMatrixTranslator(hA->ij) = NULL;
1654:     PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, hA->ij, hdnnz, honnz);
1655:     aux_matrix                               = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij);
1656:     hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !A->sortedfull;
1657:   } else {
1658:     honnz = NULL;
1659:     PetscCallExternal(HYPRE_IJMatrixSetRowSizes, hA->ij, hdnnz);
1660:   }

1662:   /* reset assembled flag and call the initialize method */
1663:   hypre_IJMatrixAssembleFlag(hA->ij) = 0;
1664: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1665:   PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij);
1666: #else
1667:   PetscCallExternal(HYPRE_IJMatrixInitialize_v2, hA->ij, HYPRE_MEMORY_HOST);
1668: #endif
1669:   if (!dnnz) PetscCall(PetscFree(hdnnz));
1670:   if (!onnz && honnz) PetscCall(PetscFree(honnz));
1671:   /* Match AIJ logic */
1672:   A->preallocated = PETSC_TRUE;
1673:   A->assembled    = PETSC_FALSE;
1674:   PetscFunctionReturn(PETSC_SUCCESS);
1675: }

1677: /*@C
1678:   MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format

1680:   Collective

1682:   Input Parameters:
1683: + A    - the matrix
1684: . dnz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1685:           (same value is used for all local rows)
1686: . dnnz - array containing the number of nonzeros in the various rows of the
1687:           DIAGONAL portion of the local submatrix (possibly different for each row)
1688:           or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure.
1689:           The size of this array is equal to the number of local rows, i.e `m`.
1690:           For matrices that will be factored, you must leave room for (and set)
1691:           the diagonal entry even if it is zero.
1692: . onz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1693:           submatrix (same value is used for all local rows).
1694: - onnz - array containing the number of nonzeros in the various rows of the
1695:           OFF-DIAGONAL portion of the local submatrix (possibly different for
1696:           each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero
1697:           structure. The size of this array is equal to the number
1698:           of local rows, i.e `m`.

1700:   Level: intermediate

1702:   Note:
1703:   If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored.

1705: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ`
1706: @*/
1707: PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[])
1708: {
1709:   PetscFunctionBegin;
1712:   PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz));
1713:   PetscFunctionReturn(PETSC_SUCCESS);
1714: }

1716: /*@C
1717:   MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix`

1719:   Collective

1721:   Input Parameters:
1722: + parcsr   - the pointer to the `hypre_ParCSRMatrix`
1723: . mtype    - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported.
1724: - copymode - PETSc copying options, see  `PetscCopyMode`

1726:   Output Parameter:
1727: . A - the matrix

1729:   Level: intermediate

1731: .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1732: @*/
1733: PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A)
1734: {
1735:   Mat        T;
1736:   Mat_HYPRE *hA;
1737:   MPI_Comm   comm;
1738:   PetscInt   rstart, rend, cstart, cend, M, N;
1739:   PetscBool  isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis;

1741:   PetscFunctionBegin;
1742:   comm = hypre_ParCSRMatrixComm(parcsr);
1743:   PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij));
1744:   PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl));
1745:   PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij));
1746:   PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij));
1747:   PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp));
1748:   PetscCall(PetscStrcmp(mtype, MATIS, &isis));
1749:   isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij);
1750:   /* TODO */
1751:   PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE);
1752:   /* access ParCSRMatrix */
1753:   rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr);
1754:   rend   = hypre_ParCSRMatrixLastRowIndex(parcsr);
1755:   cstart = hypre_ParCSRMatrixFirstColDiag(parcsr);
1756:   cend   = hypre_ParCSRMatrixLastColDiag(parcsr);
1757:   M      = hypre_ParCSRMatrixGlobalNumRows(parcsr);
1758:   N      = hypre_ParCSRMatrixGlobalNumCols(parcsr);

1760:   /* create PETSc matrix with MatHYPRE */
1761:   PetscCall(MatCreate(comm, &T));
1762:   PetscCall(MatSetSizes(T, PetscMax(rend - rstart + 1, 0), PetscMax(cend - cstart + 1, 0), M, N));
1763:   PetscCall(MatSetType(T, MATHYPRE));
1764:   hA = (Mat_HYPRE *)T->data;

1766:   /* create HYPRE_IJMatrix */
1767:   PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend, cstart, cend, &hA->ij);
1768:   PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR);

1770:   /* create new ParCSR object if needed */
1771:   if (ishyp && copymode == PETSC_COPY_VALUES) {
1772:     hypre_ParCSRMatrix *new_parcsr;
1773: #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0)
1774:     hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd;

1776:     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
1777:     hdiag      = hypre_ParCSRMatrixDiag(parcsr);
1778:     hoffd      = hypre_ParCSRMatrixOffd(parcsr);
1779:     ndiag      = hypre_ParCSRMatrixDiag(new_parcsr);
1780:     noffd      = hypre_ParCSRMatrixOffd(new_parcsr);
1781:     PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag)));
1782:     PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd)));
1783: #else
1784:     new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1);
1785: #endif
1786:     parcsr   = new_parcsr;
1787:     copymode = PETSC_OWN_POINTER;
1788:   }

1790:   /* set ParCSR object */
1791:   hypre_IJMatrixObject(hA->ij) = parcsr;
1792:   T->preallocated              = PETSC_TRUE;

1794:   /* set assembled flag */
1795:   hypre_IJMatrixAssembleFlag(hA->ij) = 1;
1796: #if 0
1797:   PetscCallExternal(HYPRE_IJMatrixInitialize,hA->ij);
1798: #endif
1799:   if (ishyp) {
1800:     PetscMPIInt myid = 0;

1802:     /* make sure we always have row_starts and col_starts available */
1803:     if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid));
1804: #if defined(hypre_ParCSRMatrixOwnsRowStarts)
1805:     if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) {
1806:       PetscLayout map;

1808:       PetscCall(MatGetLayouts(T, NULL, &map));
1809:       PetscCall(PetscLayoutSetUp(map));
1810:       hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1811:     }
1812:     if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) {
1813:       PetscLayout map;

1815:       PetscCall(MatGetLayouts(T, &map, NULL));
1816:       PetscCall(PetscLayoutSetUp(map));
1817:       hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid);
1818:     }
1819: #endif
1820:     /* prevent from freeing the pointer */
1821:     if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE;
1822:     *A = T;
1823:     PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE));
1824:     PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
1825:     PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
1826:   } else if (isaij) {
1827:     if (copymode != PETSC_OWN_POINTER) {
1828:       /* prevent from freeing the pointer */
1829:       hA->inner_free = PETSC_FALSE;
1830:       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A));
1831:       PetscCall(MatDestroy(&T));
1832:     } else { /* AIJ return type with PETSC_OWN_POINTER */
1833:       PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T));
1834:       *A = T;
1835:     }
1836:   } else if (isis) {
1837:     PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A));
1838:     if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE;
1839:     PetscCall(MatDestroy(&T));
1840:   }
1841:   PetscFunctionReturn(PETSC_SUCCESS);
1842: }

1844: static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr)
1845: {
1846:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
1847:   HYPRE_Int  type;

1849:   PetscFunctionBegin;
1850:   PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present");
1851:   PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type);
1852:   PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR");
1853:   PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)parcsr);
1854:   PetscFunctionReturn(PETSC_SUCCESS);
1855: }

1857: /*@C
1858:   MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix

1860:   Not Collective, No Fortran Support

1862:   Input Parameter:
1863: . A - the `MATHYPRE` object

1865:   Output Parameter:
1866: . parcsr - the pointer to the `hypre_ParCSRMatrix`

1868:   Level: intermediate

1870: .seealso: [](ch_matrices), `Mat`, `MATHYPRE`, `PetscCopyMode`
1871: @*/
1872: PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr)
1873: {
1874:   PetscFunctionBegin;
1877:   PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr));
1878:   PetscFunctionReturn(PETSC_SUCCESS);
1879: }

1881: static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd)
1882: {
1883:   hypre_ParCSRMatrix *parcsr;
1884:   hypre_CSRMatrix    *ha;
1885:   PetscInt            rst;

1887:   PetscFunctionBegin;
1888:   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented with non-square diagonal blocks");
1889:   PetscCall(MatGetOwnershipRange(A, &rst, NULL));
1890:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1891:   if (missing) *missing = PETSC_FALSE;
1892:   if (dd) *dd = -1;
1893:   ha = hypre_ParCSRMatrixDiag(parcsr);
1894:   if (ha) {
1895:     PetscInt   size, i;
1896:     HYPRE_Int *ii, *jj;

1898:     size = hypre_CSRMatrixNumRows(ha);
1899:     ii   = hypre_CSRMatrixI(ha);
1900:     jj   = hypre_CSRMatrixJ(ha);
1901:     for (i = 0; i < size; i++) {
1902:       PetscInt  j;
1903:       PetscBool found = PETSC_FALSE;

1905:       for (j = ii[i]; j < ii[i + 1] && !found; j++) found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE;

1907:       if (!found) {
1908:         PetscCall(PetscInfo(A, "Matrix is missing local diagonal entry %" PetscInt_FMT "\n", i));
1909:         if (missing) *missing = PETSC_TRUE;
1910:         if (dd) *dd = i + rst;
1911:         PetscFunctionReturn(PETSC_SUCCESS);
1912:       }
1913:     }
1914:     if (!size) {
1915:       PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1916:       if (missing) *missing = PETSC_TRUE;
1917:       if (dd) *dd = rst;
1918:     }
1919:   } else {
1920:     PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n"));
1921:     if (missing) *missing = PETSC_TRUE;
1922:     if (dd) *dd = rst;
1923:   }
1924:   PetscFunctionReturn(PETSC_SUCCESS);
1925: }

1927: static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s)
1928: {
1929:   hypre_ParCSRMatrix *parcsr;
1930: #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0)
1931:   hypre_CSRMatrix *ha;
1932: #endif
1933:   HYPRE_Complex hs;

1935:   PetscFunctionBegin;
1936:   PetscCall(PetscHYPREScalarCast(s, &hs));
1937:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1938: #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0)
1939:   PetscCallExternal(hypre_ParCSRMatrixScale, parcsr, hs);
1940: #else /* diagonal part */
1941:   ha = hypre_ParCSRMatrixDiag(parcsr);
1942:   if (ha) {
1943:     PetscInt       size, i;
1944:     HYPRE_Int     *ii;
1945:     HYPRE_Complex *a;

1947:     size = hypre_CSRMatrixNumRows(ha);
1948:     a    = hypre_CSRMatrixData(ha);
1949:     ii   = hypre_CSRMatrixI(ha);
1950:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1951:   }
1952:   /* off-diagonal part */
1953:   ha = hypre_ParCSRMatrixOffd(parcsr);
1954:   if (ha) {
1955:     PetscInt       size, i;
1956:     HYPRE_Int     *ii;
1957:     HYPRE_Complex *a;

1959:     size = hypre_CSRMatrixNumRows(ha);
1960:     a    = hypre_CSRMatrixData(ha);
1961:     ii   = hypre_CSRMatrixI(ha);
1962:     for (i = 0; i < ii[size]; i++) a[i] *= hs;
1963:   }
1964: #endif
1965:   PetscFunctionReturn(PETSC_SUCCESS);
1966: }

1968: static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1969: {
1970:   hypre_ParCSRMatrix *parcsr;
1971:   HYPRE_Int          *lrows;
1972:   PetscInt            rst, ren, i;

1974:   PetscFunctionBegin;
1975:   PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented");
1976:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
1977:   PetscCall(PetscMalloc1(numRows, &lrows));
1978:   PetscCall(MatGetOwnershipRange(A, &rst, &ren));
1979:   for (i = 0; i < numRows; i++) {
1980:     PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported");
1981:     lrows[i] = rows[i] - rst;
1982:   }
1983:   PetscCallExternal(hypre_ParCSRMatrixEliminateRowsCols, parcsr, numRows, lrows);
1984:   PetscCall(PetscFree(lrows));
1985:   PetscFunctionReturn(PETSC_SUCCESS);
1986: }

1988: static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha)
1989: {
1990:   PetscFunctionBegin;
1991:   if (ha) {
1992:     HYPRE_Int     *ii, size;
1993:     HYPRE_Complex *a;

1995:     size = hypre_CSRMatrixNumRows(ha);
1996:     a    = hypre_CSRMatrixData(ha);
1997:     ii   = hypre_CSRMatrixI(ha);

1999:     if (a) PetscCall(PetscArrayzero(a, ii[size]));
2000:   }
2001:   PetscFunctionReturn(PETSC_SUCCESS);
2002: }

2004: static PetscErrorCode MatZeroEntries_HYPRE(Mat A)
2005: {
2006:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

2008:   PetscFunctionBegin;
2009:   if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) {
2010:     PetscCallExternal(HYPRE_IJMatrixSetConstantValues, hA->ij, 0.0);
2011:   } else {
2012:     hypre_ParCSRMatrix *parcsr;

2014:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2015:     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr)));
2016:     PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr)));
2017:   }
2018:   PetscFunctionReturn(PETSC_SUCCESS);
2019: }

2021: static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag)
2022: {
2023:   PetscInt       ii;
2024:   HYPRE_Int     *i, *j;
2025:   HYPRE_Complex *a;

2027:   PetscFunctionBegin;
2028:   if (!hA) PetscFunctionReturn(PETSC_SUCCESS);

2030:   i = hypre_CSRMatrixI(hA);
2031:   j = hypre_CSRMatrixJ(hA);
2032:   a = hypre_CSRMatrixData(hA);
2033: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2034:   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hA)) {
2035:   #if defined(HYPRE_USING_CUDA)
2036:     MatZeroRows_CUDA(N, rows, i, j, a, diag);
2037:   #elif defined(HYPRE_USING_HIP)
2038:     MatZeroRows_HIP(N, rows, i, j, a, diag);
2039:   #elif defined(PETSC_HAVE_KOKKOS)
2040:     MatZeroRows_Kokkos(N, rows, i, j, a, diag);
2041:   #else
2042:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for MatZeroRows on a hypre matrix in this memory location");
2043:   #endif
2044:   } else
2045: #endif
2046:   {
2047:     for (ii = 0; ii < N; ii++) {
2048:       HYPRE_Int jj, ibeg, iend, irow;

2050:       irow = rows[ii];
2051:       ibeg = i[irow];
2052:       iend = i[irow + 1];
2053:       for (jj = ibeg; jj < iend; jj++)
2054:         if (j[jj] == irow) a[jj] = diag;
2055:         else a[jj] = 0.0;
2056:     }
2057:   }
2058:   PetscFunctionReturn(PETSC_SUCCESS);
2059: }

2061: static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2062: {
2063:   hypre_ParCSRMatrix *parcsr;
2064:   PetscInt           *lrows, len, *lrows2;
2065:   HYPRE_Complex       hdiag;

2067:   PetscFunctionBegin;
2068:   PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size");
2069:   PetscCall(PetscHYPREScalarCast(diag, &hdiag));
2070:   /* retrieve the internal matrix */
2071:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2072:   /* get locally owned rows */
2073:   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));

2075: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2076:   if (HYPRE_MEMORY_DEVICE == hypre_CSRMatrixMemoryLocation(hypre_ParCSRMatrixDiag(parcsr))) {
2077:     Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2078:     PetscInt   m;
2079:     PetscCall(MatGetLocalSize(A, &m, NULL));
2080:     if (!hA->rows_d) {
2081:       hA->rows_d = hypre_TAlloc(PetscInt, m, HYPRE_MEMORY_DEVICE);
2082:       if (m) PetscCheck(hA->rows_d, PETSC_COMM_SELF, PETSC_ERR_MEM, "HYPRE_TAlloc failed");
2083:     }
2084:     PetscCheck(len <= m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Too many rows in rows[]");
2085:     PetscStackCallExternalVoid("hypre_Memcpy", hypre_Memcpy(hA->rows_d, lrows, sizeof(PetscInt) * len, HYPRE_MEMORY_DEVICE, HYPRE_MEMORY_HOST));
2086:     lrows2 = hA->rows_d;
2087:   } else
2088: #endif
2089:   {
2090:     lrows2 = lrows;
2091:   }

2093:   /* zero diagonal part */
2094:   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows2, hdiag));
2095:   /* zero off-diagonal part */
2096:   PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows2, 0.0));

2098:   PetscCall(PetscFree(lrows));
2099:   PetscFunctionReturn(PETSC_SUCCESS);
2100: }

2102: static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode)
2103: {
2104:   PetscFunctionBegin;
2105:   if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);

2107:   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
2108:   PetscFunctionReturn(PETSC_SUCCESS);
2109: }

2111: static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2112: {
2113:   hypre_ParCSRMatrix *parcsr;
2114:   HYPRE_Int           hnz;

2116:   PetscFunctionBegin;
2117:   /* retrieve the internal matrix */
2118:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2119:   /* call HYPRE API */
2120:   PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2121:   if (nz) *nz = (PetscInt)hnz;
2122:   PetscFunctionReturn(PETSC_SUCCESS);
2123: }

2125: static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
2126: {
2127:   hypre_ParCSRMatrix *parcsr;
2128:   HYPRE_Int           hnz;

2130:   PetscFunctionBegin;
2131:   /* retrieve the internal matrix */
2132:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2133:   /* call HYPRE API */
2134:   hnz = nz ? (HYPRE_Int)(*nz) : 0;
2135:   PetscCallExternal(HYPRE_ParCSRMatrixRestoreRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v);
2136:   PetscFunctionReturn(PETSC_SUCCESS);
2137: }

2139: static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2140: {
2141:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;
2142:   PetscInt   i;

2144:   PetscFunctionBegin;
2145:   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2146:   /* Ignore negative row indices
2147:    * And negative column indices should be automatically ignored in hypre
2148:    * */
2149:   for (i = 0; i < m; i++) {
2150:     if (idxm[i] >= 0) {
2151:       HYPRE_Int hn = (HYPRE_Int)n;
2152:       PetscCallExternal(HYPRE_IJMatrixGetValues, hA->ij, 1, &hn, (HYPRE_BigInt *)&idxm[i], (HYPRE_BigInt *)idxn, (HYPRE_Complex *)(v + i * n));
2153:     }
2154:   }
2155:   PetscFunctionReturn(PETSC_SUCCESS);
2156: }

2158: static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg)
2159: {
2160:   Mat_HYPRE *hA = (Mat_HYPRE *)A->data;

2162:   PetscFunctionBegin;
2163:   switch (op) {
2164:   case MAT_NO_OFF_PROC_ENTRIES:
2165:     if (flg) PetscCallExternal(HYPRE_IJMatrixSetMaxOffProcElmts, hA->ij, 0);
2166:     break;
2167:   case MAT_IGNORE_OFF_PROC_ENTRIES:
2168:     hA->donotstash = flg;
2169:     break;
2170:   default:
2171:     break;
2172:   }
2173:   PetscFunctionReturn(PETSC_SUCCESS);
2174: }

2176: static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view)
2177: {
2178:   PetscViewerFormat format;

2180:   PetscFunctionBegin;
2181:   PetscCall(PetscViewerGetFormat(view, &format));
2182:   if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS);
2183:   if (format != PETSC_VIEWER_NATIVE) {
2184:     Mat                 B;
2185:     hypre_ParCSRMatrix *parcsr;
2186:     PetscErrorCode (*mview)(Mat, PetscViewer) = NULL;

2188:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2189:     PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B));
2190:     PetscCall(MatGetOperation(B, MATOP_VIEW, (void (**)(void))&mview));
2191:     PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation");
2192:     PetscCall((*mview)(B, view));
2193:     PetscCall(MatDestroy(&B));
2194:   } else {
2195:     Mat_HYPRE  *hA = (Mat_HYPRE *)A->data;
2196:     PetscMPIInt size;
2197:     PetscBool   isascii;
2198:     const char *filename;

2200:     /* HYPRE uses only text files */
2201:     PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii));
2202:     PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name);
2203:     PetscCall(PetscViewerFileGetName(view, &filename));
2204:     PetscCallExternal(HYPRE_IJMatrixPrint, hA->ij, filename);
2205:     PetscCallMPI(MPI_Comm_size(hA->comm, &size));
2206:     if (size > 1) {
2207:       PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1));
2208:     } else {
2209:       PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0));
2210:     }
2211:   }
2212:   PetscFunctionReturn(PETSC_SUCCESS);
2213: }

2215: static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str)
2216: {
2217:   hypre_ParCSRMatrix *acsr, *bcsr;

2219:   PetscFunctionBegin;
2220:   if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) {
2221:     PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr));
2222:     PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr));
2223:     PetscCallExternal(hypre_ParCSRMatrixCopy, acsr, bcsr, 1);
2224:     PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */
2225:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2226:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2227:   } else {
2228:     PetscCall(MatCopy_Basic(A, B, str));
2229:   }
2230:   PetscFunctionReturn(PETSC_SUCCESS);
2231: }

2233: static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d)
2234: {
2235:   hypre_ParCSRMatrix *parcsr;
2236:   hypre_CSRMatrix    *dmat;
2237:   HYPRE_Complex      *a;
2238:   PetscBool           cong;

2240:   PetscFunctionBegin;
2241:   PetscCall(MatHasCongruentLayouts(A, &cong));
2242:   PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns");
2243:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2244:   dmat = hypre_ParCSRMatrixDiag(parcsr);
2245:   if (dmat) {
2246: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2247:     HYPRE_MemoryLocation mem = hypre_CSRMatrixMemoryLocation(dmat);
2248: #else
2249:     HYPRE_MemoryLocation mem = HYPRE_MEMORY_HOST;
2250: #endif

2252:     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecGetArrayWriteAndMemType(d, (PetscScalar **)&a, NULL));
2253:     else PetscCall(VecGetArrayWrite(d, (PetscScalar **)&a));
2254:     hypre_CSRMatrixExtractDiagonal(dmat, a, 0);
2255:     if (mem != HYPRE_MEMORY_HOST) PetscCall(VecRestoreArrayWriteAndMemType(d, (PetscScalar **)&a));
2256:     else PetscCall(VecRestoreArrayWrite(d, (PetscScalar **)&a));
2257:   }
2258:   PetscFunctionReturn(PETSC_SUCCESS);
2259: }

2261: #include <petscblaslapack.h>

2263: static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str)
2264: {
2265:   PetscFunctionBegin;
2266: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2267:   {
2268:     Mat                 B;
2269:     hypre_ParCSRMatrix *x, *y, *z;

2271:     PetscCall(MatHYPREGetParCSR(Y, &y));
2272:     PetscCall(MatHYPREGetParCSR(X, &x));
2273:     PetscCallExternal(hypre_ParCSRMatrixAdd, 1.0, y, 1.0, x, &z);
2274:     PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B));
2275:     PetscCall(MatHeaderMerge(Y, &B));
2276:   }
2277: #else
2278:   if (str == SAME_NONZERO_PATTERN) {
2279:     hypre_ParCSRMatrix *x, *y;
2280:     hypre_CSRMatrix    *xloc, *yloc;
2281:     PetscInt            xnnz, ynnz;
2282:     HYPRE_Complex      *xarr, *yarr;
2283:     PetscBLASInt        one = 1, bnz;

2285:     PetscCall(MatHYPREGetParCSR(Y, &y));
2286:     PetscCall(MatHYPREGetParCSR(X, &x));

2288:     /* diagonal block */
2289:     xloc = hypre_ParCSRMatrixDiag(x);
2290:     yloc = hypre_ParCSRMatrixDiag(y);
2291:     xnnz = 0;
2292:     ynnz = 0;
2293:     xarr = NULL;
2294:     yarr = NULL;
2295:     if (xloc) {
2296:       xarr = hypre_CSRMatrixData(xloc);
2297:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2298:     }
2299:     if (yloc) {
2300:       yarr = hypre_CSRMatrixData(yloc);
2301:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2302:     }
2303:     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2304:     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2305:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));

2307:     /* off-diagonal block */
2308:     xloc = hypre_ParCSRMatrixOffd(x);
2309:     yloc = hypre_ParCSRMatrixOffd(y);
2310:     xnnz = 0;
2311:     ynnz = 0;
2312:     xarr = NULL;
2313:     yarr = NULL;
2314:     if (xloc) {
2315:       xarr = hypre_CSRMatrixData(xloc);
2316:       xnnz = hypre_CSRMatrixNumNonzeros(xloc);
2317:     }
2318:     if (yloc) {
2319:       yarr = hypre_CSRMatrixData(yloc);
2320:       ynnz = hypre_CSRMatrixNumNonzeros(yloc);
2321:     }
2322:     PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz);
2323:     PetscCall(PetscBLASIntCast(xnnz, &bnz));
2324:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one));
2325:   } else if (str == SUBSET_NONZERO_PATTERN) {
2326:     PetscCall(MatAXPY_Basic(Y, a, X, str));
2327:   } else {
2328:     Mat B;

2330:     PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B));
2331:     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
2332:     PetscCall(MatHeaderReplace(Y, &B));
2333:   }
2334: #endif
2335:   PetscFunctionReturn(PETSC_SUCCESS);
2336: }

2338: static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B)
2339: {
2340:   hypre_ParCSRMatrix *parcsr = NULL;
2341:   PetscCopyMode       cpmode;
2342:   Mat_HYPRE          *hA;

2344:   PetscFunctionBegin;
2345:   PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr));
2346:   if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) {
2347:     parcsr = hypre_ParCSRMatrixClone(parcsr, 0);
2348:     cpmode = PETSC_OWN_POINTER;
2349:   } else {
2350:     cpmode = PETSC_COPY_VALUES;
2351:   }
2352:   PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B));
2353:   hA = (Mat_HYPRE *)A->data;
2354:   if (hA->cooMat) {
2355:     Mat_HYPRE *hB = (Mat_HYPRE *)((*B)->data);
2356:     op            = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES;
2357:     /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */
2358:     PetscCall(MatDuplicate(hA->cooMat, op, &hB->cooMat));
2359:     PetscCall(MatHYPRE_AttachCOOMat(*B));
2360:   }
2361:   PetscFunctionReturn(PETSC_SUCCESS);
2362: }

2364: static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[])
2365: {
2366:   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;

2368:   PetscFunctionBegin;
2369:   /* Build an agent matrix cooMat with AIJ format
2370:      It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work.
2371:    */
2372:   PetscCall(MatHYPRE_CreateCOOMat(mat));
2373:   PetscCall(MatSetOption(hmat->cooMat, MAT_IGNORE_OFF_PROC_ENTRIES, hmat->donotstash));
2374:   PetscCall(MatSetOption(hmat->cooMat, MAT_NO_OFF_PROC_ENTRIES, mat->nooffprocentries));

2376:   /* MatSetPreallocationCOO_SeqAIJ and MatSetPreallocationCOO_MPIAIJ uses this specific
2377:      name to automatically put the diagonal entries first */
2378:   PetscCall(PetscObjectSetName((PetscObject)hmat->cooMat, "_internal_COO_mat_for_hypre"));
2379:   PetscCall(MatSetPreallocationCOO(hmat->cooMat, coo_n, coo_i, coo_j));
2380:   hmat->cooMat->assembled = PETSC_TRUE;

2382:   /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */
2383:   PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE));
2384:   PetscCall(MatHYPRE_CreateFromMat(hmat->cooMat, hmat));      /* Create hmat->ij and preallocate it */
2385:   PetscCall(MatHYPRE_IJMatrixCopyIJ(hmat->cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */

2387:   mat->preallocated = PETSC_TRUE;
2388:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
2389:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */

2391:   /* Attach cooMat to mat */
2392:   PetscCall(MatHYPRE_AttachCOOMat(mat));
2393:   PetscFunctionReturn(PETSC_SUCCESS);
2394: }

2396: static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode)
2397: {
2398:   Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data;

2400:   PetscFunctionBegin;
2401:   PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet");
2402:   PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode));
2403:   PetscCall(MatViewFromOptions(hmat->cooMat, (PetscObject)mat, "-cooMat_view"));
2404:   PetscFunctionReturn(PETSC_SUCCESS);
2405: }

2407: /*MC
2408:    MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices
2409:           based on the hypre IJ interface.

2411:    Level: intermediate

2413: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation`
2414: M*/

2416: PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B)
2417: {
2418:   Mat_HYPRE *hB;
2419: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2420:   HYPRE_MemoryLocation memory_location;
2421: #endif

2423:   PetscFunctionBegin;
2424:   PetscHYPREInitialize();
2425:   PetscCall(PetscNew(&hB));

2427:   hB->inner_free      = PETSC_TRUE;
2428:   hB->array_available = PETSC_TRUE;

2430:   B->data = (void *)hB;

2432:   PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps)));
2433:   B->ops->mult                  = MatMult_HYPRE;
2434:   B->ops->multtranspose         = MatMultTranspose_HYPRE;
2435:   B->ops->multadd               = MatMultAdd_HYPRE;
2436:   B->ops->multtransposeadd      = MatMultTransposeAdd_HYPRE;
2437:   B->ops->setup                 = MatSetUp_HYPRE;
2438:   B->ops->destroy               = MatDestroy_HYPRE;
2439:   B->ops->assemblyend           = MatAssemblyEnd_HYPRE;
2440:   B->ops->assemblybegin         = MatAssemblyBegin_HYPRE;
2441:   B->ops->setvalues             = MatSetValues_HYPRE;
2442:   B->ops->missingdiagonal       = MatMissingDiagonal_HYPRE;
2443:   B->ops->scale                 = MatScale_HYPRE;
2444:   B->ops->zerorowscolumns       = MatZeroRowsColumns_HYPRE;
2445:   B->ops->zeroentries           = MatZeroEntries_HYPRE;
2446:   B->ops->zerorows              = MatZeroRows_HYPRE;
2447:   B->ops->getrow                = MatGetRow_HYPRE;
2448:   B->ops->restorerow            = MatRestoreRow_HYPRE;
2449:   B->ops->getvalues             = MatGetValues_HYPRE;
2450:   B->ops->setoption             = MatSetOption_HYPRE;
2451:   B->ops->duplicate             = MatDuplicate_HYPRE;
2452:   B->ops->copy                  = MatCopy_HYPRE;
2453:   B->ops->view                  = MatView_HYPRE;
2454:   B->ops->getdiagonal           = MatGetDiagonal_HYPRE;
2455:   B->ops->axpy                  = MatAXPY_HYPRE;
2456:   B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE;
2457: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2458:   B->ops->bindtocpu = MatBindToCPU_HYPRE;
2459:   /* Get hypre's default memory location. Users can control this using the corresponding HYPRE_SetMemoryLocation API */
2460:   PetscCallExternal(HYPRE_GetMemoryLocation, &memory_location);
2461:   B->boundtocpu = (memory_location == HYPRE_MEMORY_HOST) ? PETSC_TRUE : PETSC_FALSE;
2462: #endif

2464:   /* build cache for off array entries formed */
2465:   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));

2467:   PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm));
2468:   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE));
2469:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ));
2470:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS));
2471:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE));
2472:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE));
2473:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE));
2474:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE));
2475:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE));
2476:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE));
2477: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2478:   #if defined(HYPRE_USING_HIP)
2479:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2480:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijhipsparse_hypre_C", MatProductSetFromOptions_HYPRE));
2481:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP));
2482:   PetscCall(MatSetVecType(B, VECHIP));
2483:   #endif
2484:   #if defined(HYPRE_USING_CUDA)
2485:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2486:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaijcusparse_hypre_C", MatProductSetFromOptions_HYPRE));
2487:   PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA));
2488:   PetscCall(MatSetVecType(B, VECCUDA));
2489:   #endif
2490: #endif
2491:   PetscFunctionReturn(PETSC_SUCCESS);
2492: }