Actual source code: matrix.c

  1: /*
  2:    This is where the abstract matrix operations are defined
  3:    Portions of this code are under:
  4:    Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
  5: */

  7: #include <petsc/private/matimpl.h>
  8: #include <petsc/private/isimpl.h>
  9: #include <petsc/private/vecimpl.h>

 11: /* Logging support */
 12: PetscClassId MAT_CLASSID;
 13: PetscClassId MAT_COLORING_CLASSID;
 14: PetscClassId MAT_FDCOLORING_CLASSID;
 15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;

 17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
 18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
 19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
 20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
 21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
 22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
 23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
 24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
 25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
 26: PetscLogEvent MAT_TransposeColoringCreate;
 27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
 28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
 29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
 30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
 31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
 32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
 33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
 34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
 35: PetscLogEvent MAT_GetMultiProcBlock;
 36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
 37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
 38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
 39: PetscLogEvent MAT_SetValuesBatch;
 40: PetscLogEvent MAT_ViennaCLCopyToGPU;
 41: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
 42: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
 43: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
 44: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
 45: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
 46: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;

 48: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};

 50: /*@
 51:   MatSetRandom - Sets all components of a matrix to random numbers.

 53:   Logically Collective

 55:   Input Parameters:
 56: + x    - the matrix
 57: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
 58:           it will create one internally.

 60:   Example:
 61: .vb
 62:      PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
 63:      MatSetRandom(x,rctx);
 64:      PetscRandomDestroy(rctx);
 65: .ve

 67:   Level: intermediate

 69:   Notes:
 70:   For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,

 72:   for sparse matrices that already have nonzero locations, it fills the locations with random numbers.

 74:   It generates an error if used on unassembled sparse matrices that have not been preallocated.

 76: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
 77: @*/
 78: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
 79: {
 80:   PetscRandom randObj = NULL;

 82:   PetscFunctionBegin;
 86:   MatCheckPreallocated(x, 1);

 88:   if (!rctx) {
 89:     MPI_Comm comm;
 90:     PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
 91:     PetscCall(PetscRandomCreate(comm, &randObj));
 92:     PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
 93:     PetscCall(PetscRandomSetFromOptions(randObj));
 94:     rctx = randObj;
 95:   }
 96:   PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
 97:   PetscUseTypeMethod(x, setrandom, rctx);
 98:   PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));

100:   PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
101:   PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
102:   PetscCall(PetscRandomDestroy(&randObj));
103:   PetscFunctionReturn(PETSC_SUCCESS);
104: }

106: /*@
107:   MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in

109:   Logically Collective

111:   Input Parameter:
112: . mat - the factored matrix

114:   Output Parameters:
115: + pivot - the pivot value computed
116: - row   - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
117:          the share the matrix

119:   Level: advanced

121:   Notes:
122:   This routine does not work for factorizations done with external packages.

124:   This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`

126:   This can also be called on non-factored matrices that come from, for example, matrices used in SOR.

128: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
129: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
130: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
131: @*/
132: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
133: {
134:   PetscFunctionBegin;
136:   PetscAssertPointer(pivot, 2);
137:   PetscAssertPointer(row, 3);
138:   *pivot = mat->factorerror_zeropivot_value;
139:   *row   = mat->factorerror_zeropivot_row;
140:   PetscFunctionReturn(PETSC_SUCCESS);
141: }

143: /*@
144:   MatFactorGetError - gets the error code from a factorization

146:   Logically Collective

148:   Input Parameter:
149: . mat - the factored matrix

151:   Output Parameter:
152: . err - the error code

154:   Level: advanced

156:   Note:
157:   This can also be called on non-factored matrices that come from, for example, matrices used in SOR.

159: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
160:           `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
161: @*/
162: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
163: {
164:   PetscFunctionBegin;
166:   PetscAssertPointer(err, 2);
167:   *err = mat->factorerrortype;
168:   PetscFunctionReturn(PETSC_SUCCESS);
169: }

171: /*@
172:   MatFactorClearError - clears the error code in a factorization

174:   Logically Collective

176:   Input Parameter:
177: . mat - the factored matrix

179:   Level: developer

181:   Note:
182:   This can also be called on non-factored matrices that come from, for example, matrices used in SOR.

184: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
185:           `MatGetErrorCode()`, `MatFactorError`
186: @*/
187: PetscErrorCode MatFactorClearError(Mat mat)
188: {
189:   PetscFunctionBegin;
191:   mat->factorerrortype             = MAT_FACTOR_NOERROR;
192:   mat->factorerror_zeropivot_value = 0.0;
193:   mat->factorerror_zeropivot_row   = 0;
194:   PetscFunctionReturn(PETSC_SUCCESS);
195: }

197: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
198: {
199:   Vec                r, l;
200:   const PetscScalar *al;
201:   PetscInt           i, nz, gnz, N, n, st;

203:   PetscFunctionBegin;
204:   PetscCall(MatCreateVecs(mat, &r, &l));
205:   if (!cols) { /* nonzero rows */
206:     PetscCall(MatGetOwnershipRange(mat, &st, NULL));
207:     PetscCall(MatGetSize(mat, &N, NULL));
208:     PetscCall(MatGetLocalSize(mat, &n, NULL));
209:     PetscCall(VecSet(l, 0.0));
210:     PetscCall(VecSetRandom(r, NULL));
211:     PetscCall(MatMult(mat, r, l));
212:     PetscCall(VecGetArrayRead(l, &al));
213:   } else { /* nonzero columns */
214:     PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
215:     PetscCall(MatGetSize(mat, NULL, &N));
216:     PetscCall(MatGetLocalSize(mat, NULL, &n));
217:     PetscCall(VecSet(r, 0.0));
218:     PetscCall(VecSetRandom(l, NULL));
219:     PetscCall(MatMultTranspose(mat, l, r));
220:     PetscCall(VecGetArrayRead(r, &al));
221:   }
222:   if (tol <= 0.0) {
223:     for (i = 0, nz = 0; i < n; i++)
224:       if (al[i] != 0.0) nz++;
225:   } else {
226:     for (i = 0, nz = 0; i < n; i++)
227:       if (PetscAbsScalar(al[i]) > tol) nz++;
228:   }
229:   PetscCall(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
230:   if (gnz != N) {
231:     PetscInt *nzr;
232:     PetscCall(PetscMalloc1(nz, &nzr));
233:     if (nz) {
234:       if (tol < 0) {
235:         for (i = 0, nz = 0; i < n; i++)
236:           if (al[i] != 0.0) nzr[nz++] = i + st;
237:       } else {
238:         for (i = 0, nz = 0; i < n; i++)
239:           if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
240:       }
241:     }
242:     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
243:   } else *nonzero = NULL;
244:   if (!cols) { /* nonzero rows */
245:     PetscCall(VecRestoreArrayRead(l, &al));
246:   } else {
247:     PetscCall(VecRestoreArrayRead(r, &al));
248:   }
249:   PetscCall(VecDestroy(&l));
250:   PetscCall(VecDestroy(&r));
251:   PetscFunctionReturn(PETSC_SUCCESS);
252: }

254: /*@
255:   MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix

257:   Input Parameter:
258: . mat - the matrix

260:   Output Parameter:
261: . keptrows - the rows that are not completely zero

263:   Level: intermediate

265:   Note:
266:   `keptrows` is set to `NULL` if all rows are nonzero.

268: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
269:  @*/
270: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
271: {
272:   PetscFunctionBegin;
275:   PetscAssertPointer(keptrows, 2);
276:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
277:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
278:   if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
279:   else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
280:   PetscFunctionReturn(PETSC_SUCCESS);
281: }

283: /*@
284:   MatFindZeroRows - Locate all rows that are completely zero in the matrix

286:   Input Parameter:
287: . mat - the matrix

289:   Output Parameter:
290: . zerorows - the rows that are completely zero

292:   Level: intermediate

294:   Note:
295:   `zerorows` is set to `NULL` if no rows are zero.

297: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
298:  @*/
299: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
300: {
301:   IS       keptrows;
302:   PetscInt m, n;

304:   PetscFunctionBegin;
307:   PetscAssertPointer(zerorows, 2);
308:   PetscCall(MatFindNonzeroRows(mat, &keptrows));
309:   /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
310:      In keeping with this convention, we set zerorows to NULL if there are no zero
311:      rows. */
312:   if (keptrows == NULL) {
313:     *zerorows = NULL;
314:   } else {
315:     PetscCall(MatGetOwnershipRange(mat, &m, &n));
316:     PetscCall(ISComplement(keptrows, m, n, zerorows));
317:     PetscCall(ISDestroy(&keptrows));
318:   }
319:   PetscFunctionReturn(PETSC_SUCCESS);
320: }

322: /*@
323:   MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling

325:   Not Collective

327:   Input Parameter:
328: . A - the matrix

330:   Output Parameter:
331: . a - the diagonal part (which is a SEQUENTIAL matrix)

333:   Level: advanced

335:   Notes:
336:   See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.

338:   Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.

340: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
341: @*/
342: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
343: {
344:   PetscFunctionBegin;
347:   PetscAssertPointer(a, 2);
348:   PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
349:   if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
350:   else {
351:     PetscMPIInt size;

353:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
354:     PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
355:     *a = A;
356:   }
357:   PetscFunctionReturn(PETSC_SUCCESS);
358: }

360: /*@
361:   MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.

363:   Collective

365:   Input Parameter:
366: . mat - the matrix

368:   Output Parameter:
369: . trace - the sum of the diagonal entries

371:   Level: advanced

373: .seealso: [](ch_matrices), `Mat`
374: @*/
375: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
376: {
377:   Vec diag;

379:   PetscFunctionBegin;
381:   PetscAssertPointer(trace, 2);
382:   PetscCall(MatCreateVecs(mat, &diag, NULL));
383:   PetscCall(MatGetDiagonal(mat, diag));
384:   PetscCall(VecSum(diag, trace));
385:   PetscCall(VecDestroy(&diag));
386:   PetscFunctionReturn(PETSC_SUCCESS);
387: }

389: /*@
390:   MatRealPart - Zeros out the imaginary part of the matrix

392:   Logically Collective

394:   Input Parameter:
395: . mat - the matrix

397:   Level: advanced

399: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
400: @*/
401: PetscErrorCode MatRealPart(Mat mat)
402: {
403:   PetscFunctionBegin;
406:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
407:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
408:   MatCheckPreallocated(mat, 1);
409:   PetscUseTypeMethod(mat, realpart);
410:   PetscFunctionReturn(PETSC_SUCCESS);
411: }

413: /*@C
414:   MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix

416:   Collective

418:   Input Parameter:
419: . mat - the matrix

421:   Output Parameters:
422: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
423: - ghosts  - the global indices of the ghost points

425:   Level: advanced

427:   Note:
428:   `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`

430: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
431: @*/
432: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
433: {
434:   PetscFunctionBegin;
437:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
438:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
439:   if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
440:   else {
441:     if (nghosts) *nghosts = 0;
442:     if (ghosts) *ghosts = NULL;
443:   }
444:   PetscFunctionReturn(PETSC_SUCCESS);
445: }

447: /*@
448:   MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part

450:   Logically Collective

452:   Input Parameter:
453: . mat - the matrix

455:   Level: advanced

457: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
458: @*/
459: PetscErrorCode MatImaginaryPart(Mat mat)
460: {
461:   PetscFunctionBegin;
464:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
465:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
466:   MatCheckPreallocated(mat, 1);
467:   PetscUseTypeMethod(mat, imaginarypart);
468:   PetscFunctionReturn(PETSC_SUCCESS);
469: }

471: /*@
472:   MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure

474:   Not Collective

476:   Input Parameter:
477: . mat - the matrix

479:   Output Parameters:
480: + missing - is any diagonal entry missing
481: - dd      - first diagonal entry that is missing (optional) on this process

483:   Level: advanced

485:   Note:
486:   This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value

488: .seealso: [](ch_matrices), `Mat`
489: @*/
490: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
491: {
492:   PetscFunctionBegin;
495:   PetscAssertPointer(missing, 2);
496:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
497:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
498:   PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
499:   PetscFunctionReturn(PETSC_SUCCESS);
500: }

502: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
503: /*@C
504:   MatGetRow - Gets a row of a matrix.  You MUST call `MatRestoreRow()`
505:   for each row that you get to ensure that your application does
506:   not bleed memory.

508:   Not Collective

510:   Input Parameters:
511: + mat - the matrix
512: - row - the row to get

514:   Output Parameters:
515: + ncols - if not `NULL`, the number of nonzeros in `row`
516: . cols  - if not `NULL`, the column numbers
517: - vals  - if not `NULL`, the numerical values

519:   Level: advanced

521:   Notes:
522:   This routine is provided for people who need to have direct access
523:   to the structure of a matrix.  We hope that we provide enough
524:   high-level matrix routines that few users will need it.

526:   `MatGetRow()` always returns 0-based column indices, regardless of
527:   whether the internal representation is 0-based (default) or 1-based.

529:   For better efficiency, set `cols` and/or `vals` to `NULL` if you do
530:   not wish to extract these quantities.

532:   The user can only examine the values extracted with `MatGetRow()`;
533:   the values CANNOT be altered.  To change the matrix entries, one
534:   must use `MatSetValues()`.

536:   You can only have one call to `MatGetRow()` outstanding for a particular
537:   matrix at a time, per processor. `MatGetRow()` can only obtain rows
538:   associated with the given processor, it cannot get rows from the
539:   other processors; for that we suggest using `MatCreateSubMatrices()`, then
540:   `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
541:   is in the global number of rows.

543:   Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.

545:   Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.

547:   Fortran Note:
548:   The calling sequence is
549: .vb
550:    MatGetRow(matrix,row,ncols,cols,values,ierr)
551:          Mat     matrix (input)
552:          integer row    (input)
553:          integer ncols  (output)
554:          integer cols(maxcols) (output)
555:          double precision (or double complex) values(maxcols) output
556: .ve
557:   where maxcols >= maximum nonzeros in any row of the matrix.

559: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
560: @*/
561: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
562: {
563:   PetscInt incols;

565:   PetscFunctionBegin;
568:   PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
569:   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
570:   MatCheckPreallocated(mat, 1);
571:   PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
572:   PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
573:   PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
574:   if (ncols) *ncols = incols;
575:   PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
576:   PetscFunctionReturn(PETSC_SUCCESS);
577: }

579: /*@
580:   MatConjugate - replaces the matrix values with their complex conjugates

582:   Logically Collective

584:   Input Parameter:
585: . mat - the matrix

587:   Level: advanced

589: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
590: @*/
591: PetscErrorCode MatConjugate(Mat mat)
592: {
593:   PetscFunctionBegin;
595:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
596:   if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
597:     PetscUseTypeMethod(mat, conjugate);
598:     PetscCall(PetscObjectStateIncrease((PetscObject)mat));
599:   }
600:   PetscFunctionReturn(PETSC_SUCCESS);
601: }

603: /*@C
604:   MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.

606:   Not Collective

608:   Input Parameters:
609: + mat   - the matrix
610: . row   - the row to get
611: . ncols - the number of nonzeros
612: . cols  - the columns of the nonzeros
613: - vals  - if nonzero the column values

615:   Level: advanced

617:   Notes:
618:   This routine should be called after you have finished examining the entries.

620:   This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
621:   us of the array after it has been restored. If you pass `NULL`, it will
622:   not zero the pointers.  Use of `cols` or `vals` after `MatRestoreRow()` is invalid.

624:   Fortran Notes:
625:   The calling sequence is
626: .vb
627:    MatRestoreRow(matrix,row,ncols,cols,values,ierr)
628:       Mat     matrix (input)
629:       integer row    (input)
630:       integer ncols  (output)
631:       integer cols(maxcols) (output)
632:       double precision (or double complex) values(maxcols) output
633: .ve
634:   Where maxcols >= maximum nonzeros in any row of the matrix.

636:   In Fortran `MatRestoreRow()` MUST be called after `MatGetRow()`
637:   before another call to `MatGetRow()` can be made.

639: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
640: @*/
641: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
642: {
643:   PetscFunctionBegin;
645:   if (ncols) PetscAssertPointer(ncols, 3);
646:   PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
647:   if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
648:   PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
649:   if (ncols) *ncols = 0;
650:   if (cols) *cols = NULL;
651:   if (vals) *vals = NULL;
652:   PetscFunctionReturn(PETSC_SUCCESS);
653: }

655: /*@
656:   MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
657:   You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.

659:   Not Collective

661:   Input Parameter:
662: . mat - the matrix

664:   Level: advanced

666:   Note:
667:   The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.

669: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
670: @*/
671: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
672: {
673:   PetscFunctionBegin;
676:   PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
677:   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
678:   MatCheckPreallocated(mat, 1);
679:   if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
680:   PetscUseTypeMethod(mat, getrowuppertriangular);
681:   PetscFunctionReturn(PETSC_SUCCESS);
682: }

684: /*@
685:   MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.

687:   Not Collective

689:   Input Parameter:
690: . mat - the matrix

692:   Level: advanced

694:   Note:
695:   This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.

697: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
698: @*/
699: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
700: {
701:   PetscFunctionBegin;
704:   PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
705:   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
706:   MatCheckPreallocated(mat, 1);
707:   if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
708:   PetscUseTypeMethod(mat, restorerowuppertriangular);
709:   PetscFunctionReturn(PETSC_SUCCESS);
710: }

712: /*@
713:   MatSetOptionsPrefix - Sets the prefix used for searching for all
714:   `Mat` options in the database.

716:   Logically Collective

718:   Input Parameters:
719: + A      - the matrix
720: - prefix - the prefix to prepend to all option names

722:   Level: advanced

724:   Notes:
725:   A hyphen (-) must NOT be given at the beginning of the prefix name.
726:   The first character of all runtime options is AUTOMATICALLY the hyphen.

728:   This is NOT used for options for the factorization of the matrix. Normally the
729:   prefix is automatically passed in from the PC calling the factorization. To set
730:   it directly use  `MatSetOptionsPrefixFactor()`

732: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
733: @*/
734: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
735: {
736:   PetscFunctionBegin;
738:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
739:   PetscFunctionReturn(PETSC_SUCCESS);
740: }

742: /*@
743:   MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
744:   for matrices created with `MatGetFactor()`

746:   Logically Collective

748:   Input Parameters:
749: + A      - the matrix
750: - prefix - the prefix to prepend to all option names for the factored matrix

752:   Level: developer

754:   Notes:
755:   A hyphen (-) must NOT be given at the beginning of the prefix name.
756:   The first character of all runtime options is AUTOMATICALLY the hyphen.

758:   Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
759:   it directly when not using `KSP`/`PC` use  `MatSetOptionsPrefixFactor()`

761: .seealso: [](ch_matrices), `Mat`,   [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
762: @*/
763: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
764: {
765:   PetscFunctionBegin;
767:   if (prefix) {
768:     PetscAssertPointer(prefix, 2);
769:     PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
770:     if (prefix != A->factorprefix) {
771:       PetscCall(PetscFree(A->factorprefix));
772:       PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
773:     }
774:   } else PetscCall(PetscFree(A->factorprefix));
775:   PetscFunctionReturn(PETSC_SUCCESS);
776: }

778: /*@
779:   MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
780:   for matrices created with `MatGetFactor()`

782:   Logically Collective

784:   Input Parameters:
785: + A      - the matrix
786: - prefix - the prefix to prepend to all option names for the factored matrix

788:   Level: developer

790:   Notes:
791:   A hyphen (-) must NOT be given at the beginning of the prefix name.
792:   The first character of all runtime options is AUTOMATICALLY the hyphen.

794:   Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
795:   it directly when not using `KSP`/`PC` use  `MatAppendOptionsPrefixFactor()`

797: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
798:           `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
799:           `MatSetOptionsPrefix()`
800: @*/
801: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
802: {
803:   size_t len1, len2, new_len;

805:   PetscFunctionBegin;
807:   if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
808:   if (!A->factorprefix) {
809:     PetscCall(MatSetOptionsPrefixFactor(A, prefix));
810:     PetscFunctionReturn(PETSC_SUCCESS);
811:   }
812:   PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");

814:   PetscCall(PetscStrlen(A->factorprefix, &len1));
815:   PetscCall(PetscStrlen(prefix, &len2));
816:   new_len = len1 + len2 + 1;
817:   PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
818:   PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
819:   PetscFunctionReturn(PETSC_SUCCESS);
820: }

822: /*@
823:   MatAppendOptionsPrefix - Appends to the prefix used for searching for all
824:   matrix options in the database.

826:   Logically Collective

828:   Input Parameters:
829: + A      - the matrix
830: - prefix - the prefix to prepend to all option names

832:   Level: advanced

834:   Note:
835:   A hyphen (-) must NOT be given at the beginning of the prefix name.
836:   The first character of all runtime options is AUTOMATICALLY the hyphen.

838: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
839: @*/
840: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
841: {
842:   PetscFunctionBegin;
844:   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
845:   PetscFunctionReturn(PETSC_SUCCESS);
846: }

848: /*@
849:   MatGetOptionsPrefix - Gets the prefix used for searching for all
850:   matrix options in the database.

852:   Not Collective

854:   Input Parameter:
855: . A - the matrix

857:   Output Parameter:
858: . prefix - pointer to the prefix string used

860:   Level: advanced

862:   Fortran Note:
863:   The user should pass in a string `prefix` of
864:   sufficient length to hold the prefix.

866: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
867: @*/
868: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
869: {
870:   PetscFunctionBegin;
872:   PetscAssertPointer(prefix, 2);
873:   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
874:   PetscFunctionReturn(PETSC_SUCCESS);
875: }

877: /*@C
878:   MatGetState - Gets the state of a `Mat`.

880:   Not Collective

882:   Input Parameter:
883: . A - the matrix

885:   Output Parameter:
886: . state - the object state

888:   Level: advanced

890:   Note:
891:   Object state is an integer which gets increased every time
892:   the object is changed. By saving and later querying the object state
893:   one can determine whether information about the object is still current.

895: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`
896: @*/
897: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
898: {
899:   PetscFunctionBegin;
901:   PetscAssertPointer(state, 2);
902:   PetscCall(PetscObjectStateGet((PetscObject)A, state));
903:   PetscFunctionReturn(PETSC_SUCCESS);
904: }

906: /*@
907:   MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by the user.

909:   Collective

911:   Input Parameter:
912: . A - the matrix

914:   Level: beginner

916:   Notes:
917:   The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.

919:   Users can reset the preallocation to access the original memory.

921:   Currently only supported for  `MATAIJ` matrices.

923: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
924: @*/
925: PetscErrorCode MatResetPreallocation(Mat A)
926: {
927:   PetscFunctionBegin;
930:   PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset preallocation after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
931:   if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
932:   PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
933:   PetscFunctionReturn(PETSC_SUCCESS);
934: }

936: /*@
937:   MatSetUp - Sets up the internal matrix data structures for later use.

939:   Collective

941:   Input Parameter:
942: . A - the matrix

944:   Level: intermediate

946:   Notes:
947:   If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
948:   setting values in the matrix.

950:   This routine is called internally by other matrix functions when needed so rarely needs to be called by users

952: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
953: @*/
954: PetscErrorCode MatSetUp(Mat A)
955: {
956:   PetscFunctionBegin;
958:   if (!((PetscObject)A)->type_name) {
959:     PetscMPIInt size;

961:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
962:     PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
963:   }
964:   if (!A->preallocated) PetscTryTypeMethod(A, setup);
965:   PetscCall(PetscLayoutSetUp(A->rmap));
966:   PetscCall(PetscLayoutSetUp(A->cmap));
967:   A->preallocated = PETSC_TRUE;
968:   PetscFunctionReturn(PETSC_SUCCESS);
969: }

971: #if defined(PETSC_HAVE_SAWS)
972: #include <petscviewersaws.h>
973: #endif

975: /*
976:    If threadsafety is on extraneous matrices may be printed

978:    This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
979: */
980: #if !defined(PETSC_HAVE_THREADSAFETY)
981: static PetscInt insidematview = 0;
982: #endif

984: /*@C
985:   MatViewFromOptions - View properties of the matrix based on options set in the options database

987:   Collective

989:   Input Parameters:
990: + A    - the matrix
991: . obj  - optional additional object that provides the options prefix to use
992: - name - command line option

994:   Options Database Key:
995: . -mat_view [viewertype]:... - the viewer and its options

997:   Level: intermediate

999:   Note:
1000: .vb
1001:     If no value is provided ascii:stdout is used
1002:        ascii[:[filename][:[format][:append]]]    defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
1003:                                                   for example ascii::ascii_info prints just the information about the object not all details
1004:                                                   unless :append is given filename opens in write mode, overwriting what was already there
1005:        binary[:[filename][:[format][:append]]]   defaults to the file binaryoutput
1006:        draw[:drawtype[:filename]]                for example, draw:tikz, draw:tikz:figure.tex  or draw:x
1007:        socket[:port]                             defaults to the standard output port
1008:        saws[:communicatorname]                    publishes object to the Scientific Application Webserver (SAWs)
1009: .ve

1011: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1012: @*/
1013: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1014: {
1015:   PetscFunctionBegin;
1017: #if !defined(PETSC_HAVE_THREADSAFETY)
1018:   if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1019: #endif
1020:   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1021:   PetscFunctionReturn(PETSC_SUCCESS);
1022: }

1024: /*@C
1025:   MatView - display information about a matrix in a variety ways

1027:   Collective on viewer

1029:   Input Parameters:
1030: + mat    - the matrix
1031: - viewer - visualization context

1033:   Options Database Keys:
1034: + -mat_view ::ascii_info           - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1035: . -mat_view ::ascii_info_detail    - Prints more detailed info
1036: . -mat_view                        - Prints matrix in ASCII format
1037: . -mat_view ::ascii_matlab         - Prints matrix in MATLAB format
1038: . -mat_view draw                   - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1039: . -display <name>                  - Sets display name (default is host)
1040: . -draw_pause <sec>                - Sets number of seconds to pause after display
1041: . -mat_view socket                 - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1042: . -viewer_socket_machine <machine> - -
1043: . -viewer_socket_port <port>       - -
1044: . -mat_view binary                 - save matrix to file in binary format
1045: - -viewer_binary_filename <name>   - -

1047:   Level: beginner

1049:   Notes:
1050:   The available visualization contexts include
1051: +    `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1052: .    `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1053: .    `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1054: -     `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure

1056:   The user can open alternative visualization contexts with
1057: +    `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1058: .    `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1059:   specified file; corresponding input uses `MatLoad()`
1060: .    `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1061:   an X window display
1062: -    `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1063:   Currently only the `MATSEQDENSE` and `MATAIJ`
1064:   matrix types support the Socket viewer.

1066:   The user can call `PetscViewerPushFormat()` to specify the output
1067:   format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1068:   `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`).  Available formats include
1069: +    `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1070: .    `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1071: .    `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1072: .    `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1073:   format common among all matrix types
1074: .    `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1075:   format (which is in many cases the same as the default)
1076: .    `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1077:   size and structure (not the matrix entries)
1078: -    `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1079:   the matrix structure

1081:   The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1082:   the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.

1084:   In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).

1086:   See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1087:   viewer is used.

1089:   See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1090:   viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.

1092:   One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1093:   and then use the following mouse functions.
1094: .vb
1095:   left mouse: zoom in
1096:   middle mouse: zoom out
1097:   right mouse: continue with the simulation
1098: .ve

1100: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1101:           `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1102: @*/
1103: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1104: {
1105:   PetscInt          rows, cols, rbs, cbs;
1106:   PetscBool         isascii, isstring, issaws;
1107:   PetscViewerFormat format;
1108:   PetscMPIInt       size;

1110:   PetscFunctionBegin;
1113:   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));

1116:   PetscCall(PetscViewerGetFormat(viewer, &format));
1117:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1118:   if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);

1120: #if !defined(PETSC_HAVE_THREADSAFETY)
1121:   insidematview++;
1122: #endif
1123:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1124:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1125:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1126:   PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");

1128:   PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1129:   if (isascii) {
1130:     if (!mat->preallocated) {
1131:       PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1132: #if !defined(PETSC_HAVE_THREADSAFETY)
1133:       insidematview--;
1134: #endif
1135:       PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1136:       PetscFunctionReturn(PETSC_SUCCESS);
1137:     }
1138:     if (!mat->assembled) {
1139:       PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1140: #if !defined(PETSC_HAVE_THREADSAFETY)
1141:       insidematview--;
1142: #endif
1143:       PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1144:       PetscFunctionReturn(PETSC_SUCCESS);
1145:     }
1146:     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1147:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1148:       MatNullSpace nullsp, transnullsp;

1150:       PetscCall(PetscViewerASCIIPushTab(viewer));
1151:       PetscCall(MatGetSize(mat, &rows, &cols));
1152:       PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1153:       if (rbs != 1 || cbs != 1) {
1154:         if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1155:         else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1156:       } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1157:       if (mat->factortype) {
1158:         MatSolverType solver;
1159:         PetscCall(MatFactorGetSolverType(mat, &solver));
1160:         PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1161:       }
1162:       if (mat->ops->getinfo) {
1163:         MatInfo info;
1164:         PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1165:         PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1166:         if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1167:       }
1168:       PetscCall(MatGetNullSpace(mat, &nullsp));
1169:       PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1170:       if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, "  has attached null space\n"));
1171:       if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, "  has attached transposed null space\n"));
1172:       PetscCall(MatGetNearNullSpace(mat, &nullsp));
1173:       if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, "  has attached near null space\n"));
1174:       PetscCall(PetscViewerASCIIPushTab(viewer));
1175:       PetscCall(MatProductView(mat, viewer));
1176:       PetscCall(PetscViewerASCIIPopTab(viewer));
1177:       if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1178:         IS tmp;

1180:         PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1181:         PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1182:         PetscCall(PetscViewerASCIIPushTab(viewer));
1183:         PetscCall(ISView(tmp, viewer));
1184:         PetscCall(PetscViewerASCIIPopTab(viewer));
1185:         PetscCall(ISDestroy(&tmp));
1186:       }
1187:     }
1188:   } else if (issaws) {
1189: #if defined(PETSC_HAVE_SAWS)
1190:     PetscMPIInt rank;

1192:     PetscCall(PetscObjectName((PetscObject)mat));
1193:     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1194:     if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1195: #endif
1196:   } else if (isstring) {
1197:     const char *type;
1198:     PetscCall(MatGetType(mat, &type));
1199:     PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1200:     PetscTryTypeMethod(mat, view, viewer);
1201:   }
1202:   if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1203:     PetscCall(PetscViewerASCIIPushTab(viewer));
1204:     PetscUseTypeMethod(mat, viewnative, viewer);
1205:     PetscCall(PetscViewerASCIIPopTab(viewer));
1206:   } else if (mat->ops->view) {
1207:     PetscCall(PetscViewerASCIIPushTab(viewer));
1208:     PetscUseTypeMethod(mat, view, viewer);
1209:     PetscCall(PetscViewerASCIIPopTab(viewer));
1210:   }
1211:   if (isascii) {
1212:     PetscCall(PetscViewerGetFormat(viewer, &format));
1213:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1214:   }
1215:   PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1216: #if !defined(PETSC_HAVE_THREADSAFETY)
1217:   insidematview--;
1218: #endif
1219:   PetscFunctionReturn(PETSC_SUCCESS);
1220: }

1222: #if defined(PETSC_USE_DEBUG)
1223: #include <../src/sys/totalview/tv_data_display.h>
1224: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1225: {
1226:   TV_add_row("Local rows", "int", &mat->rmap->n);
1227:   TV_add_row("Local columns", "int", &mat->cmap->n);
1228:   TV_add_row("Global rows", "int", &mat->rmap->N);
1229:   TV_add_row("Global columns", "int", &mat->cmap->N);
1230:   TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1231:   return TV_format_OK;
1232: }
1233: #endif

1235: /*@C
1236:   MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1237:   with `MatView()`.  The matrix format is determined from the options database.
1238:   Generates a parallel MPI matrix if the communicator has more than one
1239:   processor.  The default matrix type is `MATAIJ`.

1241:   Collective

1243:   Input Parameters:
1244: + mat    - the newly loaded matrix, this needs to have been created with `MatCreate()`
1245:             or some related function before a call to `MatLoad()`
1246: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer

1248:   Options Database Key:
1249: . -matload_block_size <bs> - set block size

1251:   Level: beginner

1253:   Notes:
1254:   If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1255:   `Mat` before calling this routine if you wish to set it from the options database.

1257:   `MatLoad()` automatically loads into the options database any options
1258:   given in the file filename.info where filename is the name of the file
1259:   that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1260:   file will be ignored if you use the -viewer_binary_skip_info option.

1262:   If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1263:   sets the default matrix type AIJ and sets the local and global sizes.
1264:   If type and/or size is already set, then the same are used.

1266:   In parallel, each processor can load a subset of rows (or the
1267:   entire matrix).  This routine is especially useful when a large
1268:   matrix is stored on disk and only part of it is desired on each
1269:   processor.  For example, a parallel solver may access only some of
1270:   the rows from each processor.  The algorithm used here reads
1271:   relatively small blocks of data rather than reading the entire
1272:   matrix and then subsetting it.

1274:   Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1275:   Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1276:   or the sequence like
1277: .vb
1278:     `PetscViewer` v;
1279:     `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1280:     `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1281:     `PetscViewerSetFromOptions`(v);
1282:     `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1283:     `PetscViewerFileSetName`(v,"datafile");
1284: .ve
1285:   The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1286: $ -viewer_type {binary, hdf5}

1288:   See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1289:   and src/mat/tutorials/ex10.c with the second approach.

1291:   In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1292:   is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1293:   Multiple objects, both matrices and vectors, can be stored within the same file.
1294:   Their `PetscObject` name is ignored; they are loaded in the order of their storage.

1296:   Most users should not need to know the details of the binary storage
1297:   format, since `MatLoad()` and `MatView()` completely hide these details.
1298:   But for anyone who is interested, the standard binary matrix storage
1299:   format is

1301: .vb
1302:     PetscInt    MAT_FILE_CLASSID
1303:     PetscInt    number of rows
1304:     PetscInt    number of columns
1305:     PetscInt    total number of nonzeros
1306:     PetscInt    *number nonzeros in each row
1307:     PetscInt    *column indices of all nonzeros (starting index is zero)
1308:     PetscScalar *values of all nonzeros
1309: .ve
1310:   If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1311:   stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1312:   case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.

1314:   PETSc automatically does the byte swapping for
1315:   machines that store the bytes reversed. Thus if you write your own binary
1316:   read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1317:   and `PetscBinaryWrite()` to see how this may be done.

1319:   In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1320:   Each processor's chunk is loaded independently by its owning MPI process.
1321:   Multiple objects, both matrices and vectors, can be stored within the same file.
1322:   They are looked up by their PetscObject name.

1324:   As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1325:   by default the same structure and naming of the AIJ arrays and column count
1326:   within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1327: $    save example.mat A b -v7.3
1328:   can be directly read by this routine (see Reference 1 for details).

1330:   Depending on your MATLAB version, this format might be a default,
1331:   otherwise you can set it as default in Preferences.

1333:   Unless -nocompression flag is used to save the file in MATLAB,
1334:   PETSc must be configured with ZLIB package.

1336:   See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c

1338:   This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`

1340:   Corresponding `MatView()` is not yet implemented.

1342:   The loaded matrix is actually a transpose of the original one in MATLAB,
1343:   unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1344:   With this format, matrix is automatically transposed by PETSc,
1345:   unless the matrix is marked as SPD or symmetric
1346:   (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).

1348:   See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>

1350: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1351:  @*/
1352: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1353: {
1354:   PetscBool flg;

1356:   PetscFunctionBegin;

1360:   if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));

1362:   flg = PETSC_FALSE;
1363:   PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1364:   if (flg) {
1365:     PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1366:     PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1367:   }
1368:   flg = PETSC_FALSE;
1369:   PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1370:   if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));

1372:   PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1373:   PetscUseTypeMethod(mat, load, viewer);
1374:   PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1375:   PetscFunctionReturn(PETSC_SUCCESS);
1376: }

1378: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1379: {
1380:   Mat_Redundant *redund = *redundant;

1382:   PetscFunctionBegin;
1383:   if (redund) {
1384:     if (redund->matseq) { /* via MatCreateSubMatrices()  */
1385:       PetscCall(ISDestroy(&redund->isrow));
1386:       PetscCall(ISDestroy(&redund->iscol));
1387:       PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1388:     } else {
1389:       PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1390:       PetscCall(PetscFree(redund->sbuf_j));
1391:       PetscCall(PetscFree(redund->sbuf_a));
1392:       for (PetscInt i = 0; i < redund->nrecvs; i++) {
1393:         PetscCall(PetscFree(redund->rbuf_j[i]));
1394:         PetscCall(PetscFree(redund->rbuf_a[i]));
1395:       }
1396:       PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1397:     }

1399:     if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1400:     PetscCall(PetscFree(redund));
1401:   }
1402:   PetscFunctionReturn(PETSC_SUCCESS);
1403: }

1405: /*@C
1406:   MatDestroy - Frees space taken by a matrix.

1408:   Collective

1410:   Input Parameter:
1411: . A - the matrix

1413:   Level: beginner

1415:   Developer Note:
1416:   Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1417:   `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1418:   `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1419:   if changes are needed here.

1421: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1422: @*/
1423: PetscErrorCode MatDestroy(Mat *A)
1424: {
1425:   PetscFunctionBegin;
1426:   if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1428:   if (--((PetscObject)*A)->refct > 0) {
1429:     *A = NULL;
1430:     PetscFunctionReturn(PETSC_SUCCESS);
1431:   }

1433:   /* if memory was published with SAWs then destroy it */
1434:   PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1435:   PetscTryTypeMethod(*A, destroy);

1437:   PetscCall(PetscFree((*A)->factorprefix));
1438:   PetscCall(PetscFree((*A)->defaultvectype));
1439:   PetscCall(PetscFree((*A)->defaultrandtype));
1440:   PetscCall(PetscFree((*A)->bsizes));
1441:   PetscCall(PetscFree((*A)->solvertype));
1442:   for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1443:   if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1444:   PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1445:   PetscCall(MatProductClear(*A));
1446:   PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1447:   PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1448:   PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1449:   PetscCall(MatDestroy(&(*A)->schur));
1450:   PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1451:   PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1452:   PetscCall(PetscHeaderDestroy(A));
1453:   PetscFunctionReturn(PETSC_SUCCESS);
1454: }

1456: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1457: /*@C
1458:   MatSetValues - Inserts or adds a block of values into a matrix.
1459:   These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1460:   MUST be called after all calls to `MatSetValues()` have been completed.

1462:   Not Collective

1464:   Input Parameters:
1465: + mat  - the matrix
1466: . v    - a logically two-dimensional array of values
1467: . m    - the number of rows
1468: . idxm - the global indices of the rows
1469: . n    - the number of columns
1470: . idxn - the global indices of the columns
1471: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values

1473:   Level: beginner

1475:   Notes:
1476:   By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.

1478:   Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1479:   options cannot be mixed without intervening calls to the assembly
1480:   routines.

1482:   `MatSetValues()` uses 0-based row and column numbers in Fortran
1483:   as well as in C.

1485:   Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1486:   simply ignored. This allows easily inserting element stiffness matrices
1487:   with homogeneous Dirichlet boundary conditions that you don't want represented
1488:   in the matrix.

1490:   Efficiency Alert:
1491:   The routine `MatSetValuesBlocked()` may offer much better efficiency
1492:   for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).

1494:   Developer Note:
1495:   This is labeled with C so does not automatically generate Fortran stubs and interfaces
1496:   because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.

1498: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1499:           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1500: @*/
1501: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1502: {
1503:   PetscFunctionBeginHot;
1506:   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1507:   PetscAssertPointer(idxm, 3);
1508:   PetscAssertPointer(idxn, 5);
1509:   MatCheckPreallocated(mat, 1);

1511:   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1512:   else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");

1514:   if (PetscDefined(USE_DEBUG)) {
1515:     PetscInt i, j;

1517:     PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1518:     if (v) {
1519:       for (i = 0; i < m; i++) {
1520:         for (j = 0; j < n; j++) {
1521:           if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1522: #if defined(PETSC_USE_COMPLEX)
1523:             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1524: #else
1525:             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1526: #endif
1527:         }
1528:       }
1529:     }
1530:     for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1531:     for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1532:   }

1534:   if (mat->assembled) {
1535:     mat->was_assembled = PETSC_TRUE;
1536:     mat->assembled     = PETSC_FALSE;
1537:   }
1538:   PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1539:   PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1540:   PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1541:   PetscFunctionReturn(PETSC_SUCCESS);
1542: }

1544: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1545: /*@
1546:   MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1547:   These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1548:   MUST be called after all calls to `MatSetValues()` have been completed.

1550:   Not Collective

1552:   Input Parameters:
1553: + mat  - the matrix
1554: . v    - a logically two-dimensional array of values
1555: . ism  - the rows to provide
1556: . isn  - the columns to provide
1557: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values

1559:   Level: beginner

1561:   Notes:
1562:   By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.

1564:   Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1565:   options cannot be mixed without intervening calls to the assembly
1566:   routines.

1568:   `MatSetValues()` uses 0-based row and column numbers in Fortran
1569:   as well as in C.

1571:   Negative indices may be passed in `ism` and `isn`, these rows and columns are
1572:   simply ignored. This allows easily inserting element stiffness matrices
1573:   with homogeneous Dirichlet boundary conditions that you don't want represented
1574:   in the matrix.

1576:   Efficiency Alert:
1577:   The routine `MatSetValuesBlocked()` may offer much better efficiency
1578:   for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).

1580:   This is currently not optimized for any particular `ISType`

1582:   Developer Note:
1583:   This is labeled with C so does not automatically generate Fortran stubs and interfaces
1584:   because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.

1586: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1587:           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1588: @*/
1589: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1590: {
1591:   PetscInt        m, n;
1592:   const PetscInt *rows, *cols;

1594:   PetscFunctionBeginHot;
1596:   PetscCall(ISGetIndices(ism, &rows));
1597:   PetscCall(ISGetIndices(isn, &cols));
1598:   PetscCall(ISGetLocalSize(ism, &m));
1599:   PetscCall(ISGetLocalSize(isn, &n));
1600:   PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1601:   PetscCall(ISRestoreIndices(ism, &rows));
1602:   PetscCall(ISRestoreIndices(isn, &cols));
1603:   PetscFunctionReturn(PETSC_SUCCESS);
1604: }

1606: /*@
1607:   MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1608:   values into a matrix

1610:   Not Collective

1612:   Input Parameters:
1613: + mat - the matrix
1614: . row - the (block) row to set
1615: - v   - a logically two-dimensional array of values

1617:   Level: intermediate

1619:   Notes:
1620:   The values, `v`, are column-oriented (for the block version) and sorted

1622:   All the nonzero values in `row` must be provided

1624:   The matrix must have previously had its column indices set, likely by having been assembled.

1626:   `row` must belong to this MPI process

1628: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1629:           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1630: @*/
1631: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1632: {
1633:   PetscInt globalrow;

1635:   PetscFunctionBegin;
1638:   PetscAssertPointer(v, 3);
1639:   PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1640:   PetscCall(MatSetValuesRow(mat, globalrow, v));
1641:   PetscFunctionReturn(PETSC_SUCCESS);
1642: }

1644: /*@
1645:   MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1646:   values into a matrix

1648:   Not Collective

1650:   Input Parameters:
1651: + mat - the matrix
1652: . row - the (block) row to set
1653: - v   - a logically two-dimensional (column major) array of values for  block matrices with blocksize larger than one, otherwise a one dimensional array of values

1655:   Level: advanced

1657:   Notes:
1658:   The values, `v`, are column-oriented for the block version.

1660:   All the nonzeros in `row` must be provided

1662:   THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.

1664:   `row` must belong to this process

1666: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1667:           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1668: @*/
1669: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1670: {
1671:   PetscFunctionBeginHot;
1674:   MatCheckPreallocated(mat, 1);
1675:   PetscAssertPointer(v, 3);
1676:   PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1677:   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1678:   mat->insertmode = INSERT_VALUES;

1680:   if (mat->assembled) {
1681:     mat->was_assembled = PETSC_TRUE;
1682:     mat->assembled     = PETSC_FALSE;
1683:   }
1684:   PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1685:   PetscUseTypeMethod(mat, setvaluesrow, row, v);
1686:   PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1687:   PetscFunctionReturn(PETSC_SUCCESS);
1688: }

1690: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1691: /*@
1692:   MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1693:   Using structured grid indexing

1695:   Not Collective

1697:   Input Parameters:
1698: + mat  - the matrix
1699: . m    - number of rows being entered
1700: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1701: . n    - number of columns being entered
1702: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1703: . v    - a logically two-dimensional array of values
1704: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values

1706:   Level: beginner

1708:   Notes:
1709:   By default the values, `v`, are row-oriented.  See `MatSetOption()` for other options.

1711:   Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1712:   options cannot be mixed without intervening calls to the assembly
1713:   routines.

1715:   The grid coordinates are across the entire grid, not just the local portion

1717:   `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1718:   as well as in C.

1720:   For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine

1722:   In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1723:   or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.

1725:   The columns and rows in the stencil passed in MUST be contained within the
1726:   ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1727:   if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1728:   local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1729:   first i index you can use in your column and row indices in `MatSetStencil()` is 5.

1731:   For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1732:   obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1733:   etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1734:   `DM_BOUNDARY_PERIODIC` boundary type.

1736:   For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1737:   a single value per point) you can skip filling those indices.

1739:   Inspired by the structured grid interface to the HYPRE package
1740:   (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)

1742:   Efficiency Alert:
1743:   The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1744:   for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).

1746:   Fortran Note:
1747:   `idxm` and `idxn` should be declared as
1748: $     MatStencil idxm(4,m),idxn(4,n)
1749:   and the values inserted using
1750: .vb
1751:     idxm(MatStencil_i,1) = i
1752:     idxm(MatStencil_j,1) = j
1753:     idxm(MatStencil_k,1) = k
1754:     idxm(MatStencil_c,1) = c
1755:     etc
1756: .ve

1758: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1759:           `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1760: @*/
1761: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1762: {
1763:   PetscInt  buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1764:   PetscInt  j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1765:   PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);

1767:   PetscFunctionBegin;
1768:   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1771:   PetscAssertPointer(idxm, 3);
1772:   PetscAssertPointer(idxn, 5);

1774:   if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1775:     jdxm = buf;
1776:     jdxn = buf + m;
1777:   } else {
1778:     PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1779:     jdxm = bufm;
1780:     jdxn = bufn;
1781:   }
1782:   for (i = 0; i < m; i++) {
1783:     for (j = 0; j < 3 - sdim; j++) dxm++;
1784:     tmp = *dxm++ - starts[0];
1785:     for (j = 0; j < dim - 1; j++) {
1786:       if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1787:       else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1788:     }
1789:     if (mat->stencil.noc) dxm++;
1790:     jdxm[i] = tmp;
1791:   }
1792:   for (i = 0; i < n; i++) {
1793:     for (j = 0; j < 3 - sdim; j++) dxn++;
1794:     tmp = *dxn++ - starts[0];
1795:     for (j = 0; j < dim - 1; j++) {
1796:       if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1797:       else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1798:     }
1799:     if (mat->stencil.noc) dxn++;
1800:     jdxn[i] = tmp;
1801:   }
1802:   PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1803:   PetscCall(PetscFree2(bufm, bufn));
1804:   PetscFunctionReturn(PETSC_SUCCESS);
1805: }

1807: /*@
1808:   MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1809:   Using structured grid indexing

1811:   Not Collective

1813:   Input Parameters:
1814: + mat  - the matrix
1815: . m    - number of rows being entered
1816: . idxm - grid coordinates for matrix rows being entered
1817: . n    - number of columns being entered
1818: . idxn - grid coordinates for matrix columns being entered
1819: . v    - a logically two-dimensional array of values
1820: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values

1822:   Level: beginner

1824:   Notes:
1825:   By default the values, `v`, are row-oriented and unsorted.
1826:   See `MatSetOption()` for other options.

1828:   Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1829:   options cannot be mixed without intervening calls to the assembly
1830:   routines.

1832:   The grid coordinates are across the entire grid, not just the local portion

1834:   `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1835:   as well as in C.

1837:   For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine

1839:   In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1840:   or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.

1842:   The columns and rows in the stencil passed in MUST be contained within the
1843:   ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1844:   if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1845:   local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1846:   first i index you can use in your column and row indices in `MatSetStencil()` is 5.

1848:   Negative indices may be passed in idxm and idxn, these rows and columns are
1849:   simply ignored. This allows easily inserting element stiffness matrices
1850:   with homogeneous Dirichlet boundary conditions that you don't want represented
1851:   in the matrix.

1853:   Inspired by the structured grid interface to the HYPRE package
1854:   (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)

1856:   Fortran Note:
1857:   `idxm` and `idxn` should be declared as
1858: $     MatStencil idxm(4,m),idxn(4,n)
1859:   and the values inserted using
1860: .vb
1861:     idxm(MatStencil_i,1) = i
1862:     idxm(MatStencil_j,1) = j
1863:     idxm(MatStencil_k,1) = k
1864:    etc
1865: .ve

1867: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1868:           `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1869:           `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1870: @*/
1871: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1872: {
1873:   PetscInt  buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1874:   PetscInt  j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1875:   PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);

1877:   PetscFunctionBegin;
1878:   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1881:   PetscAssertPointer(idxm, 3);
1882:   PetscAssertPointer(idxn, 5);
1883:   PetscAssertPointer(v, 6);

1885:   if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1886:     jdxm = buf;
1887:     jdxn = buf + m;
1888:   } else {
1889:     PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1890:     jdxm = bufm;
1891:     jdxn = bufn;
1892:   }
1893:   for (i = 0; i < m; i++) {
1894:     for (j = 0; j < 3 - sdim; j++) dxm++;
1895:     tmp = *dxm++ - starts[0];
1896:     for (j = 0; j < sdim - 1; j++) {
1897:       if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1898:       else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1899:     }
1900:     dxm++;
1901:     jdxm[i] = tmp;
1902:   }
1903:   for (i = 0; i < n; i++) {
1904:     for (j = 0; j < 3 - sdim; j++) dxn++;
1905:     tmp = *dxn++ - starts[0];
1906:     for (j = 0; j < sdim - 1; j++) {
1907:       if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1908:       else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1909:     }
1910:     dxn++;
1911:     jdxn[i] = tmp;
1912:   }
1913:   PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1914:   PetscCall(PetscFree2(bufm, bufn));
1915:   PetscFunctionReturn(PETSC_SUCCESS);
1916: }

1918: /*@
1919:   MatSetStencil - Sets the grid information for setting values into a matrix via
1920:   `MatSetValuesStencil()`

1922:   Not Collective

1924:   Input Parameters:
1925: + mat    - the matrix
1926: . dim    - dimension of the grid 1, 2, or 3
1927: . dims   - number of grid points in x, y, and z direction, including ghost points on your processor
1928: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1929: - dof    - number of degrees of freedom per node

1931:   Level: beginner

1933:   Notes:
1934:   Inspired by the structured grid interface to the HYPRE package
1935:   (www.llnl.gov/CASC/hyper)

1937:   For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1938:   user.

1940: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1941:           `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1942: @*/
1943: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1944: {
1945:   PetscFunctionBegin;
1947:   PetscAssertPointer(dims, 3);
1948:   PetscAssertPointer(starts, 4);

1950:   mat->stencil.dim = dim + (dof > 1);
1951:   for (PetscInt i = 0; i < dim; i++) {
1952:     mat->stencil.dims[i]   = dims[dim - i - 1]; /* copy the values in backwards */
1953:     mat->stencil.starts[i] = starts[dim - i - 1];
1954:   }
1955:   mat->stencil.dims[dim]   = dof;
1956:   mat->stencil.starts[dim] = 0;
1957:   mat->stencil.noc         = (PetscBool)(dof == 1);
1958:   PetscFunctionReturn(PETSC_SUCCESS);
1959: }

1961: /*@C
1962:   MatSetValuesBlocked - Inserts or adds a block of values into a matrix.

1964:   Not Collective

1966:   Input Parameters:
1967: + mat  - the matrix
1968: . v    - a logically two-dimensional array of values
1969: . m    - the number of block rows
1970: . idxm - the global block indices
1971: . n    - the number of block columns
1972: . idxn - the global block indices
1973: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values

1975:   Level: intermediate

1977:   Notes:
1978:   If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1979:   MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.

1981:   The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1982:   NOT the total number of rows/columns; for example, if the block size is 2 and
1983:   you are passing in values for rows 2,3,4,5  then `m` would be 2 (not 4).
1984:   The values in `idxm` would be 1 2; that is the first index for each block divided by
1985:   the block size.

1987:   You must call `MatSetBlockSize()` when constructing this matrix (before
1988:   preallocating it).

1990:   By default the values, `v`, are row-oriented, so the layout of
1991:   `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.

1993:   Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1994:   options cannot be mixed without intervening calls to the assembly
1995:   routines.

1997:   `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
1998:   as well as in C.

2000:   Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2001:   simply ignored. This allows easily inserting element stiffness matrices
2002:   with homogeneous Dirichlet boundary conditions that you don't want represented
2003:   in the matrix.

2005:   Each time an entry is set within a sparse matrix via `MatSetValues()`,
2006:   internal searching must be done to determine where to place the
2007:   data in the matrix storage space.  By instead inserting blocks of
2008:   entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2009:   reduced.

2011:   Example:
2012: .vb
2013:    Suppose m=n=2 and block size(bs) = 2 The array is

2015:    1  2  | 3  4
2016:    5  6  | 7  8
2017:    - - - | - - -
2018:    9  10 | 11 12
2019:    13 14 | 15 16

2021:    v[] should be passed in like
2022:    v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]

2024:   If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2025:    v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2026: .ve

2028: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2029: @*/
2030: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2031: {
2032:   PetscFunctionBeginHot;
2035:   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2036:   PetscAssertPointer(idxm, 3);
2037:   PetscAssertPointer(idxn, 5);
2038:   MatCheckPreallocated(mat, 1);
2039:   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2040:   else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2041:   if (PetscDefined(USE_DEBUG)) {
2042:     PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2043:     PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2044:   }
2045:   if (PetscDefined(USE_DEBUG)) {
2046:     PetscInt rbs, cbs, M, N, i;
2047:     PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2048:     PetscCall(MatGetSize(mat, &M, &N));
2049:     for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2050:     for (i = 0; i < n; i++)
2051:       PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2052:   }
2053:   if (mat->assembled) {
2054:     mat->was_assembled = PETSC_TRUE;
2055:     mat->assembled     = PETSC_FALSE;
2056:   }
2057:   PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2058:   if (mat->ops->setvaluesblocked) {
2059:     PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2060:   } else {
2061:     PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2062:     PetscInt i, j, bs, cbs;

2064:     PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2065:     if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2066:       iidxm = buf;
2067:       iidxn = buf + m * bs;
2068:     } else {
2069:       PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2070:       iidxm = bufr;
2071:       iidxn = bufc;
2072:     }
2073:     for (i = 0; i < m; i++) {
2074:       for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2075:     }
2076:     if (m != n || bs != cbs || idxm != idxn) {
2077:       for (i = 0; i < n; i++) {
2078:         for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2079:       }
2080:     } else iidxn = iidxm;
2081:     PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2082:     PetscCall(PetscFree2(bufr, bufc));
2083:   }
2084:   PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2085:   PetscFunctionReturn(PETSC_SUCCESS);
2086: }

2088: /*@C
2089:   MatGetValues - Gets a block of local values from a matrix.

2091:   Not Collective; can only return values that are owned by the give process

2093:   Input Parameters:
2094: + mat  - the matrix
2095: . v    - a logically two-dimensional array for storing the values
2096: . m    - the number of rows
2097: . idxm - the  global indices of the rows
2098: . n    - the number of columns
2099: - idxn - the global indices of the columns

2101:   Level: advanced

2103:   Notes:
2104:   The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2105:   The values, `v`, are then returned in a row-oriented format,
2106:   analogous to that used by default in `MatSetValues()`.

2108:   `MatGetValues()` uses 0-based row and column numbers in
2109:   Fortran as well as in C.

2111:   `MatGetValues()` requires that the matrix has been assembled
2112:   with `MatAssemblyBegin()`/`MatAssemblyEnd()`.  Thus, calls to
2113:   `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2114:   without intermediate matrix assembly.

2116:   Negative row or column indices will be ignored and those locations in `v` will be
2117:   left unchanged.

2119:   For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2120:   That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2121:   from `MatGetOwnershipRange`(mat,&rstart,&rend).

2123: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2124: @*/
2125: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2126: {
2127:   PetscFunctionBegin;
2130:   if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2131:   PetscAssertPointer(idxm, 3);
2132:   PetscAssertPointer(idxn, 5);
2133:   PetscAssertPointer(v, 6);
2134:   PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2135:   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2136:   MatCheckPreallocated(mat, 1);

2138:   PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2139:   PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2140:   PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2141:   PetscFunctionReturn(PETSC_SUCCESS);
2142: }

2144: /*@C
2145:   MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2146:   defined previously by `MatSetLocalToGlobalMapping()`

2148:   Not Collective

2150:   Input Parameters:
2151: + mat  - the matrix
2152: . nrow - number of rows
2153: . irow - the row local indices
2154: . ncol - number of columns
2155: - icol - the column local indices

2157:   Output Parameter:
2158: . y - a logically two-dimensional array of values

2160:   Level: advanced

2162:   Notes:
2163:   If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.

2165:   This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2166:   are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2167:   determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2168:   with `MatSetLocalToGlobalMapping()`.

2170:   Developer Note:
2171:   This is labelled with C so does not automatically generate Fortran stubs and interfaces
2172:   because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.

2174: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2175:           `MatSetValuesLocal()`, `MatGetValues()`
2176: @*/
2177: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2178: {
2179:   PetscFunctionBeginHot;
2182:   MatCheckPreallocated(mat, 1);
2183:   if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2184:   PetscAssertPointer(irow, 3);
2185:   PetscAssertPointer(icol, 5);
2186:   if (PetscDefined(USE_DEBUG)) {
2187:     PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2188:     PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2189:   }
2190:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2191:   PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2192:   if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2193:   else {
2194:     PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2195:     if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2196:       irowm = buf;
2197:       icolm = buf + nrow;
2198:     } else {
2199:       PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2200:       irowm = bufr;
2201:       icolm = bufc;
2202:     }
2203:     PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2204:     PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2205:     PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2206:     PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2207:     PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2208:     PetscCall(PetscFree2(bufr, bufc));
2209:   }
2210:   PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2211:   PetscFunctionReturn(PETSC_SUCCESS);
2212: }

2214: /*@
2215:   MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2216:   the same size. Currently, this can only be called once and creates the given matrix.

2218:   Not Collective

2220:   Input Parameters:
2221: + mat  - the matrix
2222: . nb   - the number of blocks
2223: . bs   - the number of rows (and columns) in each block
2224: . rows - a concatenation of the rows for each block
2225: - v    - a concatenation of logically two-dimensional arrays of values

2227:   Level: advanced

2229:   Notes:
2230:   `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values

2232:   In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.

2234: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2235:           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2236: @*/
2237: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2238: {
2239:   PetscFunctionBegin;
2242:   PetscAssertPointer(rows, 4);
2243:   PetscAssertPointer(v, 5);
2244:   PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");

2246:   PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2247:   if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2248:   else {
2249:     for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2250:   }
2251:   PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2252:   PetscFunctionReturn(PETSC_SUCCESS);
2253: }

2255: /*@
2256:   MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2257:   the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2258:   using a local (per-processor) numbering.

2260:   Not Collective

2262:   Input Parameters:
2263: + x        - the matrix
2264: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2265: - cmapping - column mapping

2267:   Level: intermediate

2269:   Note:
2270:   If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix

2272: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2273: @*/
2274: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2275: {
2276:   PetscFunctionBegin;
2281:   if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2282:   else {
2283:     PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2284:     PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2285:   }
2286:   PetscFunctionReturn(PETSC_SUCCESS);
2287: }

2289: /*@
2290:   MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`

2292:   Not Collective

2294:   Input Parameter:
2295: . A - the matrix

2297:   Output Parameters:
2298: + rmapping - row mapping
2299: - cmapping - column mapping

2301:   Level: advanced

2303: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2304: @*/
2305: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2306: {
2307:   PetscFunctionBegin;
2310:   if (rmapping) {
2311:     PetscAssertPointer(rmapping, 2);
2312:     *rmapping = A->rmap->mapping;
2313:   }
2314:   if (cmapping) {
2315:     PetscAssertPointer(cmapping, 3);
2316:     *cmapping = A->cmap->mapping;
2317:   }
2318:   PetscFunctionReturn(PETSC_SUCCESS);
2319: }

2321: /*@
2322:   MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix

2324:   Logically Collective

2326:   Input Parameters:
2327: + A    - the matrix
2328: . rmap - row layout
2329: - cmap - column layout

2331:   Level: advanced

2333:   Note:
2334:   The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.

2336: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2337: @*/
2338: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2339: {
2340:   PetscFunctionBegin;
2342:   PetscCall(PetscLayoutReference(rmap, &A->rmap));
2343:   PetscCall(PetscLayoutReference(cmap, &A->cmap));
2344:   PetscFunctionReturn(PETSC_SUCCESS);
2345: }

2347: /*@
2348:   MatGetLayouts - Gets the `PetscLayout` objects for rows and columns

2350:   Not Collective

2352:   Input Parameter:
2353: . A - the matrix

2355:   Output Parameters:
2356: + rmap - row layout
2357: - cmap - column layout

2359:   Level: advanced

2361: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2362: @*/
2363: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2364: {
2365:   PetscFunctionBegin;
2368:   if (rmap) {
2369:     PetscAssertPointer(rmap, 2);
2370:     *rmap = A->rmap;
2371:   }
2372:   if (cmap) {
2373:     PetscAssertPointer(cmap, 3);
2374:     *cmap = A->cmap;
2375:   }
2376:   PetscFunctionReturn(PETSC_SUCCESS);
2377: }

2379: /*@C
2380:   MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2381:   using a local numbering of the rows and columns.

2383:   Not Collective

2385:   Input Parameters:
2386: + mat  - the matrix
2387: . nrow - number of rows
2388: . irow - the row local indices
2389: . ncol - number of columns
2390: . icol - the column local indices
2391: . y    - a logically two-dimensional array of values
2392: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values

2394:   Level: intermediate

2396:   Notes:
2397:   If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine

2399:   Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2400:   options cannot be mixed without intervening calls to the assembly
2401:   routines.

2403:   These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2404:   MUST be called after all calls to `MatSetValuesLocal()` have been completed.

2406:   Developer Note:
2407:   This is labeled with C so does not automatically generate Fortran stubs and interfaces
2408:   because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.

2410: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2411:           `MatGetValuesLocal()`
2412: @*/
2413: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2414: {
2415:   PetscFunctionBeginHot;
2418:   MatCheckPreallocated(mat, 1);
2419:   if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2420:   PetscAssertPointer(irow, 3);
2421:   PetscAssertPointer(icol, 5);
2422:   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2423:   else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2424:   if (PetscDefined(USE_DEBUG)) {
2425:     PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2426:     PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2427:   }

2429:   if (mat->assembled) {
2430:     mat->was_assembled = PETSC_TRUE;
2431:     mat->assembled     = PETSC_FALSE;
2432:   }
2433:   PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2434:   if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2435:   else {
2436:     PetscInt        buf[8192], *bufr = NULL, *bufc = NULL;
2437:     const PetscInt *irowm, *icolm;

2439:     if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2440:       bufr  = buf;
2441:       bufc  = buf + nrow;
2442:       irowm = bufr;
2443:       icolm = bufc;
2444:     } else {
2445:       PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2446:       irowm = bufr;
2447:       icolm = bufc;
2448:     }
2449:     if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2450:     else irowm = irow;
2451:     if (mat->cmap->mapping) {
2452:       if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2453:         PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2454:       } else icolm = irowm;
2455:     } else icolm = icol;
2456:     PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2457:     if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2458:   }
2459:   PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2460:   PetscFunctionReturn(PETSC_SUCCESS);
2461: }

2463: /*@C
2464:   MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2465:   using a local ordering of the nodes a block at a time.

2467:   Not Collective

2469:   Input Parameters:
2470: + mat  - the matrix
2471: . nrow - number of rows
2472: . irow - the row local indices
2473: . ncol - number of columns
2474: . icol - the column local indices
2475: . y    - a logically two-dimensional array of values
2476: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values

2478:   Level: intermediate

2480:   Notes:
2481:   If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2482:   before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the

2484:   Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2485:   options cannot be mixed without intervening calls to the assembly
2486:   routines.

2488:   These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2489:   MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.

2491:   Developer Note:
2492:   This is labeled with C so does not automatically generate Fortran stubs and interfaces
2493:   because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.

2495: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2496:           `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2497: @*/
2498: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2499: {
2500:   PetscFunctionBeginHot;
2503:   MatCheckPreallocated(mat, 1);
2504:   if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2505:   PetscAssertPointer(irow, 3);
2506:   PetscAssertPointer(icol, 5);
2507:   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2508:   else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2509:   if (PetscDefined(USE_DEBUG)) {
2510:     PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2511:     PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2512:   }

2514:   if (mat->assembled) {
2515:     mat->was_assembled = PETSC_TRUE;
2516:     mat->assembled     = PETSC_FALSE;
2517:   }
2518:   if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2519:     PetscInt irbs, rbs;
2520:     PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2521:     PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2522:     PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2523:   }
2524:   if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2525:     PetscInt icbs, cbs;
2526:     PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2527:     PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2528:     PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2529:   }
2530:   PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2531:   if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2532:   else {
2533:     PetscInt        buf[8192], *bufr = NULL, *bufc = NULL;
2534:     const PetscInt *irowm, *icolm;

2536:     if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2537:       bufr  = buf;
2538:       bufc  = buf + nrow;
2539:       irowm = bufr;
2540:       icolm = bufc;
2541:     } else {
2542:       PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2543:       irowm = bufr;
2544:       icolm = bufc;
2545:     }
2546:     if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2547:     else irowm = irow;
2548:     if (mat->cmap->mapping) {
2549:       if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2550:         PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2551:       } else icolm = irowm;
2552:     } else icolm = icol;
2553:     PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2554:     if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2555:   }
2556:   PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2557:   PetscFunctionReturn(PETSC_SUCCESS);
2558: }

2560: /*@
2561:   MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal

2563:   Collective

2565:   Input Parameters:
2566: + mat - the matrix
2567: - x   - the vector to be multiplied

2569:   Output Parameter:
2570: . y - the result

2572:   Level: developer

2574:   Note:
2575:   The vectors `x` and `y` cannot be the same.  I.e., one cannot
2576:   call `MatMultDiagonalBlock`(A,y,y).

2578: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2579: @*/
2580: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2581: {
2582:   PetscFunctionBegin;

2588:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2589:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2590:   PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2591:   MatCheckPreallocated(mat, 1);

2593:   PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2594:   PetscCall(PetscObjectStateIncrease((PetscObject)y));
2595:   PetscFunctionReturn(PETSC_SUCCESS);
2596: }

2598: /*@
2599:   MatMult - Computes the matrix-vector product, $y = Ax$.

2601:   Neighbor-wise Collective

2603:   Input Parameters:
2604: + mat - the matrix
2605: - x   - the vector to be multiplied

2607:   Output Parameter:
2608: . y - the result

2610:   Level: beginner

2612:   Note:
2613:   The vectors `x` and `y` cannot be the same.  I.e., one cannot
2614:   call `MatMult`(A,y,y).

2616: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2617: @*/
2618: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2619: {
2620:   PetscFunctionBegin;
2624:   VecCheckAssembled(x);
2626:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2627:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2628:   PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2629:   PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2630:   PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2631:   PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2632:   PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2633:   PetscCall(VecSetErrorIfLocked(y, 3));
2634:   if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2635:   MatCheckPreallocated(mat, 1);

2637:   PetscCall(VecLockReadPush(x));
2638:   PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2639:   PetscUseTypeMethod(mat, mult, x, y);
2640:   PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2641:   if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2642:   PetscCall(VecLockReadPop(x));
2643:   PetscFunctionReturn(PETSC_SUCCESS);
2644: }

2646: /*@
2647:   MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.

2649:   Neighbor-wise Collective

2651:   Input Parameters:
2652: + mat - the matrix
2653: - x   - the vector to be multiplied

2655:   Output Parameter:
2656: . y - the result

2658:   Level: beginner

2660:   Notes:
2661:   The vectors `x` and `y` cannot be the same.  I.e., one cannot
2662:   call `MatMultTranspose`(A,y,y).

2664:   For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2665:   use `MatMultHermitianTranspose()`

2667: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2668: @*/
2669: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2670: {
2671:   PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;

2673:   PetscFunctionBegin;
2677:   VecCheckAssembled(x);

2680:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2681:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2682:   PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2683:   PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2684:   PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2685:   PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2686:   PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2687:   if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2688:   MatCheckPreallocated(mat, 1);

2690:   if (!mat->ops->multtranspose) {
2691:     if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2692:     PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2693:   } else op = mat->ops->multtranspose;
2694:   PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2695:   PetscCall(VecLockReadPush(x));
2696:   PetscCall((*op)(mat, x, y));
2697:   PetscCall(VecLockReadPop(x));
2698:   PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2699:   PetscCall(PetscObjectStateIncrease((PetscObject)y));
2700:   if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2701:   PetscFunctionReturn(PETSC_SUCCESS);
2702: }

2704: /*@
2705:   MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.

2707:   Neighbor-wise Collective

2709:   Input Parameters:
2710: + mat - the matrix
2711: - x   - the vector to be multiplied

2713:   Output Parameter:
2714: . y - the result

2716:   Level: beginner

2718:   Notes:
2719:   The vectors `x` and `y` cannot be the same.  I.e., one cannot
2720:   call `MatMultHermitianTranspose`(A,y,y).

2722:   Also called the conjugate transpose, complex conjugate transpose, or adjoint.

2724:   For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.

2726: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2727: @*/
2728: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2729: {
2730:   PetscFunctionBegin;

2736:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2737:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2738:   PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2739:   PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2740:   PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2741:   PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2742:   PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2743:   MatCheckPreallocated(mat, 1);

2745:   PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2746: #if defined(PETSC_USE_COMPLEX)
2747:   if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2748:     PetscCall(VecLockReadPush(x));
2749:     if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2750:     else PetscUseTypeMethod(mat, mult, x, y);
2751:     PetscCall(VecLockReadPop(x));
2752:   } else {
2753:     Vec w;
2754:     PetscCall(VecDuplicate(x, &w));
2755:     PetscCall(VecCopy(x, w));
2756:     PetscCall(VecConjugate(w));
2757:     PetscCall(MatMultTranspose(mat, w, y));
2758:     PetscCall(VecDestroy(&w));
2759:     PetscCall(VecConjugate(y));
2760:   }
2761:   PetscCall(PetscObjectStateIncrease((PetscObject)y));
2762: #else
2763:   PetscCall(MatMultTranspose(mat, x, y));
2764: #endif
2765:   PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2766:   PetscFunctionReturn(PETSC_SUCCESS);
2767: }

2769: /*@
2770:   MatMultAdd -  Computes $v3 = v2 + A * v1$.

2772:   Neighbor-wise Collective

2774:   Input Parameters:
2775: + mat - the matrix
2776: . v1  - the vector to be multiplied by `mat`
2777: - v2  - the vector to be added to the result

2779:   Output Parameter:
2780: . v3 - the result

2782:   Level: beginner

2784:   Note:
2785:   The vectors `v1` and `v3` cannot be the same.  I.e., one cannot
2786:   call `MatMultAdd`(A,v1,v2,v1).

2788: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2789: @*/
2790: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2791: {
2792:   PetscFunctionBegin;

2799:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2800:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2801:   PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2802:   /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2803:      PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2804:   PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2805:   PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2806:   PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2807:   MatCheckPreallocated(mat, 1);

2809:   PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2810:   PetscCall(VecLockReadPush(v1));
2811:   PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2812:   PetscCall(VecLockReadPop(v1));
2813:   PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2814:   PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2815:   PetscFunctionReturn(PETSC_SUCCESS);
2816: }

2818: /*@
2819:   MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.

2821:   Neighbor-wise Collective

2823:   Input Parameters:
2824: + mat - the matrix
2825: . v1  - the vector to be multiplied by the transpose of the matrix
2826: - v2  - the vector to be added to the result

2828:   Output Parameter:
2829: . v3 - the result

2831:   Level: beginner

2833:   Note:
2834:   The vectors `v1` and `v3` cannot be the same.  I.e., one cannot
2835:   call `MatMultTransposeAdd`(A,v1,v2,v1).

2837: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2838: @*/
2839: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2840: {
2841:   PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;

2843:   PetscFunctionBegin;

2850:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2851:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2852:   PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2853:   PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2854:   PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2855:   PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2856:   PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2857:   MatCheckPreallocated(mat, 1);

2859:   PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2860:   PetscCall(VecLockReadPush(v1));
2861:   PetscCall((*op)(mat, v1, v2, v3));
2862:   PetscCall(VecLockReadPop(v1));
2863:   PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2864:   PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2865:   PetscFunctionReturn(PETSC_SUCCESS);
2866: }

2868: /*@
2869:   MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.

2871:   Neighbor-wise Collective

2873:   Input Parameters:
2874: + mat - the matrix
2875: . v1  - the vector to be multiplied by the Hermitian transpose
2876: - v2  - the vector to be added to the result

2878:   Output Parameter:
2879: . v3 - the result

2881:   Level: beginner

2883:   Note:
2884:   The vectors `v1` and `v3` cannot be the same.  I.e., one cannot
2885:   call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).

2887: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2888: @*/
2889: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2890: {
2891:   PetscFunctionBegin;

2898:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2899:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2900:   PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2901:   PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2902:   PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2903:   PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2904:   MatCheckPreallocated(mat, 1);

2906:   PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2907:   PetscCall(VecLockReadPush(v1));
2908:   if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2909:   else {
2910:     Vec w, z;
2911:     PetscCall(VecDuplicate(v1, &w));
2912:     PetscCall(VecCopy(v1, w));
2913:     PetscCall(VecConjugate(w));
2914:     PetscCall(VecDuplicate(v3, &z));
2915:     PetscCall(MatMultTranspose(mat, w, z));
2916:     PetscCall(VecDestroy(&w));
2917:     PetscCall(VecConjugate(z));
2918:     if (v2 != v3) {
2919:       PetscCall(VecWAXPY(v3, 1.0, v2, z));
2920:     } else {
2921:       PetscCall(VecAXPY(v3, 1.0, z));
2922:     }
2923:     PetscCall(VecDestroy(&z));
2924:   }
2925:   PetscCall(VecLockReadPop(v1));
2926:   PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2927:   PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2928:   PetscFunctionReturn(PETSC_SUCCESS);
2929: }

2931: /*@
2932:   MatGetFactorType - gets the type of factorization a matrix is

2934:   Not Collective

2936:   Input Parameter:
2937: . mat - the matrix

2939:   Output Parameter:
2940: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`

2942:   Level: intermediate

2944: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2945:           `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2946: @*/
2947: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2948: {
2949:   PetscFunctionBegin;
2952:   PetscAssertPointer(t, 2);
2953:   *t = mat->factortype;
2954:   PetscFunctionReturn(PETSC_SUCCESS);
2955: }

2957: /*@
2958:   MatSetFactorType - sets the type of factorization a matrix is

2960:   Logically Collective

2962:   Input Parameters:
2963: + mat - the matrix
2964: - t   - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`

2966:   Level: intermediate

2968: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2969:           `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2970: @*/
2971: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2972: {
2973:   PetscFunctionBegin;
2976:   mat->factortype = t;
2977:   PetscFunctionReturn(PETSC_SUCCESS);
2978: }

2980: /*@C
2981:   MatGetInfo - Returns information about matrix storage (number of
2982:   nonzeros, memory, etc.).

2984:   Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag

2986:   Input Parameters:
2987: + mat  - the matrix
2988: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)

2990:   Output Parameter:
2991: . info - matrix information context

2993:   Options Database Key:
2994: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`

2996:   Notes:
2997:   The `MatInfo` context contains a variety of matrix data, including
2998:   number of nonzeros allocated and used, number of mallocs during
2999:   matrix assembly, etc.  Additional information for factored matrices
3000:   is provided (such as the fill ratio, number of mallocs during
3001:   factorization, etc.).

3003:   Example:
3004:   See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3005:   data within the MatInfo context.  For example,
3006: .vb
3007:       MatInfo info;
3008:       Mat     A;
3009:       double  mal, nz_a, nz_u;

3011:       MatGetInfo(A, MAT_LOCAL, &info);
3012:       mal  = info.mallocs;
3013:       nz_a = info.nz_allocated;
3014: .ve

3016:   Fortran users should declare info as a double precision
3017:   array of dimension `MAT_INFO_SIZE`, and then extract the parameters
3018:   of interest.  See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
3019:   a complete list of parameter names.
3020: .vb
3021:       double  precision info(MAT_INFO_SIZE)
3022:       double  precision mal, nz_a
3023:       Mat     A
3024:       integer ierr

3026:       call MatGetInfo(A, MAT_LOCAL, info, ierr)
3027:       mal = info(MAT_INFO_MALLOCS)
3028:       nz_a = info(MAT_INFO_NZ_ALLOCATED)
3029: .ve

3031:   Level: intermediate

3033:   Developer Note:
3034:   The Fortran interface is not autogenerated as the
3035:   interface definition cannot be generated correctly [due to `MatInfo` argument]

3037: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3038: @*/
3039: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3040: {
3041:   PetscFunctionBegin;
3044:   PetscAssertPointer(info, 3);
3045:   MatCheckPreallocated(mat, 1);
3046:   PetscUseTypeMethod(mat, getinfo, flag, info);
3047:   PetscFunctionReturn(PETSC_SUCCESS);
3048: }

3050: /*
3051:    This is used by external packages where it is not easy to get the info from the actual
3052:    matrix factorization.
3053: */
3054: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3055: {
3056:   PetscFunctionBegin;
3057:   PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3058:   PetscFunctionReturn(PETSC_SUCCESS);
3059: }

3061: /*@C
3062:   MatLUFactor - Performs in-place LU factorization of matrix.

3064:   Collective

3066:   Input Parameters:
3067: + mat  - the matrix
3068: . row  - row permutation
3069: . col  - column permutation
3070: - info - options for factorization, includes
3071: .vb
3072:           fill - expected fill as ratio of original fill.
3073:           dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3074:                    Run with the option -info to determine an optimal value to use
3075: .ve

3077:   Level: developer

3079:   Notes:
3080:   Most users should employ the `KSP` interface for linear solvers
3081:   instead of working directly with matrix algebra routines such as this.
3082:   See, e.g., `KSPCreate()`.

3084:   This changes the state of the matrix to a factored matrix; it cannot be used
3085:   for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.

3087:   This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3088:   when not using `KSP`.

3090:   Developer Note:
3091:   The Fortran interface is not autogenerated as the
3092:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

3094: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3095:           `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3096: @*/
3097: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3098: {
3099:   MatFactorInfo tinfo;

3101:   PetscFunctionBegin;
3105:   if (info) PetscAssertPointer(info, 4);
3107:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3108:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3109:   MatCheckPreallocated(mat, 1);
3110:   if (!info) {
3111:     PetscCall(MatFactorInfoInitialize(&tinfo));
3112:     info = &tinfo;
3113:   }

3115:   PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3116:   PetscUseTypeMethod(mat, lufactor, row, col, info);
3117:   PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3118:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3119:   PetscFunctionReturn(PETSC_SUCCESS);
3120: }

3122: /*@C
3123:   MatILUFactor - Performs in-place ILU factorization of matrix.

3125:   Collective

3127:   Input Parameters:
3128: + mat  - the matrix
3129: . row  - row permutation
3130: . col  - column permutation
3131: - info - structure containing
3132: .vb
3133:       levels - number of levels of fill.
3134:       expected fill - as ratio of original fill.
3135:       1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3136:                 missing diagonal entries)
3137: .ve

3139:   Level: developer

3141:   Notes:
3142:   Most users should employ the `KSP` interface for linear solvers
3143:   instead of working directly with matrix algebra routines such as this.
3144:   See, e.g., `KSPCreate()`.

3146:   Probably really in-place only when level of fill is zero, otherwise allocates
3147:   new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3148:   when not using `KSP`.

3150:   Developer Note:
3151:   The Fortran interface is not autogenerated as the
3152:   interface definition cannot be generated correctly [due to MatFactorInfo]

3154: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3155: @*/
3156: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3157: {
3158:   PetscFunctionBegin;
3162:   PetscAssertPointer(info, 4);
3164:   PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3165:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3166:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3167:   MatCheckPreallocated(mat, 1);

3169:   PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3170:   PetscUseTypeMethod(mat, ilufactor, row, col, info);
3171:   PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3172:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3173:   PetscFunctionReturn(PETSC_SUCCESS);
3174: }

3176: /*@C
3177:   MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3178:   Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.

3180:   Collective

3182:   Input Parameters:
3183: + fact - the factor matrix obtained with `MatGetFactor()`
3184: . mat  - the matrix
3185: . row  - the row permutation
3186: . col  - the column permutation
3187: - info - options for factorization, includes
3188: .vb
3189:           fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3190:           dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3191: .ve

3193:   Level: developer

3195:   Notes:
3196:   See [Matrix Factorization](sec_matfactor) for additional information about factorizations

3198:   Most users should employ the simplified `KSP` interface for linear solvers
3199:   instead of working directly with matrix algebra routines such as this.
3200:   See, e.g., `KSPCreate()`.

3202:   Developer Note:
3203:   The Fortran interface is not autogenerated as the
3204:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

3206: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3207: @*/
3208: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3209: {
3210:   MatFactorInfo tinfo;

3212:   PetscFunctionBegin;
3217:   if (info) PetscAssertPointer(info, 5);
3220:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3221:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3222:   MatCheckPreallocated(mat, 2);
3223:   if (!info) {
3224:     PetscCall(MatFactorInfoInitialize(&tinfo));
3225:     info = &tinfo;
3226:   }

3228:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3229:   PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3230:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3231:   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3232:   PetscFunctionReturn(PETSC_SUCCESS);
3233: }

3235: /*@C
3236:   MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3237:   Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.

3239:   Collective

3241:   Input Parameters:
3242: + fact - the factor matrix obtained with `MatGetFactor()`
3243: . mat  - the matrix
3244: - info - options for factorization

3246:   Level: developer

3248:   Notes:
3249:   See `MatLUFactor()` for in-place factorization.  See
3250:   `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.

3252:   Most users should employ the `KSP` interface for linear solvers
3253:   instead of working directly with matrix algebra routines such as this.
3254:   See, e.g., `KSPCreate()`.

3256:   Developer Note:
3257:   The Fortran interface is not autogenerated as the
3258:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

3260: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3261: @*/
3262: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3263: {
3264:   MatFactorInfo tinfo;

3266:   PetscFunctionBegin;
3271:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3272:   PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3273:              mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);

3275:   MatCheckPreallocated(mat, 2);
3276:   if (!info) {
3277:     PetscCall(MatFactorInfoInitialize(&tinfo));
3278:     info = &tinfo;
3279:   }

3281:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3282:   else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3283:   PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3284:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3285:   else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3286:   PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3287:   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3288:   PetscFunctionReturn(PETSC_SUCCESS);
3289: }

3291: /*@C
3292:   MatCholeskyFactor - Performs in-place Cholesky factorization of a
3293:   symmetric matrix.

3295:   Collective

3297:   Input Parameters:
3298: + mat  - the matrix
3299: . perm - row and column permutations
3300: - info - expected fill as ratio of original fill

3302:   Level: developer

3304:   Notes:
3305:   See `MatLUFactor()` for the nonsymmetric case.  See also `MatGetFactor()`,
3306:   `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.

3308:   Most users should employ the `KSP` interface for linear solvers
3309:   instead of working directly with matrix algebra routines such as this.
3310:   See, e.g., `KSPCreate()`.

3312:   Developer Note:
3313:   The Fortran interface is not autogenerated as the
3314:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

3316: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3317:           `MatGetOrdering()`
3318: @*/
3319: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3320: {
3321:   MatFactorInfo tinfo;

3323:   PetscFunctionBegin;
3326:   if (info) PetscAssertPointer(info, 3);
3328:   PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3329:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3330:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3331:   MatCheckPreallocated(mat, 1);
3332:   if (!info) {
3333:     PetscCall(MatFactorInfoInitialize(&tinfo));
3334:     info = &tinfo;
3335:   }

3337:   PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3338:   PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3339:   PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3340:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3341:   PetscFunctionReturn(PETSC_SUCCESS);
3342: }

3344: /*@C
3345:   MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3346:   of a symmetric matrix.

3348:   Collective

3350:   Input Parameters:
3351: + fact - the factor matrix obtained with `MatGetFactor()`
3352: . mat  - the matrix
3353: . perm - row and column permutations
3354: - info - options for factorization, includes
3355: .vb
3356:           fill - expected fill as ratio of original fill.
3357:           dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3358:                    Run with the option -info to determine an optimal value to use
3359: .ve

3361:   Level: developer

3363:   Notes:
3364:   See `MatLUFactorSymbolic()` for the nonsymmetric case.  See also
3365:   `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.

3367:   Most users should employ the `KSP` interface for linear solvers
3368:   instead of working directly with matrix algebra routines such as this.
3369:   See, e.g., `KSPCreate()`.

3371:   Developer Note:
3372:   The Fortran interface is not autogenerated as the
3373:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

3375: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3376:           `MatGetOrdering()`
3377: @*/
3378: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3379: {
3380:   MatFactorInfo tinfo;

3382:   PetscFunctionBegin;
3386:   if (info) PetscAssertPointer(info, 4);
3389:   PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3390:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3391:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3392:   MatCheckPreallocated(mat, 2);
3393:   if (!info) {
3394:     PetscCall(MatFactorInfoInitialize(&tinfo));
3395:     info = &tinfo;
3396:   }

3398:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3399:   PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3400:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3401:   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3402:   PetscFunctionReturn(PETSC_SUCCESS);
3403: }

3405: /*@C
3406:   MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3407:   of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3408:   `MatCholeskyFactorSymbolic()`.

3410:   Collective

3412:   Input Parameters:
3413: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3414: . mat  - the initial matrix that is to be factored
3415: - info - options for factorization

3417:   Level: developer

3419:   Note:
3420:   Most users should employ the `KSP` interface for linear solvers
3421:   instead of working directly with matrix algebra routines such as this.
3422:   See, e.g., `KSPCreate()`.

3424:   Developer Note:
3425:   The Fortran interface is not autogenerated as the
3426:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

3428: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3429: @*/
3430: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3431: {
3432:   MatFactorInfo tinfo;

3434:   PetscFunctionBegin;
3439:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3440:   PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3441:              mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3442:   MatCheckPreallocated(mat, 2);
3443:   if (!info) {
3444:     PetscCall(MatFactorInfoInitialize(&tinfo));
3445:     info = &tinfo;
3446:   }

3448:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3449:   else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3450:   PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3451:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3452:   else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3453:   PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3454:   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3455:   PetscFunctionReturn(PETSC_SUCCESS);
3456: }

3458: /*@
3459:   MatQRFactor - Performs in-place QR factorization of matrix.

3461:   Collective

3463:   Input Parameters:
3464: + mat  - the matrix
3465: . col  - column permutation
3466: - info - options for factorization, includes
3467: .vb
3468:           fill - expected fill as ratio of original fill.
3469:           dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3470:                    Run with the option -info to determine an optimal value to use
3471: .ve

3473:   Level: developer

3475:   Notes:
3476:   Most users should employ the `KSP` interface for linear solvers
3477:   instead of working directly with matrix algebra routines such as this.
3478:   See, e.g., `KSPCreate()`.

3480:   This changes the state of the matrix to a factored matrix; it cannot be used
3481:   for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.

3483:   Developer Note:
3484:   The Fortran interface is not autogenerated as the
3485:   interface definition cannot be generated correctly [due to MatFactorInfo]

3487: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3488:           `MatSetUnfactored()`
3489: @*/
3490: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3491: {
3492:   PetscFunctionBegin;
3495:   if (info) PetscAssertPointer(info, 3);
3497:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3498:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3499:   MatCheckPreallocated(mat, 1);
3500:   PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3501:   PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3502:   PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3503:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3504:   PetscFunctionReturn(PETSC_SUCCESS);
3505: }

3507: /*@
3508:   MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3509:   Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.

3511:   Collective

3513:   Input Parameters:
3514: + fact - the factor matrix obtained with `MatGetFactor()`
3515: . mat  - the matrix
3516: . col  - column permutation
3517: - info - options for factorization, includes
3518: .vb
3519:           fill - expected fill as ratio of original fill.
3520:           dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3521:                    Run with the option -info to determine an optimal value to use
3522: .ve

3524:   Level: developer

3526:   Note:
3527:   Most users should employ the `KSP` interface for linear solvers
3528:   instead of working directly with matrix algebra routines such as this.
3529:   See, e.g., `KSPCreate()`.

3531:   Developer Note:
3532:   The Fortran interface is not autogenerated as the
3533:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

3535: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3536: @*/
3537: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3538: {
3539:   MatFactorInfo tinfo;

3541:   PetscFunctionBegin;
3545:   if (info) PetscAssertPointer(info, 4);
3548:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3549:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3550:   MatCheckPreallocated(mat, 2);
3551:   if (!info) {
3552:     PetscCall(MatFactorInfoInitialize(&tinfo));
3553:     info = &tinfo;
3554:   }

3556:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3557:   PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3558:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3559:   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3560:   PetscFunctionReturn(PETSC_SUCCESS);
3561: }

3563: /*@
3564:   MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3565:   Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.

3567:   Collective

3569:   Input Parameters:
3570: + fact - the factor matrix obtained with `MatGetFactor()`
3571: . mat  - the matrix
3572: - info - options for factorization

3574:   Level: developer

3576:   Notes:
3577:   See `MatQRFactor()` for in-place factorization.

3579:   Most users should employ the `KSP` interface for linear solvers
3580:   instead of working directly with matrix algebra routines such as this.
3581:   See, e.g., `KSPCreate()`.

3583:   Developer Note:
3584:   The Fortran interface is not autogenerated as the
3585:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

3587: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3588: @*/
3589: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3590: {
3591:   MatFactorInfo tinfo;

3593:   PetscFunctionBegin;
3598:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3599:   PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3600:              mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);

3602:   MatCheckPreallocated(mat, 2);
3603:   if (!info) {
3604:     PetscCall(MatFactorInfoInitialize(&tinfo));
3605:     info = &tinfo;
3606:   }

3608:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3609:   else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3610:   PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3611:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3612:   else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3613:   PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3614:   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3615:   PetscFunctionReturn(PETSC_SUCCESS);
3616: }

3618: /*@
3619:   MatSolve - Solves $A x = b$, given a factored matrix.

3621:   Neighbor-wise Collective

3623:   Input Parameters:
3624: + mat - the factored matrix
3625: - b   - the right-hand-side vector

3627:   Output Parameter:
3628: . x - the result vector

3630:   Level: developer

3632:   Notes:
3633:   The vectors `b` and `x` cannot be the same.  I.e., one cannot
3634:   call `MatSolve`(A,x,x).

3636:   Most users should employ the `KSP` interface for linear solvers
3637:   instead of working directly with matrix algebra routines such as this.
3638:   See, e.g., `KSPCreate()`.

3640: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3641: @*/
3642: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3643: {
3644:   PetscFunctionBegin;
3649:   PetscCheckSameComm(mat, 1, b, 2);
3650:   PetscCheckSameComm(mat, 1, x, 3);
3651:   PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3652:   PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3653:   PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3654:   PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3655:   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3656:   MatCheckPreallocated(mat, 1);

3658:   PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3659:   if (mat->factorerrortype) {
3660:     PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3661:     PetscCall(VecSetInf(x));
3662:   } else PetscUseTypeMethod(mat, solve, b, x);
3663:   PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3664:   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3665:   PetscFunctionReturn(PETSC_SUCCESS);
3666: }

3668: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3669: {
3670:   Vec      b, x;
3671:   PetscInt N, i;
3672:   PetscErrorCode (*f)(Mat, Vec, Vec);
3673:   PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;

3675:   PetscFunctionBegin;
3676:   if (A->factorerrortype) {
3677:     PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3678:     PetscCall(MatSetInf(X));
3679:     PetscFunctionReturn(PETSC_SUCCESS);
3680:   }
3681:   f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3682:   PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3683:   PetscCall(MatBoundToCPU(A, &Abound));
3684:   if (!Abound) {
3685:     PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3686:     PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3687:   }
3688: #if PetscDefined(HAVE_CUDA)
3689:   if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3690:   if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3691: #elif PetscDefined(HAVE_HIP)
3692:   if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3693:   if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3694: #endif
3695:   PetscCall(MatGetSize(B, NULL, &N));
3696:   for (i = 0; i < N; i++) {
3697:     PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3698:     PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3699:     PetscCall((*f)(A, b, x));
3700:     PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3701:     PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3702:   }
3703:   if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3704:   if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3705:   PetscFunctionReturn(PETSC_SUCCESS);
3706: }

3708: /*@
3709:   MatMatSolve - Solves $A X = B$, given a factored matrix.

3711:   Neighbor-wise Collective

3713:   Input Parameters:
3714: + A - the factored matrix
3715: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)

3717:   Output Parameter:
3718: . X - the result matrix (dense matrix)

3720:   Level: developer

3722:   Note:
3723:   If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3724:   otherwise, `B` and `X` cannot be the same.

3726: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3727: @*/
3728: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3729: {
3730:   PetscFunctionBegin;
3735:   PetscCheckSameComm(A, 1, B, 2);
3736:   PetscCheckSameComm(A, 1, X, 3);
3737:   PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3738:   PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3739:   PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3740:   if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3741:   MatCheckPreallocated(A, 1);

3743:   PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3744:   if (!A->ops->matsolve) {
3745:     PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3746:     PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3747:   } else PetscUseTypeMethod(A, matsolve, B, X);
3748:   PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3749:   PetscCall(PetscObjectStateIncrease((PetscObject)X));
3750:   PetscFunctionReturn(PETSC_SUCCESS);
3751: }

3753: /*@
3754:   MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.

3756:   Neighbor-wise Collective

3758:   Input Parameters:
3759: + A - the factored matrix
3760: - B - the right-hand-side matrix  (`MATDENSE` matrix)

3762:   Output Parameter:
3763: . X - the result matrix (dense matrix)

3765:   Level: developer

3767:   Note:
3768:   The matrices `B` and `X` cannot be the same.  I.e., one cannot
3769:   call `MatMatSolveTranspose`(A,X,X).

3771: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3772: @*/
3773: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3774: {
3775:   PetscFunctionBegin;
3780:   PetscCheckSameComm(A, 1, B, 2);
3781:   PetscCheckSameComm(A, 1, X, 3);
3782:   PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3783:   PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3784:   PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3785:   PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3786:   PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3787:   if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3788:   MatCheckPreallocated(A, 1);

3790:   PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3791:   if (!A->ops->matsolvetranspose) {
3792:     PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3793:     PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3794:   } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3795:   PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3796:   PetscCall(PetscObjectStateIncrease((PetscObject)X));
3797:   PetscFunctionReturn(PETSC_SUCCESS);
3798: }

3800: /*@
3801:   MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.

3803:   Neighbor-wise Collective

3805:   Input Parameters:
3806: + A  - the factored matrix
3807: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`

3809:   Output Parameter:
3810: . X - the result matrix (dense matrix)

3812:   Level: developer

3814:   Note:
3815:   For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3816:   format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.

3818: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3819: @*/
3820: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3821: {
3822:   PetscFunctionBegin;
3827:   PetscCheckSameComm(A, 1, Bt, 2);
3828:   PetscCheckSameComm(A, 1, X, 3);

3830:   PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3831:   PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3832:   PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3833:   PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3834:   if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3835:   PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3836:   MatCheckPreallocated(A, 1);

3838:   PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3839:   PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3840:   PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3841:   PetscCall(PetscObjectStateIncrease((PetscObject)X));
3842:   PetscFunctionReturn(PETSC_SUCCESS);
3843: }

3845: /*@
3846:   MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3847:   $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,

3849:   Neighbor-wise Collective

3851:   Input Parameters:
3852: + mat - the factored matrix
3853: - b   - the right-hand-side vector

3855:   Output Parameter:
3856: . x - the result vector

3858:   Level: developer

3860:   Notes:
3861:   `MatSolve()` should be used for most applications, as it performs
3862:   a forward solve followed by a backward solve.

3864:   The vectors `b` and `x` cannot be the same,  i.e., one cannot
3865:   call `MatForwardSolve`(A,x,x).

3867:   For matrix in `MATSEQBAIJ` format with block size larger than 1,
3868:   the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3869:   `MatForwardSolve()` solves $U^T*D y = b$, and
3870:   `MatBackwardSolve()` solves $U x = y$.
3871:   Thus they do not provide a symmetric preconditioner.

3873: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3874: @*/
3875: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3876: {
3877:   PetscFunctionBegin;
3882:   PetscCheckSameComm(mat, 1, b, 2);
3883:   PetscCheckSameComm(mat, 1, x, 3);
3884:   PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3885:   PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3886:   PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3887:   PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3888:   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3889:   MatCheckPreallocated(mat, 1);

3891:   PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3892:   PetscUseTypeMethod(mat, forwardsolve, b, x);
3893:   PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3894:   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3895:   PetscFunctionReturn(PETSC_SUCCESS);
3896: }

3898: /*@
3899:   MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3900:   $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,

3902:   Neighbor-wise Collective

3904:   Input Parameters:
3905: + mat - the factored matrix
3906: - b   - the right-hand-side vector

3908:   Output Parameter:
3909: . x - the result vector

3911:   Level: developer

3913:   Notes:
3914:   `MatSolve()` should be used for most applications, as it performs
3915:   a forward solve followed by a backward solve.

3917:   The vectors `b` and `x` cannot be the same.  I.e., one cannot
3918:   call `MatBackwardSolve`(A,x,x).

3920:   For matrix in `MATSEQBAIJ` format with block size larger than 1,
3921:   the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3922:   `MatForwardSolve()` solves $U^T*D y = b$, and
3923:   `MatBackwardSolve()` solves $U x = y$.
3924:   Thus they do not provide a symmetric preconditioner.

3926: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3927: @*/
3928: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3929: {
3930:   PetscFunctionBegin;
3935:   PetscCheckSameComm(mat, 1, b, 2);
3936:   PetscCheckSameComm(mat, 1, x, 3);
3937:   PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3938:   PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3939:   PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3940:   PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3941:   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3942:   MatCheckPreallocated(mat, 1);

3944:   PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3945:   PetscUseTypeMethod(mat, backwardsolve, b, x);
3946:   PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3947:   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3948:   PetscFunctionReturn(PETSC_SUCCESS);
3949: }

3951: /*@
3952:   MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.

3954:   Neighbor-wise Collective

3956:   Input Parameters:
3957: + mat - the factored matrix
3958: . b   - the right-hand-side vector
3959: - y   - the vector to be added to

3961:   Output Parameter:
3962: . x - the result vector

3964:   Level: developer

3966:   Note:
3967:   The vectors `b` and `x` cannot be the same.  I.e., one cannot
3968:   call `MatSolveAdd`(A,x,y,x).

3970: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3971: @*/
3972: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3973: {
3974:   PetscScalar one = 1.0;
3975:   Vec         tmp;

3977:   PetscFunctionBegin;
3983:   PetscCheckSameComm(mat, 1, b, 2);
3984:   PetscCheckSameComm(mat, 1, y, 3);
3985:   PetscCheckSameComm(mat, 1, x, 4);
3986:   PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3987:   PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3988:   PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3989:   PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3990:   PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3991:   PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
3992:   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3993:   MatCheckPreallocated(mat, 1);

3995:   PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
3996:   if (mat->factorerrortype) {
3997:     PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3998:     PetscCall(VecSetInf(x));
3999:   } else if (mat->ops->solveadd) {
4000:     PetscUseTypeMethod(mat, solveadd, b, y, x);
4001:   } else {
4002:     /* do the solve then the add manually */
4003:     if (x != y) {
4004:       PetscCall(MatSolve(mat, b, x));
4005:       PetscCall(VecAXPY(x, one, y));
4006:     } else {
4007:       PetscCall(VecDuplicate(x, &tmp));
4008:       PetscCall(VecCopy(x, tmp));
4009:       PetscCall(MatSolve(mat, b, x));
4010:       PetscCall(VecAXPY(x, one, tmp));
4011:       PetscCall(VecDestroy(&tmp));
4012:     }
4013:   }
4014:   PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4015:   PetscCall(PetscObjectStateIncrease((PetscObject)x));
4016:   PetscFunctionReturn(PETSC_SUCCESS);
4017: }

4019: /*@
4020:   MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.

4022:   Neighbor-wise Collective

4024:   Input Parameters:
4025: + mat - the factored matrix
4026: - b   - the right-hand-side vector

4028:   Output Parameter:
4029: . x - the result vector

4031:   Level: developer

4033:   Notes:
4034:   The vectors `b` and `x` cannot be the same.  I.e., one cannot
4035:   call `MatSolveTranspose`(A,x,x).

4037:   Most users should employ the `KSP` interface for linear solvers
4038:   instead of working directly with matrix algebra routines such as this.
4039:   See, e.g., `KSPCreate()`.

4041: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4042: @*/
4043: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4044: {
4045:   PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;

4047:   PetscFunctionBegin;
4052:   PetscCheckSameComm(mat, 1, b, 2);
4053:   PetscCheckSameComm(mat, 1, x, 3);
4054:   PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4055:   PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4056:   PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4057:   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4058:   MatCheckPreallocated(mat, 1);
4059:   PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4060:   if (mat->factorerrortype) {
4061:     PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4062:     PetscCall(VecSetInf(x));
4063:   } else {
4064:     PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4065:     PetscCall((*f)(mat, b, x));
4066:   }
4067:   PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4068:   PetscCall(PetscObjectStateIncrease((PetscObject)x));
4069:   PetscFunctionReturn(PETSC_SUCCESS);
4070: }

4072: /*@
4073:   MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4074:   factored matrix.

4076:   Neighbor-wise Collective

4078:   Input Parameters:
4079: + mat - the factored matrix
4080: . b   - the right-hand-side vector
4081: - y   - the vector to be added to

4083:   Output Parameter:
4084: . x - the result vector

4086:   Level: developer

4088:   Note:
4089:   The vectors `b` and `x` cannot be the same.  I.e., one cannot
4090:   call `MatSolveTransposeAdd`(A,x,y,x).

4092: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4093: @*/
4094: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4095: {
4096:   PetscScalar one = 1.0;
4097:   Vec         tmp;
4098:   PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;

4100:   PetscFunctionBegin;
4106:   PetscCheckSameComm(mat, 1, b, 2);
4107:   PetscCheckSameComm(mat, 1, y, 3);
4108:   PetscCheckSameComm(mat, 1, x, 4);
4109:   PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4110:   PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4111:   PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4112:   PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4113:   PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4114:   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4115:   MatCheckPreallocated(mat, 1);

4117:   PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4118:   if (mat->factorerrortype) {
4119:     PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4120:     PetscCall(VecSetInf(x));
4121:   } else if (f) {
4122:     PetscCall((*f)(mat, b, y, x));
4123:   } else {
4124:     /* do the solve then the add manually */
4125:     if (x != y) {
4126:       PetscCall(MatSolveTranspose(mat, b, x));
4127:       PetscCall(VecAXPY(x, one, y));
4128:     } else {
4129:       PetscCall(VecDuplicate(x, &tmp));
4130:       PetscCall(VecCopy(x, tmp));
4131:       PetscCall(MatSolveTranspose(mat, b, x));
4132:       PetscCall(VecAXPY(x, one, tmp));
4133:       PetscCall(VecDestroy(&tmp));
4134:     }
4135:   }
4136:   PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4137:   PetscCall(PetscObjectStateIncrease((PetscObject)x));
4138:   PetscFunctionReturn(PETSC_SUCCESS);
4139: }

4141: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4142: /*@
4143:   MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.

4145:   Neighbor-wise Collective

4147:   Input Parameters:
4148: + mat   - the matrix
4149: . b     - the right-hand side
4150: . omega - the relaxation factor
4151: . flag  - flag indicating the type of SOR (see below)
4152: . shift - diagonal shift
4153: . its   - the number of iterations
4154: - lits  - the number of local iterations

4156:   Output Parameter:
4157: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)

4159:   SOR Flags:
4160: +     `SOR_FORWARD_SWEEP` - forward SOR
4161: .     `SOR_BACKWARD_SWEEP` - backward SOR
4162: .     `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4163: .     `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4164: .     `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4165: .     `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4166: .     `SOR_EISENSTAT` - SOR with Eisenstat trick
4167: .     `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4168:   upper/lower triangular part of matrix to
4169:   vector (with omega)
4170: -     `SOR_ZERO_INITIAL_GUESS` - zero initial guess

4172:   Level: developer

4174:   Notes:
4175:   `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4176:   `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4177:   on each processor.

4179:   Application programmers will not generally use `MatSOR()` directly,
4180:   but instead will employ the `KSP`/`PC` interface.

4182:   For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing

4184:   Most users should employ the `KSP` interface for linear solvers
4185:   instead of working directly with matrix algebra routines such as this.
4186:   See, e.g., `KSPCreate()`.

4188:   Vectors `x` and `b` CANNOT be the same

4190:   The flags are implemented as bitwise inclusive or operations.
4191:   For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4192:   to specify a zero initial guess for SSOR.

4194:   Developer Note:
4195:   We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes

4197: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4198: @*/
4199: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4200: {
4201:   PetscFunctionBegin;
4206:   PetscCheckSameComm(mat, 1, b, 2);
4207:   PetscCheckSameComm(mat, 1, x, 8);
4208:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4209:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4210:   PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4211:   PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4212:   PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4213:   PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4214:   PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4215:   PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");

4217:   MatCheckPreallocated(mat, 1);
4218:   PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4219:   PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4220:   PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4221:   PetscCall(PetscObjectStateIncrease((PetscObject)x));
4222:   PetscFunctionReturn(PETSC_SUCCESS);
4223: }

4225: /*
4226:       Default matrix copy routine.
4227: */
4228: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4229: {
4230:   PetscInt           i, rstart = 0, rend = 0, nz;
4231:   const PetscInt    *cwork;
4232:   const PetscScalar *vwork;

4234:   PetscFunctionBegin;
4235:   if (B->assembled) PetscCall(MatZeroEntries(B));
4236:   if (str == SAME_NONZERO_PATTERN) {
4237:     PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4238:     for (i = rstart; i < rend; i++) {
4239:       PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4240:       PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4241:       PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4242:     }
4243:   } else {
4244:     PetscCall(MatAYPX(B, 0.0, A, str));
4245:   }
4246:   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4247:   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4248:   PetscFunctionReturn(PETSC_SUCCESS);
4249: }

4251: /*@
4252:   MatCopy - Copies a matrix to another matrix.

4254:   Collective

4256:   Input Parameters:
4257: + A   - the matrix
4258: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`

4260:   Output Parameter:
4261: . B - where the copy is put

4263:   Level: intermediate

4265:   Notes:
4266:   If you use `SAME_NONZERO_PATTERN` then the two matrices must have the same nonzero pattern or the routine will crash.

4268:   `MatCopy()` copies the matrix entries of a matrix to another existing
4269:   matrix (after first zeroing the second matrix).  A related routine is
4270:   `MatConvert()`, which first creates a new matrix and then copies the data.

4272: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4273: @*/
4274: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4275: {
4276:   PetscInt i;

4278:   PetscFunctionBegin;
4283:   PetscCheckSameComm(A, 1, B, 2);
4284:   MatCheckPreallocated(B, 2);
4285:   PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4286:   PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4287:   PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4288:              A->cmap->N, B->cmap->N);
4289:   MatCheckPreallocated(A, 1);
4290:   if (A == B) PetscFunctionReturn(PETSC_SUCCESS);

4292:   PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4293:   if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4294:   else PetscCall(MatCopy_Basic(A, B, str));

4296:   B->stencil.dim = A->stencil.dim;
4297:   B->stencil.noc = A->stencil.noc;
4298:   for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4299:     B->stencil.dims[i]   = A->stencil.dims[i];
4300:     B->stencil.starts[i] = A->stencil.starts[i];
4301:   }

4303:   PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4304:   PetscCall(PetscObjectStateIncrease((PetscObject)B));
4305:   PetscFunctionReturn(PETSC_SUCCESS);
4306: }

4308: /*@
4309:   MatConvert - Converts a matrix to another matrix, either of the same
4310:   or different type.

4312:   Collective

4314:   Input Parameters:
4315: + mat     - the matrix
4316: . newtype - new matrix type.  Use `MATSAME` to create a new matrix of the
4317:             same type as the original matrix.
4318: - reuse   - denotes if the destination matrix is to be created or reused.
4319:             Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4320:             `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).

4322:   Output Parameter:
4323: . M - pointer to place new matrix

4325:   Level: intermediate

4327:   Notes:
4328:   `MatConvert()` first creates a new matrix and then copies the data from
4329:   the first matrix.  A related routine is `MatCopy()`, which copies the matrix
4330:   entries of one matrix to another already existing matrix context.

4332:   Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4333:   the MPI communicator of the generated matrix is always the same as the communicator
4334:   of the input matrix.

4336: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4337: @*/
4338: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4339: {
4340:   PetscBool  sametype, issame, flg;
4341:   PetscBool3 issymmetric, ishermitian;
4342:   char       convname[256], mtype[256];
4343:   Mat        B;

4345:   PetscFunctionBegin;
4348:   PetscAssertPointer(M, 4);
4349:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4350:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4351:   MatCheckPreallocated(mat, 1);

4353:   PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4354:   if (flg) newtype = mtype;

4356:   PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4357:   PetscCall(PetscStrcmp(newtype, "same", &issame));
4358:   PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4359:   if (reuse == MAT_REUSE_MATRIX) {
4361:     PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4362:   }

4364:   if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4365:     PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4366:     PetscFunctionReturn(PETSC_SUCCESS);
4367:   }

4369:   /* Cache Mat options because some converters use MatHeaderReplace  */
4370:   issymmetric = mat->symmetric;
4371:   ishermitian = mat->hermitian;

4373:   if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4374:     PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4375:     PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4376:   } else {
4377:     PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4378:     const char *prefix[3]                                 = {"seq", "mpi", ""};
4379:     PetscInt    i;
4380:     /*
4381:        Order of precedence:
4382:        0) See if newtype is a superclass of the current matrix.
4383:        1) See if a specialized converter is known to the current matrix.
4384:        2) See if a specialized converter is known to the desired matrix class.
4385:        3) See if a good general converter is registered for the desired class
4386:           (as of 6/27/03 only MATMPIADJ falls into this category).
4387:        4) See if a good general converter is known for the current matrix.
4388:        5) Use a really basic converter.
4389:     */

4391:     /* 0) See if newtype is a superclass of the current matrix.
4392:           i.e mat is mpiaij and newtype is aij */
4393:     for (i = 0; i < 2; i++) {
4394:       PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4395:       PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4396:       PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4397:       PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4398:       if (flg) {
4399:         if (reuse == MAT_INPLACE_MATRIX) {
4400:           PetscCall(PetscInfo(mat, "Early return\n"));
4401:           PetscFunctionReturn(PETSC_SUCCESS);
4402:         } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4403:           PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4404:           PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4405:           PetscFunctionReturn(PETSC_SUCCESS);
4406:         } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4407:           PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4408:           PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4409:           PetscFunctionReturn(PETSC_SUCCESS);
4410:         }
4411:       }
4412:     }
4413:     /* 1) See if a specialized converter is known to the current matrix and the desired class */
4414:     for (i = 0; i < 3; i++) {
4415:       PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4416:       PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4417:       PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4418:       PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4419:       PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4420:       PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4421:       PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4422:       PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4423:       if (conv) goto foundconv;
4424:     }

4426:     /* 2)  See if a specialized converter is known to the desired matrix class. */
4427:     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4428:     PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4429:     PetscCall(MatSetType(B, newtype));
4430:     for (i = 0; i < 3; i++) {
4431:       PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4432:       PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4433:       PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4434:       PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4435:       PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4436:       PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4437:       PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4438:       PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4439:       if (conv) {
4440:         PetscCall(MatDestroy(&B));
4441:         goto foundconv;
4442:       }
4443:     }

4445:     /* 3) See if a good general converter is registered for the desired class */
4446:     conv = B->ops->convertfrom;
4447:     PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4448:     PetscCall(MatDestroy(&B));
4449:     if (conv) goto foundconv;

4451:     /* 4) See if a good general converter is known for the current matrix */
4452:     if (mat->ops->convert) conv = mat->ops->convert;
4453:     PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4454:     if (conv) goto foundconv;

4456:     /* 5) Use a really basic converter. */
4457:     PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4458:     conv = MatConvert_Basic;

4460:   foundconv:
4461:     PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4462:     PetscCall((*conv)(mat, newtype, reuse, M));
4463:     if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4464:       /* the block sizes must be same if the mappings are copied over */
4465:       (*M)->rmap->bs = mat->rmap->bs;
4466:       (*M)->cmap->bs = mat->cmap->bs;
4467:       PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4468:       PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4469:       (*M)->rmap->mapping = mat->rmap->mapping;
4470:       (*M)->cmap->mapping = mat->cmap->mapping;
4471:     }
4472:     (*M)->stencil.dim = mat->stencil.dim;
4473:     (*M)->stencil.noc = mat->stencil.noc;
4474:     for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4475:       (*M)->stencil.dims[i]   = mat->stencil.dims[i];
4476:       (*M)->stencil.starts[i] = mat->stencil.starts[i];
4477:     }
4478:     PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4479:   }
4480:   PetscCall(PetscObjectStateIncrease((PetscObject)*M));

4482:   /* Copy Mat options */
4483:   if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4484:   else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4485:   if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4486:   else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4487:   PetscFunctionReturn(PETSC_SUCCESS);
4488: }

4490: /*@C
4491:   MatFactorGetSolverType - Returns name of the package providing the factorization routines

4493:   Not Collective

4495:   Input Parameter:
4496: . mat - the matrix, must be a factored matrix

4498:   Output Parameter:
4499: . type - the string name of the package (do not free this string)

4501:   Level: intermediate

4503:   Fortran Note:
4504:   Pass in an empty string that is long enough and the package name will be copied into it.

4506: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4507: @*/
4508: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4509: {
4510:   PetscErrorCode (*conv)(Mat, MatSolverType *);

4512:   PetscFunctionBegin;
4515:   PetscAssertPointer(type, 2);
4516:   PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4517:   PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4518:   if (conv) PetscCall((*conv)(mat, type));
4519:   else *type = MATSOLVERPETSC;
4520:   PetscFunctionReturn(PETSC_SUCCESS);
4521: }

4523: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4524: struct _MatSolverTypeForSpecifcType {
4525:   MatType mtype;
4526:   /* no entry for MAT_FACTOR_NONE */
4527:   PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4528:   MatSolverTypeForSpecifcType next;
4529: };

4531: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4532: struct _MatSolverTypeHolder {
4533:   char                       *name;
4534:   MatSolverTypeForSpecifcType handlers;
4535:   MatSolverTypeHolder         next;
4536: };

4538: static MatSolverTypeHolder MatSolverTypeHolders = NULL;

4540: /*@C
4541:   MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type

4543:   Logically Collective, No Fortran Support

4545:   Input Parameters:
4546: + package      - name of the package, for example petsc or superlu
4547: . mtype        - the matrix type that works with this package
4548: . ftype        - the type of factorization supported by the package
4549: - createfactor - routine that will create the factored matrix ready to be used

4551:   Level: developer

4553: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4554:   `MatGetFactor()`
4555: @*/
4556: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4557: {
4558:   MatSolverTypeHolder         next = MatSolverTypeHolders, prev = NULL;
4559:   PetscBool                   flg;
4560:   MatSolverTypeForSpecifcType inext, iprev = NULL;

4562:   PetscFunctionBegin;
4563:   PetscCall(MatInitializePackage());
4564:   if (!next) {
4565:     PetscCall(PetscNew(&MatSolverTypeHolders));
4566:     PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4567:     PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4568:     PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4569:     MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4570:     PetscFunctionReturn(PETSC_SUCCESS);
4571:   }
4572:   while (next) {
4573:     PetscCall(PetscStrcasecmp(package, next->name, &flg));
4574:     if (flg) {
4575:       PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4576:       inext = next->handlers;
4577:       while (inext) {
4578:         PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4579:         if (flg) {
4580:           inext->createfactor[(int)ftype - 1] = createfactor;
4581:           PetscFunctionReturn(PETSC_SUCCESS);
4582:         }
4583:         iprev = inext;
4584:         inext = inext->next;
4585:       }
4586:       PetscCall(PetscNew(&iprev->next));
4587:       PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4588:       iprev->next->createfactor[(int)ftype - 1] = createfactor;
4589:       PetscFunctionReturn(PETSC_SUCCESS);
4590:     }
4591:     prev = next;
4592:     next = next->next;
4593:   }
4594:   PetscCall(PetscNew(&prev->next));
4595:   PetscCall(PetscStrallocpy(package, &prev->next->name));
4596:   PetscCall(PetscNew(&prev->next->handlers));
4597:   PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4598:   prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4599:   PetscFunctionReturn(PETSC_SUCCESS);
4600: }

4602: /*@C
4603:   MatSolverTypeGet - Gets the function that creates the factor matrix if it exist

4605:   Input Parameters:
4606: + type  - name of the package, for example petsc or superlu, if this is 'NULL' then the first result that satisfies the other criteria is returned
4607: . ftype - the type of factorization supported by the type
4608: - mtype - the matrix type that works with this type

4610:   Output Parameters:
4611: + foundtype    - `PETSC_TRUE` if the type was registered
4612: . foundmtype   - `PETSC_TRUE` if the type supports the requested mtype
4613: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found

4615:   Calling sequence of `createfactor`:
4616: + A     - the matrix providing the factor matrix
4617: . mtype - the `MatType` of the factor requested
4618: - B     - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`

4620:   Level: developer

4622:   Note:
4623:   When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4624:   Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4625:   For example if one configuration had --download-mumps while a different one had --download-superlu_dist.

4627: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4628:           `MatInitializePackage()`
4629: @*/
4630: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType mtype, Mat *B))
4631: {
4632:   MatSolverTypeHolder         next = MatSolverTypeHolders;
4633:   PetscBool                   flg;
4634:   MatSolverTypeForSpecifcType inext;

4636:   PetscFunctionBegin;
4637:   if (foundtype) *foundtype = PETSC_FALSE;
4638:   if (foundmtype) *foundmtype = PETSC_FALSE;
4639:   if (createfactor) *createfactor = NULL;

4641:   if (type) {
4642:     while (next) {
4643:       PetscCall(PetscStrcasecmp(type, next->name, &flg));
4644:       if (flg) {
4645:         if (foundtype) *foundtype = PETSC_TRUE;
4646:         inext = next->handlers;
4647:         while (inext) {
4648:           PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4649:           if (flg) {
4650:             if (foundmtype) *foundmtype = PETSC_TRUE;
4651:             if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4652:             PetscFunctionReturn(PETSC_SUCCESS);
4653:           }
4654:           inext = inext->next;
4655:         }
4656:       }
4657:       next = next->next;
4658:     }
4659:   } else {
4660:     while (next) {
4661:       inext = next->handlers;
4662:       while (inext) {
4663:         PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4664:         if (flg && inext->createfactor[(int)ftype - 1]) {
4665:           if (foundtype) *foundtype = PETSC_TRUE;
4666:           if (foundmtype) *foundmtype = PETSC_TRUE;
4667:           if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4668:           PetscFunctionReturn(PETSC_SUCCESS);
4669:         }
4670:         inext = inext->next;
4671:       }
4672:       next = next->next;
4673:     }
4674:     /* try with base classes inext->mtype */
4675:     next = MatSolverTypeHolders;
4676:     while (next) {
4677:       inext = next->handlers;
4678:       while (inext) {
4679:         PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4680:         if (flg && inext->createfactor[(int)ftype - 1]) {
4681:           if (foundtype) *foundtype = PETSC_TRUE;
4682:           if (foundmtype) *foundmtype = PETSC_TRUE;
4683:           if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4684:           PetscFunctionReturn(PETSC_SUCCESS);
4685:         }
4686:         inext = inext->next;
4687:       }
4688:       next = next->next;
4689:     }
4690:   }
4691:   PetscFunctionReturn(PETSC_SUCCESS);
4692: }

4694: PetscErrorCode MatSolverTypeDestroy(void)
4695: {
4696:   MatSolverTypeHolder         next = MatSolverTypeHolders, prev;
4697:   MatSolverTypeForSpecifcType inext, iprev;

4699:   PetscFunctionBegin;
4700:   while (next) {
4701:     PetscCall(PetscFree(next->name));
4702:     inext = next->handlers;
4703:     while (inext) {
4704:       PetscCall(PetscFree(inext->mtype));
4705:       iprev = inext;
4706:       inext = inext->next;
4707:       PetscCall(PetscFree(iprev));
4708:     }
4709:     prev = next;
4710:     next = next->next;
4711:     PetscCall(PetscFree(prev));
4712:   }
4713:   MatSolverTypeHolders = NULL;
4714:   PetscFunctionReturn(PETSC_SUCCESS);
4715: }

4717: /*@
4718:   MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`

4720:   Logically Collective

4722:   Input Parameter:
4723: . mat - the matrix

4725:   Output Parameter:
4726: . flg - `PETSC_TRUE` if uses the ordering

4728:   Level: developer

4730:   Note:
4731:   Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4732:   packages do not, thus we want to skip generating the ordering when it is not needed or used.

4734: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4735: @*/
4736: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4737: {
4738:   PetscFunctionBegin;
4739:   *flg = mat->canuseordering;
4740:   PetscFunctionReturn(PETSC_SUCCESS);
4741: }

4743: /*@
4744:   MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object

4746:   Logically Collective

4748:   Input Parameters:
4749: + mat   - the matrix obtained with `MatGetFactor()`
4750: - ftype - the factorization type to be used

4752:   Output Parameter:
4753: . otype - the preferred ordering type

4755:   Level: developer

4757: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4758: @*/
4759: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4760: {
4761:   PetscFunctionBegin;
4762:   *otype = mat->preferredordering[ftype];
4763:   PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4764:   PetscFunctionReturn(PETSC_SUCCESS);
4765: }

4767: /*@
4768:   MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()

4770:   Collective

4772:   Input Parameters:
4773: + mat   - the matrix
4774: . type  - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL' then the first result that satisfies
4775:           the other criteria is returned
4776: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`

4778:   Output Parameter:
4779: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.

4781:   Options Database Keys:
4782: + -pc_factor_mat_solver_type <type>             - choose the type at run time. When using `KSP` solvers
4783: - -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4784:                                                   One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.

4786:   Level: intermediate

4788:   Notes:
4789:   The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4790:   types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.

4792:   Users usually access the factorization solvers via `KSP`

4794:   Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4795:   such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir

4797:   When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4798:   Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4799:   For example if one configuration had --download-mumps while a different one had --download-superlu_dist.

4801:   Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4802:   where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4803:   call `MatSetOptionsPrefixFactor()` on the originating matrix or  `MatSetOptionsPrefix()` on the resulting factor matrix.

4805:   Developer Note:
4806:   This should actually be called `MatCreateFactor()` since it creates a new factor object

4808: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4809:           `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4810:           `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4811: @*/
4812: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4813: {
4814:   PetscBool foundtype, foundmtype;
4815:   PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);

4817:   PetscFunctionBegin;

4821:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4822:   MatCheckPreallocated(mat, 1);

4824:   PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4825:   if (!foundtype) {
4826:     if (type) {
4827:       SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4828:               ((PetscObject)mat)->type_name, type);
4829:     } else {
4830:       SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4831:     }
4832:   }
4833:   PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4834:   PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);

4836:   PetscCall((*conv)(mat, ftype, f));
4837:   if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4838:   PetscFunctionReturn(PETSC_SUCCESS);
4839: }

4841: /*@
4842:   MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type

4844:   Not Collective

4846:   Input Parameters:
4847: + mat   - the matrix
4848: . type  - name of solver type, for example, superlu, petsc (to use PETSc's default)
4849: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`

4851:   Output Parameter:
4852: . flg - PETSC_TRUE if the factorization is available

4854:   Level: intermediate

4856:   Notes:
4857:   Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4858:   such as pastix, superlu, mumps etc.

4860:   PETSc must have been ./configure to use the external solver, using the option --download-package

4862:   Developer Note:
4863:   This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object

4865: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4866:           `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4867: @*/
4868: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4869: {
4870:   PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);

4872:   PetscFunctionBegin;
4874:   PetscAssertPointer(flg, 4);

4876:   *flg = PETSC_FALSE;
4877:   if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);

4879:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4880:   MatCheckPreallocated(mat, 1);

4882:   PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4883:   *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4884:   PetscFunctionReturn(PETSC_SUCCESS);
4885: }

4887: /*@
4888:   MatDuplicate - Duplicates a matrix including the non-zero structure.

4890:   Collective

4892:   Input Parameters:
4893: + mat - the matrix
4894: - op  - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4895:         See the manual page for `MatDuplicateOption()` for an explanation of these options.

4897:   Output Parameter:
4898: . M - pointer to place new matrix

4900:   Level: intermediate

4902:   Notes:
4903:   You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.

4905:   If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.

4907:   May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.

4909:   When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4910:   is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4911:   User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.

4913: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4914: @*/
4915: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4916: {
4917:   Mat         B;
4918:   VecType     vtype;
4919:   PetscInt    i;
4920:   PetscObject dm, container_h, container_d;
4921:   void (*viewf)(void);

4923:   PetscFunctionBegin;
4926:   PetscAssertPointer(M, 3);
4927:   PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4928:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4929:   MatCheckPreallocated(mat, 1);

4931:   *M = NULL;
4932:   PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4933:   PetscUseTypeMethod(mat, duplicate, op, M);
4934:   PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4935:   B = *M;

4937:   PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4938:   if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4939:   PetscCall(MatGetVecType(mat, &vtype));
4940:   PetscCall(MatSetVecType(B, vtype));

4942:   B->stencil.dim = mat->stencil.dim;
4943:   B->stencil.noc = mat->stencil.noc;
4944:   for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4945:     B->stencil.dims[i]   = mat->stencil.dims[i];
4946:     B->stencil.starts[i] = mat->stencil.starts[i];
4947:   }

4949:   B->nooffproczerorows = mat->nooffproczerorows;
4950:   B->nooffprocentries  = mat->nooffprocentries;

4952:   PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4953:   if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4954:   PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4955:   if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4956:   PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4957:   if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4958:   PetscCall(PetscObjectStateIncrease((PetscObject)B));
4959:   PetscFunctionReturn(PETSC_SUCCESS);
4960: }

4962: /*@
4963:   MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`

4965:   Logically Collective

4967:   Input Parameter:
4968: . mat - the matrix

4970:   Output Parameter:
4971: . v - the diagonal of the matrix

4973:   Level: intermediate

4975:   Note:
4976:   If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
4977:   of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
4978:   is larger than `ndiag`, the values of the remaining entries are unspecified.

4980:   Currently only correct in parallel for square matrices.

4982: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
4983: @*/
4984: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
4985: {
4986:   PetscFunctionBegin;
4990:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4991:   MatCheckPreallocated(mat, 1);
4992:   if (PetscDefined(USE_DEBUG)) {
4993:     PetscInt nv, row, col, ndiag;

4995:     PetscCall(VecGetLocalSize(v, &nv));
4996:     PetscCall(MatGetLocalSize(mat, &row, &col));
4997:     ndiag = PetscMin(row, col);
4998:     PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
4999:   }

5001:   PetscUseTypeMethod(mat, getdiagonal, v);
5002:   PetscCall(PetscObjectStateIncrease((PetscObject)v));
5003:   PetscFunctionReturn(PETSC_SUCCESS);
5004: }

5006: /*@
5007:   MatGetRowMin - Gets the minimum value (of the real part) of each
5008:   row of the matrix

5010:   Logically Collective

5012:   Input Parameter:
5013: . mat - the matrix

5015:   Output Parameters:
5016: + v   - the vector for storing the maximums
5017: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)

5019:   Level: intermediate

5021:   Note:
5022:   The result of this call are the same as if one converted the matrix to dense format
5023:   and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).

5025:   This code is only implemented for a couple of matrix formats.

5027: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5028:           `MatGetRowMax()`
5029: @*/
5030: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5031: {
5032:   PetscFunctionBegin;
5036:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");

5038:   if (!mat->cmap->N) {
5039:     PetscCall(VecSet(v, PETSC_MAX_REAL));
5040:     if (idx) {
5041:       PetscInt i, m = mat->rmap->n;
5042:       for (i = 0; i < m; i++) idx[i] = -1;
5043:     }
5044:   } else {
5045:     MatCheckPreallocated(mat, 1);
5046:   }
5047:   PetscUseTypeMethod(mat, getrowmin, v, idx);
5048:   PetscCall(PetscObjectStateIncrease((PetscObject)v));
5049:   PetscFunctionReturn(PETSC_SUCCESS);
5050: }

5052: /*@
5053:   MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5054:   row of the matrix

5056:   Logically Collective

5058:   Input Parameter:
5059: . mat - the matrix

5061:   Output Parameters:
5062: + v   - the vector for storing the minimums
5063: - idx - the indices of the column found for each row (or `NULL` if not needed)

5065:   Level: intermediate

5067:   Notes:
5068:   if a row is completely empty or has only 0.0 values then the `idx` value for that
5069:   row is 0 (the first column).

5071:   This code is only implemented for a couple of matrix formats.

5073: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5074: @*/
5075: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5076: {
5077:   PetscFunctionBegin;
5081:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5082:   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");

5084:   if (!mat->cmap->N) {
5085:     PetscCall(VecSet(v, 0.0));
5086:     if (idx) {
5087:       PetscInt i, m = mat->rmap->n;
5088:       for (i = 0; i < m; i++) idx[i] = -1;
5089:     }
5090:   } else {
5091:     MatCheckPreallocated(mat, 1);
5092:     if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5093:     PetscUseTypeMethod(mat, getrowminabs, v, idx);
5094:   }
5095:   PetscCall(PetscObjectStateIncrease((PetscObject)v));
5096:   PetscFunctionReturn(PETSC_SUCCESS);
5097: }

5099: /*@
5100:   MatGetRowMax - Gets the maximum value (of the real part) of each
5101:   row of the matrix

5103:   Logically Collective

5105:   Input Parameter:
5106: . mat - the matrix

5108:   Output Parameters:
5109: + v   - the vector for storing the maximums
5110: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)

5112:   Level: intermediate

5114:   Notes:
5115:   The result of this call are the same as if one converted the matrix to dense format
5116:   and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).

5118:   This code is only implemented for a couple of matrix formats.

5120: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5121: @*/
5122: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5123: {
5124:   PetscFunctionBegin;
5128:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");

5130:   if (!mat->cmap->N) {
5131:     PetscCall(VecSet(v, PETSC_MIN_REAL));
5132:     if (idx) {
5133:       PetscInt i, m = mat->rmap->n;
5134:       for (i = 0; i < m; i++) idx[i] = -1;
5135:     }
5136:   } else {
5137:     MatCheckPreallocated(mat, 1);
5138:     PetscUseTypeMethod(mat, getrowmax, v, idx);
5139:   }
5140:   PetscCall(PetscObjectStateIncrease((PetscObject)v));
5141:   PetscFunctionReturn(PETSC_SUCCESS);
5142: }

5144: /*@
5145:   MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5146:   row of the matrix

5148:   Logically Collective

5150:   Input Parameter:
5151: . mat - the matrix

5153:   Output Parameters:
5154: + v   - the vector for storing the maximums
5155: - idx - the indices of the column found for each row (or `NULL` if not needed)

5157:   Level: intermediate

5159:   Notes:
5160:   if a row is completely empty or has only 0.0 values then the `idx` value for that
5161:   row is 0 (the first column).

5163:   This code is only implemented for a couple of matrix formats.

5165: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5166: @*/
5167: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5168: {
5169:   PetscFunctionBegin;
5173:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");

5175:   if (!mat->cmap->N) {
5176:     PetscCall(VecSet(v, 0.0));
5177:     if (idx) {
5178:       PetscInt i, m = mat->rmap->n;
5179:       for (i = 0; i < m; i++) idx[i] = -1;
5180:     }
5181:   } else {
5182:     MatCheckPreallocated(mat, 1);
5183:     if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5184:     PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5185:   }
5186:   PetscCall(PetscObjectStateIncrease((PetscObject)v));
5187:   PetscFunctionReturn(PETSC_SUCCESS);
5188: }

5190: /*@
5191:   MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix

5193:   Logically Collective

5195:   Input Parameter:
5196: . mat - the matrix

5198:   Output Parameter:
5199: . v - the vector for storing the sum

5201:   Level: intermediate

5203:   This code is only implemented for a couple of matrix formats.

5205: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5206: @*/
5207: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5208: {
5209:   PetscFunctionBegin;
5213:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");

5215:   if (!mat->cmap->N) {
5216:     PetscCall(VecSet(v, 0.0));
5217:   } else {
5218:     MatCheckPreallocated(mat, 1);
5219:     PetscUseTypeMethod(mat, getrowsumabs, v);
5220:   }
5221:   PetscCall(PetscObjectStateIncrease((PetscObject)v));
5222:   PetscFunctionReturn(PETSC_SUCCESS);
5223: }

5225: /*@
5226:   MatGetRowSum - Gets the sum of each row of the matrix

5228:   Logically or Neighborhood Collective

5230:   Input Parameter:
5231: . mat - the matrix

5233:   Output Parameter:
5234: . v - the vector for storing the sum of rows

5236:   Level: intermediate

5238:   Note:
5239:   This code is slow since it is not currently specialized for different formats

5241: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5242: @*/
5243: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5244: {
5245:   Vec ones;

5247:   PetscFunctionBegin;
5251:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5252:   MatCheckPreallocated(mat, 1);
5253:   PetscCall(MatCreateVecs(mat, &ones, NULL));
5254:   PetscCall(VecSet(ones, 1.));
5255:   PetscCall(MatMult(mat, ones, v));
5256:   PetscCall(VecDestroy(&ones));
5257:   PetscFunctionReturn(PETSC_SUCCESS);
5258: }

5260: /*@
5261:   MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5262:   when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)

5264:   Collective

5266:   Input Parameter:
5267: . mat - the matrix to provide the transpose

5269:   Output Parameter:
5270: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results

5272:   Level: advanced

5274:   Note:
5275:   Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5276:   routine allows bypassing that call.

5278: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5279: @*/
5280: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5281: {
5282:   PetscContainer  rB = NULL;
5283:   MatParentState *rb = NULL;

5285:   PetscFunctionBegin;
5286:   PetscCall(PetscNew(&rb));
5287:   rb->id    = ((PetscObject)mat)->id;
5288:   rb->state = 0;
5289:   PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5290:   PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)B), &rB));
5291:   PetscCall(PetscContainerSetPointer(rB, rb));
5292:   PetscCall(PetscContainerSetUserDestroy(rB, PetscContainerUserDestroyDefault));
5293:   PetscCall(PetscObjectCompose((PetscObject)B, "MatTransposeParent", (PetscObject)rB));
5294:   PetscCall(PetscObjectDereference((PetscObject)rB));
5295:   PetscFunctionReturn(PETSC_SUCCESS);
5296: }

5298: /*@
5299:   MatTranspose - Computes an in-place or out-of-place transpose of a matrix.

5301:   Collective

5303:   Input Parameters:
5304: + mat   - the matrix to transpose
5305: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`

5307:   Output Parameter:
5308: . B - the transpose

5310:   Level: intermediate

5312:   Notes:
5313:   If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`

5315:   `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5316:   transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.

5318:   If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.

5320:   Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.

5322:   If mat is unchanged from the last call this function returns immediately without recomputing the result

5324:   If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`

5326: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5327:           `MatTransposeSymbolic()`, `MatCreateTranspose()`
5328: @*/
5329: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5330: {
5331:   PetscContainer  rB = NULL;
5332:   MatParentState *rb = NULL;

5334:   PetscFunctionBegin;
5337:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5338:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5339:   PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5340:   PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5341:   MatCheckPreallocated(mat, 1);
5342:   if (reuse == MAT_REUSE_MATRIX) {
5343:     PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5344:     PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5345:     PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5346:     PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5347:     if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5348:   }

5350:   PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5351:   if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5352:     PetscUseTypeMethod(mat, transpose, reuse, B);
5353:     PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5354:   }
5355:   PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));

5357:   if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5358:   if (reuse != MAT_INPLACE_MATRIX) {
5359:     PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5360:     PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5361:     rb->state        = ((PetscObject)mat)->state;
5362:     rb->nonzerostate = mat->nonzerostate;
5363:   }
5364:   PetscFunctionReturn(PETSC_SUCCESS);
5365: }

5367: /*@
5368:   MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.

5370:   Collective

5372:   Input Parameter:
5373: . A - the matrix to transpose

5375:   Output Parameter:
5376: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5377:       numerical portion.

5379:   Level: intermediate

5381:   Note:
5382:   This is not supported for many matrix types, use `MatTranspose()` in those cases

5384: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5385: @*/
5386: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5387: {
5388:   PetscFunctionBegin;
5391:   PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5392:   PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5393:   PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5394:   PetscUseTypeMethod(A, transposesymbolic, B);
5395:   PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));

5397:   PetscCall(MatTransposeSetPrecursor(A, *B));
5398:   PetscFunctionReturn(PETSC_SUCCESS);
5399: }

5401: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5402: {
5403:   PetscContainer  rB;
5404:   MatParentState *rb;

5406:   PetscFunctionBegin;
5409:   PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5410:   PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5411:   PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5412:   PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5413:   PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5414:   PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5415:   PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5416:   PetscFunctionReturn(PETSC_SUCCESS);
5417: }

5419: /*@
5420:   MatIsTranspose - Test whether a matrix is another one's transpose,
5421:   or its own, in which case it tests symmetry.

5423:   Collective

5425:   Input Parameters:
5426: + A   - the matrix to test
5427: . B   - the matrix to test against, this can equal the first parameter
5428: - tol - tolerance, differences between entries smaller than this are counted as zero

5430:   Output Parameter:
5431: . flg - the result

5433:   Level: intermediate

5435:   Notes:
5436:   The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5437:   test involves parallel copies of the block off-diagonal parts of the matrix.

5439: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5440: @*/
5441: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5442: {
5443:   PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);

5445:   PetscFunctionBegin;
5448:   PetscAssertPointer(flg, 4);
5449:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5450:   PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5451:   *flg = PETSC_FALSE;
5452:   if (f && g) {
5453:     PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5454:     PetscCall((*f)(A, B, tol, flg));
5455:   } else {
5456:     MatType mattype;

5458:     PetscCall(MatGetType(f ? B : A, &mattype));
5459:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5460:   }
5461:   PetscFunctionReturn(PETSC_SUCCESS);
5462: }

5464: /*@
5465:   MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.

5467:   Collective

5469:   Input Parameters:
5470: + mat   - the matrix to transpose and complex conjugate
5471: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`

5473:   Output Parameter:
5474: . B - the Hermitian transpose

5476:   Level: intermediate

5478: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5479: @*/
5480: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5481: {
5482:   PetscFunctionBegin;
5483:   PetscCall(MatTranspose(mat, reuse, B));
5484: #if defined(PETSC_USE_COMPLEX)
5485:   PetscCall(MatConjugate(*B));
5486: #endif
5487:   PetscFunctionReturn(PETSC_SUCCESS);
5488: }

5490: /*@
5491:   MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,

5493:   Collective

5495:   Input Parameters:
5496: + A   - the matrix to test
5497: . B   - the matrix to test against, this can equal the first parameter
5498: - tol - tolerance, differences between entries smaller than this are counted as zero

5500:   Output Parameter:
5501: . flg - the result

5503:   Level: intermediate

5505:   Notes:
5506:   Only available for `MATAIJ` matrices.

5508:   The sequential algorithm
5509:   has a running time of the order of the number of nonzeros; the parallel
5510:   test involves parallel copies of the block off-diagonal parts of the matrix.

5512: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5513: @*/
5514: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5515: {
5516:   PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);

5518:   PetscFunctionBegin;
5521:   PetscAssertPointer(flg, 4);
5522:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5523:   PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5524:   if (f && g) {
5525:     PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5526:     PetscCall((*f)(A, B, tol, flg));
5527:   }
5528:   PetscFunctionReturn(PETSC_SUCCESS);
5529: }

5531: /*@
5532:   MatPermute - Creates a new matrix with rows and columns permuted from the
5533:   original.

5535:   Collective

5537:   Input Parameters:
5538: + mat - the matrix to permute
5539: . row - row permutation, each processor supplies only the permutation for its rows
5540: - col - column permutation, each processor supplies only the permutation for its columns

5542:   Output Parameter:
5543: . B - the permuted matrix

5545:   Level: advanced

5547:   Note:
5548:   The index sets map from row/col of permuted matrix to row/col of original matrix.
5549:   The index sets should be on the same communicator as mat and have the same local sizes.

5551:   Developer Note:
5552:   If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5553:   exploit the fact that row and col are permutations, consider implementing the
5554:   more general `MatCreateSubMatrix()` instead.

5556: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5557: @*/
5558: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5559: {
5560:   PetscFunctionBegin;
5565:   PetscAssertPointer(B, 4);
5566:   PetscCheckSameComm(mat, 1, row, 2);
5567:   if (row != col) PetscCheckSameComm(row, 2, col, 3);
5568:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5569:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5570:   PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5571:   MatCheckPreallocated(mat, 1);

5573:   if (mat->ops->permute) {
5574:     PetscUseTypeMethod(mat, permute, row, col, B);
5575:     PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5576:   } else {
5577:     PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5578:   }
5579:   PetscFunctionReturn(PETSC_SUCCESS);
5580: }

5582: /*@
5583:   MatEqual - Compares two matrices.

5585:   Collective

5587:   Input Parameters:
5588: + A - the first matrix
5589: - B - the second matrix

5591:   Output Parameter:
5592: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.

5594:   Level: intermediate

5596: .seealso: [](ch_matrices), `Mat`
5597: @*/
5598: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5599: {
5600:   PetscFunctionBegin;
5605:   PetscAssertPointer(flg, 3);
5606:   PetscCheckSameComm(A, 1, B, 2);
5607:   MatCheckPreallocated(A, 1);
5608:   MatCheckPreallocated(B, 2);
5609:   PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5610:   PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5611:   PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5612:              B->cmap->N);
5613:   if (A->ops->equal && A->ops->equal == B->ops->equal) {
5614:     PetscUseTypeMethod(A, equal, B, flg);
5615:   } else {
5616:     PetscCall(MatMultEqual(A, B, 10, flg));
5617:   }
5618:   PetscFunctionReturn(PETSC_SUCCESS);
5619: }

5621: /*@
5622:   MatDiagonalScale - Scales a matrix on the left and right by diagonal
5623:   matrices that are stored as vectors.  Either of the two scaling
5624:   matrices can be `NULL`.

5626:   Collective

5628:   Input Parameters:
5629: + mat - the matrix to be scaled
5630: . l   - the left scaling vector (or `NULL`)
5631: - r   - the right scaling vector (or `NULL`)

5633:   Level: intermediate

5635:   Note:
5636:   `MatDiagonalScale()` computes $A = LAR$, where
5637:   L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5638:   The L scales the rows of the matrix, the R scales the columns of the matrix.

5640: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5641: @*/
5642: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5643: {
5644:   PetscFunctionBegin;
5647:   if (l) {
5649:     PetscCheckSameComm(mat, 1, l, 2);
5650:   }
5651:   if (r) {
5653:     PetscCheckSameComm(mat, 1, r, 3);
5654:   }
5655:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5656:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5657:   MatCheckPreallocated(mat, 1);
5658:   if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);

5660:   PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5661:   PetscUseTypeMethod(mat, diagonalscale, l, r);
5662:   PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5663:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5664:   if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5665:   PetscFunctionReturn(PETSC_SUCCESS);
5666: }

5668: /*@
5669:   MatScale - Scales all elements of a matrix by a given number.

5671:   Logically Collective

5673:   Input Parameters:
5674: + mat - the matrix to be scaled
5675: - a   - the scaling value

5677:   Level: intermediate

5679: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5680: @*/
5681: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5682: {
5683:   PetscFunctionBegin;
5686:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5687:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5689:   MatCheckPreallocated(mat, 1);

5691:   PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5692:   if (a != (PetscScalar)1.0) {
5693:     PetscUseTypeMethod(mat, scale, a);
5694:     PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5695:   }
5696:   PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5697:   PetscFunctionReturn(PETSC_SUCCESS);
5698: }

5700: /*@
5701:   MatNorm - Calculates various norms of a matrix.

5703:   Collective

5705:   Input Parameters:
5706: + mat  - the matrix
5707: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`

5709:   Output Parameter:
5710: . nrm - the resulting norm

5712:   Level: intermediate

5714: .seealso: [](ch_matrices), `Mat`
5715: @*/
5716: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5717: {
5718:   PetscFunctionBegin;
5721:   PetscAssertPointer(nrm, 3);

5723:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5724:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5725:   MatCheckPreallocated(mat, 1);

5727:   PetscUseTypeMethod(mat, norm, type, nrm);
5728:   PetscFunctionReturn(PETSC_SUCCESS);
5729: }

5731: /*
5732:      This variable is used to prevent counting of MatAssemblyBegin() that
5733:    are called from within a MatAssemblyEnd().
5734: */
5735: static PetscInt MatAssemblyEnd_InUse = 0;
5736: /*@
5737:   MatAssemblyBegin - Begins assembling the matrix.  This routine should
5738:   be called after completing all calls to `MatSetValues()`.

5740:   Collective

5742:   Input Parameters:
5743: + mat  - the matrix
5744: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`

5746:   Level: beginner

5748:   Notes:
5749:   `MatSetValues()` generally caches the values that belong to other MPI processes.  The matrix is ready to
5750:   use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.

5752:   Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5753:   in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5754:   using the matrix.

5756:   ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5757:   same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5758:   a global collective operation requiring all processes that share the matrix.

5760:   Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5761:   out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5762:   before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.

5764: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5765: @*/
5766: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5767: {
5768:   PetscFunctionBegin;
5771:   MatCheckPreallocated(mat, 1);
5772:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5773:   if (mat->assembled) {
5774:     mat->was_assembled = PETSC_TRUE;
5775:     mat->assembled     = PETSC_FALSE;
5776:   }

5778:   if (!MatAssemblyEnd_InUse) {
5779:     PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5780:     PetscTryTypeMethod(mat, assemblybegin, type);
5781:     PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5782:   } else PetscTryTypeMethod(mat, assemblybegin, type);
5783:   PetscFunctionReturn(PETSC_SUCCESS);
5784: }

5786: /*@
5787:   MatAssembled - Indicates if a matrix has been assembled and is ready for
5788:   use; for example, in matrix-vector product.

5790:   Not Collective

5792:   Input Parameter:
5793: . mat - the matrix

5795:   Output Parameter:
5796: . assembled - `PETSC_TRUE` or `PETSC_FALSE`

5798:   Level: advanced

5800: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5801: @*/
5802: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5803: {
5804:   PetscFunctionBegin;
5806:   PetscAssertPointer(assembled, 2);
5807:   *assembled = mat->assembled;
5808:   PetscFunctionReturn(PETSC_SUCCESS);
5809: }

5811: /*@
5812:   MatAssemblyEnd - Completes assembling the matrix.  This routine should
5813:   be called after `MatAssemblyBegin()`.

5815:   Collective

5817:   Input Parameters:
5818: + mat  - the matrix
5819: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`

5821:   Options Database Keys:
5822: + -mat_view ::ascii_info             - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5823: . -mat_view ::ascii_info_detail      - Prints more detailed info
5824: . -mat_view                          - Prints matrix in ASCII format
5825: . -mat_view ::ascii_matlab           - Prints matrix in MATLAB format
5826: . -mat_view draw                     - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5827: . -display <name>                    - Sets display name (default is host)
5828: . -draw_pause <sec>                  - Sets number of seconds to pause after display
5829: . -mat_view socket                   - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5830: . -viewer_socket_machine <machine>   - Machine to use for socket
5831: . -viewer_socket_port <port>         - Port number to use for socket
5832: - -mat_view binary:filename[:append] - Save matrix to file in binary format

5834:   Level: beginner

5836: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5837: @*/
5838: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5839: {
5840:   static PetscInt inassm = 0;
5841:   PetscBool       flg    = PETSC_FALSE;

5843:   PetscFunctionBegin;

5847:   inassm++;
5848:   MatAssemblyEnd_InUse++;
5849:   if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5850:     PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5851:     PetscTryTypeMethod(mat, assemblyend, type);
5852:     PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5853:   } else PetscTryTypeMethod(mat, assemblyend, type);

5855:   /* Flush assembly is not a true assembly */
5856:   if (type != MAT_FLUSH_ASSEMBLY) {
5857:     if (mat->num_ass) {
5858:       if (!mat->symmetry_eternal) {
5859:         mat->symmetric = PETSC_BOOL3_UNKNOWN;
5860:         mat->hermitian = PETSC_BOOL3_UNKNOWN;
5861:       }
5862:       if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5863:       if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5864:     }
5865:     mat->num_ass++;
5866:     mat->assembled        = PETSC_TRUE;
5867:     mat->ass_nonzerostate = mat->nonzerostate;
5868:   }

5870:   mat->insertmode = NOT_SET_VALUES;
5871:   MatAssemblyEnd_InUse--;
5872:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5873:   if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5874:     PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));

5876:     if (mat->checksymmetryonassembly) {
5877:       PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5878:       if (flg) {
5879:         PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5880:       } else {
5881:         PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5882:       }
5883:     }
5884:     if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5885:   }
5886:   inassm--;
5887:   PetscFunctionReturn(PETSC_SUCCESS);
5888: }

5890: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5891: /*@
5892:   MatSetOption - Sets a parameter option for a matrix. Some options
5893:   may be specific to certain storage formats.  Some options
5894:   determine how values will be inserted (or added). Sorted,
5895:   row-oriented input will generally assemble the fastest. The default
5896:   is row-oriented.

5898:   Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`

5900:   Input Parameters:
5901: + mat - the matrix
5902: . op  - the option, one of those listed below (and possibly others),
5903: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)

5905:   Options Describing Matrix Structure:
5906: + `MAT_SPD`                         - symmetric positive definite
5907: . `MAT_SYMMETRIC`                   - symmetric in terms of both structure and value
5908: . `MAT_HERMITIAN`                   - transpose is the complex conjugation
5909: . `MAT_STRUCTURALLY_SYMMETRIC`      - symmetric nonzero structure
5910: . `MAT_SYMMETRY_ETERNAL`            - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5911: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5912: . `MAT_SPD_ETERNAL`                 - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix

5914:    These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5915:    do not need to be computed (usually at a high cost)

5917:    Options For Use with `MatSetValues()`:
5918:    Insert a logically dense subblock, which can be
5919: . `MAT_ROW_ORIENTED`                - row-oriented (default)

5921:    These options reflect the data you pass in with `MatSetValues()`; it has
5922:    nothing to do with how the data is stored internally in the matrix
5923:    data structure.

5925:    When (re)assembling a matrix, we can restrict the input for
5926:    efficiency/debugging purposes.  These options include
5927: . `MAT_NEW_NONZERO_LOCATIONS`       - additional insertions will be allowed if they generate a new nonzero (slow)
5928: . `MAT_FORCE_DIAGONAL_ENTRIES`      - forces diagonal entries to be allocated
5929: . `MAT_IGNORE_OFF_PROC_ENTRIES`     - drops off-processor entries
5930: . `MAT_NEW_NONZERO_LOCATION_ERR`    - generates an error for new matrix entry
5931: . `MAT_USE_HASH_TABLE`              - uses a hash table to speed up matrix assembly
5932: . `MAT_NO_OFF_PROC_ENTRIES`         - you know each process will only set values for its own rows, will generate an error if
5933:         any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5934:         performance for very large process counts.
5935: - `MAT_SUBSET_OFF_PROC_ENTRIES`     - you know that the first assembly after setting this flag will set a superset
5936:         of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5937:         functions, instead sending only neighbor messages.

5939:   Level: intermediate

5941:   Notes:
5942:   Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and  `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!

5944:   Some options are relevant only for particular matrix types and
5945:   are thus ignored by others.  Other options are not supported by
5946:   certain matrix types and will generate an error message if set.

5948:   If using Fortran to compute a matrix, one may need to
5949:   use the column-oriented option (or convert to the row-oriented
5950:   format).

5952:   `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5953:   that would generate a new entry in the nonzero structure is instead
5954:   ignored.  Thus, if memory has not already been allocated for this particular
5955:   data, then the insertion is ignored. For dense matrices, in which
5956:   the entire array is allocated, no entries are ever ignored.
5957:   Set after the first `MatAssemblyEnd()`. If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction

5959:   `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5960:   that would generate a new entry in the nonzero structure instead produces
5961:   an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction

5963:   `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5964:   that would generate a new entry that has not been preallocated will
5965:   instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5966:   only.) This is a useful flag when debugging matrix memory preallocation.
5967:   If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction

5969:   `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
5970:   other processors should be dropped, rather than stashed.
5971:   This is useful if you know that the "owning" processor is also
5972:   always generating the correct matrix entries, so that PETSc need
5973:   not transfer duplicate entries generated on another processor.

5975:   `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
5976:   searches during matrix assembly. When this flag is set, the hash table
5977:   is created during the first matrix assembly. This hash table is
5978:   used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
5979:   to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
5980:   should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
5981:   supported by `MATMPIBAIJ` format only.

5983:   `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
5984:   are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`

5986:   `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
5987:   a zero location in the matrix

5989:   `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types

5991:   `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
5992:   zero row routines and thus improves performance for very large process counts.

5994:   `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
5995:   part of the matrix (since they should match the upper triangular part).

5997:   `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
5998:   single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
5999:   with finite difference schemes with non-periodic boundary conditions.

6001:   Developer Note:
6002:   `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6003:   places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6004:   to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6005:   not changed.

6007: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6008: @*/
6009: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6010: {
6011:   PetscFunctionBegin;
6013:   if (op > 0) {
6016:   }

6018:   PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);

6020:   switch (op) {
6021:   case MAT_FORCE_DIAGONAL_ENTRIES:
6022:     mat->force_diagonals = flg;
6023:     PetscFunctionReturn(PETSC_SUCCESS);
6024:   case MAT_NO_OFF_PROC_ENTRIES:
6025:     mat->nooffprocentries = flg;
6026:     PetscFunctionReturn(PETSC_SUCCESS);
6027:   case MAT_SUBSET_OFF_PROC_ENTRIES:
6028:     mat->assembly_subset = flg;
6029:     if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6030: #if !defined(PETSC_HAVE_MPIUNI)
6031:       PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6032: #endif
6033:       mat->stash.first_assembly_done = PETSC_FALSE;
6034:     }
6035:     PetscFunctionReturn(PETSC_SUCCESS);
6036:   case MAT_NO_OFF_PROC_ZERO_ROWS:
6037:     mat->nooffproczerorows = flg;
6038:     PetscFunctionReturn(PETSC_SUCCESS);
6039:   case MAT_SPD:
6040:     if (flg) {
6041:       mat->spd                    = PETSC_BOOL3_TRUE;
6042:       mat->symmetric              = PETSC_BOOL3_TRUE;
6043:       mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6044:     } else {
6045:       mat->spd = PETSC_BOOL3_FALSE;
6046:     }
6047:     break;
6048:   case MAT_SYMMETRIC:
6049:     mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6050:     if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6051: #if !defined(PETSC_USE_COMPLEX)
6052:     mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6053: #endif
6054:     break;
6055:   case MAT_HERMITIAN:
6056:     mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6057:     if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6058: #if !defined(PETSC_USE_COMPLEX)
6059:     mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6060: #endif
6061:     break;
6062:   case MAT_STRUCTURALLY_SYMMETRIC:
6063:     mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6064:     break;
6065:   case MAT_SYMMETRY_ETERNAL:
6066:     PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6067:     mat->symmetry_eternal = flg;
6068:     if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6069:     break;
6070:   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6071:     PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6072:     mat->structural_symmetry_eternal = flg;
6073:     break;
6074:   case MAT_SPD_ETERNAL:
6075:     PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6076:     mat->spd_eternal = flg;
6077:     if (flg) {
6078:       mat->structural_symmetry_eternal = PETSC_TRUE;
6079:       mat->symmetry_eternal            = PETSC_TRUE;
6080:     }
6081:     break;
6082:   case MAT_STRUCTURE_ONLY:
6083:     mat->structure_only = flg;
6084:     break;
6085:   case MAT_SORTED_FULL:
6086:     mat->sortedfull = flg;
6087:     break;
6088:   default:
6089:     break;
6090:   }
6091:   PetscTryTypeMethod(mat, setoption, op, flg);
6092:   PetscFunctionReturn(PETSC_SUCCESS);
6093: }

6095: /*@
6096:   MatGetOption - Gets a parameter option that has been set for a matrix.

6098:   Logically Collective

6100:   Input Parameters:
6101: + mat - the matrix
6102: - op  - the option, this only responds to certain options, check the code for which ones

6104:   Output Parameter:
6105: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)

6107:   Level: intermediate

6109:   Notes:
6110:   Can only be called after `MatSetSizes()` and `MatSetType()` have been set.

6112:   Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6113:   `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`

6115: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6116:     `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6117: @*/
6118: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6119: {
6120:   PetscFunctionBegin;

6124:   PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6125:   PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");

6127:   switch (op) {
6128:   case MAT_NO_OFF_PROC_ENTRIES:
6129:     *flg = mat->nooffprocentries;
6130:     break;
6131:   case MAT_NO_OFF_PROC_ZERO_ROWS:
6132:     *flg = mat->nooffproczerorows;
6133:     break;
6134:   case MAT_SYMMETRIC:
6135:     SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6136:     break;
6137:   case MAT_HERMITIAN:
6138:     SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6139:     break;
6140:   case MAT_STRUCTURALLY_SYMMETRIC:
6141:     SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6142:     break;
6143:   case MAT_SPD:
6144:     SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6145:     break;
6146:   case MAT_SYMMETRY_ETERNAL:
6147:     *flg = mat->symmetry_eternal;
6148:     break;
6149:   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6150:     *flg = mat->symmetry_eternal;
6151:     break;
6152:   default:
6153:     break;
6154:   }
6155:   PetscFunctionReturn(PETSC_SUCCESS);
6156: }

6158: /*@
6159:   MatZeroEntries - Zeros all entries of a matrix.  For sparse matrices
6160:   this routine retains the old nonzero structure.

6162:   Logically Collective

6164:   Input Parameter:
6165: . mat - the matrix

6167:   Level: intermediate

6169:   Note:
6170:   If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6171:   See the Performance chapter of the users manual for information on preallocating matrices.

6173: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6174: @*/
6175: PetscErrorCode MatZeroEntries(Mat mat)
6176: {
6177:   PetscFunctionBegin;
6180:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6181:   PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6182:   MatCheckPreallocated(mat, 1);

6184:   PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6185:   PetscUseTypeMethod(mat, zeroentries);
6186:   PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6187:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6188:   PetscFunctionReturn(PETSC_SUCCESS);
6189: }

6191: /*@
6192:   MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6193:   of a set of rows and columns of a matrix.

6195:   Collective

6197:   Input Parameters:
6198: + mat     - the matrix
6199: . numRows - the number of rows/columns to zero
6200: . rows    - the global row indices
6201: . diag    - value put in the diagonal of the eliminated rows
6202: . x       - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6203: - b       - optional vector of the right-hand side, that will be adjusted by provided solution entries

6205:   Level: intermediate

6207:   Notes:
6208:   This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.

6210:   For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6211:   The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated

6213:   If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6214:   Krylov method to take advantage of the known solution on the zeroed rows.

6216:   For the parallel case, all processes that share the matrix (i.e.,
6217:   those in the communicator used for matrix creation) MUST call this
6218:   routine, regardless of whether any rows being zeroed are owned by
6219:   them.

6221:   Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6222:   removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6223:   missing.

6225:   Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6226:   list only rows local to itself).

6228:   The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.

6230: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6231:           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6232: @*/
6233: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6234: {
6235:   PetscFunctionBegin;
6238:   if (numRows) PetscAssertPointer(rows, 3);
6239:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6240:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6241:   MatCheckPreallocated(mat, 1);

6243:   PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6244:   PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6245:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6246:   PetscFunctionReturn(PETSC_SUCCESS);
6247: }

6249: /*@
6250:   MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6251:   of a set of rows and columns of a matrix.

6253:   Collective

6255:   Input Parameters:
6256: + mat  - the matrix
6257: . is   - the rows to zero
6258: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6259: . x    - optional vector of solutions for zeroed rows (other entries in vector are not used)
6260: - b    - optional vector of right-hand side, that will be adjusted by provided solution

6262:   Level: intermediate

6264:   Note:
6265:   See `MatZeroRowsColumns()` for details on how this routine operates.

6267: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6268:           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6269: @*/
6270: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6271: {
6272:   PetscInt        numRows;
6273:   const PetscInt *rows;

6275:   PetscFunctionBegin;
6280:   PetscCall(ISGetLocalSize(is, &numRows));
6281:   PetscCall(ISGetIndices(is, &rows));
6282:   PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6283:   PetscCall(ISRestoreIndices(is, &rows));
6284:   PetscFunctionReturn(PETSC_SUCCESS);
6285: }

6287: /*@
6288:   MatZeroRows - Zeros all entries (except possibly the main diagonal)
6289:   of a set of rows of a matrix.

6291:   Collective

6293:   Input Parameters:
6294: + mat     - the matrix
6295: . numRows - the number of rows to zero
6296: . rows    - the global row indices
6297: . diag    - value put in the diagonal of the zeroed rows
6298: . x       - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6299: - b       - optional vector of right-hand side, that will be adjusted by provided solution entries

6301:   Level: intermediate

6303:   Notes:
6304:   This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.

6306:   For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.

6308:   If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6309:   Krylov method to take advantage of the known solution on the zeroed rows.

6311:   May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6312:   from the matrix.

6314:   Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6315:   but does not release memory.  Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6316:   formats this does not alter the nonzero structure.

6318:   If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6319:   of the matrix is not changed the values are
6320:   merely zeroed.

6322:   The user can set a value in the diagonal entry (or for the `MATAIJ` format
6323:   formats can optionally remove the main diagonal entry from the
6324:   nonzero structure as well, by passing 0.0 as the final argument).

6326:   For the parallel case, all processes that share the matrix (i.e.,
6327:   those in the communicator used for matrix creation) MUST call this
6328:   routine, regardless of whether any rows being zeroed are owned by
6329:   them.

6331:   Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6332:   list only rows local to itself).

6334:   You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6335:   owns that are to be zeroed. This saves a global synchronization in the implementation.

6337: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6338:           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6339: @*/
6340: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6341: {
6342:   PetscFunctionBegin;
6345:   if (numRows) PetscAssertPointer(rows, 3);
6346:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6347:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6348:   MatCheckPreallocated(mat, 1);

6350:   PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6351:   PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6352:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6353:   PetscFunctionReturn(PETSC_SUCCESS);
6354: }

6356: /*@
6357:   MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6358:   of a set of rows of a matrix.

6360:   Collective

6362:   Input Parameters:
6363: + mat  - the matrix
6364: . is   - index set of rows to remove (if `NULL` then no row is removed)
6365: . diag - value put in all diagonals of eliminated rows
6366: . x    - optional vector of solutions for zeroed rows (other entries in vector are not used)
6367: - b    - optional vector of right-hand side, that will be adjusted by provided solution

6369:   Level: intermediate

6371:   Note:
6372:   See `MatZeroRows()` for details on how this routine operates.

6374: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6375:           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6376: @*/
6377: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6378: {
6379:   PetscInt        numRows = 0;
6380:   const PetscInt *rows    = NULL;

6382:   PetscFunctionBegin;
6385:   if (is) {
6387:     PetscCall(ISGetLocalSize(is, &numRows));
6388:     PetscCall(ISGetIndices(is, &rows));
6389:   }
6390:   PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6391:   if (is) PetscCall(ISRestoreIndices(is, &rows));
6392:   PetscFunctionReturn(PETSC_SUCCESS);
6393: }

6395: /*@
6396:   MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6397:   of a set of rows of a matrix. These rows must be local to the process.

6399:   Collective

6401:   Input Parameters:
6402: + mat     - the matrix
6403: . numRows - the number of rows to remove
6404: . rows    - the grid coordinates (and component number when dof > 1) for matrix rows
6405: . diag    - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6406: . x       - optional vector of solutions for zeroed rows (other entries in vector are not used)
6407: - b       - optional vector of right-hand side, that will be adjusted by provided solution

6409:   Level: intermediate

6411:   Notes:
6412:   See `MatZeroRows()` for details on how this routine operates.

6414:   The grid coordinates are across the entire grid, not just the local portion

6416:   For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6417:   obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6418:   etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6419:   `DM_BOUNDARY_PERIODIC` boundary type.

6421:   For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6422:   a single value per point) you can skip filling those indices.

6424:   Fortran Note:
6425:   `idxm` and `idxn` should be declared as
6426: $     MatStencil idxm(4, m)
6427:   and the values inserted using
6428: .vb
6429:     idxm(MatStencil_i, 1) = i
6430:     idxm(MatStencil_j, 1) = j
6431:     idxm(MatStencil_k, 1) = k
6432:     idxm(MatStencil_c, 1) = c
6433:    etc
6434: .ve

6436: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsl()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6437:           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6438: @*/
6439: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6440: {
6441:   PetscInt  dim    = mat->stencil.dim;
6442:   PetscInt  sdim   = dim - (1 - (PetscInt)mat->stencil.noc);
6443:   PetscInt *dims   = mat->stencil.dims + 1;
6444:   PetscInt *starts = mat->stencil.starts;
6445:   PetscInt *dxm    = (PetscInt *)rows;
6446:   PetscInt *jdxm, i, j, tmp, numNewRows = 0;

6448:   PetscFunctionBegin;
6451:   if (numRows) PetscAssertPointer(rows, 3);

6453:   PetscCall(PetscMalloc1(numRows, &jdxm));
6454:   for (i = 0; i < numRows; ++i) {
6455:     /* Skip unused dimensions (they are ordered k, j, i, c) */
6456:     for (j = 0; j < 3 - sdim; ++j) dxm++;
6457:     /* Local index in X dir */
6458:     tmp = *dxm++ - starts[0];
6459:     /* Loop over remaining dimensions */
6460:     for (j = 0; j < dim - 1; ++j) {
6461:       /* If nonlocal, set index to be negative */
6462:       if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6463:       /* Update local index */
6464:       else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6465:     }
6466:     /* Skip component slot if necessary */
6467:     if (mat->stencil.noc) dxm++;
6468:     /* Local row number */
6469:     if (tmp >= 0) jdxm[numNewRows++] = tmp;
6470:   }
6471:   PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6472:   PetscCall(PetscFree(jdxm));
6473:   PetscFunctionReturn(PETSC_SUCCESS);
6474: }

6476: /*@
6477:   MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6478:   of a set of rows and columns of a matrix.

6480:   Collective

6482:   Input Parameters:
6483: + mat     - the matrix
6484: . numRows - the number of rows/columns to remove
6485: . rows    - the grid coordinates (and component number when dof > 1) for matrix rows
6486: . diag    - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6487: . x       - optional vector of solutions for zeroed rows (other entries in vector are not used)
6488: - b       - optional vector of right-hand side, that will be adjusted by provided solution

6490:   Level: intermediate

6492:   Notes:
6493:   See `MatZeroRowsColumns()` for details on how this routine operates.

6495:   The grid coordinates are across the entire grid, not just the local portion

6497:   For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6498:   obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6499:   etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6500:   `DM_BOUNDARY_PERIODIC` boundary type.

6502:   For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6503:   a single value per point) you can skip filling those indices.

6505:   Fortran Note:
6506:   `idxm` and `idxn` should be declared as
6507: $     MatStencil idxm(4, m)
6508:   and the values inserted using
6509: .vb
6510:     idxm(MatStencil_i, 1) = i
6511:     idxm(MatStencil_j, 1) = j
6512:     idxm(MatStencil_k, 1) = k
6513:     idxm(MatStencil_c, 1) = c
6514:     etc
6515: .ve

6517: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6518:           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6519: @*/
6520: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6521: {
6522:   PetscInt  dim    = mat->stencil.dim;
6523:   PetscInt  sdim   = dim - (1 - (PetscInt)mat->stencil.noc);
6524:   PetscInt *dims   = mat->stencil.dims + 1;
6525:   PetscInt *starts = mat->stencil.starts;
6526:   PetscInt *dxm    = (PetscInt *)rows;
6527:   PetscInt *jdxm, i, j, tmp, numNewRows = 0;

6529:   PetscFunctionBegin;
6532:   if (numRows) PetscAssertPointer(rows, 3);

6534:   PetscCall(PetscMalloc1(numRows, &jdxm));
6535:   for (i = 0; i < numRows; ++i) {
6536:     /* Skip unused dimensions (they are ordered k, j, i, c) */
6537:     for (j = 0; j < 3 - sdim; ++j) dxm++;
6538:     /* Local index in X dir */
6539:     tmp = *dxm++ - starts[0];
6540:     /* Loop over remaining dimensions */
6541:     for (j = 0; j < dim - 1; ++j) {
6542:       /* If nonlocal, set index to be negative */
6543:       if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6544:       /* Update local index */
6545:       else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6546:     }
6547:     /* Skip component slot if necessary */
6548:     if (mat->stencil.noc) dxm++;
6549:     /* Local row number */
6550:     if (tmp >= 0) jdxm[numNewRows++] = tmp;
6551:   }
6552:   PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6553:   PetscCall(PetscFree(jdxm));
6554:   PetscFunctionReturn(PETSC_SUCCESS);
6555: }

6557: /*@C
6558:   MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6559:   of a set of rows of a matrix; using local numbering of rows.

6561:   Collective

6563:   Input Parameters:
6564: + mat     - the matrix
6565: . numRows - the number of rows to remove
6566: . rows    - the local row indices
6567: . diag    - value put in all diagonals of eliminated rows
6568: . x       - optional vector of solutions for zeroed rows (other entries in vector are not used)
6569: - b       - optional vector of right-hand side, that will be adjusted by provided solution

6571:   Level: intermediate

6573:   Notes:
6574:   Before calling `MatZeroRowsLocal()`, the user must first set the
6575:   local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.

6577:   See `MatZeroRows()` for details on how this routine operates.

6579: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6580:           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6581: @*/
6582: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6583: {
6584:   PetscFunctionBegin;
6587:   if (numRows) PetscAssertPointer(rows, 3);
6588:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6589:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6590:   MatCheckPreallocated(mat, 1);

6592:   if (mat->ops->zerorowslocal) {
6593:     PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6594:   } else {
6595:     IS              is, newis;
6596:     const PetscInt *newRows;

6598:     PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6599:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6600:     PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6601:     PetscCall(ISGetIndices(newis, &newRows));
6602:     PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6603:     PetscCall(ISRestoreIndices(newis, &newRows));
6604:     PetscCall(ISDestroy(&newis));
6605:     PetscCall(ISDestroy(&is));
6606:   }
6607:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6608:   PetscFunctionReturn(PETSC_SUCCESS);
6609: }

6611: /*@
6612:   MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6613:   of a set of rows of a matrix; using local numbering of rows.

6615:   Collective

6617:   Input Parameters:
6618: + mat  - the matrix
6619: . is   - index set of rows to remove
6620: . diag - value put in all diagonals of eliminated rows
6621: . x    - optional vector of solutions for zeroed rows (other entries in vector are not used)
6622: - b    - optional vector of right-hand side, that will be adjusted by provided solution

6624:   Level: intermediate

6626:   Notes:
6627:   Before calling `MatZeroRowsLocalIS()`, the user must first set the
6628:   local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.

6630:   See `MatZeroRows()` for details on how this routine operates.

6632: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6633:           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6634: @*/
6635: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6636: {
6637:   PetscInt        numRows;
6638:   const PetscInt *rows;

6640:   PetscFunctionBegin;
6644:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6645:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6646:   MatCheckPreallocated(mat, 1);

6648:   PetscCall(ISGetLocalSize(is, &numRows));
6649:   PetscCall(ISGetIndices(is, &rows));
6650:   PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6651:   PetscCall(ISRestoreIndices(is, &rows));
6652:   PetscFunctionReturn(PETSC_SUCCESS);
6653: }

6655: /*@
6656:   MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6657:   of a set of rows and columns of a matrix; using local numbering of rows.

6659:   Collective

6661:   Input Parameters:
6662: + mat     - the matrix
6663: . numRows - the number of rows to remove
6664: . rows    - the global row indices
6665: . diag    - value put in all diagonals of eliminated rows
6666: . x       - optional vector of solutions for zeroed rows (other entries in vector are not used)
6667: - b       - optional vector of right-hand side, that will be adjusted by provided solution

6669:   Level: intermediate

6671:   Notes:
6672:   Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6673:   local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.

6675:   See `MatZeroRowsColumns()` for details on how this routine operates.

6677: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6678:           `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6679: @*/
6680: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6681: {
6682:   IS              is, newis;
6683:   const PetscInt *newRows;

6685:   PetscFunctionBegin;
6688:   if (numRows) PetscAssertPointer(rows, 3);
6689:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6690:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6691:   MatCheckPreallocated(mat, 1);

6693:   PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6694:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6695:   PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6696:   PetscCall(ISGetIndices(newis, &newRows));
6697:   PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6698:   PetscCall(ISRestoreIndices(newis, &newRows));
6699:   PetscCall(ISDestroy(&newis));
6700:   PetscCall(ISDestroy(&is));
6701:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6702:   PetscFunctionReturn(PETSC_SUCCESS);
6703: }

6705: /*@
6706:   MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6707:   of a set of rows and columns of a matrix; using local numbering of rows.

6709:   Collective

6711:   Input Parameters:
6712: + mat  - the matrix
6713: . is   - index set of rows to remove
6714: . diag - value put in all diagonals of eliminated rows
6715: . x    - optional vector of solutions for zeroed rows (other entries in vector are not used)
6716: - b    - optional vector of right-hand side, that will be adjusted by provided solution

6718:   Level: intermediate

6720:   Notes:
6721:   Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6722:   local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.

6724:   See `MatZeroRowsColumns()` for details on how this routine operates.

6726: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6727:           `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6728: @*/
6729: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6730: {
6731:   PetscInt        numRows;
6732:   const PetscInt *rows;

6734:   PetscFunctionBegin;
6738:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6739:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6740:   MatCheckPreallocated(mat, 1);

6742:   PetscCall(ISGetLocalSize(is, &numRows));
6743:   PetscCall(ISGetIndices(is, &rows));
6744:   PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6745:   PetscCall(ISRestoreIndices(is, &rows));
6746:   PetscFunctionReturn(PETSC_SUCCESS);
6747: }

6749: /*@C
6750:   MatGetSize - Returns the numbers of rows and columns in a matrix.

6752:   Not Collective

6754:   Input Parameter:
6755: . mat - the matrix

6757:   Output Parameters:
6758: + m - the number of global rows
6759: - n - the number of global columns

6761:   Level: beginner

6763:   Note:
6764:   Both output parameters can be `NULL` on input.

6766: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6767: @*/
6768: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6769: {
6770:   PetscFunctionBegin;
6772:   if (m) *m = mat->rmap->N;
6773:   if (n) *n = mat->cmap->N;
6774:   PetscFunctionReturn(PETSC_SUCCESS);
6775: }

6777: /*@C
6778:   MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6779:   of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.

6781:   Not Collective

6783:   Input Parameter:
6784: . mat - the matrix

6786:   Output Parameters:
6787: + m - the number of local rows, use `NULL` to not obtain this value
6788: - n - the number of local columns, use `NULL` to not obtain this value

6790:   Level: beginner

6792: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6793: @*/
6794: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6795: {
6796:   PetscFunctionBegin;
6798:   if (m) PetscAssertPointer(m, 2);
6799:   if (n) PetscAssertPointer(n, 3);
6800:   if (m) *m = mat->rmap->n;
6801:   if (n) *n = mat->cmap->n;
6802:   PetscFunctionReturn(PETSC_SUCCESS);
6803: }

6805: /*@
6806:   MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6807:   vector one multiplies this matrix by that are owned by this processor.

6809:   Not Collective, unless matrix has not been allocated, then collective

6811:   Input Parameter:
6812: . mat - the matrix

6814:   Output Parameters:
6815: + m - the global index of the first local column, use `NULL` to not obtain this value
6816: - n - one more than the global index of the last local column, use `NULL` to not obtain this value

6818:   Level: developer

6820:   Note:
6821:   Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6822:   Layouts](sec_matlayout) for details on matrix layouts.

6824: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6825: @*/
6826: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6827: {
6828:   PetscFunctionBegin;
6831:   if (m) PetscAssertPointer(m, 2);
6832:   if (n) PetscAssertPointer(n, 3);
6833:   MatCheckPreallocated(mat, 1);
6834:   if (m) *m = mat->cmap->rstart;
6835:   if (n) *n = mat->cmap->rend;
6836:   PetscFunctionReturn(PETSC_SUCCESS);
6837: }

6839: /*@C
6840:   MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6841:   this MPI process.

6843:   Not Collective

6845:   Input Parameter:
6846: . mat - the matrix

6848:   Output Parameters:
6849: + m - the global index of the first local row, use `NULL` to not obtain this value
6850: - n - one more than the global index of the last local row, use `NULL` to not obtain this value

6852:   Level: beginner

6854:   Note:
6855:   For all matrices  it returns the range of matrix rows associated with rows of a vector that
6856:   would contain the result of a matrix vector product with this matrix. See [Matrix
6857:   Layouts](sec_matlayout) for details on matrix layouts.

6859: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`,
6860:           `PetscLayout`
6861: @*/
6862: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6863: {
6864:   PetscFunctionBegin;
6867:   if (m) PetscAssertPointer(m, 2);
6868:   if (n) PetscAssertPointer(n, 3);
6869:   MatCheckPreallocated(mat, 1);
6870:   if (m) *m = mat->rmap->rstart;
6871:   if (n) *n = mat->rmap->rend;
6872:   PetscFunctionReturn(PETSC_SUCCESS);
6873: }

6875: /*@C
6876:   MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6877:   `MATSCALAPACK`, returns the range of matrix rows owned by each process.

6879:   Not Collective, unless matrix has not been allocated

6881:   Input Parameter:
6882: . mat - the matrix

6884:   Output Parameter:
6885: . ranges - start of each processors portion plus one more than the total length at the end

6887:   Level: beginner

6889:   Note:
6890:   For all matrices  it returns the ranges of matrix rows associated with rows of a vector that
6891:   would contain the result of a matrix vector product with this matrix. See [Matrix
6892:   Layouts](sec_matlayout) for details on matrix layouts.

6894: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6895: @*/
6896: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
6897: {
6898:   PetscFunctionBegin;
6901:   MatCheckPreallocated(mat, 1);
6902:   PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6903:   PetscFunctionReturn(PETSC_SUCCESS);
6904: }

6906: /*@C
6907:   MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6908:   vector one multiplies this vector by that are owned by each processor.

6910:   Not Collective, unless matrix has not been allocated

6912:   Input Parameter:
6913: . mat - the matrix

6915:   Output Parameter:
6916: . ranges - start of each processors portion plus one more than the total length at the end

6918:   Level: beginner

6920:   Note:
6921:   Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6922:   Layouts](sec_matlayout) for details on matrix layouts.

6924: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`
6925: @*/
6926: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
6927: {
6928:   PetscFunctionBegin;
6931:   MatCheckPreallocated(mat, 1);
6932:   PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
6933:   PetscFunctionReturn(PETSC_SUCCESS);
6934: }

6936: /*@C
6937:   MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.

6939:   Not Collective

6941:   Input Parameter:
6942: . A - matrix

6944:   Output Parameters:
6945: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
6946: - cols - columns in which this process owns elements, use `NULL` to not obtain this value

6948:   Level: intermediate

6950:   Note:
6951:   For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
6952:   returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
6953:   `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
6954:   details on matrix layouts.

6956: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatSetValues()`, ``MATELEMENTAL``, ``MATSCALAPACK``
6957: @*/
6958: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
6959: {
6960:   PetscErrorCode (*f)(Mat, IS *, IS *);

6962:   PetscFunctionBegin;
6963:   MatCheckPreallocated(A, 1);
6964:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
6965:   if (f) {
6966:     PetscCall((*f)(A, rows, cols));
6967:   } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6968:     if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
6969:     if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
6970:   }
6971:   PetscFunctionReturn(PETSC_SUCCESS);
6972: }

6974: /*@C
6975:   MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
6976:   Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
6977:   to complete the factorization.

6979:   Collective

6981:   Input Parameters:
6982: + fact - the factorized matrix obtained with `MatGetFactor()`
6983: . mat  - the matrix
6984: . row  - row permutation
6985: . col  - column permutation
6986: - info - structure containing
6987: .vb
6988:       levels - number of levels of fill.
6989:       expected fill - as ratio of original fill.
6990:       1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6991:                 missing diagonal entries)
6992: .ve

6994:   Level: developer

6996:   Notes:
6997:   See [Matrix Factorization](sec_matfactor) for additional information.

6999:   Most users should employ the `KSP` interface for linear solvers
7000:   instead of working directly with matrix algebra routines such as this.
7001:   See, e.g., `KSPCreate()`.

7003:   Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`

7005:   Developer Note:
7006:   The Fortran interface is not autogenerated as the
7007:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

7009: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7010:           `MatGetOrdering()`, `MatFactorInfo`
7011: @*/
7012: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7013: {
7014:   PetscFunctionBegin;
7019:   PetscAssertPointer(info, 5);
7020:   PetscAssertPointer(fact, 1);
7021:   PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7022:   PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7023:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7024:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7025:   MatCheckPreallocated(mat, 2);

7027:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7028:   PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7029:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7030:   PetscFunctionReturn(PETSC_SUCCESS);
7031: }

7033: /*@C
7034:   MatICCFactorSymbolic - Performs symbolic incomplete
7035:   Cholesky factorization for a symmetric matrix.  Use
7036:   `MatCholeskyFactorNumeric()` to complete the factorization.

7038:   Collective

7040:   Input Parameters:
7041: + fact - the factorized matrix obtained with `MatGetFactor()`
7042: . mat  - the matrix to be factored
7043: . perm - row and column permutation
7044: - info - structure containing
7045: .vb
7046:       levels - number of levels of fill.
7047:       expected fill - as ratio of original fill.
7048: .ve

7050:   Level: developer

7052:   Notes:
7053:   Most users should employ the `KSP` interface for linear solvers
7054:   instead of working directly with matrix algebra routines such as this.
7055:   See, e.g., `KSPCreate()`.

7057:   This uses the definition of level of fill as in Y. Saad {cite}`saad2003`

7059:   Developer Note:
7060:   The Fortran interface is not autogenerated as the
7061:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

7063: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7064: @*/
7065: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7066: {
7067:   PetscFunctionBegin;
7071:   PetscAssertPointer(info, 4);
7072:   PetscAssertPointer(fact, 1);
7073:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7074:   PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7075:   PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7076:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7077:   MatCheckPreallocated(mat, 2);

7079:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7080:   PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7081:   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7082:   PetscFunctionReturn(PETSC_SUCCESS);
7083: }

7085: /*@C
7086:   MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7087:   points to an array of valid matrices, they may be reused to store the new
7088:   submatrices.

7090:   Collective

7092:   Input Parameters:
7093: + mat   - the matrix
7094: . n     - the number of submatrixes to be extracted (on this processor, may be zero)
7095: . irow  - index set of rows to extract
7096: . icol  - index set of columns to extract
7097: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`

7099:   Output Parameter:
7100: . submat - the array of submatrices

7102:   Level: advanced

7104:   Notes:
7105:   `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7106:   (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7107:   to extract a parallel submatrix.

7109:   Some matrix types place restrictions on the row and column
7110:   indices, such as that they be sorted or that they be equal to each other.

7112:   The index sets may not have duplicate entries.

7114:   When extracting submatrices from a parallel matrix, each processor can
7115:   form a different submatrix by setting the rows and columns of its
7116:   individual index sets according to the local submatrix desired.

7118:   When finished using the submatrices, the user should destroy
7119:   them with `MatDestroySubMatrices()`.

7121:   `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7122:   original matrix has not changed from that last call to `MatCreateSubMatrices()`.

7124:   This routine creates the matrices in submat; you should NOT create them before
7125:   calling it. It also allocates the array of matrix pointers submat.

7127:   For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7128:   request one row/column in a block, they must request all rows/columns that are in
7129:   that block. For example, if the block size is 2 you cannot request just row 0 and
7130:   column 0.

7132:   Fortran Note:
7133:   The Fortran interface is slightly different from that given below; it
7134:   requires one to pass in as `submat` a `Mat` (integer) array of size at least n+1.

7136: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7137: @*/
7138: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7139: {
7140:   PetscInt  i;
7141:   PetscBool eq;

7143:   PetscFunctionBegin;
7146:   if (n) {
7147:     PetscAssertPointer(irow, 3);
7149:     PetscAssertPointer(icol, 4);
7151:   }
7152:   PetscAssertPointer(submat, 6);
7153:   if (n && scall == MAT_REUSE_MATRIX) {
7154:     PetscAssertPointer(*submat, 6);
7156:   }
7157:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7158:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7159:   MatCheckPreallocated(mat, 1);
7160:   PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7161:   PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7162:   PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7163:   for (i = 0; i < n; i++) {
7164:     (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7165:     PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7166:     if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7167: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7168:     if (mat->boundtocpu && mat->bindingpropagates) {
7169:       PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7170:       PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7171:     }
7172: #endif
7173:   }
7174:   PetscFunctionReturn(PETSC_SUCCESS);
7175: }

7177: /*@C
7178:   MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).

7180:   Collective

7182:   Input Parameters:
7183: + mat   - the matrix
7184: . n     - the number of submatrixes to be extracted
7185: . irow  - index set of rows to extract
7186: . icol  - index set of columns to extract
7187: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`

7189:   Output Parameter:
7190: . submat - the array of submatrices

7192:   Level: advanced

7194:   Note:
7195:   This is used by `PCGASM`

7197: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7198: @*/
7199: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7200: {
7201:   PetscInt  i;
7202:   PetscBool eq;

7204:   PetscFunctionBegin;
7207:   if (n) {
7208:     PetscAssertPointer(irow, 3);
7210:     PetscAssertPointer(icol, 4);
7212:   }
7213:   PetscAssertPointer(submat, 6);
7214:   if (n && scall == MAT_REUSE_MATRIX) {
7215:     PetscAssertPointer(*submat, 6);
7217:   }
7218:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7219:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7220:   MatCheckPreallocated(mat, 1);

7222:   PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7223:   PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7224:   PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7225:   for (i = 0; i < n; i++) {
7226:     PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7227:     if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7228:   }
7229:   PetscFunctionReturn(PETSC_SUCCESS);
7230: }

7232: /*@C
7233:   MatDestroyMatrices - Destroys an array of matrices.

7235:   Collective

7237:   Input Parameters:
7238: + n   - the number of local matrices
7239: - mat - the matrices (this is a pointer to the array of matrices)

7241:   Level: advanced

7243:   Note:
7244:   Frees not only the matrices, but also the array that contains the matrices

7246:   Fortran Note:
7247:   This does not free the array.

7249: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()` `MatDestroySubMatrices()`
7250: @*/
7251: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7252: {
7253:   PetscInt i;

7255:   PetscFunctionBegin;
7256:   if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7257:   PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7258:   PetscAssertPointer(mat, 2);

7260:   for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));

7262:   /* memory is allocated even if n = 0 */
7263:   PetscCall(PetscFree(*mat));
7264:   PetscFunctionReturn(PETSC_SUCCESS);
7265: }

7267: /*@C
7268:   MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.

7270:   Collective

7272:   Input Parameters:
7273: + n   - the number of local matrices
7274: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7275:                        sequence of `MatCreateSubMatrices()`)

7277:   Level: advanced

7279:   Note:
7280:   Frees not only the matrices, but also the array that contains the matrices

7282:   Fortran Note:
7283:   This does not free the array.

7285: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7286: @*/
7287: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7288: {
7289:   Mat mat0;

7291:   PetscFunctionBegin;
7292:   if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7293:   /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7294:   PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7295:   PetscAssertPointer(mat, 2);

7297:   mat0 = (*mat)[0];
7298:   if (mat0 && mat0->ops->destroysubmatrices) {
7299:     PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7300:   } else {
7301:     PetscCall(MatDestroyMatrices(n, mat));
7302:   }
7303:   PetscFunctionReturn(PETSC_SUCCESS);
7304: }

7306: /*@
7307:   MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process

7309:   Collective

7311:   Input Parameter:
7312: . mat - the matrix

7314:   Output Parameter:
7315: . matstruct - the sequential matrix with the nonzero structure of `mat`

7317:   Level: developer

7319: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7320: @*/
7321: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7322: {
7323:   PetscFunctionBegin;
7325:   PetscAssertPointer(matstruct, 2);

7328:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7329:   MatCheckPreallocated(mat, 1);

7331:   PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7332:   PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7333:   PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7334:   PetscFunctionReturn(PETSC_SUCCESS);
7335: }

7337: /*@C
7338:   MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.

7340:   Collective

7342:   Input Parameter:
7343: . mat - the matrix

7345:   Level: advanced

7347:   Note:
7348:   This is not needed, one can just call `MatDestroy()`

7350: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7351: @*/
7352: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7353: {
7354:   PetscFunctionBegin;
7355:   PetscAssertPointer(mat, 1);
7356:   PetscCall(MatDestroy(mat));
7357:   PetscFunctionReturn(PETSC_SUCCESS);
7358: }

7360: /*@
7361:   MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7362:   replaces the index sets by larger ones that represent submatrices with
7363:   additional overlap.

7365:   Collective

7367:   Input Parameters:
7368: + mat - the matrix
7369: . n   - the number of index sets
7370: . is  - the array of index sets (these index sets will changed during the call)
7371: - ov  - the additional overlap requested

7373:   Options Database Key:
7374: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)

7376:   Level: developer

7378:   Note:
7379:   The computed overlap preserves the matrix block sizes when the blocks are square.
7380:   That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7381:   that block are included in the overlap regardless of whether each specific column would increase the overlap.

7383: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7384: @*/
7385: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7386: {
7387:   PetscInt i, bs, cbs;

7389:   PetscFunctionBegin;
7393:   PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7394:   if (n) {
7395:     PetscAssertPointer(is, 3);
7397:   }
7398:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7399:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7400:   MatCheckPreallocated(mat, 1);

7402:   if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7403:   PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7404:   PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7405:   PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7406:   PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7407:   if (bs == cbs) {
7408:     for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7409:   }
7410:   PetscFunctionReturn(PETSC_SUCCESS);
7411: }

7413: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);

7415: /*@
7416:   MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7417:   a sub communicator, replaces the index sets by larger ones that represent submatrices with
7418:   additional overlap.

7420:   Collective

7422:   Input Parameters:
7423: + mat - the matrix
7424: . n   - the number of index sets
7425: . is  - the array of index sets (these index sets will changed during the call)
7426: - ov  - the additional overlap requested

7428:   `   Options Database Key:
7429: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)

7431:   Level: developer

7433: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7434: @*/
7435: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7436: {
7437:   PetscInt i;

7439:   PetscFunctionBegin;
7442:   PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7443:   if (n) {
7444:     PetscAssertPointer(is, 3);
7446:   }
7447:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7448:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7449:   MatCheckPreallocated(mat, 1);
7450:   if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7451:   PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7452:   for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7453:   PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7454:   PetscFunctionReturn(PETSC_SUCCESS);
7455: }

7457: /*@
7458:   MatGetBlockSize - Returns the matrix block size.

7460:   Not Collective

7462:   Input Parameter:
7463: . mat - the matrix

7465:   Output Parameter:
7466: . bs - block size

7468:   Level: intermediate

7470:   Notes:
7471:   Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.

7473:   If the block size has not been set yet this routine returns 1.

7475: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7476: @*/
7477: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7478: {
7479:   PetscFunctionBegin;
7481:   PetscAssertPointer(bs, 2);
7482:   *bs = PetscAbs(mat->rmap->bs);
7483:   PetscFunctionReturn(PETSC_SUCCESS);
7484: }

7486: /*@
7487:   MatGetBlockSizes - Returns the matrix block row and column sizes.

7489:   Not Collective

7491:   Input Parameter:
7492: . mat - the matrix

7494:   Output Parameters:
7495: + rbs - row block size
7496: - cbs - column block size

7498:   Level: intermediate

7500:   Notes:
7501:   Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7502:   If you pass a different block size for the columns than the rows, the row block size determines the square block storage.

7504:   If a block size has not been set yet this routine returns 1.

7506: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7507: @*/
7508: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7509: {
7510:   PetscFunctionBegin;
7512:   if (rbs) PetscAssertPointer(rbs, 2);
7513:   if (cbs) PetscAssertPointer(cbs, 3);
7514:   if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7515:   if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7516:   PetscFunctionReturn(PETSC_SUCCESS);
7517: }

7519: /*@
7520:   MatSetBlockSize - Sets the matrix block size.

7522:   Logically Collective

7524:   Input Parameters:
7525: + mat - the matrix
7526: - bs  - block size

7528:   Level: intermediate

7530:   Notes:
7531:   Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7532:   This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.

7534:   For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7535:   is compatible with the matrix local sizes.

7537: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7538: @*/
7539: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7540: {
7541:   PetscFunctionBegin;
7544:   PetscCall(MatSetBlockSizes(mat, bs, bs));
7545:   PetscFunctionReturn(PETSC_SUCCESS);
7546: }

7548: typedef struct {
7549:   PetscInt         n;
7550:   IS              *is;
7551:   Mat             *mat;
7552:   PetscObjectState nonzerostate;
7553:   Mat              C;
7554: } EnvelopeData;

7556: static PetscErrorCode EnvelopeDataDestroy(void *ptr)
7557: {
7558:   EnvelopeData *edata = (EnvelopeData *)ptr;

7560:   PetscFunctionBegin;
7561:   for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7562:   PetscCall(PetscFree(edata->is));
7563:   PetscCall(PetscFree(edata));
7564:   PetscFunctionReturn(PETSC_SUCCESS);
7565: }

7567: /*@
7568:   MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7569:   the sizes of these blocks in the matrix. An individual block may lie over several processes.

7571:   Collective

7573:   Input Parameter:
7574: . mat - the matrix

7576:   Level: intermediate

7578:   Notes:
7579:   There can be zeros within the blocks

7581:   The blocks can overlap between processes, including laying on more than two processes

7583: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7584: @*/
7585: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7586: {
7587:   PetscInt           n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7588:   PetscInt          *diag, *odiag, sc;
7589:   VecScatter         scatter;
7590:   PetscScalar       *seqv;
7591:   const PetscScalar *parv;
7592:   const PetscInt    *ia, *ja;
7593:   PetscBool          set, flag, done;
7594:   Mat                AA = mat, A;
7595:   MPI_Comm           comm;
7596:   PetscMPIInt        rank, size, tag;
7597:   MPI_Status         status;
7598:   PetscContainer     container;
7599:   EnvelopeData      *edata;
7600:   Vec                seq, par;
7601:   IS                 isglobal;

7603:   PetscFunctionBegin;
7605:   PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7606:   if (!set || !flag) {
7607:     /* TODO: only needs nonzero structure of transpose */
7608:     PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7609:     PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7610:   }
7611:   PetscCall(MatAIJGetLocalMat(AA, &A));
7612:   PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7613:   PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");

7615:   PetscCall(MatGetLocalSize(mat, &n, NULL));
7616:   PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7617:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7618:   PetscCallMPI(MPI_Comm_size(comm, &size));
7619:   PetscCallMPI(MPI_Comm_rank(comm, &rank));

7621:   PetscCall(PetscMalloc2(n, &sizes, n, &starts));

7623:   if (rank > 0) {
7624:     PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7625:     PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7626:   }
7627:   PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7628:   for (i = 0; i < n; i++) {
7629:     env = PetscMax(env, ja[ia[i + 1] - 1]);
7630:     II  = rstart + i;
7631:     if (env == II) {
7632:       starts[lblocks]  = tbs;
7633:       sizes[lblocks++] = 1 + II - tbs;
7634:       tbs              = 1 + II;
7635:     }
7636:   }
7637:   if (rank < size - 1) {
7638:     PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7639:     PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7640:   }

7642:   PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7643:   if (!set || !flag) PetscCall(MatDestroy(&AA));
7644:   PetscCall(MatDestroy(&A));

7646:   PetscCall(PetscNew(&edata));
7647:   PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7648:   edata->n = lblocks;
7649:   /* create IS needed for extracting blocks from the original matrix */
7650:   PetscCall(PetscMalloc1(lblocks, &edata->is));
7651:   for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));

7653:   /* Create the resulting inverse matrix structure with preallocation information */
7654:   PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7655:   PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7656:   PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7657:   PetscCall(MatSetType(edata->C, MATAIJ));

7659:   /* Communicate the start and end of each row, from each block to the correct rank */
7660:   /* TODO: Use PetscSF instead of VecScatter */
7661:   for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7662:   PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7663:   PetscCall(VecGetArrayWrite(seq, &seqv));
7664:   for (PetscInt i = 0; i < lblocks; i++) {
7665:     for (PetscInt j = 0; j < sizes[i]; j++) {
7666:       seqv[cnt]     = starts[i];
7667:       seqv[cnt + 1] = starts[i] + sizes[i];
7668:       cnt += 2;
7669:     }
7670:   }
7671:   PetscCall(VecRestoreArrayWrite(seq, &seqv));
7672:   PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7673:   sc -= cnt;
7674:   PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7675:   PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7676:   PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7677:   PetscCall(ISDestroy(&isglobal));
7678:   PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7679:   PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7680:   PetscCall(VecScatterDestroy(&scatter));
7681:   PetscCall(VecDestroy(&seq));
7682:   PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7683:   PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7684:   PetscCall(VecGetArrayRead(par, &parv));
7685:   cnt = 0;
7686:   PetscCall(MatGetSize(mat, NULL, &n));
7687:   for (PetscInt i = 0; i < mat->rmap->n; i++) {
7688:     PetscInt start, end, d = 0, od = 0;

7690:     start = (PetscInt)PetscRealPart(parv[cnt]);
7691:     end   = (PetscInt)PetscRealPart(parv[cnt + 1]);
7692:     cnt += 2;

7694:     if (start < cstart) {
7695:       od += cstart - start + n - cend;
7696:       d += cend - cstart;
7697:     } else if (start < cend) {
7698:       od += n - cend;
7699:       d += cend - start;
7700:     } else od += n - start;
7701:     if (end <= cstart) {
7702:       od -= cstart - end + n - cend;
7703:       d -= cend - cstart;
7704:     } else if (end < cend) {
7705:       od -= n - cend;
7706:       d -= cend - end;
7707:     } else od -= n - end;

7709:     odiag[i] = od;
7710:     diag[i]  = d;
7711:   }
7712:   PetscCall(VecRestoreArrayRead(par, &parv));
7713:   PetscCall(VecDestroy(&par));
7714:   PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7715:   PetscCall(PetscFree2(diag, odiag));
7716:   PetscCall(PetscFree2(sizes, starts));

7718:   PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7719:   PetscCall(PetscContainerSetPointer(container, edata));
7720:   PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode(*)(void *))EnvelopeDataDestroy));
7721:   PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7722:   PetscCall(PetscObjectDereference((PetscObject)container));
7723:   PetscFunctionReturn(PETSC_SUCCESS);
7724: }

7726: /*@
7727:   MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A

7729:   Collective

7731:   Input Parameters:
7732: + A     - the matrix
7733: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine

7735:   Output Parameter:
7736: . C - matrix with inverted block diagonal of `A`

7738:   Level: advanced

7740:   Note:
7741:   For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.

7743: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7744: @*/
7745: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7746: {
7747:   PetscContainer   container;
7748:   EnvelopeData    *edata;
7749:   PetscObjectState nonzerostate;

7751:   PetscFunctionBegin;
7752:   PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7753:   if (!container) {
7754:     PetscCall(MatComputeVariableBlockEnvelope(A));
7755:     PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7756:   }
7757:   PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7758:   PetscCall(MatGetNonzeroState(A, &nonzerostate));
7759:   PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7760:   PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");

7762:   PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7763:   *C = edata->C;

7765:   for (PetscInt i = 0; i < edata->n; i++) {
7766:     Mat          D;
7767:     PetscScalar *dvalues;

7769:     PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7770:     PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7771:     PetscCall(MatSeqDenseInvert(D));
7772:     PetscCall(MatDenseGetArray(D, &dvalues));
7773:     PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7774:     PetscCall(MatDestroy(&D));
7775:   }
7776:   PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7777:   PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7778:   PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7779:   PetscFunctionReturn(PETSC_SUCCESS);
7780: }

7782: /*@
7783:   MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size

7785:   Not Collective

7787:   Input Parameters:
7788: + mat     - the matrix
7789: . nblocks - the number of blocks on this process, each block can only exist on a single process
7790: - bsizes  - the block sizes

7792:   Level: intermediate

7794:   Notes:
7795:   Currently used by `PCVPBJACOBI` for `MATAIJ` matrices

7797:   Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.

7799: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7800:           `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7801: @*/
7802: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7803: {
7804:   PetscInt ncnt = 0, nlocal;

7806:   PetscFunctionBegin;
7808:   PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7809:   PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7810:   for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7811:   PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7812:   PetscCall(PetscFree(mat->bsizes));
7813:   mat->nblocks = nblocks;
7814:   PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7815:   PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7816:   PetscFunctionReturn(PETSC_SUCCESS);
7817: }

7819: /*@C
7820:   MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size

7822:   Not Collective; No Fortran Support

7824:   Input Parameter:
7825: . mat - the matrix

7827:   Output Parameters:
7828: + nblocks - the number of blocks on this process
7829: - bsizes  - the block sizes

7831:   Level: intermediate

7833: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7834: @*/
7835: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7836: {
7837:   PetscFunctionBegin;
7839:   if (nblocks) *nblocks = mat->nblocks;
7840:   if (bsizes) *bsizes = mat->bsizes;
7841:   PetscFunctionReturn(PETSC_SUCCESS);
7842: }

7844: /*@
7845:   MatSetBlockSizes - Sets the matrix block row and column sizes.

7847:   Logically Collective

7849:   Input Parameters:
7850: + mat - the matrix
7851: . rbs - row block size
7852: - cbs - column block size

7854:   Level: intermediate

7856:   Notes:
7857:   Block row formats are `MATBAIJ` and  `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7858:   If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7859:   This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.

7861:   For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7862:   are compatible with the matrix local sizes.

7864:   The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.

7866: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7867: @*/
7868: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7869: {
7870:   PetscFunctionBegin;
7874:   PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7875:   if (mat->rmap->refcnt) {
7876:     ISLocalToGlobalMapping l2g  = NULL;
7877:     PetscLayout            nmap = NULL;

7879:     PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7880:     if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7881:     PetscCall(PetscLayoutDestroy(&mat->rmap));
7882:     mat->rmap          = nmap;
7883:     mat->rmap->mapping = l2g;
7884:   }
7885:   if (mat->cmap->refcnt) {
7886:     ISLocalToGlobalMapping l2g  = NULL;
7887:     PetscLayout            nmap = NULL;

7889:     PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7890:     if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7891:     PetscCall(PetscLayoutDestroy(&mat->cmap));
7892:     mat->cmap          = nmap;
7893:     mat->cmap->mapping = l2g;
7894:   }
7895:   PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7896:   PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7897:   PetscFunctionReturn(PETSC_SUCCESS);
7898: }

7900: /*@
7901:   MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices

7903:   Logically Collective

7905:   Input Parameters:
7906: + mat     - the matrix
7907: . fromRow - matrix from which to copy row block size
7908: - fromCol - matrix from which to copy column block size (can be same as fromRow)

7910:   Level: developer

7912: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7913: @*/
7914: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7915: {
7916:   PetscFunctionBegin;
7920:   if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7921:   if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7922:   PetscFunctionReturn(PETSC_SUCCESS);
7923: }

7925: /*@
7926:   MatResidual - Default routine to calculate the residual r = b - Ax

7928:   Collective

7930:   Input Parameters:
7931: + mat - the matrix
7932: . b   - the right-hand-side
7933: - x   - the approximate solution

7935:   Output Parameter:
7936: . r - location to store the residual

7938:   Level: developer

7940: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
7941: @*/
7942: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
7943: {
7944:   PetscFunctionBegin;
7950:   MatCheckPreallocated(mat, 1);
7951:   PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
7952:   if (!mat->ops->residual) {
7953:     PetscCall(MatMult(mat, x, r));
7954:     PetscCall(VecAYPX(r, -1.0, b));
7955:   } else {
7956:     PetscUseTypeMethod(mat, residual, b, x, r);
7957:   }
7958:   PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
7959:   PetscFunctionReturn(PETSC_SUCCESS);
7960: }

7962: /*MC
7963:     MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix

7965:     Synopsis:
7966:     MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)

7968:     Not Collective

7970:     Input Parameters:
7971: +   A - the matrix
7972: .   shift -  0 or 1 indicating we want the indices starting at 0 or 1
7973: .   symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7974: -   inodecompressed - `PETSC_TRUE` or `PETSC_FALSE`  indicating if the nonzero structure of the
7975:                  inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7976:                  always used.

7978:     Output Parameters:
7979: +   n - number of local rows in the (possibly compressed) matrix
7980: .   ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7981: .   ja - the column indices
7982: -   done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7983:            are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set

7985:     Level: developer

7987:     Note:
7988:     Use  `MatRestoreRowIJF90()` when you no longer need access to the data

7990: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
7991: M*/

7993: /*MC
7994:     MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`

7996:     Synopsis:
7997:     MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)

7999:     Not Collective

8001:     Input Parameters:
8002: +   A - the  matrix
8003: .   shift -  0 or 1 indicating we want the indices starting at 0 or 1
8004: .   symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8005:     inodecompressed - `PETSC_TRUE` or `PETSC_FALSE`  indicating if the nonzero structure of the
8006:                  inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8007:                  always used.
8008: .   n - number of local rows in the (possibly compressed) matrix
8009: .   ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8010: .   ja - the column indices
8011: -   done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8012:            are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set

8014:     Level: developer

8016: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
8017: M*/

8019: /*@C
8020:   MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix

8022:   Collective

8024:   Input Parameters:
8025: + mat             - the matrix
8026: . shift           - 0 or 1 indicating we want the indices starting at 0 or 1
8027: . symmetric       - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8028: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE`  indicating if the nonzero structure of the
8029:                  inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8030:                  always used.

8032:   Output Parameters:
8033: + n    - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8034: . ia   - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8035: . ja   - the column indices, use `NULL` if not needed
8036: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8037:            are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set

8039:   Level: developer

8041:   Notes:
8042:   You CANNOT change any of the ia[] or ja[] values.

8044:   Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.

8046:   Fortran Notes:
8047:   Use
8048: .vb
8049:     PetscInt, pointer :: ia(:),ja(:)
8050:     call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8051:     ! Access the ith and jth entries via ia(i) and ja(j)
8052: .ve

8054:   `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`

8056: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8057: @*/
8058: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8059: {
8060:   PetscFunctionBegin;
8063:   if (n) PetscAssertPointer(n, 5);
8064:   if (ia) PetscAssertPointer(ia, 6);
8065:   if (ja) PetscAssertPointer(ja, 7);
8066:   if (done) PetscAssertPointer(done, 8);
8067:   MatCheckPreallocated(mat, 1);
8068:   if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8069:   else {
8070:     if (done) *done = PETSC_TRUE;
8071:     PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8072:     PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8073:     PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8074:   }
8075:   PetscFunctionReturn(PETSC_SUCCESS);
8076: }

8078: /*@C
8079:   MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.

8081:   Collective

8083:   Input Parameters:
8084: + mat             - the matrix
8085: . shift           - 1 or zero indicating we want the indices starting at 0 or 1
8086: . symmetric       - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8087:                 symmetrized
8088: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8089:                  inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8090:                  always used.
8091: . n               - number of columns in the (possibly compressed) matrix
8092: . ia              - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8093: - ja              - the row indices

8095:   Output Parameter:
8096: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned

8098:   Level: developer

8100: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8101: @*/
8102: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8103: {
8104:   PetscFunctionBegin;
8107:   PetscAssertPointer(n, 5);
8108:   if (ia) PetscAssertPointer(ia, 6);
8109:   if (ja) PetscAssertPointer(ja, 7);
8110:   PetscAssertPointer(done, 8);
8111:   MatCheckPreallocated(mat, 1);
8112:   if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8113:   else {
8114:     *done = PETSC_TRUE;
8115:     PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8116:   }
8117:   PetscFunctionReturn(PETSC_SUCCESS);
8118: }

8120: /*@C
8121:   MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.

8123:   Collective

8125:   Input Parameters:
8126: + mat             - the matrix
8127: . shift           - 1 or zero indicating we want the indices starting at 0 or 1
8128: . symmetric       - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8129: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8130:                  inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8131:                  always used.
8132: . n               - size of (possibly compressed) matrix
8133: . ia              - the row pointers
8134: - ja              - the column indices

8136:   Output Parameter:
8137: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned

8139:   Level: developer

8141:   Note:
8142:   This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8143:   us of the array after it has been restored. If you pass `NULL`, it will
8144:   not zero the pointers.  Use of ia or ja after `MatRestoreRowIJ()` is invalid.

8146:   Fortran Note:
8147:   `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`

8149: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8150: @*/
8151: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8152: {
8153:   PetscFunctionBegin;
8156:   if (ia) PetscAssertPointer(ia, 6);
8157:   if (ja) PetscAssertPointer(ja, 7);
8158:   if (done) PetscAssertPointer(done, 8);
8159:   MatCheckPreallocated(mat, 1);

8161:   if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8162:   else {
8163:     if (done) *done = PETSC_TRUE;
8164:     PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8165:     if (n) *n = 0;
8166:     if (ia) *ia = NULL;
8167:     if (ja) *ja = NULL;
8168:   }
8169:   PetscFunctionReturn(PETSC_SUCCESS);
8170: }

8172: /*@C
8173:   MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.

8175:   Collective

8177:   Input Parameters:
8178: + mat             - the matrix
8179: . shift           - 1 or zero indicating we want the indices starting at 0 or 1
8180: . symmetric       - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8181: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8182:                  inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8183:                  always used.

8185:   Output Parameters:
8186: + n    - size of (possibly compressed) matrix
8187: . ia   - the column pointers
8188: . ja   - the row indices
8189: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned

8191:   Level: developer

8193: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8194: @*/
8195: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8196: {
8197:   PetscFunctionBegin;
8200:   if (ia) PetscAssertPointer(ia, 6);
8201:   if (ja) PetscAssertPointer(ja, 7);
8202:   PetscAssertPointer(done, 8);
8203:   MatCheckPreallocated(mat, 1);

8205:   if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8206:   else {
8207:     *done = PETSC_TRUE;
8208:     PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8209:     if (n) *n = 0;
8210:     if (ia) *ia = NULL;
8211:     if (ja) *ja = NULL;
8212:   }
8213:   PetscFunctionReturn(PETSC_SUCCESS);
8214: }

8216: /*@
8217:   MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8218:   `MatGetColumnIJ()`.

8220:   Collective

8222:   Input Parameters:
8223: + mat        - the matrix
8224: . ncolors    - maximum color value
8225: . n          - number of entries in colorarray
8226: - colorarray - array indicating color for each column

8228:   Output Parameter:
8229: . iscoloring - coloring generated using colorarray information

8231:   Level: developer

8233: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8234: @*/
8235: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8236: {
8237:   PetscFunctionBegin;
8240:   PetscAssertPointer(colorarray, 4);
8241:   PetscAssertPointer(iscoloring, 5);
8242:   MatCheckPreallocated(mat, 1);

8244:   if (!mat->ops->coloringpatch) {
8245:     PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8246:   } else {
8247:     PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8248:   }
8249:   PetscFunctionReturn(PETSC_SUCCESS);
8250: }

8252: /*@
8253:   MatSetUnfactored - Resets a factored matrix to be treated as unfactored.

8255:   Logically Collective

8257:   Input Parameter:
8258: . mat - the factored matrix to be reset

8260:   Level: developer

8262:   Notes:
8263:   This routine should be used only with factored matrices formed by in-place
8264:   factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8265:   format).  This option can save memory, for example, when solving nonlinear
8266:   systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8267:   ILU(0) preconditioner.

8269:   One can specify in-place ILU(0) factorization by calling
8270: .vb
8271:      PCType(pc,PCILU);
8272:      PCFactorSeUseInPlace(pc);
8273: .ve
8274:   or by using the options -pc_type ilu -pc_factor_in_place

8276:   In-place factorization ILU(0) can also be used as a local
8277:   solver for the blocks within the block Jacobi or additive Schwarz
8278:   methods (runtime option: -sub_pc_factor_in_place).  See Users-Manual: ch_pc
8279:   for details on setting local solver options.

8281:   Most users should employ the `KSP` interface for linear solvers
8282:   instead of working directly with matrix algebra routines such as this.
8283:   See, e.g., `KSPCreate()`.

8285: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8286: @*/
8287: PetscErrorCode MatSetUnfactored(Mat mat)
8288: {
8289:   PetscFunctionBegin;
8292:   MatCheckPreallocated(mat, 1);
8293:   mat->factortype = MAT_FACTOR_NONE;
8294:   if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8295:   PetscUseTypeMethod(mat, setunfactored);
8296:   PetscFunctionReturn(PETSC_SUCCESS);
8297: }

8299: /*MC
8300:     MatDenseGetArrayF90 - Accesses a matrix array from Fortran

8302:     Synopsis:
8303:     MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)

8305:     Not Collective

8307:     Input Parameter:
8308: .   x - matrix

8310:     Output Parameters:
8311: +   xx_v - the Fortran pointer to the array
8312: -   ierr - error code

8314:     Example of Usage:
8315: .vb
8316:       PetscScalar, pointer xx_v(:,:)
8317:       ....
8318:       call MatDenseGetArrayF90(x,xx_v,ierr)
8319:       a = xx_v(3)
8320:       call MatDenseRestoreArrayF90(x,xx_v,ierr)
8321: .ve

8323:     Level: advanced

8325: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8326: M*/

8328: /*MC
8329:     MatDenseRestoreArrayF90 - Restores a matrix array that has been
8330:     accessed with `MatDenseGetArrayF90()`.

8332:     Synopsis:
8333:     MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)

8335:     Not Collective

8337:     Input Parameters:
8338: +   x - matrix
8339: -   xx_v - the Fortran90 pointer to the array

8341:     Output Parameter:
8342: .   ierr - error code

8344:     Example of Usage:
8345: .vb
8346:        PetscScalar, pointer xx_v(:,:)
8347:        ....
8348:        call MatDenseGetArrayF90(x,xx_v,ierr)
8349:        a = xx_v(3)
8350:        call MatDenseRestoreArrayF90(x,xx_v,ierr)
8351: .ve

8353:     Level: advanced

8355: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8356: M*/

8358: /*MC
8359:     MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.

8361:     Synopsis:
8362:     MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)

8364:     Not Collective

8366:     Input Parameter:
8367: .   x - matrix

8369:     Output Parameters:
8370: +   xx_v - the Fortran pointer to the array
8371: -   ierr - error code

8373:     Example of Usage:
8374: .vb
8375:       PetscScalar, pointer xx_v(:)
8376:       ....
8377:       call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8378:       a = xx_v(3)
8379:       call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8380: .ve

8382:     Level: advanced

8384: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8385: M*/

8387: /*MC
8388:     MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8389:     accessed with `MatSeqAIJGetArrayF90()`.

8391:     Synopsis:
8392:     MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)

8394:     Not Collective

8396:     Input Parameters:
8397: +   x - matrix
8398: -   xx_v - the Fortran90 pointer to the array

8400:     Output Parameter:
8401: .   ierr - error code

8403:     Example of Usage:
8404: .vb
8405:        PetscScalar, pointer xx_v(:)
8406:        ....
8407:        call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8408:        a = xx_v(3)
8409:        call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8410: .ve

8412:     Level: advanced

8414: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8415: M*/

8417: /*@
8418:   MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8419:   as the original matrix.

8421:   Collective

8423:   Input Parameters:
8424: + mat   - the original matrix
8425: . isrow - parallel `IS` containing the rows this processor should obtain
8426: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8427: - cll   - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`

8429:   Output Parameter:
8430: . newmat - the new submatrix, of the same type as the original matrix

8432:   Level: advanced

8434:   Notes:
8435:   The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.

8437:   Some matrix types place restrictions on the row and column indices, such
8438:   as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8439:   for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.

8441:   The index sets may not have duplicate entries.

8443:   The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8444:   the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8445:   to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8446:   will reuse the matrix generated the first time.  You should call `MatDestroy()` on `newmat` when
8447:   you are finished using it.

8449:   The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8450:   the input matrix.

8452:   If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).

8454:   If `isrow` and `iscol` have a nontrivial block-size then the resulting matrix has this block-size as well. This feature
8455:   is used by `PCFIELDSPLIT` to allow easy nesting of its use.

8457:   Example usage:
8458:   Consider the following 8x8 matrix with 34 non-zero values, that is
8459:   assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8460:   proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8461:   as follows
8462: .vb
8463:             1  2  0  |  0  3  0  |  0  4
8464:     Proc0   0  5  6  |  7  0  0  |  8  0
8465:             9  0 10  | 11  0  0  | 12  0
8466:     -------------------------------------
8467:            13  0 14  | 15 16 17  |  0  0
8468:     Proc1   0 18  0  | 19 20 21  |  0  0
8469:             0  0  0  | 22 23  0  | 24  0
8470:     -------------------------------------
8471:     Proc2  25 26 27  |  0  0 28  | 29  0
8472:            30  0  0  | 31 32 33  |  0 34
8473: .ve

8475:   Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6].  The resulting submatrix is

8477: .vb
8478:             2  0  |  0  3  0  |  0
8479:     Proc0   5  6  |  7  0  0  |  8
8480:     -------------------------------
8481:     Proc1  18  0  | 19 20 21  |  0
8482:     -------------------------------
8483:     Proc2  26 27  |  0  0 28  | 29
8484:             0  0  | 31 32 33  |  0
8485: .ve

8487: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8488: @*/
8489: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8490: {
8491:   PetscMPIInt size;
8492:   Mat        *local;
8493:   IS          iscoltmp;
8494:   PetscBool   flg;

8496:   PetscFunctionBegin;
8500:   PetscAssertPointer(newmat, 5);
8503:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8504:   PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");

8506:   MatCheckPreallocated(mat, 1);
8507:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));

8509:   if (!iscol || isrow == iscol) {
8510:     PetscBool   stride;
8511:     PetscMPIInt grabentirematrix = 0, grab;
8512:     PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8513:     if (stride) {
8514:       PetscInt first, step, n, rstart, rend;
8515:       PetscCall(ISStrideGetInfo(isrow, &first, &step));
8516:       if (step == 1) {
8517:         PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8518:         if (rstart == first) {
8519:           PetscCall(ISGetLocalSize(isrow, &n));
8520:           if (n == rend - rstart) grabentirematrix = 1;
8521:         }
8522:       }
8523:     }
8524:     PetscCall(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8525:     if (grab) {
8526:       PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8527:       if (cll == MAT_INITIAL_MATRIX) {
8528:         *newmat = mat;
8529:         PetscCall(PetscObjectReference((PetscObject)mat));
8530:       }
8531:       PetscFunctionReturn(PETSC_SUCCESS);
8532:     }
8533:   }

8535:   if (!iscol) {
8536:     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8537:   } else {
8538:     iscoltmp = iscol;
8539:   }

8541:   /* if original matrix is on just one processor then use submatrix generated */
8542:   if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8543:     PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8544:     goto setproperties;
8545:   } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8546:     PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8547:     *newmat = *local;
8548:     PetscCall(PetscFree(local));
8549:     goto setproperties;
8550:   } else if (!mat->ops->createsubmatrix) {
8551:     /* Create a new matrix type that implements the operation using the full matrix */
8552:     PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8553:     switch (cll) {
8554:     case MAT_INITIAL_MATRIX:
8555:       PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8556:       break;
8557:     case MAT_REUSE_MATRIX:
8558:       PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8559:       break;
8560:     default:
8561:       SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8562:     }
8563:     PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8564:     goto setproperties;
8565:   }

8567:   PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8568:   PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8569:   PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));

8571: setproperties:
8572:   PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8573:   if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8574:   if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8575:   if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8576:   PetscFunctionReturn(PETSC_SUCCESS);
8577: }

8579: /*@
8580:   MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix

8582:   Not Collective

8584:   Input Parameters:
8585: + A - the matrix we wish to propagate options from
8586: - B - the matrix we wish to propagate options to

8588:   Level: beginner

8590:   Note:
8591:   Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`

8593: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8594: @*/
8595: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8596: {
8597:   PetscFunctionBegin;
8600:   B->symmetry_eternal            = A->symmetry_eternal;
8601:   B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8602:   B->symmetric                   = A->symmetric;
8603:   B->structurally_symmetric      = A->structurally_symmetric;
8604:   B->spd                         = A->spd;
8605:   B->hermitian                   = A->hermitian;
8606:   PetscFunctionReturn(PETSC_SUCCESS);
8607: }

8609: /*@
8610:   MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8611:   used during the assembly process to store values that belong to
8612:   other processors.

8614:   Not Collective

8616:   Input Parameters:
8617: + mat   - the matrix
8618: . size  - the initial size of the stash.
8619: - bsize - the initial size of the block-stash(if used).

8621:   Options Database Keys:
8622: + -matstash_initial_size <size> or <size0,size1,...sizep-1>            - set initial size
8623: - -matstash_block_initial_size <bsize>  or <bsize0,bsize1,...bsizep-1> - set initial block size

8625:   Level: intermediate

8627:   Notes:
8628:   The block-stash is used for values set with `MatSetValuesBlocked()` while
8629:   the stash is used for values set with `MatSetValues()`

8631:   Run with the option -info and look for output of the form
8632:   MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8633:   to determine the appropriate value, MM, to use for size and
8634:   MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8635:   to determine the value, BMM to use for bsize

8637: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8638: @*/
8639: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8640: {
8641:   PetscFunctionBegin;
8644:   PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8645:   PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8646:   PetscFunctionReturn(PETSC_SUCCESS);
8647: }

8649: /*@
8650:   MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8651:   the matrix

8653:   Neighbor-wise Collective

8655:   Input Parameters:
8656: + A - the matrix
8657: . x - the vector to be multiplied by the interpolation operator
8658: - y - the vector to be added to the result

8660:   Output Parameter:
8661: . w - the resulting vector

8663:   Level: intermediate

8665:   Notes:
8666:   `w` may be the same vector as `y`.

8668:   This allows one to use either the restriction or interpolation (its transpose)
8669:   matrix to do the interpolation

8671: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8672: @*/
8673: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8674: {
8675:   PetscInt M, N, Ny;

8677:   PetscFunctionBegin;
8682:   PetscCall(MatGetSize(A, &M, &N));
8683:   PetscCall(VecGetSize(y, &Ny));
8684:   if (M == Ny) {
8685:     PetscCall(MatMultAdd(A, x, y, w));
8686:   } else {
8687:     PetscCall(MatMultTransposeAdd(A, x, y, w));
8688:   }
8689:   PetscFunctionReturn(PETSC_SUCCESS);
8690: }

8692: /*@
8693:   MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8694:   the matrix

8696:   Neighbor-wise Collective

8698:   Input Parameters:
8699: + A - the matrix
8700: - x - the vector to be interpolated

8702:   Output Parameter:
8703: . y - the resulting vector

8705:   Level: intermediate

8707:   Note:
8708:   This allows one to use either the restriction or interpolation (its transpose)
8709:   matrix to do the interpolation

8711: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8712: @*/
8713: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8714: {
8715:   PetscInt M, N, Ny;

8717:   PetscFunctionBegin;
8721:   PetscCall(MatGetSize(A, &M, &N));
8722:   PetscCall(VecGetSize(y, &Ny));
8723:   if (M == Ny) {
8724:     PetscCall(MatMult(A, x, y));
8725:   } else {
8726:     PetscCall(MatMultTranspose(A, x, y));
8727:   }
8728:   PetscFunctionReturn(PETSC_SUCCESS);
8729: }

8731: /*@
8732:   MatRestrict - $y = A*x$ or $A^T*x$

8734:   Neighbor-wise Collective

8736:   Input Parameters:
8737: + A - the matrix
8738: - x - the vector to be restricted

8740:   Output Parameter:
8741: . y - the resulting vector

8743:   Level: intermediate

8745:   Note:
8746:   This allows one to use either the restriction or interpolation (its transpose)
8747:   matrix to do the restriction

8749: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8750: @*/
8751: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8752: {
8753:   PetscInt M, N, Nx;

8755:   PetscFunctionBegin;
8759:   PetscCall(MatGetSize(A, &M, &N));
8760:   PetscCall(VecGetSize(x, &Nx));
8761:   if (M == Nx) {
8762:     PetscCall(MatMultTranspose(A, x, y));
8763:   } else {
8764:     PetscCall(MatMult(A, x, y));
8765:   }
8766:   PetscFunctionReturn(PETSC_SUCCESS);
8767: }

8769: /*@
8770:   MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`

8772:   Neighbor-wise Collective

8774:   Input Parameters:
8775: + A - the matrix
8776: . x - the input dense matrix to be multiplied
8777: - w - the input dense matrix to be added to the result

8779:   Output Parameter:
8780: . y - the output dense matrix

8782:   Level: intermediate

8784:   Note:
8785:   This allows one to use either the restriction or interpolation (its transpose)
8786:   matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8787:   otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.

8789: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8790: @*/
8791: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8792: {
8793:   PetscInt  M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8794:   PetscBool trans = PETSC_TRUE;
8795:   MatReuse  reuse = MAT_INITIAL_MATRIX;

8797:   PetscFunctionBegin;
8803:   PetscCall(MatGetSize(A, &M, &N));
8804:   PetscCall(MatGetSize(x, &Mx, &Nx));
8805:   if (N == Mx) trans = PETSC_FALSE;
8806:   else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8807:   Mo = trans ? N : M;
8808:   if (*y) {
8809:     PetscCall(MatGetSize(*y, &My, &Ny));
8810:     if (Mo == My && Nx == Ny) {
8811:       reuse = MAT_REUSE_MATRIX;
8812:     } else {
8813:       PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8814:       PetscCall(MatDestroy(y));
8815:     }
8816:   }

8818:   if (w && *y == w) { /* this is to minimize changes in PCMG */
8819:     PetscBool flg;

8821:     PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8822:     if (w) {
8823:       PetscInt My, Ny, Mw, Nw;

8825:       PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8826:       PetscCall(MatGetSize(*y, &My, &Ny));
8827:       PetscCall(MatGetSize(w, &Mw, &Nw));
8828:       if (!flg || My != Mw || Ny != Nw) w = NULL;
8829:     }
8830:     if (!w) {
8831:       PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8832:       PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8833:       PetscCall(PetscObjectDereference((PetscObject)w));
8834:     } else {
8835:       PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8836:     }
8837:   }
8838:   if (!trans) {
8839:     PetscCall(MatMatMult(A, x, reuse, PETSC_DEFAULT, y));
8840:   } else {
8841:     PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DEFAULT, y));
8842:   }
8843:   if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8844:   PetscFunctionReturn(PETSC_SUCCESS);
8845: }

8847: /*@
8848:   MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`

8850:   Neighbor-wise Collective

8852:   Input Parameters:
8853: + A - the matrix
8854: - x - the input dense matrix

8856:   Output Parameter:
8857: . y - the output dense matrix

8859:   Level: intermediate

8861:   Note:
8862:   This allows one to use either the restriction or interpolation (its transpose)
8863:   matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8864:   otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.

8866: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8867: @*/
8868: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8869: {
8870:   PetscFunctionBegin;
8871:   PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8872:   PetscFunctionReturn(PETSC_SUCCESS);
8873: }

8875: /*@
8876:   MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`

8878:   Neighbor-wise Collective

8880:   Input Parameters:
8881: + A - the matrix
8882: - x - the input dense matrix

8884:   Output Parameter:
8885: . y - the output dense matrix

8887:   Level: intermediate

8889:   Note:
8890:   This allows one to use either the restriction or interpolation (its transpose)
8891:   matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8892:   otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.

8894: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8895: @*/
8896: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8897: {
8898:   PetscFunctionBegin;
8899:   PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8900:   PetscFunctionReturn(PETSC_SUCCESS);
8901: }

8903: /*@
8904:   MatGetNullSpace - retrieves the null space of a matrix.

8906:   Logically Collective

8908:   Input Parameters:
8909: + mat    - the matrix
8910: - nullsp - the null space object

8912:   Level: developer

8914: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8915: @*/
8916: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8917: {
8918:   PetscFunctionBegin;
8920:   PetscAssertPointer(nullsp, 2);
8921:   *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8922:   PetscFunctionReturn(PETSC_SUCCESS);
8923: }

8925: /*@C
8926:   MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices

8928:   Logically Collective

8930:   Input Parameters:
8931: + n   - the number of matrices
8932: - mat - the array of matrices

8934:   Output Parameters:
8935: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space

8937:   Level: developer

8939:   Note:
8940:   Call `MatRestoreNullspaces()` to provide these to another array of matrices

8942: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8943:           `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
8944: @*/
8945: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8946: {
8947:   PetscFunctionBegin;
8948:   PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8949:   PetscAssertPointer(mat, 2);
8950:   PetscAssertPointer(nullsp, 3);

8952:   PetscCall(PetscCalloc1(3 * n, nullsp));
8953:   for (PetscInt i = 0; i < n; i++) {
8955:     (*nullsp)[i] = mat[i]->nullsp;
8956:     PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
8957:     (*nullsp)[n + i] = mat[i]->nearnullsp;
8958:     PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
8959:     (*nullsp)[2 * n + i] = mat[i]->transnullsp;
8960:     PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
8961:   }
8962:   PetscFunctionReturn(PETSC_SUCCESS);
8963: }

8965: /*@C
8966:   MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices

8968:   Logically Collective

8970:   Input Parameters:
8971: + n      - the number of matrices
8972: . mat    - the array of matrices
8973: - nullsp - an array of null spaces, `NULL` if the null space does not exist

8975:   Level: developer

8977:   Note:
8978:   Call `MatGetNullSpaces()` to create `nullsp`

8980: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8981:           `MatNullSpaceRemove()`, `MatGetNullSpaces()`
8982: @*/
8983: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8984: {
8985:   PetscFunctionBegin;
8986:   PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8987:   PetscAssertPointer(mat, 2);
8988:   PetscAssertPointer(nullsp, 3);
8989:   PetscAssertPointer(*nullsp, 3);

8991:   for (PetscInt i = 0; i < n; i++) {
8993:     PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
8994:     PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
8995:     PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
8996:     PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
8997:     PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
8998:     PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
8999:   }
9000:   PetscCall(PetscFree(*nullsp));
9001:   PetscFunctionReturn(PETSC_SUCCESS);
9002: }

9004: /*@
9005:   MatSetNullSpace - attaches a null space to a matrix.

9007:   Logically Collective

9009:   Input Parameters:
9010: + mat    - the matrix
9011: - nullsp - the null space object

9013:   Level: advanced

9015:   Notes:
9016:   This null space is used by the `KSP` linear solvers to solve singular systems.

9018:   Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`

9020:   For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge to
9021:   to zero but the linear system will still be solved in a least squares sense.

9023:   The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9024:   the domain of a matrix A (from $R^n$ to $R^m$ (m rows, n columns) $R^n$ = the direct sum of the null space of A, n(A), + the range of $A^T$, $R(A^T)$.
9025:   Similarly $R^m$ = direct sum n($A^T$) + R(A).  Hence the linear system $A x = b$ has a solution only if b in R(A) (or correspondingly b is orthogonal to
9026:   n($A^T$)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
9027:   the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n($A^T$).
9028:   This  \hat{b} can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.

9030:   If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
9031:   `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9032:   routine also automatically calls `MatSetTransposeNullSpace()`.

9034:   The user should call `MatNullSpaceDestroy()`.

9036: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9037:           `KSPSetPCSide()`
9038: @*/
9039: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9040: {
9041:   PetscFunctionBegin;
9044:   if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9045:   PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9046:   mat->nullsp = nullsp;
9047:   if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9048:   PetscFunctionReturn(PETSC_SUCCESS);
9049: }

9051: /*@
9052:   MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.

9054:   Logically Collective

9056:   Input Parameters:
9057: + mat    - the matrix
9058: - nullsp - the null space object

9060:   Level: developer

9062: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9063: @*/
9064: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9065: {
9066:   PetscFunctionBegin;
9069:   PetscAssertPointer(nullsp, 2);
9070:   *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9071:   PetscFunctionReturn(PETSC_SUCCESS);
9072: }

9074: /*@
9075:   MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix

9077:   Logically Collective

9079:   Input Parameters:
9080: + mat    - the matrix
9081: - nullsp - the null space object

9083:   Level: advanced

9085:   Notes:
9086:   This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.

9088:   See `MatSetNullSpace()`

9090: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9091: @*/
9092: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9093: {
9094:   PetscFunctionBegin;
9097:   if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9098:   PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9099:   mat->transnullsp = nullsp;
9100:   PetscFunctionReturn(PETSC_SUCCESS);
9101: }

9103: /*@
9104:   MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9105:   This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.

9107:   Logically Collective

9109:   Input Parameters:
9110: + mat    - the matrix
9111: - nullsp - the null space object

9113:   Level: advanced

9115:   Notes:
9116:   Overwrites any previous near null space that may have been attached

9118:   You can remove the null space by calling this routine with an `nullsp` of `NULL`

9120: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9121: @*/
9122: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9123: {
9124:   PetscFunctionBegin;
9128:   MatCheckPreallocated(mat, 1);
9129:   if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9130:   PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9131:   mat->nearnullsp = nullsp;
9132:   PetscFunctionReturn(PETSC_SUCCESS);
9133: }

9135: /*@
9136:   MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`

9138:   Not Collective

9140:   Input Parameter:
9141: . mat - the matrix

9143:   Output Parameter:
9144: . nullsp - the null space object, `NULL` if not set

9146:   Level: advanced

9148: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9149: @*/
9150: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9151: {
9152:   PetscFunctionBegin;
9155:   PetscAssertPointer(nullsp, 2);
9156:   MatCheckPreallocated(mat, 1);
9157:   *nullsp = mat->nearnullsp;
9158:   PetscFunctionReturn(PETSC_SUCCESS);
9159: }

9161: /*@C
9162:   MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.

9164:   Collective

9166:   Input Parameters:
9167: + mat  - the matrix
9168: . row  - row/column permutation
9169: - info - information on desired factorization process

9171:   Level: developer

9173:   Notes:
9174:   Probably really in-place only when level of fill is zero, otherwise allocates
9175:   new space to store factored matrix and deletes previous memory.

9177:   Most users should employ the `KSP` interface for linear solvers
9178:   instead of working directly with matrix algebra routines such as this.
9179:   See, e.g., `KSPCreate()`.

9181:   Developer Note:
9182:   The Fortran interface is not autogenerated as the
9183:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

9185: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9186: @*/
9187: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9188: {
9189:   PetscFunctionBegin;
9193:   PetscAssertPointer(info, 3);
9194:   PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9195:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9196:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9197:   MatCheckPreallocated(mat, 1);
9198:   PetscUseTypeMethod(mat, iccfactor, row, info);
9199:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9200:   PetscFunctionReturn(PETSC_SUCCESS);
9201: }

9203: /*@
9204:   MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9205:   ghosted ones.

9207:   Not Collective

9209:   Input Parameters:
9210: + mat  - the matrix
9211: - diag - the diagonal values, including ghost ones

9213:   Level: developer

9215:   Notes:
9216:   Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices

9218:   This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`

9220: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9221: @*/
9222: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9223: {
9224:   PetscMPIInt size;

9226:   PetscFunctionBegin;

9231:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9232:   PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9233:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9234:   if (size == 1) {
9235:     PetscInt n, m;
9236:     PetscCall(VecGetSize(diag, &n));
9237:     PetscCall(MatGetSize(mat, NULL, &m));
9238:     PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9239:     PetscCall(MatDiagonalScale(mat, NULL, diag));
9240:   } else {
9241:     PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9242:   }
9243:   PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9244:   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9245:   PetscFunctionReturn(PETSC_SUCCESS);
9246: }

9248: /*@
9249:   MatGetInertia - Gets the inertia from a factored matrix

9251:   Collective

9253:   Input Parameter:
9254: . mat - the matrix

9256:   Output Parameters:
9257: + nneg  - number of negative eigenvalues
9258: . nzero - number of zero eigenvalues
9259: - npos  - number of positive eigenvalues

9261:   Level: advanced

9263:   Note:
9264:   Matrix must have been factored by `MatCholeskyFactor()`

9266: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9267: @*/
9268: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9269: {
9270:   PetscFunctionBegin;
9273:   PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9274:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9275:   PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9276:   PetscFunctionReturn(PETSC_SUCCESS);
9277: }

9279: /*@C
9280:   MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors

9282:   Neighbor-wise Collective

9284:   Input Parameters:
9285: + mat - the factored matrix obtained with `MatGetFactor()`
9286: - b   - the right-hand-side vectors

9288:   Output Parameter:
9289: . x - the result vectors

9291:   Level: developer

9293:   Note:
9294:   The vectors `b` and `x` cannot be the same.  I.e., one cannot
9295:   call `MatSolves`(A,x,x).

9297: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9298: @*/
9299: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9300: {
9301:   PetscFunctionBegin;
9304:   PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9305:   PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9306:   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);

9308:   MatCheckPreallocated(mat, 1);
9309:   PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9310:   PetscUseTypeMethod(mat, solves, b, x);
9311:   PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9312:   PetscFunctionReturn(PETSC_SUCCESS);
9313: }

9315: /*@
9316:   MatIsSymmetric - Test whether a matrix is symmetric

9318:   Collective

9320:   Input Parameters:
9321: + A   - the matrix to test
9322: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)

9324:   Output Parameter:
9325: . flg - the result

9327:   Level: intermediate

9329:   Notes:
9330:   For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results

9332:   If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`

9334:   One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9335:   after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)

9337: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9338:           `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9339: @*/
9340: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9341: {
9342:   PetscFunctionBegin;
9344:   PetscAssertPointer(flg, 3);
9345:   if (A->symmetric != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->symmetric);
9346:   else {
9347:     if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9348:     else PetscCall(MatIsTranspose(A, A, tol, flg));
9349:     if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9350:   }
9351:   PetscFunctionReturn(PETSC_SUCCESS);
9352: }

9354: /*@
9355:   MatIsHermitian - Test whether a matrix is Hermitian

9357:   Collective

9359:   Input Parameters:
9360: + A   - the matrix to test
9361: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)

9363:   Output Parameter:
9364: . flg - the result

9366:   Level: intermediate

9368:   Notes:
9369:   For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results

9371:   If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`

9373:   One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9374:   after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)

9376: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9377:           `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9378: @*/
9379: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9380: {
9381:   PetscFunctionBegin;
9383:   PetscAssertPointer(flg, 3);
9384:   if (A->hermitian != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->hermitian);
9385:   else {
9386:     if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9387:     else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9388:     if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9389:   }
9390:   PetscFunctionReturn(PETSC_SUCCESS);
9391: }

9393: /*@
9394:   MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state

9396:   Not Collective

9398:   Input Parameter:
9399: . A - the matrix to check

9401:   Output Parameters:
9402: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9403: - flg - the result (only valid if set is `PETSC_TRUE`)

9405:   Level: advanced

9407:   Notes:
9408:   Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9409:   if you want it explicitly checked

9411:   One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9412:   after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)

9414: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9415: @*/
9416: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9417: {
9418:   PetscFunctionBegin;
9420:   PetscAssertPointer(set, 2);
9421:   PetscAssertPointer(flg, 3);
9422:   if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9423:     *set = PETSC_TRUE;
9424:     *flg = PetscBool3ToBool(A->symmetric);
9425:   } else {
9426:     *set = PETSC_FALSE;
9427:   }
9428:   PetscFunctionReturn(PETSC_SUCCESS);
9429: }

9431: /*@
9432:   MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state

9434:   Not Collective

9436:   Input Parameter:
9437: . A - the matrix to check

9439:   Output Parameters:
9440: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9441: - flg - the result (only valid if set is `PETSC_TRUE`)

9443:   Level: advanced

9445:   Notes:
9446:   Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).

9448:   One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9449:   after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)

9451: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9452: @*/
9453: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9454: {
9455:   PetscFunctionBegin;
9457:   PetscAssertPointer(set, 2);
9458:   PetscAssertPointer(flg, 3);
9459:   if (A->spd != PETSC_BOOL3_UNKNOWN) {
9460:     *set = PETSC_TRUE;
9461:     *flg = PetscBool3ToBool(A->spd);
9462:   } else {
9463:     *set = PETSC_FALSE;
9464:   }
9465:   PetscFunctionReturn(PETSC_SUCCESS);
9466: }

9468: /*@
9469:   MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state

9471:   Not Collective

9473:   Input Parameter:
9474: . A - the matrix to check

9476:   Output Parameters:
9477: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9478: - flg - the result (only valid if set is `PETSC_TRUE`)

9480:   Level: advanced

9482:   Notes:
9483:   Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9484:   if you want it explicitly checked

9486:   One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9487:   after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)

9489: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9490: @*/
9491: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9492: {
9493:   PetscFunctionBegin;
9495:   PetscAssertPointer(set, 2);
9496:   PetscAssertPointer(flg, 3);
9497:   if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9498:     *set = PETSC_TRUE;
9499:     *flg = PetscBool3ToBool(A->hermitian);
9500:   } else {
9501:     *set = PETSC_FALSE;
9502:   }
9503:   PetscFunctionReturn(PETSC_SUCCESS);
9504: }

9506: /*@
9507:   MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric

9509:   Collective

9511:   Input Parameter:
9512: . A - the matrix to test

9514:   Output Parameter:
9515: . flg - the result

9517:   Level: intermediate

9519:   Notes:
9520:   If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`

9522:   One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9523:   symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)

9525: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9526: @*/
9527: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9528: {
9529:   PetscFunctionBegin;
9531:   PetscAssertPointer(flg, 2);
9532:   if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9533:     *flg = PetscBool3ToBool(A->structurally_symmetric);
9534:   } else {
9535:     PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9536:     PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9537:   }
9538:   PetscFunctionReturn(PETSC_SUCCESS);
9539: }

9541: /*@
9542:   MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state

9544:   Not Collective

9546:   Input Parameter:
9547: . A - the matrix to check

9549:   Output Parameters:
9550: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9551: - flg - the result (only valid if set is PETSC_TRUE)

9553:   Level: advanced

9555:   Notes:
9556:   One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9557:   symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)

9559:   Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)

9561: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9562: @*/
9563: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9564: {
9565:   PetscFunctionBegin;
9567:   PetscAssertPointer(set, 2);
9568:   PetscAssertPointer(flg, 3);
9569:   if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9570:     *set = PETSC_TRUE;
9571:     *flg = PetscBool3ToBool(A->structurally_symmetric);
9572:   } else {
9573:     *set = PETSC_FALSE;
9574:   }
9575:   PetscFunctionReturn(PETSC_SUCCESS);
9576: }

9578: /*@
9579:   MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9580:   to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process

9582:   Not Collective

9584:   Input Parameter:
9585: . mat - the matrix

9587:   Output Parameters:
9588: + nstash    - the size of the stash
9589: . reallocs  - the number of additional mallocs incurred.
9590: . bnstash   - the size of the block stash
9591: - breallocs - the number of additional mallocs incurred.in the block stash

9593:   Level: advanced

9595: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9596: @*/
9597: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9598: {
9599:   PetscFunctionBegin;
9600:   PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9601:   PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9602:   PetscFunctionReturn(PETSC_SUCCESS);
9603: }

9605: /*@C
9606:   MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9607:   parallel layout, `PetscLayout` for rows and columns

9609:   Collective

9611:   Input Parameter:
9612: . mat - the matrix

9614:   Output Parameters:
9615: + right - (optional) vector that the matrix can be multiplied against
9616: - left  - (optional) vector that the matrix vector product can be stored in

9618:   Level: advanced

9620:   Notes:
9621:   The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.

9623:   These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed

9625: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9626: @*/
9627: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9628: {
9629:   PetscFunctionBegin;
9632:   if (mat->ops->getvecs) {
9633:     PetscUseTypeMethod(mat, getvecs, right, left);
9634:   } else {
9635:     if (right) {
9636:       PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9637:       PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9638:       PetscCall(VecSetType(*right, mat->defaultvectype));
9639: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9640:       if (mat->boundtocpu && mat->bindingpropagates) {
9641:         PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9642:         PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9643:       }
9644: #endif
9645:     }
9646:     if (left) {
9647:       PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9648:       PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9649:       PetscCall(VecSetType(*left, mat->defaultvectype));
9650: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9651:       if (mat->boundtocpu && mat->bindingpropagates) {
9652:         PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9653:         PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9654:       }
9655: #endif
9656:     }
9657:   }
9658:   PetscFunctionReturn(PETSC_SUCCESS);
9659: }

9661: /*@C
9662:   MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9663:   with default values.

9665:   Not Collective

9667:   Input Parameter:
9668: . info - the `MatFactorInfo` data structure

9670:   Level: developer

9672:   Notes:
9673:   The solvers are generally used through the `KSP` and `PC` objects, for example
9674:   `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`

9676:   Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed

9678:   Developer Note:
9679:   The Fortran interface is not autogenerated as the
9680:   interface definition cannot be generated correctly [due to `MatFactorInfo`]

9682: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9683: @*/
9684: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9685: {
9686:   PetscFunctionBegin;
9687:   PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9688:   PetscFunctionReturn(PETSC_SUCCESS);
9689: }

9691: /*@
9692:   MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed

9694:   Collective

9696:   Input Parameters:
9697: + mat - the factored matrix
9698: - is  - the index set defining the Schur indices (0-based)

9700:   Level: advanced

9702:   Notes:
9703:   Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.

9705:   You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.

9707:   This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`

9709: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9710:           `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9711: @*/
9712: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9713: {
9714:   PetscErrorCode (*f)(Mat, IS);

9716:   PetscFunctionBegin;
9721:   PetscCheckSameComm(mat, 1, is, 2);
9722:   PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9723:   PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9724:   PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9725:   PetscCall(MatDestroy(&mat->schur));
9726:   PetscCall((*f)(mat, is));
9727:   PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9728:   PetscFunctionReturn(PETSC_SUCCESS);
9729: }

9731: /*@
9732:   MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step

9734:   Logically Collective

9736:   Input Parameters:
9737: + F      - the factored matrix obtained by calling `MatGetFactor()`
9738: . S      - location where to return the Schur complement, can be `NULL`
9739: - status - the status of the Schur complement matrix, can be `NULL`

9741:   Level: advanced

9743:   Notes:
9744:   You must call `MatFactorSetSchurIS()` before calling this routine.

9746:   This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`

9748:   The routine provides a copy of the Schur matrix stored within the solver data structures.
9749:   The caller must destroy the object when it is no longer needed.
9750:   If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.

9752:   Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)

9754:   See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.

9756:   Developer Note:
9757:   The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9758:   matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.

9760: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9761: @*/
9762: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9763: {
9764:   PetscFunctionBegin;
9766:   if (S) PetscAssertPointer(S, 2);
9767:   if (status) PetscAssertPointer(status, 3);
9768:   if (S) {
9769:     PetscErrorCode (*f)(Mat, Mat *);

9771:     PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9772:     if (f) {
9773:       PetscCall((*f)(F, S));
9774:     } else {
9775:       PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9776:     }
9777:   }
9778:   if (status) *status = F->schur_status;
9779:   PetscFunctionReturn(PETSC_SUCCESS);
9780: }

9782: /*@
9783:   MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix

9785:   Logically Collective

9787:   Input Parameters:
9788: + F      - the factored matrix obtained by calling `MatGetFactor()`
9789: . S      - location where to return the Schur complement, can be `NULL`
9790: - status - the status of the Schur complement matrix, can be `NULL`

9792:   Level: advanced

9794:   Notes:
9795:   You must call `MatFactorSetSchurIS()` before calling this routine.

9797:   Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`

9799:   The routine returns a the Schur Complement stored within the data structures of the solver.

9801:   If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.

9803:   The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.

9805:   Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix

9807:   See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.

9809: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9810: @*/
9811: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9812: {
9813:   PetscFunctionBegin;
9815:   if (S) {
9816:     PetscAssertPointer(S, 2);
9817:     *S = F->schur;
9818:   }
9819:   if (status) {
9820:     PetscAssertPointer(status, 3);
9821:     *status = F->schur_status;
9822:   }
9823:   PetscFunctionReturn(PETSC_SUCCESS);
9824: }

9826: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9827: {
9828:   Mat S = F->schur;

9830:   PetscFunctionBegin;
9831:   switch (F->schur_status) {
9832:   case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9833:   case MAT_FACTOR_SCHUR_INVERTED:
9834:     if (S) {
9835:       S->ops->solve             = NULL;
9836:       S->ops->matsolve          = NULL;
9837:       S->ops->solvetranspose    = NULL;
9838:       S->ops->matsolvetranspose = NULL;
9839:       S->ops->solveadd          = NULL;
9840:       S->ops->solvetransposeadd = NULL;
9841:       S->factortype             = MAT_FACTOR_NONE;
9842:       PetscCall(PetscFree(S->solvertype));
9843:     }
9844:   case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9845:     break;
9846:   default:
9847:     SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9848:   }
9849:   PetscFunctionReturn(PETSC_SUCCESS);
9850: }

9852: /*@
9853:   MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`

9855:   Logically Collective

9857:   Input Parameters:
9858: + F      - the factored matrix obtained by calling `MatGetFactor()`
9859: . S      - location where the Schur complement is stored
9860: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)

9862:   Level: advanced

9864: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9865: @*/
9866: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9867: {
9868:   PetscFunctionBegin;
9870:   if (S) {
9872:     *S = NULL;
9873:   }
9874:   F->schur_status = status;
9875:   PetscCall(MatFactorUpdateSchurStatus_Private(F));
9876:   PetscFunctionReturn(PETSC_SUCCESS);
9877: }

9879: /*@
9880:   MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step

9882:   Logically Collective

9884:   Input Parameters:
9885: + F   - the factored matrix obtained by calling `MatGetFactor()`
9886: . rhs - location where the right-hand side of the Schur complement system is stored
9887: - sol - location where the solution of the Schur complement system has to be returned

9889:   Level: advanced

9891:   Notes:
9892:   The sizes of the vectors should match the size of the Schur complement

9894:   Must be called after `MatFactorSetSchurIS()`

9896: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9897: @*/
9898: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9899: {
9900:   PetscFunctionBegin;
9907:   PetscCheckSameComm(F, 1, rhs, 2);
9908:   PetscCheckSameComm(F, 1, sol, 3);
9909:   PetscCall(MatFactorFactorizeSchurComplement(F));
9910:   switch (F->schur_status) {
9911:   case MAT_FACTOR_SCHUR_FACTORED:
9912:     PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9913:     break;
9914:   case MAT_FACTOR_SCHUR_INVERTED:
9915:     PetscCall(MatMultTranspose(F->schur, rhs, sol));
9916:     break;
9917:   default:
9918:     SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9919:   }
9920:   PetscFunctionReturn(PETSC_SUCCESS);
9921: }

9923: /*@
9924:   MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step

9926:   Logically Collective

9928:   Input Parameters:
9929: + F   - the factored matrix obtained by calling `MatGetFactor()`
9930: . rhs - location where the right-hand side of the Schur complement system is stored
9931: - sol - location where the solution of the Schur complement system has to be returned

9933:   Level: advanced

9935:   Notes:
9936:   The sizes of the vectors should match the size of the Schur complement

9938:   Must be called after `MatFactorSetSchurIS()`

9940: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9941: @*/
9942: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9943: {
9944:   PetscFunctionBegin;
9951:   PetscCheckSameComm(F, 1, rhs, 2);
9952:   PetscCheckSameComm(F, 1, sol, 3);
9953:   PetscCall(MatFactorFactorizeSchurComplement(F));
9954:   switch (F->schur_status) {
9955:   case MAT_FACTOR_SCHUR_FACTORED:
9956:     PetscCall(MatSolve(F->schur, rhs, sol));
9957:     break;
9958:   case MAT_FACTOR_SCHUR_INVERTED:
9959:     PetscCall(MatMult(F->schur, rhs, sol));
9960:     break;
9961:   default:
9962:     SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9963:   }
9964:   PetscFunctionReturn(PETSC_SUCCESS);
9965: }

9967: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9968: #if PetscDefined(HAVE_CUDA)
9969: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9970: #endif

9972: /* Schur status updated in the interface */
9973: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9974: {
9975:   Mat S = F->schur;

9977:   PetscFunctionBegin;
9978:   if (S) {
9979:     PetscMPIInt size;
9980:     PetscBool   isdense, isdensecuda;

9982:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
9983:     PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
9984:     PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
9985:     PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
9986:     PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
9987:     PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
9988:     if (isdense) {
9989:       PetscCall(MatSeqDenseInvertFactors_Private(S));
9990:     } else if (isdensecuda) {
9991: #if defined(PETSC_HAVE_CUDA)
9992:       PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
9993: #endif
9994:     }
9995:     // HIP??????????????
9996:     PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
9997:   }
9998:   PetscFunctionReturn(PETSC_SUCCESS);
9999: }

10001: /*@
10002:   MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step

10004:   Logically Collective

10006:   Input Parameter:
10007: . F - the factored matrix obtained by calling `MatGetFactor()`

10009:   Level: advanced

10011:   Notes:
10012:   Must be called after `MatFactorSetSchurIS()`.

10014:   Call `MatFactorGetSchurComplement()` or  `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.

10016: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10017: @*/
10018: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10019: {
10020:   PetscFunctionBegin;
10023:   if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10024:   PetscCall(MatFactorFactorizeSchurComplement(F));
10025:   PetscCall(MatFactorInvertSchurComplement_Private(F));
10026:   F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10027:   PetscFunctionReturn(PETSC_SUCCESS);
10028: }

10030: /*@
10031:   MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step

10033:   Logically Collective

10035:   Input Parameter:
10036: . F - the factored matrix obtained by calling `MatGetFactor()`

10038:   Level: advanced

10040:   Note:
10041:   Must be called after `MatFactorSetSchurIS()`

10043: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10044: @*/
10045: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10046: {
10047:   MatFactorInfo info;

10049:   PetscFunctionBegin;
10052:   if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10053:   PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10054:   PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10055:   if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10056:     PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10057:   } else {
10058:     PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10059:   }
10060:   PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10061:   F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10062:   PetscFunctionReturn(PETSC_SUCCESS);
10063: }

10065: /*@
10066:   MatPtAP - Creates the matrix product $C = P^T * A * P$

10068:   Neighbor-wise Collective

10070:   Input Parameters:
10071: + A     - the matrix
10072: . P     - the projection matrix
10073: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10074: - fill  - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DEFAULT` if you do not have a good estimate
10075:           if the result is a dense matrix this is irrelevant

10077:   Output Parameter:
10078: . C - the product matrix

10080:   Level: intermediate

10082:   Notes:
10083:   C will be created and must be destroyed by the user with `MatDestroy()`.

10085:   An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done

10087:   Developer Note:
10088:   For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.

10090: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10091: @*/
10092: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10093: {
10094:   PetscFunctionBegin;
10095:   if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10096:   PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");

10098:   if (scall == MAT_INITIAL_MATRIX) {
10099:     PetscCall(MatProductCreate(A, P, NULL, C));
10100:     PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10101:     PetscCall(MatProductSetAlgorithm(*C, "default"));
10102:     PetscCall(MatProductSetFill(*C, fill));

10104:     (*C)->product->api_user = PETSC_TRUE;
10105:     PetscCall(MatProductSetFromOptions(*C));
10106:     PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10107:     PetscCall(MatProductSymbolic(*C));
10108:   } else { /* scall == MAT_REUSE_MATRIX */
10109:     PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10110:   }

10112:   PetscCall(MatProductNumeric(*C));
10113:   (*C)->symmetric = A->symmetric;
10114:   (*C)->spd       = A->spd;
10115:   PetscFunctionReturn(PETSC_SUCCESS);
10116: }

10118: /*@
10119:   MatRARt - Creates the matrix product $C = R * A * R^T$

10121:   Neighbor-wise Collective

10123:   Input Parameters:
10124: + A     - the matrix
10125: . R     - the projection matrix
10126: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10127: - fill  - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DEFAULT` if you do not have a good estimate
10128:           if the result is a dense matrix this is irrelevant

10130:   Output Parameter:
10131: . C - the product matrix

10133:   Level: intermediate

10135:   Notes:
10136:   C will be created and must be destroyed by the user with `MatDestroy()`.

10138:   An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done

10140:   This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10141:   which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10142:   parallel MatRARt is implemented via explicit transpose of R, which could be very expensive.
10143:   We recommend using MatPtAP().

10145: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10146: @*/
10147: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10148: {
10149:   PetscFunctionBegin;
10150:   if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10151:   PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");

10153:   if (scall == MAT_INITIAL_MATRIX) {
10154:     PetscCall(MatProductCreate(A, R, NULL, C));
10155:     PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10156:     PetscCall(MatProductSetAlgorithm(*C, "default"));
10157:     PetscCall(MatProductSetFill(*C, fill));

10159:     (*C)->product->api_user = PETSC_TRUE;
10160:     PetscCall(MatProductSetFromOptions(*C));
10161:     PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10162:     PetscCall(MatProductSymbolic(*C));
10163:   } else { /* scall == MAT_REUSE_MATRIX */
10164:     PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10165:   }

10167:   PetscCall(MatProductNumeric(*C));
10168:   if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10169:   PetscFunctionReturn(PETSC_SUCCESS);
10170: }

10172: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10173: {
10174:   PetscBool flg = PETSC_TRUE;

10176:   PetscFunctionBegin;
10177:   PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10178:   if (scall == MAT_INITIAL_MATRIX) {
10179:     PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10180:     PetscCall(MatProductCreate(A, B, NULL, C));
10181:     PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10182:     PetscCall(MatProductSetFill(*C, fill));
10183:   } else { /* scall == MAT_REUSE_MATRIX */
10184:     Mat_Product *product = (*C)->product;

10186:     PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10187:     if (flg && product && product->type != ptype) {
10188:       PetscCall(MatProductClear(*C));
10189:       product = NULL;
10190:     }
10191:     PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10192:     if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10193:       PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10194:       PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10195:       product        = (*C)->product;
10196:       product->fill  = fill;
10197:       product->clear = PETSC_TRUE;
10198:     } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10199:       flg = PETSC_FALSE;
10200:       PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10201:     }
10202:   }
10203:   if (flg) {
10204:     (*C)->product->api_user = PETSC_TRUE;
10205:     PetscCall(MatProductSetType(*C, ptype));
10206:     PetscCall(MatProductSetFromOptions(*C));
10207:     PetscCall(MatProductSymbolic(*C));
10208:   }
10209:   PetscCall(MatProductNumeric(*C));
10210:   PetscFunctionReturn(PETSC_SUCCESS);
10211: }

10213: /*@
10214:   MatMatMult - Performs matrix-matrix multiplication C=A*B.

10216:   Neighbor-wise Collective

10218:   Input Parameters:
10219: + A     - the left matrix
10220: . B     - the right matrix
10221: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10222: - fill  - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if you do not have a good estimate
10223:           if the result is a dense matrix this is irrelevant

10225:   Output Parameter:
10226: . C - the product matrix

10228:   Notes:
10229:   Unless scall is `MAT_REUSE_MATRIX` C will be created.

10231:   `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10232:   call to this function with `MAT_INITIAL_MATRIX`.

10234:   To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value actually needed.

10236:   In the special case where matrix B (and hence C) are dense you can create the correctly sized matrix C yourself and then call this routine with `MAT_REUSE_MATRIX`,
10237:   rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix C is sparse.

10239:   Example of Usage:
10240: .vb
10241:      MatProductCreate(A,B,NULL,&C);
10242:      MatProductSetType(C,MATPRODUCT_AB);
10243:      MatProductSymbolic(C);
10244:      MatProductNumeric(C); // compute C=A * B
10245:      MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10246:      MatProductNumeric(C);
10247:      MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10248:      MatProductNumeric(C);
10249: .ve

10251:   Level: intermediate

10253: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10254: @*/
10255: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10256: {
10257:   PetscFunctionBegin;
10258:   PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10259:   PetscFunctionReturn(PETSC_SUCCESS);
10260: }

10262: /*@
10263:   MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.

10265:   Neighbor-wise Collective

10267:   Input Parameters:
10268: + A     - the left matrix
10269: . B     - the right matrix
10270: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10271: - fill  - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known

10273:   Output Parameter:
10274: . C - the product matrix

10276:   Options Database Key:
10277: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10278:               first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10279:               the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.

10281:   Level: intermediate

10283:   Notes:
10284:   C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.

10286:   `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call

10288:   To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10289:   actually needed.

10291:   This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10292:   and for pairs of `MATMPIDENSE` matrices.

10294:   This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`

10296: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10297: @*/
10298: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10299: {
10300:   PetscFunctionBegin;
10301:   PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10302:   if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10303:   PetscFunctionReturn(PETSC_SUCCESS);
10304: }

10306: /*@
10307:   MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.

10309:   Neighbor-wise Collective

10311:   Input Parameters:
10312: + A     - the left matrix
10313: . B     - the right matrix
10314: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10315: - fill  - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known

10317:   Output Parameter:
10318: . C - the product matrix

10320:   Level: intermediate

10322:   Notes:
10323:   `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.

10325:   `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.

10327:   This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`

10329:   To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10330:   actually needed.

10332:   This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10333:   which inherit from `MATSEQAIJ`.  `C` will be of the same type as the input matrices.

10335: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10336: @*/
10337: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10338: {
10339:   PetscFunctionBegin;
10340:   PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10341:   PetscFunctionReturn(PETSC_SUCCESS);
10342: }

10344: /*@
10345:   MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.

10347:   Neighbor-wise Collective

10349:   Input Parameters:
10350: + A     - the left matrix
10351: . B     - the middle matrix
10352: . C     - the right matrix
10353: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10354: - fill  - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DEFAULT` if you do not have a good estimate
10355:           if the result is a dense matrix this is irrelevant

10357:   Output Parameter:
10358: . D - the product matrix

10360:   Level: intermediate

10362:   Notes:
10363:   Unless `scall` is `MAT_REUSE_MATRIX` D will be created.

10365:   `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call

10367:   This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`

10369:   To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10370:   actually needed.

10372:   If you have many matrices with the same non-zero structure to multiply, you
10373:   should use `MAT_REUSE_MATRIX` in all calls but the first

10375: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10376: @*/
10377: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10378: {
10379:   PetscFunctionBegin;
10380:   if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10381:   PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");

10383:   if (scall == MAT_INITIAL_MATRIX) {
10384:     PetscCall(MatProductCreate(A, B, C, D));
10385:     PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10386:     PetscCall(MatProductSetAlgorithm(*D, "default"));
10387:     PetscCall(MatProductSetFill(*D, fill));

10389:     (*D)->product->api_user = PETSC_TRUE;
10390:     PetscCall(MatProductSetFromOptions(*D));
10391:     PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10392:                ((PetscObject)C)->type_name);
10393:     PetscCall(MatProductSymbolic(*D));
10394:   } else { /* user may change input matrices when REUSE */
10395:     PetscCall(MatProductReplaceMats(A, B, C, *D));
10396:   }
10397:   PetscCall(MatProductNumeric(*D));
10398:   PetscFunctionReturn(PETSC_SUCCESS);
10399: }

10401: /*@
10402:   MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.

10404:   Collective

10406:   Input Parameters:
10407: + mat      - the matrix
10408: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10409: . subcomm  - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10410: - reuse    - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`

10412:   Output Parameter:
10413: . matredundant - redundant matrix

10415:   Level: advanced

10417:   Notes:
10418:   `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10419:   original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.

10421:   This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10422:   calling it.

10424:   `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.

10426: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10427: @*/
10428: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10429: {
10430:   MPI_Comm       comm;
10431:   PetscMPIInt    size;
10432:   PetscInt       mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10433:   Mat_Redundant *redund     = NULL;
10434:   PetscSubcomm   psubcomm   = NULL;
10435:   MPI_Comm       subcomm_in = subcomm;
10436:   Mat           *matseq;
10437:   IS             isrow, iscol;
10438:   PetscBool      newsubcomm = PETSC_FALSE;

10440:   PetscFunctionBegin;
10442:   if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10443:     PetscAssertPointer(*matredundant, 5);
10445:   }

10447:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10448:   if (size == 1 || nsubcomm == 1) {
10449:     if (reuse == MAT_INITIAL_MATRIX) {
10450:       PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10451:     } else {
10452:       PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10453:       PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10454:     }
10455:     PetscFunctionReturn(PETSC_SUCCESS);
10456:   }

10458:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10459:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10460:   MatCheckPreallocated(mat, 1);

10462:   PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10463:   if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10464:     /* create psubcomm, then get subcomm */
10465:     PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10466:     PetscCallMPI(MPI_Comm_size(comm, &size));
10467:     PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);

10469:     PetscCall(PetscSubcommCreate(comm, &psubcomm));
10470:     PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10471:     PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10472:     PetscCall(PetscSubcommSetFromOptions(psubcomm));
10473:     PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10474:     newsubcomm = PETSC_TRUE;
10475:     PetscCall(PetscSubcommDestroy(&psubcomm));
10476:   }

10478:   /* get isrow, iscol and a local sequential matrix matseq[0] */
10479:   if (reuse == MAT_INITIAL_MATRIX) {
10480:     mloc_sub = PETSC_DECIDE;
10481:     nloc_sub = PETSC_DECIDE;
10482:     if (bs < 1) {
10483:       PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10484:       PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10485:     } else {
10486:       PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10487:       PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10488:     }
10489:     PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10490:     rstart = rend - mloc_sub;
10491:     PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10492:     PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10493:     PetscCall(ISSetIdentity(iscol));
10494:   } else { /* reuse == MAT_REUSE_MATRIX */
10495:     PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10496:     /* retrieve subcomm */
10497:     PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10498:     redund = (*matredundant)->redundant;
10499:     isrow  = redund->isrow;
10500:     iscol  = redund->iscol;
10501:     matseq = redund->matseq;
10502:   }
10503:   PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));

10505:   /* get matredundant over subcomm */
10506:   if (reuse == MAT_INITIAL_MATRIX) {
10507:     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));

10509:     /* create a supporting struct and attach it to C for reuse */
10510:     PetscCall(PetscNew(&redund));
10511:     (*matredundant)->redundant = redund;
10512:     redund->isrow              = isrow;
10513:     redund->iscol              = iscol;
10514:     redund->matseq             = matseq;
10515:     if (newsubcomm) {
10516:       redund->subcomm = subcomm;
10517:     } else {
10518:       redund->subcomm = MPI_COMM_NULL;
10519:     }
10520:   } else {
10521:     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10522:   }
10523: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10524:   if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10525:     PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10526:     PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10527:   }
10528: #endif
10529:   PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10530:   PetscFunctionReturn(PETSC_SUCCESS);
10531: }

10533: /*@C
10534:   MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10535:   a given `Mat`. Each submatrix can span multiple procs.

10537:   Collective

10539:   Input Parameters:
10540: + mat     - the matrix
10541: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10542: - scall   - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`

10544:   Output Parameter:
10545: . subMat - parallel sub-matrices each spanning a given `subcomm`

10547:   Level: advanced

10549:   Notes:
10550:   The submatrix partition across processors is dictated by `subComm` a
10551:   communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10552:   is not restricted to be grouped with consecutive original MPI processes.

10554:   Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10555:   map directly to the layout of the original matrix [wrt the local
10556:   row,col partitioning]. So the original 'DiagonalMat' naturally maps
10557:   into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10558:   the `subMat`. However the offDiagMat looses some columns - and this is
10559:   reconstructed with `MatSetValues()`

10561:   This is used by `PCBJACOBI` when a single block spans multiple MPI processes.

10563: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10564: @*/
10565: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10566: {
10567:   PetscMPIInt commsize, subCommSize;

10569:   PetscFunctionBegin;
10570:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10571:   PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10572:   PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);

10574:   PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10575:   PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10576:   PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10577:   PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10578:   PetscFunctionReturn(PETSC_SUCCESS);
10579: }

10581: /*@
10582:   MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering

10584:   Not Collective

10586:   Input Parameters:
10587: + mat   - matrix to extract local submatrix from
10588: . isrow - local row indices for submatrix
10589: - iscol - local column indices for submatrix

10591:   Output Parameter:
10592: . submat - the submatrix

10594:   Level: intermediate

10596:   Notes:
10597:   `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.

10599:   Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`.  Its communicator may be
10600:   the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.

10602:   `submat` always implements `MatSetValuesLocal()`.  If `isrow` and `iscol` have the same block size, then
10603:   `MatSetValuesBlockedLocal()` will also be implemented.

10605:   `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10606:   Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.

10608: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10609: @*/
10610: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10611: {
10612:   PetscFunctionBegin;
10616:   PetscCheckSameComm(isrow, 2, iscol, 3);
10617:   PetscAssertPointer(submat, 4);
10618:   PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");

10620:   if (mat->ops->getlocalsubmatrix) {
10621:     PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10622:   } else {
10623:     PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10624:   }
10625:   PetscFunctionReturn(PETSC_SUCCESS);
10626: }

10628: /*@
10629:   MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`

10631:   Not Collective

10633:   Input Parameters:
10634: + mat    - matrix to extract local submatrix from
10635: . isrow  - local row indices for submatrix
10636: . iscol  - local column indices for submatrix
10637: - submat - the submatrix

10639:   Level: intermediate

10641: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10642: @*/
10643: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10644: {
10645:   PetscFunctionBegin;
10649:   PetscCheckSameComm(isrow, 2, iscol, 3);
10650:   PetscAssertPointer(submat, 4);

10653:   if (mat->ops->restorelocalsubmatrix) {
10654:     PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10655:   } else {
10656:     PetscCall(MatDestroy(submat));
10657:   }
10658:   *submat = NULL;
10659:   PetscFunctionReturn(PETSC_SUCCESS);
10660: }

10662: /*@
10663:   MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix

10665:   Collective

10667:   Input Parameter:
10668: . mat - the matrix

10670:   Output Parameter:
10671: . is - if any rows have zero diagonals this contains the list of them

10673:   Level: developer

10675: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10676: @*/
10677: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10678: {
10679:   PetscFunctionBegin;
10682:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10683:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");

10685:   if (!mat->ops->findzerodiagonals) {
10686:     Vec                diag;
10687:     const PetscScalar *a;
10688:     PetscInt          *rows;
10689:     PetscInt           rStart, rEnd, r, nrow = 0;

10691:     PetscCall(MatCreateVecs(mat, &diag, NULL));
10692:     PetscCall(MatGetDiagonal(mat, diag));
10693:     PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10694:     PetscCall(VecGetArrayRead(diag, &a));
10695:     for (r = 0; r < rEnd - rStart; ++r)
10696:       if (a[r] == 0.0) ++nrow;
10697:     PetscCall(PetscMalloc1(nrow, &rows));
10698:     nrow = 0;
10699:     for (r = 0; r < rEnd - rStart; ++r)
10700:       if (a[r] == 0.0) rows[nrow++] = r + rStart;
10701:     PetscCall(VecRestoreArrayRead(diag, &a));
10702:     PetscCall(VecDestroy(&diag));
10703:     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10704:   } else {
10705:     PetscUseTypeMethod(mat, findzerodiagonals, is);
10706:   }
10707:   PetscFunctionReturn(PETSC_SUCCESS);
10708: }

10710: /*@
10711:   MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)

10713:   Collective

10715:   Input Parameter:
10716: . mat - the matrix

10718:   Output Parameter:
10719: . is - contains the list of rows with off block diagonal entries

10721:   Level: developer

10723: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10724: @*/
10725: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10726: {
10727:   PetscFunctionBegin;
10730:   PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10731:   PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");

10733:   PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10734:   PetscFunctionReturn(PETSC_SUCCESS);
10735: }

10737: /*@C
10738:   MatInvertBlockDiagonal - Inverts the block diagonal entries.

10740:   Collective; No Fortran Support

10742:   Input Parameter:
10743: . mat - the matrix

10745:   Output Parameter:
10746: . values - the block inverses in column major order (FORTRAN-like)

10748:   Level: advanced

10750:   Notes:
10751:   The size of the blocks is determined by the block size of the matrix.

10753:   The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case

10755:   The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size

10757: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10758: @*/
10759: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10760: {
10761:   PetscFunctionBegin;
10763:   PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10764:   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10765:   PetscUseTypeMethod(mat, invertblockdiagonal, values);
10766:   PetscFunctionReturn(PETSC_SUCCESS);
10767: }

10769: /*@
10770:   MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.

10772:   Collective; No Fortran Support

10774:   Input Parameters:
10775: + mat     - the matrix
10776: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10777: - bsizes  - the size of each block on the process, set with `MatSetVariableBlockSizes()`

10779:   Output Parameter:
10780: . values - the block inverses in column major order (FORTRAN-like)

10782:   Level: advanced

10784:   Notes:
10785:   Use `MatInvertBlockDiagonal()` if all blocks have the same size

10787:   The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case

10789: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10790: @*/
10791: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10792: {
10793:   PetscFunctionBegin;
10795:   PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10796:   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10797:   PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10798:   PetscFunctionReturn(PETSC_SUCCESS);
10799: }

10801: /*@
10802:   MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A

10804:   Collective

10806:   Input Parameters:
10807: + A - the matrix
10808: - C - matrix with inverted block diagonal of `A`.  This matrix should be created and may have its type set.

10810:   Level: advanced

10812:   Note:
10813:   The blocksize of the matrix is used to determine the blocks on the diagonal of `C`

10815: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10816: @*/
10817: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10818: {
10819:   const PetscScalar *vals;
10820:   PetscInt          *dnnz;
10821:   PetscInt           m, rstart, rend, bs, i, j;

10823:   PetscFunctionBegin;
10824:   PetscCall(MatInvertBlockDiagonal(A, &vals));
10825:   PetscCall(MatGetBlockSize(A, &bs));
10826:   PetscCall(MatGetLocalSize(A, &m, NULL));
10827:   PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10828:   PetscCall(PetscMalloc1(m / bs, &dnnz));
10829:   for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10830:   PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10831:   PetscCall(PetscFree(dnnz));
10832:   PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10833:   PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10834:   for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10835:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10836:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10837:   PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10838:   PetscFunctionReturn(PETSC_SUCCESS);
10839: }

10841: /*@C
10842:   MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10843:   via `MatTransposeColoringCreate()`.

10845:   Collective

10847:   Input Parameter:
10848: . c - coloring context

10850:   Level: intermediate

10852: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10853: @*/
10854: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10855: {
10856:   MatTransposeColoring matcolor = *c;

10858:   PetscFunctionBegin;
10859:   if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10860:   if (--((PetscObject)matcolor)->refct > 0) {
10861:     matcolor = NULL;
10862:     PetscFunctionReturn(PETSC_SUCCESS);
10863:   }

10865:   PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10866:   PetscCall(PetscFree(matcolor->rows));
10867:   PetscCall(PetscFree(matcolor->den2sp));
10868:   PetscCall(PetscFree(matcolor->colorforcol));
10869:   PetscCall(PetscFree(matcolor->columns));
10870:   if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10871:   PetscCall(PetscHeaderDestroy(c));
10872:   PetscFunctionReturn(PETSC_SUCCESS);
10873: }

10875: /*@
10876:   MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10877:   a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10878:   `MatTransposeColoring` to sparse `B`.

10880:   Collective

10882:   Input Parameters:
10883: + coloring - coloring context created with `MatTransposeColoringCreate()`
10884: - B        - sparse matrix

10886:   Output Parameter:
10887: . Btdense - dense matrix $B^T$

10889:   Level: developer

10891:   Note:
10892:   These are used internally for some implementations of `MatRARt()`

10894: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10895: @*/
10896: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10897: {
10898:   PetscFunctionBegin;

10903:   PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10904:   PetscFunctionReturn(PETSC_SUCCESS);
10905: }

10907: /*@
10908:   MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10909:   a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10910:   in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10911:   $C_{sp}$ from $C_{den}$.

10913:   Collective

10915:   Input Parameters:
10916: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10917: - Cden        - matrix product of a sparse matrix and a dense matrix Btdense

10919:   Output Parameter:
10920: . Csp - sparse matrix

10922:   Level: developer

10924:   Note:
10925:   These are used internally for some implementations of `MatRARt()`

10927: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10928: @*/
10929: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10930: {
10931:   PetscFunctionBegin;

10936:   PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10937:   PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10938:   PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10939:   PetscFunctionReturn(PETSC_SUCCESS);
10940: }

10942: /*@
10943:   MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.

10945:   Collective

10947:   Input Parameters:
10948: + mat        - the matrix product C
10949: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`

10951:   Output Parameter:
10952: . color - the new coloring context

10954:   Level: intermediate

10956: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10957:           `MatTransColoringApplyDenToSp()`
10958: @*/
10959: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
10960: {
10961:   MatTransposeColoring c;
10962:   MPI_Comm             comm;

10964:   PetscFunctionBegin;
10965:   PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10966:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10967:   PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));

10969:   c->ctype = iscoloring->ctype;
10970:   PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);

10972:   *color = c;
10973:   PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10974:   PetscFunctionReturn(PETSC_SUCCESS);
10975: }

10977: /*@
10978:   MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
10979:   matrix has had no new nonzero locations added to (or removed from) the matrix since the previous call then the value will be the
10980:   same, otherwise it will be larger

10982:   Not Collective

10984:   Input Parameter:
10985: . mat - the matrix

10987:   Output Parameter:
10988: . state - the current state

10990:   Level: intermediate

10992:   Notes:
10993:   You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10994:   different matrices

10996:   Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix

10998:   Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.

11000: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11001: @*/
11002: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11003: {
11004:   PetscFunctionBegin;
11006:   *state = mat->nonzerostate;
11007:   PetscFunctionReturn(PETSC_SUCCESS);
11008: }

11010: /*@
11011:   MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11012:   matrices from each processor

11014:   Collective

11016:   Input Parameters:
11017: + comm   - the communicators the parallel matrix will live on
11018: . seqmat - the input sequential matrices
11019: . n      - number of local columns (or `PETSC_DECIDE`)
11020: - reuse  - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`

11022:   Output Parameter:
11023: . mpimat - the parallel matrix generated

11025:   Level: developer

11027:   Note:
11028:   The number of columns of the matrix in EACH processor MUST be the same.

11030: .seealso: [](ch_matrices), `Mat`
11031: @*/
11032: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11033: {
11034:   PetscMPIInt size;

11036:   PetscFunctionBegin;
11037:   PetscCallMPI(MPI_Comm_size(comm, &size));
11038:   if (size == 1) {
11039:     if (reuse == MAT_INITIAL_MATRIX) {
11040:       PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11041:     } else {
11042:       PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11043:     }
11044:     PetscFunctionReturn(PETSC_SUCCESS);
11045:   }

11047:   PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");

11049:   PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11050:   PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11051:   PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11052:   PetscFunctionReturn(PETSC_SUCCESS);
11053: }

11055: /*@
11056:   MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.

11058:   Collective

11060:   Input Parameters:
11061: + A - the matrix to create subdomains from
11062: - N - requested number of subdomains

11064:   Output Parameters:
11065: + n   - number of subdomains resulting on this MPI process
11066: - iss - `IS` list with indices of subdomains on this MPI process

11068:   Level: advanced

11070:   Note:
11071:   The number of subdomains must be smaller than the communicator size

11073: .seealso: [](ch_matrices), `Mat`, `IS`
11074: @*/
11075: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11076: {
11077:   MPI_Comm    comm, subcomm;
11078:   PetscMPIInt size, rank, color;
11079:   PetscInt    rstart, rend, k;

11081:   PetscFunctionBegin;
11082:   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11083:   PetscCallMPI(MPI_Comm_size(comm, &size));
11084:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
11085:   PetscCheck(N >= 1 && N < (PetscInt)size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11086:   *n    = 1;
11087:   k     = ((PetscInt)size) / N + ((PetscInt)size % N > 0); /* There are up to k ranks to a color */
11088:   color = rank / k;
11089:   PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11090:   PetscCall(PetscMalloc1(1, iss));
11091:   PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11092:   PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11093:   PetscCallMPI(MPI_Comm_free(&subcomm));
11094:   PetscFunctionReturn(PETSC_SUCCESS);
11095: }

11097: /*@
11098:   MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.

11100:   If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11101:   If they are not the same, uses `MatMatMatMult()`.

11103:   Once the coarse grid problem is constructed, correct for interpolation operators
11104:   that are not of full rank, which can legitimately happen in the case of non-nested
11105:   geometric multigrid.

11107:   Input Parameters:
11108: + restrct     - restriction operator
11109: . dA          - fine grid matrix
11110: . interpolate - interpolation operator
11111: . reuse       - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11112: - fill        - expected fill, use `PETSC_DEFAULT` if you do not have a good estimate

11114:   Output Parameter:
11115: . A - the Galerkin coarse matrix

11117:   Options Database Key:
11118: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used

11120:   Level: developer

11122: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11123: @*/
11124: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11125: {
11126:   IS  zerorows;
11127:   Vec diag;

11129:   PetscFunctionBegin;
11130:   PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11131:   /* Construct the coarse grid matrix */
11132:   if (interpolate == restrct) {
11133:     PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11134:   } else {
11135:     PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11136:   }

11138:   /* If the interpolation matrix is not of full rank, A will have zero rows.
11139:      This can legitimately happen in the case of non-nested geometric multigrid.
11140:      In that event, we set the rows of the matrix to the rows of the identity,
11141:      ignoring the equations (as the RHS will also be zero). */

11143:   PetscCall(MatFindZeroRows(*A, &zerorows));

11145:   if (zerorows != NULL) { /* if there are any zero rows */
11146:     PetscCall(MatCreateVecs(*A, &diag, NULL));
11147:     PetscCall(MatGetDiagonal(*A, diag));
11148:     PetscCall(VecISSet(diag, zerorows, 1.0));
11149:     PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11150:     PetscCall(VecDestroy(&diag));
11151:     PetscCall(ISDestroy(&zerorows));
11152:   }
11153:   PetscFunctionReturn(PETSC_SUCCESS);
11154: }

11156: /*@C
11157:   MatSetOperation - Allows user to set a matrix operation for any matrix type

11159:   Logically Collective

11161:   Input Parameters:
11162: + mat - the matrix
11163: . op  - the name of the operation
11164: - f   - the function that provides the operation

11166:   Level: developer

11168:   Example Usage:
11169: .vb
11170:   extern PetscErrorCode usermult(Mat, Vec, Vec);

11172:   PetscCall(MatCreateXXX(comm, ..., &A));
11173:   PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11174: .ve

11176:   Notes:
11177:   See the file `include/petscmat.h` for a complete list of matrix
11178:   operations, which all have the form MATOP_<OPERATION>, where
11179:   <OPERATION> is the name (in all capital letters) of the
11180:   user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).

11182:   All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11183:   sequence as the usual matrix interface routines, since they
11184:   are intended to be accessed via the usual matrix interface
11185:   routines, e.g.,
11186: .vb
11187:   MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11188: .ve

11190:   In particular each function MUST return `PETSC_SUCCESS` on success and
11191:   nonzero on failure.

11193:   This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.

11195: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11196: @*/
11197: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11198: {
11199:   PetscFunctionBegin;
11201:   if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11202:   (((void (**)(void))mat->ops)[op]) = f;
11203:   PetscFunctionReturn(PETSC_SUCCESS);
11204: }

11206: /*@C
11207:   MatGetOperation - Gets a matrix operation for any matrix type.

11209:   Not Collective

11211:   Input Parameters:
11212: + mat - the matrix
11213: - op  - the name of the operation

11215:   Output Parameter:
11216: . f - the function that provides the operation

11218:   Level: developer

11220:   Example Usage:
11221: .vb
11222:   PetscErrorCode (*usermult)(Mat, Vec, Vec);

11224:   MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11225: .ve

11227:   Notes:
11228:   See the file include/petscmat.h for a complete list of matrix
11229:   operations, which all have the form MATOP_<OPERATION>, where
11230:   <OPERATION> is the name (in all capital letters) of the
11231:   user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).

11233:   This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.

11235: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11236: @*/
11237: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11238: {
11239:   PetscFunctionBegin;
11241:   *f = (((void (**)(void))mat->ops)[op]);
11242:   PetscFunctionReturn(PETSC_SUCCESS);
11243: }

11245: /*@
11246:   MatHasOperation - Determines whether the given matrix supports the particular operation.

11248:   Not Collective

11250:   Input Parameters:
11251: + mat - the matrix
11252: - op  - the operation, for example, `MATOP_GET_DIAGONAL`

11254:   Output Parameter:
11255: . has - either `PETSC_TRUE` or `PETSC_FALSE`

11257:   Level: advanced

11259:   Note:
11260:   See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.

11262: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11263: @*/
11264: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11265: {
11266:   PetscFunctionBegin;
11268:   PetscAssertPointer(has, 3);
11269:   if (mat->ops->hasoperation) {
11270:     PetscUseTypeMethod(mat, hasoperation, op, has);
11271:   } else {
11272:     if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11273:     else {
11274:       *has = PETSC_FALSE;
11275:       if (op == MATOP_CREATE_SUBMATRIX) {
11276:         PetscMPIInt size;

11278:         PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11279:         if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11280:       }
11281:     }
11282:   }
11283:   PetscFunctionReturn(PETSC_SUCCESS);
11284: }

11286: /*@
11287:   MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent

11289:   Collective

11291:   Input Parameter:
11292: . mat - the matrix

11294:   Output Parameter:
11295: . cong - either `PETSC_TRUE` or `PETSC_FALSE`

11297:   Level: beginner

11299: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11300: @*/
11301: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11302: {
11303:   PetscFunctionBegin;
11306:   PetscAssertPointer(cong, 2);
11307:   if (!mat->rmap || !mat->cmap) {
11308:     *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11309:     PetscFunctionReturn(PETSC_SUCCESS);
11310:   }
11311:   if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11312:     PetscCall(PetscLayoutSetUp(mat->rmap));
11313:     PetscCall(PetscLayoutSetUp(mat->cmap));
11314:     PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11315:     if (*cong) mat->congruentlayouts = 1;
11316:     else mat->congruentlayouts = 0;
11317:   } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11318:   PetscFunctionReturn(PETSC_SUCCESS);
11319: }

11321: PetscErrorCode MatSetInf(Mat A)
11322: {
11323:   PetscFunctionBegin;
11324:   PetscUseTypeMethod(A, setinf);
11325:   PetscFunctionReturn(PETSC_SUCCESS);
11326: }

11328: /*@
11329:   MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11330:   and possibly removes small values from the graph structure.

11332:   Collective

11334:   Input Parameters:
11335: + A       - the matrix
11336: . sym     - `PETSC_TRUE` indicates that the graph should be symmetrized
11337: . scale   - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11338: . filter  - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11339: . num_idx - size of 'index' array
11340: - index   - array of block indices to use for graph strength of connection weight

11342:   Output Parameter:
11343: . graph - the resulting graph

11345:   Level: advanced

11347: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11348: @*/
11349: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11350: {
11351:   PetscFunctionBegin;
11355:   PetscAssertPointer(graph, 7);
11356:   PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11357:   PetscFunctionReturn(PETSC_SUCCESS);
11358: }

11360: /*@
11361:   MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11362:   meaning the same memory is used for the matrix, and no new memory is allocated.

11364:   Collective

11366:   Input Parameters:
11367: + A    - the matrix
11368: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well

11370:   Level: intermediate

11372:   Developer Note:
11373:   The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11374:   of the arrays in the data structure are unneeded.

11376: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11377: @*/
11378: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11379: {
11380:   PetscFunctionBegin;
11382:   PetscUseTypeMethod(A, eliminatezeros, keep);
11383:   PetscFunctionReturn(PETSC_SUCCESS);
11384: }