Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_ADot, MAT_ANorm;
19: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
20: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
21: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
22: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
23: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
24: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
25: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
26: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
27: PetscLogEvent MAT_TransposeColoringCreate;
28: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
29: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
30: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
31: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
32: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
33: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
34: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
35: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
36: PetscLogEvent MAT_GetMultiProcBlock;
37: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
38: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
39: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
40: PetscLogEvent MAT_CreateGraph;
41: PetscLogEvent MAT_SetValuesBatch;
42: PetscLogEvent MAT_ViennaCLCopyToGPU;
43: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
44: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
45: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
46: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
47: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
48: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
50: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
52: /*@
53: MatSetRandom - Sets all components of a matrix to random numbers.
55: Logically Collective
57: Input Parameters:
58: + x - the matrix
59: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
60: it will create one internally.
62: Example:
63: .vb
64: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
65: MatSetRandom(x,rctx);
66: PetscRandomDestroy(rctx);
67: .ve
69: Level: intermediate
71: Notes:
72: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
74: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
76: It generates an error if used on unassembled sparse matrices that have not been preallocated.
78: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
79: @*/
80: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
81: {
82: PetscRandom randObj = NULL;
84: PetscFunctionBegin;
88: MatCheckPreallocated(x, 1);
90: if (!rctx) {
91: MPI_Comm comm;
92: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
93: PetscCall(PetscRandomCreate(comm, &randObj));
94: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
95: PetscCall(PetscRandomSetFromOptions(randObj));
96: rctx = randObj;
97: }
98: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
99: PetscUseTypeMethod(x, setrandom, rctx);
100: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
102: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
104: PetscCall(PetscRandomDestroy(&randObj));
105: PetscFunctionReturn(PETSC_SUCCESS);
106: }
108: /*@
109: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
111: Logically Collective
113: Input Parameter:
114: . A - A matrix in unassembled, hash table form
116: Output Parameter:
117: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
119: Example:
120: .vb
121: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
122: PetscCall(MatCopyHashToXAIJ(A, B));
123: .ve
125: Level: advanced
127: Notes:
128: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
130: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
131: @*/
132: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
133: {
134: PetscFunctionBegin;
136: PetscUseTypeMethod(A, copyhashtoxaij, B);
137: PetscFunctionReturn(PETSC_SUCCESS);
138: }
140: /*@
141: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
143: Logically Collective
145: Input Parameter:
146: . mat - the factored matrix
148: Output Parameters:
149: + pivot - the pivot value computed
150: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
151: the share the matrix
153: Level: advanced
155: Notes:
156: This routine does not work for factorizations done with external packages.
158: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
160: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
162: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
163: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
164: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
165: @*/
166: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
167: {
168: PetscFunctionBegin;
170: PetscAssertPointer(pivot, 2);
171: PetscAssertPointer(row, 3);
172: *pivot = mat->factorerror_zeropivot_value;
173: *row = mat->factorerror_zeropivot_row;
174: PetscFunctionReturn(PETSC_SUCCESS);
175: }
177: /*@
178: MatFactorGetError - gets the error code from a factorization
180: Logically Collective
182: Input Parameter:
183: . mat - the factored matrix
185: Output Parameter:
186: . err - the error code
188: Level: advanced
190: Note:
191: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
193: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
194: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
195: @*/
196: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
197: {
198: PetscFunctionBegin;
200: PetscAssertPointer(err, 2);
201: *err = mat->factorerrortype;
202: PetscFunctionReturn(PETSC_SUCCESS);
203: }
205: /*@
206: MatFactorClearError - clears the error code in a factorization
208: Logically Collective
210: Input Parameter:
211: . mat - the factored matrix
213: Level: developer
215: Note:
216: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
218: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
219: `MatGetErrorCode()`, `MatFactorError`
220: @*/
221: PetscErrorCode MatFactorClearError(Mat mat)
222: {
223: PetscFunctionBegin;
225: mat->factorerrortype = MAT_FACTOR_NOERROR;
226: mat->factorerror_zeropivot_value = 0.0;
227: mat->factorerror_zeropivot_row = 0;
228: PetscFunctionReturn(PETSC_SUCCESS);
229: }
231: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
232: {
233: Vec r, l;
234: const PetscScalar *al;
235: PetscInt i, nz, gnz, N, n, st;
237: PetscFunctionBegin;
238: PetscCall(MatCreateVecs(mat, &r, &l));
239: if (!cols) { /* nonzero rows */
240: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
241: PetscCall(MatGetSize(mat, &N, NULL));
242: PetscCall(MatGetLocalSize(mat, &n, NULL));
243: PetscCall(VecSet(l, 0.0));
244: PetscCall(VecSetRandom(r, NULL));
245: PetscCall(MatMult(mat, r, l));
246: PetscCall(VecGetArrayRead(l, &al));
247: } else { /* nonzero columns */
248: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
249: PetscCall(MatGetSize(mat, NULL, &N));
250: PetscCall(MatGetLocalSize(mat, NULL, &n));
251: PetscCall(VecSet(r, 0.0));
252: PetscCall(VecSetRandom(l, NULL));
253: PetscCall(MatMultTranspose(mat, l, r));
254: PetscCall(VecGetArrayRead(r, &al));
255: }
256: if (tol <= 0.0) {
257: for (i = 0, nz = 0; i < n; i++)
258: if (al[i] != 0.0) nz++;
259: } else {
260: for (i = 0, nz = 0; i < n; i++)
261: if (PetscAbsScalar(al[i]) > tol) nz++;
262: }
263: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
264: if (gnz != N) {
265: PetscInt *nzr;
266: PetscCall(PetscMalloc1(nz, &nzr));
267: if (nz) {
268: if (tol < 0) {
269: for (i = 0, nz = 0; i < n; i++)
270: if (al[i] != 0.0) nzr[nz++] = i + st;
271: } else {
272: for (i = 0, nz = 0; i < n; i++)
273: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
274: }
275: }
276: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
277: } else *nonzero = NULL;
278: if (!cols) { /* nonzero rows */
279: PetscCall(VecRestoreArrayRead(l, &al));
280: } else {
281: PetscCall(VecRestoreArrayRead(r, &al));
282: }
283: PetscCall(VecDestroy(&l));
284: PetscCall(VecDestroy(&r));
285: PetscFunctionReturn(PETSC_SUCCESS);
286: }
288: /*@
289: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
291: Input Parameter:
292: . mat - the matrix
294: Output Parameter:
295: . keptrows - the rows that are not completely zero
297: Level: intermediate
299: Note:
300: `keptrows` is set to `NULL` if all rows are nonzero.
302: Developer Note:
303: If `keptrows` is not `NULL`, it must be sorted.
305: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
306: @*/
307: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
308: {
309: PetscFunctionBegin;
312: PetscAssertPointer(keptrows, 2);
313: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
314: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
315: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
316: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
317: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
318: PetscFunctionReturn(PETSC_SUCCESS);
319: }
321: /*@
322: MatFindZeroRows - Locate all rows that are completely zero in the matrix
324: Input Parameter:
325: . mat - the matrix
327: Output Parameter:
328: . zerorows - the rows that are completely zero
330: Level: intermediate
332: Note:
333: `zerorows` is set to `NULL` if no rows are zero.
335: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
336: @*/
337: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
338: {
339: IS keptrows;
340: PetscInt m, n;
342: PetscFunctionBegin;
345: PetscAssertPointer(zerorows, 2);
346: PetscCall(MatFindNonzeroRows(mat, &keptrows));
347: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
348: In keeping with this convention, we set zerorows to NULL if there are no zero
349: rows. */
350: if (keptrows == NULL) {
351: *zerorows = NULL;
352: } else {
353: PetscCall(MatGetOwnershipRange(mat, &m, &n));
354: PetscCall(ISComplement(keptrows, m, n, zerorows));
355: PetscCall(ISDestroy(&keptrows));
356: }
357: PetscFunctionReturn(PETSC_SUCCESS);
358: }
360: /*@
361: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
363: Not Collective
365: Input Parameter:
366: . A - the matrix
368: Output Parameter:
369: . a - the diagonal part (which is a SEQUENTIAL matrix)
371: Level: advanced
373: Notes:
374: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
376: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
378: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
379: @*/
380: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
381: {
382: PetscFunctionBegin;
385: PetscAssertPointer(a, 2);
386: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
387: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
388: else {
389: PetscMPIInt size;
391: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
392: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
393: *a = A;
394: }
395: PetscFunctionReturn(PETSC_SUCCESS);
396: }
398: /*@
399: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
401: Collective
403: Input Parameter:
404: . mat - the matrix
406: Output Parameter:
407: . trace - the sum of the diagonal entries
409: Level: advanced
411: .seealso: [](ch_matrices), `Mat`
412: @*/
413: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
414: {
415: Vec diag;
417: PetscFunctionBegin;
419: PetscAssertPointer(trace, 2);
420: PetscCall(MatCreateVecs(mat, &diag, NULL));
421: PetscCall(MatGetDiagonal(mat, diag));
422: PetscCall(VecSum(diag, trace));
423: PetscCall(VecDestroy(&diag));
424: PetscFunctionReturn(PETSC_SUCCESS);
425: }
427: /*@
428: MatRealPart - Zeros out the imaginary part of the matrix
430: Logically Collective
432: Input Parameter:
433: . mat - the matrix
435: Level: advanced
437: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
438: @*/
439: PetscErrorCode MatRealPart(Mat mat)
440: {
441: PetscFunctionBegin;
444: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
445: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
446: MatCheckPreallocated(mat, 1);
447: PetscUseTypeMethod(mat, realpart);
448: PetscFunctionReturn(PETSC_SUCCESS);
449: }
451: /*@C
452: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
454: Collective
456: Input Parameter:
457: . mat - the matrix
459: Output Parameters:
460: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
461: - ghosts - the global indices of the ghost points
463: Level: advanced
465: Note:
466: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
468: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
469: @*/
470: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
471: {
472: PetscFunctionBegin;
475: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
476: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
477: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
478: else {
479: if (nghosts) *nghosts = 0;
480: if (ghosts) *ghosts = NULL;
481: }
482: PetscFunctionReturn(PETSC_SUCCESS);
483: }
485: /*@
486: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
488: Logically Collective
490: Input Parameter:
491: . mat - the matrix
493: Level: advanced
495: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
496: @*/
497: PetscErrorCode MatImaginaryPart(Mat mat)
498: {
499: PetscFunctionBegin;
502: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
503: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
504: MatCheckPreallocated(mat, 1);
505: PetscUseTypeMethod(mat, imaginarypart);
506: PetscFunctionReturn(PETSC_SUCCESS);
507: }
509: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
510: /*@C
511: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
512: for each row that you get to ensure that your application does
513: not bleed memory.
515: Not Collective
517: Input Parameters:
518: + mat - the matrix
519: - row - the row to get
521: Output Parameters:
522: + ncols - if not `NULL`, the number of nonzeros in `row`
523: . cols - if not `NULL`, the column numbers
524: - vals - if not `NULL`, the numerical values
526: Level: advanced
528: Notes:
529: This routine is provided for people who need to have direct access
530: to the structure of a matrix. We hope that we provide enough
531: high-level matrix routines that few users will need it.
533: `MatGetRow()` always returns 0-based column indices, regardless of
534: whether the internal representation is 0-based (default) or 1-based.
536: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
537: not wish to extract these quantities.
539: The user can only examine the values extracted with `MatGetRow()`;
540: the values CANNOT be altered. To change the matrix entries, one
541: must use `MatSetValues()`.
543: You can only have one call to `MatGetRow()` outstanding for a particular
544: matrix at a time, per processor. `MatGetRow()` can only obtain rows
545: associated with the given processor, it cannot get rows from the
546: other processors; for that we suggest using `MatCreateSubMatrices()`, then
547: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
548: is in the global number of rows.
550: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
552: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
554: Fortran Note:
555: .vb
556: PetscInt, pointer :: cols(:)
557: PetscScalar, pointer :: vals(:)
558: .ve
560: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
561: @*/
562: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
563: {
564: PetscInt incols;
566: PetscFunctionBegin;
569: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
570: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
571: MatCheckPreallocated(mat, 1);
572: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
573: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
574: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
575: if (ncols) *ncols = incols;
576: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
577: PetscFunctionReturn(PETSC_SUCCESS);
578: }
580: /*@
581: MatConjugate - replaces the matrix values with their complex conjugates
583: Logically Collective
585: Input Parameter:
586: . mat - the matrix
588: Level: advanced
590: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
591: @*/
592: PetscErrorCode MatConjugate(Mat mat)
593: {
594: PetscFunctionBegin;
596: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
597: if (PetscDefined(USE_COMPLEX) && !(mat->symmetric == PETSC_BOOL3_TRUE && mat->hermitian == PETSC_BOOL3_TRUE)) {
598: PetscUseTypeMethod(mat, conjugate);
599: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
600: }
601: PetscFunctionReturn(PETSC_SUCCESS);
602: }
604: /*@C
605: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
607: Not Collective
609: Input Parameters:
610: + mat - the matrix
611: . row - the row to get
612: . ncols - the number of nonzeros
613: . cols - the columns of the nonzeros
614: - vals - if nonzero the column values
616: Level: advanced
618: Notes:
619: This routine should be called after you have finished examining the entries.
621: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
622: us of the array after it has been restored. If you pass `NULL`, it will
623: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
625: Fortran Note:
626: .vb
627: PetscInt, pointer :: cols(:)
628: PetscScalar, pointer :: vals(:)
629: .ve
631: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
632: @*/
633: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
634: {
635: PetscFunctionBegin;
637: if (ncols) PetscAssertPointer(ncols, 3);
638: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
639: PetscTryTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
640: if (ncols) *ncols = 0;
641: if (cols) *cols = NULL;
642: if (vals) *vals = NULL;
643: PetscFunctionReturn(PETSC_SUCCESS);
644: }
646: /*@
647: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
648: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
650: Not Collective
652: Input Parameter:
653: . mat - the matrix
655: Level: advanced
657: Note:
658: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
660: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
661: @*/
662: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
663: {
664: PetscFunctionBegin;
667: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
668: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
669: MatCheckPreallocated(mat, 1);
670: PetscTryTypeMethod(mat, getrowuppertriangular);
671: PetscFunctionReturn(PETSC_SUCCESS);
672: }
674: /*@
675: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
677: Not Collective
679: Input Parameter:
680: . mat - the matrix
682: Level: advanced
684: Note:
685: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
687: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
688: @*/
689: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
690: {
691: PetscFunctionBegin;
694: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
695: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
696: MatCheckPreallocated(mat, 1);
697: PetscTryTypeMethod(mat, restorerowuppertriangular);
698: PetscFunctionReturn(PETSC_SUCCESS);
699: }
701: /*@
702: MatSetOptionsPrefix - Sets the prefix used for searching for all
703: `Mat` options in the database.
705: Logically Collective
707: Input Parameters:
708: + A - the matrix
709: - prefix - the prefix to prepend to all option names
711: Level: advanced
713: Notes:
714: A hyphen (-) must NOT be given at the beginning of the prefix name.
715: The first character of all runtime options is AUTOMATICALLY the hyphen.
717: This is NOT used for options for the factorization of the matrix. Normally the
718: prefix is automatically passed in from the PC calling the factorization. To set
719: it directly use `MatSetOptionsPrefixFactor()`
721: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
722: @*/
723: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
724: {
725: PetscFunctionBegin;
727: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
728: PetscTryMethod(A, "MatSetOptionsPrefix_C", (Mat, const char[]), (A, prefix));
729: PetscFunctionReturn(PETSC_SUCCESS);
730: }
732: /*@
733: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
734: for matrices created with `MatGetFactor()`
736: Logically Collective
738: Input Parameters:
739: + A - the matrix
740: - prefix - the prefix to prepend to all option names for the factored matrix
742: Level: developer
744: Notes:
745: A hyphen (-) must NOT be given at the beginning of the prefix name.
746: The first character of all runtime options is AUTOMATICALLY the hyphen.
748: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
749: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
751: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
752: @*/
753: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
754: {
755: PetscFunctionBegin;
757: if (prefix) {
758: PetscAssertPointer(prefix, 2);
759: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
760: if (prefix != A->factorprefix) {
761: PetscCall(PetscFree(A->factorprefix));
762: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
763: }
764: } else PetscCall(PetscFree(A->factorprefix));
765: PetscFunctionReturn(PETSC_SUCCESS);
766: }
768: /*@
769: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
770: for matrices created with `MatGetFactor()`
772: Logically Collective
774: Input Parameters:
775: + A - the matrix
776: - prefix - the prefix to prepend to all option names for the factored matrix
778: Level: developer
780: Notes:
781: A hyphen (-) must NOT be given at the beginning of the prefix name.
782: The first character of all runtime options is AUTOMATICALLY the hyphen.
784: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
785: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
787: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
788: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
789: `MatSetOptionsPrefix()`
790: @*/
791: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
792: {
793: size_t len1, len2, new_len;
795: PetscFunctionBegin;
797: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
798: if (!A->factorprefix) {
799: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
800: PetscFunctionReturn(PETSC_SUCCESS);
801: }
802: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
804: PetscCall(PetscStrlen(A->factorprefix, &len1));
805: PetscCall(PetscStrlen(prefix, &len2));
806: new_len = len1 + len2 + 1;
807: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
808: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
809: PetscFunctionReturn(PETSC_SUCCESS);
810: }
812: /*@
813: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
814: matrix options in the database.
816: Logically Collective
818: Input Parameters:
819: + A - the matrix
820: - prefix - the prefix to prepend to all option names
822: Level: advanced
824: Note:
825: A hyphen (-) must NOT be given at the beginning of the prefix name.
826: The first character of all runtime options is AUTOMATICALLY the hyphen.
828: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
829: @*/
830: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
831: {
832: PetscFunctionBegin;
834: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
835: PetscTryMethod(A, "MatAppendOptionsPrefix_C", (Mat, const char[]), (A, prefix));
836: PetscFunctionReturn(PETSC_SUCCESS);
837: }
839: /*@
840: MatGetOptionsPrefix - Gets the prefix used for searching for all
841: matrix options in the database.
843: Not Collective
845: Input Parameter:
846: . A - the matrix
848: Output Parameter:
849: . prefix - pointer to the prefix string used
851: Level: advanced
853: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
854: @*/
855: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
856: {
857: PetscFunctionBegin;
859: PetscAssertPointer(prefix, 2);
860: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
861: PetscFunctionReturn(PETSC_SUCCESS);
862: }
864: /*@
865: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
867: Not Collective
869: Input Parameter:
870: . A - the matrix
872: Output Parameter:
873: . state - the object state
875: Level: advanced
877: Note:
878: Object state is an integer which gets increased every time
879: the object is changed. By saving and later querying the object state
880: one can determine whether information about the object is still current.
882: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
884: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
885: @*/
886: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
887: {
888: PetscFunctionBegin;
890: PetscAssertPointer(state, 2);
891: PetscCall(PetscObjectStateGet((PetscObject)A, state));
892: PetscFunctionReturn(PETSC_SUCCESS);
893: }
895: /*@
896: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
898: Collective
900: Input Parameter:
901: . A - the matrix
903: Level: beginner
905: Notes:
906: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
907: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
908: makes all of the preallocation space available
910: Current values in the matrix are lost in this call
912: Currently only supported for `MATAIJ` matrices.
914: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
915: @*/
916: PetscErrorCode MatResetPreallocation(Mat A)
917: {
918: PetscFunctionBegin;
921: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
922: PetscFunctionReturn(PETSC_SUCCESS);
923: }
925: /*@
926: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
928: Collective
930: Input Parameter:
931: . A - the matrix
933: Level: intermediate
935: Notes:
936: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
938: Currently only supported for `MATAIJ` matrices.
940: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
941: @*/
942: PetscErrorCode MatResetHash(Mat A)
943: {
944: PetscFunctionBegin;
947: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
948: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
949: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
950: /* These flags are used to determine whether certain setups occur */
951: A->was_assembled = PETSC_FALSE;
952: A->assembled = PETSC_FALSE;
953: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
954: PetscCall(PetscObjectStateIncrease((PetscObject)A));
955: PetscFunctionReturn(PETSC_SUCCESS);
956: }
958: /*@
959: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
961: Collective
963: Input Parameter:
964: . A - the matrix
966: Level: advanced
968: Notes:
969: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
970: setting values in the matrix.
972: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
974: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
975: @*/
976: PetscErrorCode MatSetUp(Mat A)
977: {
978: PetscFunctionBegin;
980: if (!((PetscObject)A)->type_name) {
981: PetscMPIInt size;
983: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
984: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
985: }
986: if (!A->preallocated) PetscTryTypeMethod(A, setup);
987: PetscCall(PetscLayoutSetUp(A->rmap));
988: PetscCall(PetscLayoutSetUp(A->cmap));
989: A->preallocated = PETSC_TRUE;
990: PetscFunctionReturn(PETSC_SUCCESS);
991: }
993: #if defined(PETSC_HAVE_SAWS)
994: #include <petscviewersaws.h>
995: #endif
997: /*
998: If threadsafety is on extraneous matrices may be printed
1000: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1001: */
1002: #if !defined(PETSC_HAVE_THREADSAFETY)
1003: static PetscInt insidematview = 0;
1004: #endif
1006: /*@
1007: MatViewFromOptions - View properties of the matrix based on options set in the options database
1009: Collective
1011: Input Parameters:
1012: + A - the matrix
1013: . obj - optional additional object that provides the options prefix to use
1014: - name - command line option
1016: Options Database Key:
1017: . -name [viewertype][:...] - option name and values. See `PetscObjectViewFromOptions()` for the possible arguments
1019: Level: intermediate
1021: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1022: @*/
1023: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1024: {
1025: PetscFunctionBegin;
1027: #if !defined(PETSC_HAVE_THREADSAFETY)
1028: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1029: #endif
1030: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1031: PetscFunctionReturn(PETSC_SUCCESS);
1032: }
1034: /*@
1035: MatView - display information about a matrix in a variety ways
1037: Collective on viewer
1039: Input Parameters:
1040: + mat - the matrix
1041: - viewer - visualization context
1043: Options Database Keys:
1044: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1045: . -mat_view ::ascii_info_detail - Prints more detailed info
1046: . -mat_view - Prints matrix in ASCII format
1047: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1048: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1049: . -display name - Sets display name (default is host)
1050: . -draw_pause sec - Sets number of seconds to pause after display
1051: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1052: . -viewer_socket_machine machine - -
1053: . -viewer_socket_port port - -
1054: . -mat_view binary - save matrix to file in binary format
1055: - -viewer_binary_filename name - -
1057: Level: beginner
1059: Notes:
1060: The available visualization contexts include
1061: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1062: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1063: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1064: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1066: The user can open alternative visualization contexts with
1067: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1068: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1069: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1070: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1072: The user can call `PetscViewerPushFormat()` to specify the output
1073: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1074: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1075: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1076: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1077: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1078: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1079: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1080: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1081: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1083: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1084: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1086: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1088: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1089: viewer is used.
1091: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1092: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1094: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1095: and then use the following mouse functions.
1096: .vb
1097: left mouse: zoom in
1098: middle mouse: zoom out
1099: right mouse: continue with the simulation
1100: .ve
1102: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1103: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1104: @*/
1105: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1106: {
1107: PetscInt rows, cols, rbs, cbs;
1108: PetscBool isascii, isstring, issaws;
1109: PetscViewerFormat format;
1110: PetscMPIInt size;
1112: PetscFunctionBegin;
1115: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1118: PetscCall(PetscViewerGetFormat(viewer, &format));
1119: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1120: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1122: #if !defined(PETSC_HAVE_THREADSAFETY)
1123: insidematview++;
1124: #endif
1125: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1126: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1127: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1128: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1130: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1131: if (isascii) {
1132: if (!mat->preallocated) {
1133: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1134: #if !defined(PETSC_HAVE_THREADSAFETY)
1135: insidematview--;
1136: #endif
1137: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1138: PetscFunctionReturn(PETSC_SUCCESS);
1139: }
1140: if (!mat->assembled) {
1141: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1142: #if !defined(PETSC_HAVE_THREADSAFETY)
1143: insidematview--;
1144: #endif
1145: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1146: PetscFunctionReturn(PETSC_SUCCESS);
1147: }
1148: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1149: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1150: MatNullSpace nullsp, transnullsp;
1152: PetscCall(PetscViewerASCIIPushTab(viewer));
1153: PetscCall(MatGetSize(mat, &rows, &cols));
1154: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1155: if (rbs != 1 || cbs != 1) {
1156: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1157: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1158: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1159: if (mat->factortype) {
1160: MatSolverType solver;
1161: PetscCall(MatFactorGetSolverType(mat, &solver));
1162: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1163: }
1164: if (mat->ops->getinfo) {
1165: PetscBool is_constant_or_diagonal;
1167: // Don't print nonzero information for constant or diagonal matrices, it just adds noise to the output
1168: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &is_constant_or_diagonal, MATCONSTANTDIAGONAL, MATDIAGONAL, ""));
1169: if (!is_constant_or_diagonal) {
1170: MatInfo info;
1172: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1173: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1174: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1175: }
1176: }
1177: PetscCall(MatGetNullSpace(mat, &nullsp));
1178: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1179: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1180: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1181: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1182: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1183: PetscCall(PetscViewerASCIIPushTab(viewer));
1184: PetscCall(MatProductView(mat, viewer));
1185: PetscCall(PetscViewerASCIIPopTab(viewer));
1186: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1187: IS tmp;
1189: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1190: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1191: PetscCall(PetscViewerASCIIPushTab(viewer));
1192: PetscCall(ISView(tmp, viewer));
1193: PetscCall(PetscViewerASCIIPopTab(viewer));
1194: PetscCall(ISDestroy(&tmp));
1195: }
1196: }
1197: } else if (issaws) {
1198: #if defined(PETSC_HAVE_SAWS)
1199: PetscMPIInt rank;
1201: PetscCall(PetscObjectName((PetscObject)mat));
1202: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1203: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1204: #endif
1205: } else if (isstring) {
1206: const char *type;
1207: PetscCall(MatGetType(mat, &type));
1208: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1209: PetscTryTypeMethod(mat, view, viewer);
1210: }
1211: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1212: PetscCall(PetscViewerASCIIPushTab(viewer));
1213: PetscUseTypeMethod(mat, viewnative, viewer);
1214: PetscCall(PetscViewerASCIIPopTab(viewer));
1215: } else if (mat->ops->view) {
1216: PetscCall(PetscViewerASCIIPushTab(viewer));
1217: PetscUseTypeMethod(mat, view, viewer);
1218: PetscCall(PetscViewerASCIIPopTab(viewer));
1219: }
1220: if (isascii) {
1221: PetscCall(PetscViewerGetFormat(viewer, &format));
1222: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1223: }
1224: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1225: #if !defined(PETSC_HAVE_THREADSAFETY)
1226: insidematview--;
1227: #endif
1228: PetscFunctionReturn(PETSC_SUCCESS);
1229: }
1231: #if defined(PETSC_USE_DEBUG)
1232: #include <../src/sys/totalview/tv_data_display.h>
1233: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1234: {
1235: TV_add_row("Local rows", "int", &mat->rmap->n);
1236: TV_add_row("Local columns", "int", &mat->cmap->n);
1237: TV_add_row("Global rows", "int", &mat->rmap->N);
1238: TV_add_row("Global columns", "int", &mat->cmap->N);
1239: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1240: return TV_format_OK;
1241: }
1242: #endif
1244: /*@
1245: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1246: with `MatView()`. The matrix format is determined from the options database.
1247: Generates a parallel MPI matrix if the communicator has more than one
1248: processor. The default matrix type is `MATAIJ`.
1250: Collective
1252: Input Parameters:
1253: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1254: or some related function before a call to `MatLoad()`
1255: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1257: Options Database Key:
1258: . -matload_block_size bs - set block size
1260: Level: beginner
1262: Notes:
1263: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1264: `Mat` before calling this routine if you wish to set it from the options database.
1266: `MatLoad()` automatically loads into the options database any options
1267: given in the file filename.info where filename is the name of the file
1268: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1269: file will be ignored if you use the -viewer_binary_skip_info option.
1271: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1272: sets the default matrix type AIJ and sets the local and global sizes.
1273: If type and/or size is already set, then the same are used.
1275: In parallel, each processor can load a subset of rows (or the
1276: entire matrix). This routine is especially useful when a large
1277: matrix is stored on disk and only part of it is desired on each
1278: processor. For example, a parallel solver may access only some of
1279: the rows from each processor. The algorithm used here reads
1280: relatively small blocks of data rather than reading the entire
1281: matrix and then subsetting it.
1283: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1284: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1285: or the sequence like
1286: .vb
1287: `PetscViewer` v;
1288: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1289: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1290: `PetscViewerSetFromOptions`(v);
1291: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1292: `PetscViewerFileSetName`(v,"datafile");
1293: .ve
1294: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1295: .vb
1296: -viewer_type {binary, hdf5}
1297: .ve
1299: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1300: and src/mat/tutorials/ex10.c with the second approach.
1302: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1303: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1304: Multiple objects, both matrices and vectors, can be stored within the same file.
1305: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1307: Most users should not need to know the details of the binary storage
1308: format, since `MatLoad()` and `MatView()` completely hide these details.
1309: But for anyone who is interested, the standard binary matrix storage
1310: format is
1312: .vb
1313: PetscInt MAT_FILE_CLASSID
1314: PetscInt number of rows
1315: PetscInt number of columns
1316: PetscInt total number of nonzeros
1317: PetscInt *number nonzeros in each row
1318: PetscInt *column indices of all nonzeros (starting index is zero)
1319: PetscScalar *values of all nonzeros
1320: .ve
1321: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1322: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1323: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1325: PETSc automatically does the byte swapping for
1326: machines that store the bytes reversed. Thus if you write your own binary
1327: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1328: and `PetscBinaryWrite()` to see how this may be done.
1330: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1331: Each processor's chunk is loaded independently by its owning MPI process.
1332: Multiple objects, both matrices and vectors, can be stored within the same file.
1333: They are looked up by their PetscObject name.
1335: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1336: by default the same structure and naming of the AIJ arrays and column count
1337: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1338: .vb
1339: save example.mat A b -v7.3
1340: .ve
1341: can be directly read by this routine (see Reference 1 for details).
1343: Depending on your MATLAB version, this format might be a default,
1344: otherwise you can set it as default in Preferences.
1346: Unless -nocompression flag is used to save the file in MATLAB,
1347: PETSc must be configured with ZLIB package.
1349: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1351: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1353: Corresponding `MatView()` is not yet implemented.
1355: The loaded matrix is actually a transpose of the original one in MATLAB,
1356: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1357: With this format, matrix is automatically transposed by PETSc,
1358: unless the matrix is marked as SPD or symmetric
1359: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1361: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1363: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1364: @*/
1365: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1366: {
1367: PetscBool flg;
1369: PetscFunctionBegin;
1373: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1375: flg = PETSC_FALSE;
1376: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1377: if (flg) {
1378: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1379: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1380: }
1381: flg = PETSC_FALSE;
1382: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1383: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1385: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1386: PetscUseTypeMethod(mat, load, viewer);
1387: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1388: PetscFunctionReturn(PETSC_SUCCESS);
1389: }
1391: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1392: {
1393: Mat_Redundant *redund = *redundant;
1395: PetscFunctionBegin;
1396: if (redund) {
1397: if (redund->matseq) { /* via MatCreateSubMatrices() */
1398: PetscCall(ISDestroy(&redund->isrow));
1399: PetscCall(ISDestroy(&redund->iscol));
1400: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1401: } else {
1402: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1403: PetscCall(PetscFree(redund->sbuf_j));
1404: PetscCall(PetscFree(redund->sbuf_a));
1405: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1406: PetscCall(PetscFree(redund->rbuf_j[i]));
1407: PetscCall(PetscFree(redund->rbuf_a[i]));
1408: }
1409: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1410: }
1412: PetscCall(PetscCommDestroy(&redund->subcomm));
1413: PetscCall(PetscFree(redund));
1414: }
1415: PetscFunctionReturn(PETSC_SUCCESS);
1416: }
1418: /*@
1419: MatDestroy - Frees space taken by a matrix.
1421: Collective
1423: Input Parameter:
1424: . A - the matrix
1426: Level: beginner
1428: Developer Note:
1429: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1430: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1431: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1432: if changes are needed here.
1434: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1435: @*/
1436: PetscErrorCode MatDestroy(Mat *A)
1437: {
1438: PetscFunctionBegin;
1439: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1441: if (--((PetscObject)*A)->refct > 0) {
1442: *A = NULL;
1443: PetscFunctionReturn(PETSC_SUCCESS);
1444: }
1446: /* if memory was published with SAWs then destroy it */
1447: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1448: PetscTryTypeMethod(*A, destroy);
1450: PetscCall(PetscFree((*A)->factorprefix));
1451: PetscCall(PetscFree((*A)->defaultvectype));
1452: PetscCall(PetscFree((*A)->defaultrandtype));
1453: PetscCall(PetscFree((*A)->bsizes));
1454: PetscCall(PetscFree((*A)->solvertype));
1455: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1456: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1457: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1458: PetscCall(MatProductClear(*A));
1459: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1460: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1461: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1462: PetscCall(MatDestroy(&(*A)->schur));
1463: PetscCall(VecDestroy(&(*A)->dot_vec));
1464: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1465: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1466: PetscCall(PetscHeaderDestroy(A));
1467: PetscFunctionReturn(PETSC_SUCCESS);
1468: }
1470: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1471: /*@
1472: MatSetValues - Inserts or adds a block of values into a matrix.
1473: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1474: MUST be called after all calls to `MatSetValues()` have been completed.
1476: Not Collective
1478: Input Parameters:
1479: + mat - the matrix
1480: . m - the number of rows
1481: . idxm - the global indices of the rows
1482: . n - the number of columns
1483: . idxn - the global indices of the columns
1484: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1485: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1486: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1488: Level: beginner
1490: Notes:
1491: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1492: options cannot be mixed without intervening calls to the assembly
1493: routines.
1495: `MatSetValues()` uses 0-based row and column numbers in Fortran
1496: as well as in C.
1498: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1499: simply ignored. This allows easily inserting element stiffness matrices
1500: with homogeneous Dirichlet boundary conditions that you don't want represented
1501: in the matrix.
1503: Efficiency Alert:
1504: The routine `MatSetValuesBlocked()` may offer much better efficiency
1505: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1507: Fortran Notes:
1508: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1509: .vb
1510: call MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
1511: .ve
1513: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1515: Developer Note:
1516: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1517: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1519: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1520: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1521: @*/
1522: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1523: {
1524: PetscFunctionBeginHot;
1527: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1528: PetscAssertPointer(idxm, 3);
1529: PetscAssertPointer(idxn, 5);
1530: MatCheckPreallocated(mat, 1);
1532: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1533: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1535: if (PetscDefined(USE_DEBUG)) {
1536: PetscInt i, j;
1538: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1539: if (v) {
1540: for (i = 0; i < m; i++) {
1541: for (j = 0; j < n; j++) {
1542: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1543: #if defined(PETSC_USE_COMPLEX)
1544: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1545: #else
1546: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1547: #endif
1548: }
1549: }
1550: }
1551: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1552: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1553: }
1555: if (mat->assembled) {
1556: mat->was_assembled = PETSC_TRUE;
1557: mat->assembled = PETSC_FALSE;
1558: }
1559: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1560: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1561: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1562: PetscFunctionReturn(PETSC_SUCCESS);
1563: }
1565: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1566: /*@
1567: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1568: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1569: MUST be called after all calls to `MatSetValues()` have been completed.
1571: Not Collective
1573: Input Parameters:
1574: + mat - the matrix
1575: . ism - the rows to provide
1576: . isn - the columns to provide
1577: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1578: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1579: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1581: Level: beginner
1583: Notes:
1584: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1586: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1587: options cannot be mixed without intervening calls to the assembly
1588: routines.
1590: `MatSetValues()` uses 0-based row and column numbers in Fortran
1591: as well as in C.
1593: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1594: simply ignored. This allows easily inserting element stiffness matrices
1595: with homogeneous Dirichlet boundary conditions that you don't want represented
1596: in the matrix.
1598: Fortran Note:
1599: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1601: Efficiency Alert:
1602: The routine `MatSetValuesBlocked()` may offer much better efficiency
1603: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1605: This is currently not optimized for any particular `ISType`
1607: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1608: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1609: @*/
1610: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1611: {
1612: PetscInt m, n;
1613: const PetscInt *rows, *cols;
1615: PetscFunctionBeginHot;
1617: PetscCall(ISGetIndices(ism, &rows));
1618: PetscCall(ISGetIndices(isn, &cols));
1619: PetscCall(ISGetLocalSize(ism, &m));
1620: PetscCall(ISGetLocalSize(isn, &n));
1621: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1622: PetscCall(ISRestoreIndices(ism, &rows));
1623: PetscCall(ISRestoreIndices(isn, &cols));
1624: PetscFunctionReturn(PETSC_SUCCESS);
1625: }
1627: /*@
1628: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1629: values into a matrix
1631: Not Collective
1633: Input Parameters:
1634: + mat - the matrix
1635: . row - the (block) row to set
1636: - v - a one-dimensional array that contains the values. For `MATBAIJ` they are implicitly stored as a two-dimensional array, by default in row-major order.
1637: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1639: Level: intermediate
1641: Notes:
1642: The values, `v`, are column-oriented (for the block version) and sorted
1644: All the nonzero values in `row` must be provided
1646: The matrix must have previously had its column indices set, likely by having been assembled.
1648: `row` must belong to this MPI process
1650: Fortran Note:
1651: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1653: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1654: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1655: @*/
1656: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1657: {
1658: PetscInt globalrow;
1660: PetscFunctionBegin;
1663: PetscAssertPointer(v, 3);
1664: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1665: PetscCall(MatSetValuesRow(mat, globalrow, v));
1666: PetscFunctionReturn(PETSC_SUCCESS);
1667: }
1669: /*@
1670: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1671: values into a matrix
1673: Not Collective
1675: Input Parameters:
1676: + mat - the matrix
1677: . row - the (block) row to set
1678: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1680: Level: advanced
1682: Notes:
1683: The values, `v`, are column-oriented for the block version.
1685: All the nonzeros in `row` must be provided
1687: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1689: `row` must belong to this process
1691: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1692: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1693: @*/
1694: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1695: {
1696: PetscFunctionBeginHot;
1699: MatCheckPreallocated(mat, 1);
1700: PetscAssertPointer(v, 3);
1701: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1702: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1703: mat->insertmode = INSERT_VALUES;
1705: if (mat->assembled) {
1706: mat->was_assembled = PETSC_TRUE;
1707: mat->assembled = PETSC_FALSE;
1708: }
1709: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1710: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1711: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1712: PetscFunctionReturn(PETSC_SUCCESS);
1713: }
1715: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1716: /*@
1717: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1718: Using structured grid indexing
1720: Not Collective
1722: Input Parameters:
1723: + mat - the matrix
1724: . m - number of rows being entered
1725: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1726: . n - number of columns being entered
1727: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1728: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1729: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1730: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1732: Level: beginner
1734: Notes:
1735: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1737: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1738: options cannot be mixed without intervening calls to the assembly
1739: routines.
1741: The grid coordinates are across the entire grid, not just the local portion
1743: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1744: as well as in C.
1746: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1748: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1749: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1751: The columns and rows in the stencil passed in MUST be contained within the
1752: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1753: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1754: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1755: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1757: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1758: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1759: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1760: `DM_BOUNDARY_PERIODIC` boundary type.
1762: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1763: a single value per point) you can skip filling those indices.
1765: Inspired by the structured grid interface to the HYPRE package
1766: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1768: Fortran Note:
1769: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1771: Efficiency Alert:
1772: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1773: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1775: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1776: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1777: @*/
1778: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1779: {
1780: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1781: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1782: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1784: PetscFunctionBegin;
1785: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1788: PetscAssertPointer(idxm, 3);
1789: PetscAssertPointer(idxn, 5);
1791: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1792: jdxm = buf;
1793: jdxn = buf + m;
1794: } else {
1795: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1796: jdxm = bufm;
1797: jdxn = bufn;
1798: }
1799: for (i = 0; i < m; i++) {
1800: for (j = 0; j < 3 - sdim; j++) dxm++;
1801: tmp = *dxm++ - starts[0];
1802: for (j = 0; j < dim - 1; j++) {
1803: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1804: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1805: }
1806: if (mat->stencil.noc) dxm++;
1807: jdxm[i] = tmp;
1808: }
1809: for (i = 0; i < n; i++) {
1810: for (j = 0; j < 3 - sdim; j++) dxn++;
1811: tmp = *dxn++ - starts[0];
1812: for (j = 0; j < dim - 1; j++) {
1813: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1814: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1815: }
1816: if (mat->stencil.noc) dxn++;
1817: jdxn[i] = tmp;
1818: }
1819: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1820: PetscCall(PetscFree2(bufm, bufn));
1821: PetscFunctionReturn(PETSC_SUCCESS);
1822: }
1824: /*@
1825: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1826: Using structured grid indexing
1828: Not Collective
1830: Input Parameters:
1831: + mat - the matrix
1832: . m - number of rows being entered
1833: . idxm - grid coordinates for matrix rows being entered
1834: . n - number of columns being entered
1835: . idxn - grid coordinates for matrix columns being entered
1836: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1837: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1838: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1840: Level: beginner
1842: Notes:
1843: By default the values, `v`, are row-oriented and unsorted.
1844: See `MatSetOption()` for other options.
1846: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1847: options cannot be mixed without intervening calls to the assembly
1848: routines.
1850: The grid coordinates are across the entire grid, not just the local portion
1852: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1853: as well as in C.
1855: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1857: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1858: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1860: The columns and rows in the stencil passed in MUST be contained within the
1861: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1862: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1863: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1864: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1866: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1867: simply ignored. This allows easily inserting element stiffness matrices
1868: with homogeneous Dirichlet boundary conditions that you don't want represented
1869: in the matrix.
1871: Inspired by the structured grid interface to the HYPRE package
1872: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1874: Fortran Notes:
1875: `idxm` and `idxn` should be declared as
1876: .vb
1877: MatStencil idxm(4,m),idxn(4,n)
1878: .ve
1879: and the values inserted using
1880: .vb
1881: idxm(MatStencil_i,1) = i
1882: idxm(MatStencil_j,1) = j
1883: idxm(MatStencil_k,1) = k
1884: etc
1885: .ve
1887: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1889: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1890: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1891: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1892: @*/
1893: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1894: {
1895: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1896: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1897: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1899: PetscFunctionBegin;
1900: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1903: PetscAssertPointer(idxm, 3);
1904: PetscAssertPointer(idxn, 5);
1905: PetscAssertPointer(v, 6);
1907: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1908: jdxm = buf;
1909: jdxn = buf + m;
1910: } else {
1911: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1912: jdxm = bufm;
1913: jdxn = bufn;
1914: }
1915: for (i = 0; i < m; i++) {
1916: for (j = 0; j < 3 - sdim; j++) dxm++;
1917: tmp = *dxm++ - starts[0];
1918: for (j = 0; j < sdim - 1; j++) {
1919: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1920: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1921: }
1922: dxm++;
1923: jdxm[i] = tmp;
1924: }
1925: for (i = 0; i < n; i++) {
1926: for (j = 0; j < 3 - sdim; j++) dxn++;
1927: tmp = *dxn++ - starts[0];
1928: for (j = 0; j < sdim - 1; j++) {
1929: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1930: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1931: }
1932: dxn++;
1933: jdxn[i] = tmp;
1934: }
1935: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1936: PetscCall(PetscFree2(bufm, bufn));
1937: PetscFunctionReturn(PETSC_SUCCESS);
1938: }
1940: /*@
1941: MatSetStencil - Sets the grid information for setting values into a matrix via
1942: `MatSetValuesStencil()`
1944: Not Collective
1946: Input Parameters:
1947: + mat - the matrix
1948: . dim - dimension of the grid 1, 2, or 3
1949: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1950: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1951: - dof - number of degrees of freedom per node
1953: Level: beginner
1955: Notes:
1956: Inspired by the structured grid interface to the HYPRE package
1957: (www.llnl.gov/CASC/hyper)
1959: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1960: user.
1962: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1963: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1964: @*/
1965: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1966: {
1967: PetscFunctionBegin;
1969: PetscAssertPointer(dims, 3);
1970: PetscAssertPointer(starts, 4);
1972: mat->stencil.dim = dim + (dof > 1);
1973: for (PetscInt i = 0; i < dim; i++) {
1974: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1975: mat->stencil.starts[i] = starts[dim - i - 1];
1976: }
1977: mat->stencil.dims[dim] = dof;
1978: mat->stencil.starts[dim] = 0;
1979: mat->stencil.noc = (PetscBool)(dof == 1);
1980: PetscFunctionReturn(PETSC_SUCCESS);
1981: }
1983: /*@
1984: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1986: Not Collective
1988: Input Parameters:
1989: + mat - the matrix
1990: . m - the number of block rows
1991: . idxm - the global block indices
1992: . n - the number of block columns
1993: . idxn - the global block indices
1994: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1995: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1996: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1998: Level: intermediate
2000: Notes:
2001: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2002: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2004: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2005: NOT the total number of rows/columns; for example, if the block size is 2 and
2006: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2007: The values in `idxm` would be 1 2; that is the first index for each block divided by
2008: the block size.
2010: You must call `MatSetBlockSize()` when constructing this matrix (before
2011: preallocating it).
2013: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2015: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2016: options cannot be mixed without intervening calls to the assembly
2017: routines.
2019: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2020: as well as in C.
2022: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2023: simply ignored. This allows easily inserting element stiffness matrices
2024: with homogeneous Dirichlet boundary conditions that you don't want represented
2025: in the matrix.
2027: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2028: internal searching must be done to determine where to place the
2029: data in the matrix storage space. By instead inserting blocks of
2030: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2031: reduced.
2033: Example:
2034: .vb
2035: Suppose m=n=2 and block size(bs) = 2 The array is
2037: 1 2 | 3 4
2038: 5 6 | 7 8
2039: - - - | - - -
2040: 9 10 | 11 12
2041: 13 14 | 15 16
2043: v[] should be passed in like
2044: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2046: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2047: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2048: .ve
2050: Fortran Notes:
2051: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2052: .vb
2053: call MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
2054: .ve
2056: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2058: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2059: @*/
2060: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2061: {
2062: PetscFunctionBeginHot;
2065: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2066: PetscAssertPointer(idxm, 3);
2067: PetscAssertPointer(idxn, 5);
2068: MatCheckPreallocated(mat, 1);
2069: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2070: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2071: if (PetscDefined(USE_DEBUG)) {
2072: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2073: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2074: }
2075: if (PetscDefined(USE_DEBUG)) {
2076: PetscInt rbs, cbs, M, N, i;
2077: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2078: PetscCall(MatGetSize(mat, &M, &N));
2079: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2080: for (i = 0; i < n; i++)
2081: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2082: }
2083: if (mat->assembled) {
2084: mat->was_assembled = PETSC_TRUE;
2085: mat->assembled = PETSC_FALSE;
2086: }
2087: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2088: if (mat->ops->setvaluesblocked) PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2089: else {
2090: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2091: PetscInt i, j, bs, cbs;
2093: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2094: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2095: iidxm = buf;
2096: iidxn = buf + m * bs;
2097: } else {
2098: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2099: iidxm = bufr;
2100: iidxn = bufc;
2101: }
2102: for (i = 0; i < m; i++) {
2103: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2104: }
2105: if (m != n || bs != cbs || idxm != idxn) {
2106: for (i = 0; i < n; i++) {
2107: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2108: }
2109: } else iidxn = iidxm;
2110: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2111: PetscCall(PetscFree2(bufr, bufc));
2112: }
2113: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2114: PetscFunctionReturn(PETSC_SUCCESS);
2115: }
2117: /*@
2118: MatGetValues - Gets a block of local values from a matrix.
2120: Not Collective; can only return values that are owned by the give process
2122: Input Parameters:
2123: + mat - the matrix
2124: . v - a logically two-dimensional array for storing the values
2125: . m - the number of rows
2126: . idxm - the global indices of the rows
2127: . n - the number of columns
2128: - idxn - the global indices of the columns
2130: Level: advanced
2132: Notes:
2133: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2134: The values, `v`, are then returned in a row-oriented format,
2135: analogous to that used by default in `MatSetValues()`.
2137: `MatGetValues()` uses 0-based row and column numbers in
2138: Fortran as well as in C.
2140: `MatGetValues()` requires that the matrix has been assembled
2141: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2142: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2143: without intermediate matrix assembly.
2145: Negative row or column indices will be ignored and those locations in `v` will be
2146: left unchanged.
2148: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2149: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2150: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2152: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2153: @*/
2154: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2155: {
2156: PetscFunctionBegin;
2159: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2160: PetscAssertPointer(idxm, 3);
2161: PetscAssertPointer(idxn, 5);
2162: PetscAssertPointer(v, 6);
2163: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2164: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2165: MatCheckPreallocated(mat, 1);
2167: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2168: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2169: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2170: PetscFunctionReturn(PETSC_SUCCESS);
2171: }
2173: /*@
2174: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2175: defined previously by `MatSetLocalToGlobalMapping()`
2177: Not Collective
2179: Input Parameters:
2180: + mat - the matrix
2181: . nrow - number of rows
2182: . irow - the row local indices
2183: . ncol - number of columns
2184: - icol - the column local indices
2186: Output Parameter:
2187: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2188: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2190: Level: advanced
2192: Notes:
2193: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2195: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2196: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2197: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2198: with `MatSetLocalToGlobalMapping()`.
2200: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2201: `MatSetValuesLocal()`, `MatGetValues()`
2202: @*/
2203: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2204: {
2205: PetscFunctionBeginHot;
2208: MatCheckPreallocated(mat, 1);
2209: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2210: PetscAssertPointer(irow, 3);
2211: PetscAssertPointer(icol, 5);
2212: if (PetscDefined(USE_DEBUG)) {
2213: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2214: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2215: }
2216: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2217: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2218: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2219: else {
2220: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2221: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2222: irowm = buf;
2223: icolm = buf + nrow;
2224: } else {
2225: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2226: irowm = bufr;
2227: icolm = bufc;
2228: }
2229: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2230: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2231: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2232: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2233: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2234: PetscCall(PetscFree2(bufr, bufc));
2235: }
2236: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2237: PetscFunctionReturn(PETSC_SUCCESS);
2238: }
2240: /*@
2241: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2242: the same size. Currently, this can only be called once and creates the given matrix.
2244: Not Collective
2246: Input Parameters:
2247: + mat - the matrix
2248: . nb - the number of blocks
2249: . bs - the number of rows (and columns) in each block
2250: . rows - a concatenation of the rows for each block
2251: - v - a concatenation of logically two-dimensional arrays of values
2253: Level: advanced
2255: Notes:
2256: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2258: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2260: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2261: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2262: @*/
2263: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2264: {
2265: PetscFunctionBegin;
2268: PetscAssertPointer(rows, 4);
2269: PetscAssertPointer(v, 5);
2270: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2272: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2273: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2274: else {
2275: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2276: }
2277: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2278: PetscFunctionReturn(PETSC_SUCCESS);
2279: }
2281: /*@
2282: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2283: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2284: using a local (per-processor) numbering.
2286: Not Collective
2288: Input Parameters:
2289: + x - the matrix
2290: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2291: - cmapping - column mapping
2293: Level: intermediate
2295: Note:
2296: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2298: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2299: @*/
2300: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2301: {
2302: PetscFunctionBegin;
2307: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2308: else {
2309: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2310: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2311: }
2312: PetscFunctionReturn(PETSC_SUCCESS);
2313: }
2315: /*@
2316: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2318: Not Collective
2320: Input Parameter:
2321: . A - the matrix
2323: Output Parameters:
2324: + rmapping - row mapping
2325: - cmapping - column mapping
2327: Level: advanced
2329: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2330: @*/
2331: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2332: {
2333: PetscFunctionBegin;
2336: if (rmapping) {
2337: PetscAssertPointer(rmapping, 2);
2338: *rmapping = A->rmap->mapping;
2339: }
2340: if (cmapping) {
2341: PetscAssertPointer(cmapping, 3);
2342: *cmapping = A->cmap->mapping;
2343: }
2344: PetscFunctionReturn(PETSC_SUCCESS);
2345: }
2347: /*@
2348: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2350: Logically Collective
2352: Input Parameters:
2353: + A - the matrix
2354: . rmap - row layout
2355: - cmap - column layout
2357: Level: advanced
2359: Note:
2360: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2362: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2363: @*/
2364: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2365: {
2366: PetscFunctionBegin;
2368: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2369: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2370: PetscFunctionReturn(PETSC_SUCCESS);
2371: }
2373: /*@
2374: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2376: Not Collective
2378: Input Parameter:
2379: . A - the matrix
2381: Output Parameters:
2382: + rmap - row layout
2383: - cmap - column layout
2385: Level: advanced
2387: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2388: @*/
2389: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2390: {
2391: PetscFunctionBegin;
2394: if (rmap) {
2395: PetscAssertPointer(rmap, 2);
2396: *rmap = A->rmap;
2397: }
2398: if (cmap) {
2399: PetscAssertPointer(cmap, 3);
2400: *cmap = A->cmap;
2401: }
2402: PetscFunctionReturn(PETSC_SUCCESS);
2403: }
2405: /*@
2406: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2407: using a local numbering of the rows and columns.
2409: Not Collective
2411: Input Parameters:
2412: + mat - the matrix
2413: . nrow - number of rows
2414: . irow - the row local indices
2415: . ncol - number of columns
2416: . icol - the column local indices
2417: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2418: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2419: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2421: Level: intermediate
2423: Notes:
2424: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2426: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2427: options cannot be mixed without intervening calls to the assembly
2428: routines.
2430: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2431: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2433: Fortran Notes:
2434: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2435: .vb
2436: call MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2437: .ve
2439: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2441: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2442: `MatGetValuesLocal()`
2443: @*/
2444: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2445: {
2446: PetscFunctionBeginHot;
2449: MatCheckPreallocated(mat, 1);
2450: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2451: PetscAssertPointer(irow, 3);
2452: PetscAssertPointer(icol, 5);
2453: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2454: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2455: if (PetscDefined(USE_DEBUG)) {
2456: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2457: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2458: }
2460: if (mat->assembled) {
2461: mat->was_assembled = PETSC_TRUE;
2462: mat->assembled = PETSC_FALSE;
2463: }
2464: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2465: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2466: else {
2467: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2468: const PetscInt *irowm, *icolm;
2470: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2471: bufr = buf;
2472: bufc = buf + nrow;
2473: irowm = bufr;
2474: icolm = bufc;
2475: } else {
2476: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2477: irowm = bufr;
2478: icolm = bufc;
2479: }
2480: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2481: else irowm = irow;
2482: if (mat->cmap->mapping) {
2483: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2484: else icolm = irowm;
2485: } else icolm = icol;
2486: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2487: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2488: }
2489: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2490: PetscFunctionReturn(PETSC_SUCCESS);
2491: }
2493: /*@
2494: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2495: using a local ordering of the nodes a block at a time.
2497: Not Collective
2499: Input Parameters:
2500: + mat - the matrix
2501: . nrow - number of rows
2502: . irow - the row local indices
2503: . ncol - number of columns
2504: . icol - the column local indices
2505: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2506: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2507: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2509: Level: intermediate
2511: Notes:
2512: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2513: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2515: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2516: options cannot be mixed without intervening calls to the assembly
2517: routines.
2519: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2520: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2522: Fortran Notes:
2523: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2524: .vb
2525: call MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2526: .ve
2528: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2530: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2531: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2532: @*/
2533: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2534: {
2535: PetscFunctionBeginHot;
2538: MatCheckPreallocated(mat, 1);
2539: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2540: PetscAssertPointer(irow, 3);
2541: PetscAssertPointer(icol, 5);
2542: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2543: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2544: if (PetscDefined(USE_DEBUG)) {
2545: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2546: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2547: }
2549: if (mat->assembled) {
2550: mat->was_assembled = PETSC_TRUE;
2551: mat->assembled = PETSC_FALSE;
2552: }
2553: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2554: PetscInt irbs, rbs;
2555: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2556: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2557: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2558: }
2559: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2560: PetscInt icbs, cbs;
2561: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2562: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2563: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2564: }
2565: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2566: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2567: else {
2568: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2569: const PetscInt *irowm, *icolm;
2571: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2572: bufr = buf;
2573: bufc = buf + nrow;
2574: irowm = bufr;
2575: icolm = bufc;
2576: } else {
2577: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2578: irowm = bufr;
2579: icolm = bufc;
2580: }
2581: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2582: else irowm = irow;
2583: if (mat->cmap->mapping) {
2584: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2585: else icolm = irowm;
2586: } else icolm = icol;
2587: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2588: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2589: }
2590: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2591: PetscFunctionReturn(PETSC_SUCCESS);
2592: }
2594: /*@
2595: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2597: Collective
2599: Input Parameters:
2600: + mat - the matrix
2601: - x - the vector to be multiplied
2603: Output Parameter:
2604: . y - the result
2606: Level: developer
2608: Note:
2609: The vectors `x` and `y` cannot be the same. I.e., one cannot
2610: call `MatMultDiagonalBlock`(A,y,y).
2612: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2613: @*/
2614: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2615: {
2616: PetscFunctionBegin;
2622: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2623: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2624: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2625: MatCheckPreallocated(mat, 1);
2627: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2628: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2629: PetscFunctionReturn(PETSC_SUCCESS);
2630: }
2632: /*@
2633: MatMult - Computes the matrix-vector product, $y = Ax$.
2635: Neighbor-wise Collective
2637: Input Parameters:
2638: + mat - the matrix
2639: - x - the vector to be multiplied
2641: Output Parameter:
2642: . y - the result
2644: Level: beginner
2646: Note:
2647: The vectors `x` and `y` cannot be the same. I.e., one cannot
2648: call `MatMult`(A,y,y).
2650: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2651: @*/
2652: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2653: {
2654: PetscFunctionBegin;
2658: VecCheckAssembled(x);
2660: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2661: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2662: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2663: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2664: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2665: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2666: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2667: PetscCall(VecSetErrorIfLocked(y, 3));
2668: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2669: MatCheckPreallocated(mat, 1);
2671: PetscCall(VecLockReadPush(x));
2672: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2673: PetscUseTypeMethod(mat, mult, x, y);
2674: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2675: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2676: PetscCall(VecLockReadPop(x));
2677: PetscFunctionReturn(PETSC_SUCCESS);
2678: }
2680: /*@
2681: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2683: Neighbor-wise Collective
2685: Input Parameters:
2686: + mat - the matrix
2687: - x - the vector to be multiplied
2689: Output Parameter:
2690: . y - the result
2692: Level: beginner
2694: Notes:
2695: The vectors `x` and `y` cannot be the same. I.e., one cannot
2696: call `MatMultTranspose`(A,y,y).
2698: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2699: use `MatMultHermitianTranspose()`
2701: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2702: @*/
2703: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2704: {
2705: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2707: PetscFunctionBegin;
2711: VecCheckAssembled(x);
2714: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2715: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2716: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2717: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2718: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2719: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2720: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2721: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2722: MatCheckPreallocated(mat, 1);
2724: if (!mat->ops->multtranspose) {
2725: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2726: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2727: } else op = mat->ops->multtranspose;
2728: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2729: PetscCall(VecLockReadPush(x));
2730: PetscCall((*op)(mat, x, y));
2731: PetscCall(VecLockReadPop(x));
2732: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2733: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2734: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2735: PetscFunctionReturn(PETSC_SUCCESS);
2736: }
2738: /*@
2739: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2741: Neighbor-wise Collective
2743: Input Parameters:
2744: + mat - the matrix
2745: - x - the vector to be multiplied
2747: Output Parameter:
2748: . y - the result
2750: Level: beginner
2752: Notes:
2753: The vectors `x` and `y` cannot be the same. I.e., one cannot
2754: call `MatMultHermitianTranspose`(A,y,y).
2756: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2758: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2760: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2761: @*/
2762: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2763: {
2764: PetscFunctionBegin;
2770: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2771: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2772: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2773: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2774: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2775: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2776: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2777: MatCheckPreallocated(mat, 1);
2779: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2780: #if defined(PETSC_USE_COMPLEX)
2781: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2782: PetscCall(VecLockReadPush(x));
2783: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2784: else PetscUseTypeMethod(mat, mult, x, y);
2785: PetscCall(VecLockReadPop(x));
2786: } else {
2787: Vec w;
2788: PetscCall(VecDuplicate(x, &w));
2789: PetscCall(VecCopy(x, w));
2790: PetscCall(VecConjugate(w));
2791: PetscCall(MatMultTranspose(mat, w, y));
2792: PetscCall(VecDestroy(&w));
2793: PetscCall(VecConjugate(y));
2794: }
2795: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2796: #else
2797: PetscCall(MatMultTranspose(mat, x, y));
2798: #endif
2799: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2800: PetscFunctionReturn(PETSC_SUCCESS);
2801: }
2803: /*@
2804: MatMultAdd - Computes $v3 = v2 + A * v1$.
2806: Neighbor-wise Collective
2808: Input Parameters:
2809: + mat - the matrix
2810: . v1 - the vector to be multiplied by `mat`
2811: - v2 - the vector to be added to the result
2813: Output Parameter:
2814: . v3 - the result
2816: Level: beginner
2818: Note:
2819: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2820: call `MatMultAdd`(A,v1,v2,v1).
2822: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2823: @*/
2824: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2825: {
2826: PetscFunctionBegin;
2833: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2834: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2835: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2836: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2837: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2838: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2839: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2840: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2841: MatCheckPreallocated(mat, 1);
2843: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2844: PetscCall(VecLockReadPush(v1));
2845: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2846: PetscCall(VecLockReadPop(v1));
2847: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2848: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2849: PetscFunctionReturn(PETSC_SUCCESS);
2850: }
2852: /*@
2853: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2855: Neighbor-wise Collective
2857: Input Parameters:
2858: + mat - the matrix
2859: . v1 - the vector to be multiplied by the transpose of the matrix
2860: - v2 - the vector to be added to the result
2862: Output Parameter:
2863: . v3 - the result
2865: Level: beginner
2867: Note:
2868: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2869: call `MatMultTransposeAdd`(A,v1,v2,v1).
2871: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2872: @*/
2873: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2874: {
2875: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2877: PetscFunctionBegin;
2884: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2885: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2886: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2887: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2888: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2889: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2890: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2891: MatCheckPreallocated(mat, 1);
2893: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2894: PetscCall(VecLockReadPush(v1));
2895: PetscCall((*op)(mat, v1, v2, v3));
2896: PetscCall(VecLockReadPop(v1));
2897: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2898: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2899: PetscFunctionReturn(PETSC_SUCCESS);
2900: }
2902: /*@
2903: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2905: Neighbor-wise Collective
2907: Input Parameters:
2908: + mat - the matrix
2909: . v1 - the vector to be multiplied by the Hermitian transpose
2910: - v2 - the vector to be added to the result
2912: Output Parameter:
2913: . v3 - the result
2915: Level: beginner
2917: Note:
2918: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2919: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2921: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2922: @*/
2923: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2924: {
2925: PetscFunctionBegin;
2932: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2933: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2934: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2935: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2936: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2937: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2938: MatCheckPreallocated(mat, 1);
2940: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2941: PetscCall(VecLockReadPush(v1));
2942: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2943: else {
2944: Vec w, z;
2945: PetscCall(VecDuplicate(v1, &w));
2946: PetscCall(VecCopy(v1, w));
2947: PetscCall(VecConjugate(w));
2948: PetscCall(VecDuplicate(v3, &z));
2949: PetscCall(MatMultTranspose(mat, w, z));
2950: PetscCall(VecDestroy(&w));
2951: PetscCall(VecConjugate(z));
2952: if (v2 != v3) PetscCall(VecWAXPY(v3, 1.0, v2, z));
2953: else PetscCall(VecAXPY(v3, 1.0, z));
2954: PetscCall(VecDestroy(&z));
2955: }
2956: PetscCall(VecLockReadPop(v1));
2957: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2958: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2959: PetscFunctionReturn(PETSC_SUCCESS);
2960: }
2962: PetscErrorCode MatADot_Default(Mat mat, Vec x, Vec y, PetscScalar *val)
2963: {
2964: PetscFunctionBegin;
2965: if (!mat->dot_vec) PetscCall(MatCreateVecs(mat, &mat->dot_vec, NULL));
2966: PetscCall(MatMult(mat, x, mat->dot_vec));
2967: PetscCall(VecDot(mat->dot_vec, y, val));
2968: PetscFunctionReturn(PETSC_SUCCESS);
2969: }
2971: PetscErrorCode MatANorm_Default(Mat mat, Vec x, PetscReal *val)
2972: {
2973: PetscScalar sval;
2975: PetscFunctionBegin;
2976: PetscCall(MatADot_Default(mat, x, x, &sval));
2977: PetscCheck(PetscRealPart(sval) >= 0.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix argument is not positive definite");
2978: PetscCheck(PetscAbsReal(PetscImaginaryPart(sval)) < 100 * PETSC_MACHINE_EPSILON, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix argument is not Hermitian");
2979: *val = PetscSqrtReal(PetscRealPart(sval));
2980: PetscFunctionReturn(PETSC_SUCCESS);
2981: }
2983: /*@
2984: MatADot - Computes the inner product with respect to a matrix, i.e., $(x, y)_A = y^H A x$ where $A$ is symmetric (Hermitian when using complex)
2985: positive definite.
2987: Collective
2989: Input Parameters:
2990: + mat - matrix used to define the inner product
2991: . x - first vector
2992: - y - second vector
2994: Output Parameter:
2995: . val - the dot product with respect to `A`
2997: Level: intermediate
2999: Note:
3000: For complex vectors, `MatADot()` computes
3001: $$
3002: val = (x,y)_A = y^H A x,
3003: $$
3004: where $y^H$ denotes the conjugate transpose of `y`. Note that this corresponds to the "mathematicians" complex
3005: inner product where the SECOND argument gets the complex conjugate.
3007: .seealso: [](ch_matrices), `Mat`, `MatANorm()`, `VecDot()`, `VecNorm()`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`
3008: @*/
3009: PetscErrorCode MatADot(Mat mat, Vec x, Vec y, PetscScalar *val)
3010: {
3011: PetscFunctionBegin;
3015: VecCheckAssembled(x);
3017: VecCheckAssembled(y);
3020: PetscAssertPointer(val, 4);
3021: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3022: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3023: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3024: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3025: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
3026: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
3027: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
3028: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_TRUE));
3029: MatCheckPreallocated(mat, 1);
3031: PetscCall(VecLockReadPush(x));
3032: PetscCall(VecLockReadPush(y));
3033: PetscCall(PetscLogEventBegin(MAT_ADot, mat, x, y, 0));
3034: PetscUseTypeMethod(mat, adot, x, y, val);
3035: PetscCall(PetscLogEventEnd(MAT_ADot, mat, x, y, 0));
3036: PetscCall(VecLockReadPop(y));
3037: PetscCall(VecLockReadPop(x));
3038: PetscFunctionReturn(PETSC_SUCCESS);
3039: }
3041: /*@
3042: MatANorm - Computes the norm with respect to a matrix, i.e., $(x, x)_A^{1/2} = (x^H A x)^{1/2}$ where $A$ is symmetric (Hermitian when using complex)
3043: positive definite.
3045: Collective
3047: Input Parameters:
3048: + mat - matrix used to define norm
3049: - x - the vector to compute the norm of
3051: Output Parameter:
3052: . val - the norm with respect to `A`
3054: Level: intermediate
3056: Note:
3057: For complex vectors, `MatANorm()` computes
3058: $$
3059: val = (x,x)_A^{1/2} = (x^H A x)^{1/2},
3060: $$
3061: where $x^H$ denotes the conjugate transpose of `x`.
3063: .seealso: [](ch_matrices), `Mat`, `MatADot()`, `VecDot()`, `VecNorm()`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`
3064: @*/
3065: PetscErrorCode MatANorm(Mat mat, Vec x, PetscReal *val)
3066: {
3067: PetscFunctionBegin;
3071: VecCheckAssembled(x);
3073: PetscAssertPointer(val, 3);
3074: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3075: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3076: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3077: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
3078: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
3079: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
3080: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
3081: MatCheckPreallocated(mat, 1);
3083: PetscCall(VecLockReadPush(x));
3084: PetscCall(PetscLogEventBegin(MAT_ANorm, mat, x, 0, 0));
3085: PetscUseTypeMethod(mat, anorm, x, val);
3086: PetscCall(PetscLogEventEnd(MAT_ANorm, mat, x, 0, 0));
3087: PetscCall(VecLockReadPop(x));
3088: PetscFunctionReturn(PETSC_SUCCESS);
3089: }
3091: /*@
3092: MatGetFactorType - gets the type of factorization a matrix is
3094: Not Collective
3096: Input Parameter:
3097: . mat - the matrix
3099: Output Parameter:
3100: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3102: Level: intermediate
3104: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3105: `MAT_FACTOR_ICC`, `MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3106: @*/
3107: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
3108: {
3109: PetscFunctionBegin;
3112: PetscAssertPointer(t, 2);
3113: *t = mat->factortype;
3114: PetscFunctionReturn(PETSC_SUCCESS);
3115: }
3117: /*@
3118: MatSetFactorType - sets the type of factorization a matrix is
3120: Logically Collective
3122: Input Parameters:
3123: + mat - the matrix
3124: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3126: Level: intermediate
3128: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3129: `MAT_FACTOR_ICC`, `MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3130: @*/
3131: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3132: {
3133: PetscFunctionBegin;
3136: mat->factortype = t;
3137: PetscFunctionReturn(PETSC_SUCCESS);
3138: }
3140: /*@
3141: MatGetInfo - Returns information about matrix storage (number of
3142: nonzeros, memory, etc.).
3144: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3146: Input Parameters:
3147: + mat - the matrix
3148: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3150: Output Parameter:
3151: . info - matrix information context
3153: Options Database Key:
3154: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3156: Level: intermediate
3158: Notes:
3159: The `MatInfo` context contains a variety of matrix data, including
3160: number of nonzeros allocated and used, number of mallocs during
3161: matrix assembly, etc. Additional information for factored matrices
3162: is provided (such as the fill ratio, number of mallocs during
3163: factorization, etc.).
3165: Example:
3166: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3167: data within the `MatInfo` context. For example,
3168: .vb
3169: MatInfo info;
3170: Mat A;
3171: double mal, nz_a, nz_u;
3173: MatGetInfo(A, MAT_LOCAL, &info);
3174: mal = info.mallocs;
3175: nz_a = info.nz_allocated;
3176: .ve
3178: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3179: @*/
3180: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3181: {
3182: PetscFunctionBegin;
3185: PetscAssertPointer(info, 3);
3186: MatCheckPreallocated(mat, 1);
3187: PetscUseTypeMethod(mat, getinfo, flag, info);
3188: PetscFunctionReturn(PETSC_SUCCESS);
3189: }
3191: /*
3192: This is used by external packages where it is not easy to get the info from the actual
3193: matrix factorization.
3194: */
3195: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3196: {
3197: PetscFunctionBegin;
3198: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3199: PetscFunctionReturn(PETSC_SUCCESS);
3200: }
3202: /*@
3203: MatLUFactor - Performs in-place LU factorization of matrix.
3205: Collective
3207: Input Parameters:
3208: + mat - the matrix
3209: . row - row permutation
3210: . col - column permutation
3211: - info - options for factorization, includes
3212: .vb
3213: fill - expected fill as ratio of original fill.
3214: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3215: Run with the option -info to determine an optimal value to use
3216: .ve
3218: Level: developer
3220: Notes:
3221: Most users should employ the `KSP` interface for linear solvers
3222: instead of working directly with matrix algebra routines such as this.
3223: See, e.g., `KSPCreate()`.
3225: This changes the state of the matrix to a factored matrix; it cannot be used
3226: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3228: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3229: when not using `KSP`.
3231: Fortran Note:
3232: A valid (non-null) `info` argument must be provided
3234: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3235: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3236: @*/
3237: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3238: {
3239: MatFactorInfo tinfo;
3241: PetscFunctionBegin;
3245: if (info) PetscAssertPointer(info, 4);
3247: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3248: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3249: MatCheckPreallocated(mat, 1);
3250: if (!info) {
3251: PetscCall(MatFactorInfoInitialize(&tinfo));
3252: info = &tinfo;
3253: }
3255: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3256: PetscUseTypeMethod(mat, lufactor, row, col, info);
3257: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3258: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3259: PetscFunctionReturn(PETSC_SUCCESS);
3260: }
3262: /*@
3263: MatILUFactor - Performs in-place ILU factorization of matrix.
3265: Collective
3267: Input Parameters:
3268: + mat - the matrix
3269: . row - row permutation
3270: . col - column permutation
3271: - info - structure containing
3272: .vb
3273: levels - number of levels of fill.
3274: expected fill - as ratio of original fill.
3275: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3276: missing diagonal entries)
3277: .ve
3279: Level: developer
3281: Notes:
3282: Most users should employ the `KSP` interface for linear solvers
3283: instead of working directly with matrix algebra routines such as this.
3284: See, e.g., `KSPCreate()`.
3286: Probably really in-place only when level of fill is zero, otherwise allocates
3287: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3288: when not using `KSP`.
3290: Fortran Note:
3291: A valid (non-null) `info` argument must be provided
3293: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3294: @*/
3295: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3296: {
3297: PetscFunctionBegin;
3301: PetscAssertPointer(info, 4);
3303: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3304: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3305: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3306: MatCheckPreallocated(mat, 1);
3308: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3309: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3310: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3311: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3312: PetscFunctionReturn(PETSC_SUCCESS);
3313: }
3315: /*@
3316: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3317: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3319: Collective
3321: Input Parameters:
3322: + fact - the factor matrix obtained with `MatGetFactor()`
3323: . mat - the matrix
3324: . row - the row permutation
3325: . col - the column permutation
3326: - info - options for factorization, includes
3327: .vb
3328: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3329: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3330: .ve
3332: Level: developer
3334: Notes:
3335: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3337: Most users should employ the simplified `KSP` interface for linear solvers
3338: instead of working directly with matrix algebra routines such as this.
3339: See, e.g., `KSPCreate()`.
3341: Fortran Note:
3342: A valid (non-null) `info` argument must be provided
3344: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3345: @*/
3346: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3347: {
3348: MatFactorInfo tinfo;
3350: PetscFunctionBegin;
3355: if (info) PetscAssertPointer(info, 5);
3358: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3359: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3360: MatCheckPreallocated(mat, 2);
3361: if (!info) {
3362: PetscCall(MatFactorInfoInitialize(&tinfo));
3363: info = &tinfo;
3364: }
3366: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3367: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3368: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3369: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3370: PetscFunctionReturn(PETSC_SUCCESS);
3371: }
3373: /*@
3374: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3375: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3377: Collective
3379: Input Parameters:
3380: + fact - the factor matrix obtained with `MatGetFactor()`
3381: . mat - the matrix
3382: - info - options for factorization
3384: Level: developer
3386: Notes:
3387: See `MatLUFactor()` for in-place factorization. See
3388: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3390: Most users should employ the `KSP` interface for linear solvers
3391: instead of working directly with matrix algebra routines such as this.
3392: See, e.g., `KSPCreate()`.
3394: Fortran Note:
3395: A valid (non-null) `info` argument must be provided
3397: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3398: @*/
3399: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3400: {
3401: MatFactorInfo tinfo;
3403: PetscFunctionBegin;
3408: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3409: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3410: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3412: MatCheckPreallocated(mat, 2);
3413: if (!info) {
3414: PetscCall(MatFactorInfoInitialize(&tinfo));
3415: info = &tinfo;
3416: }
3418: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3419: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3420: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3421: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3422: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3423: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3424: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3425: PetscFunctionReturn(PETSC_SUCCESS);
3426: }
3428: /*@
3429: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3430: symmetric matrix.
3432: Collective
3434: Input Parameters:
3435: + mat - the matrix
3436: . perm - row and column permutations
3437: - info - expected fill as ratio of original fill
3439: Level: developer
3441: Notes:
3442: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3443: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3445: Most users should employ the `KSP` interface for linear solvers
3446: instead of working directly with matrix algebra routines such as this.
3447: See, e.g., `KSPCreate()`.
3449: Fortran Note:
3450: A valid (non-null) `info` argument must be provided
3452: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`,
3453: `MatGetOrdering()`
3454: @*/
3455: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3456: {
3457: MatFactorInfo tinfo;
3459: PetscFunctionBegin;
3462: if (info) PetscAssertPointer(info, 3);
3464: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3465: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3466: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3467: MatCheckPreallocated(mat, 1);
3468: if (!info) {
3469: PetscCall(MatFactorInfoInitialize(&tinfo));
3470: info = &tinfo;
3471: }
3473: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3474: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3475: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3476: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3477: PetscFunctionReturn(PETSC_SUCCESS);
3478: }
3480: /*@
3481: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3482: of a symmetric matrix.
3484: Collective
3486: Input Parameters:
3487: + fact - the factor matrix obtained with `MatGetFactor()`
3488: . mat - the matrix
3489: . perm - row and column permutations
3490: - info - options for factorization, includes
3491: .vb
3492: fill - expected fill as ratio of original fill.
3493: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3494: Run with the option -info to determine an optimal value to use
3495: .ve
3497: Level: developer
3499: Notes:
3500: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3501: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3503: Most users should employ the `KSP` interface for linear solvers
3504: instead of working directly with matrix algebra routines such as this.
3505: See, e.g., `KSPCreate()`.
3507: Fortran Note:
3508: A valid (non-null) `info` argument must be provided
3510: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`,
3511: `MatGetOrdering()`
3512: @*/
3513: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3514: {
3515: MatFactorInfo tinfo;
3517: PetscFunctionBegin;
3521: if (info) PetscAssertPointer(info, 4);
3524: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3525: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3526: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3527: MatCheckPreallocated(mat, 2);
3528: if (!info) {
3529: PetscCall(MatFactorInfoInitialize(&tinfo));
3530: info = &tinfo;
3531: }
3533: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3534: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3535: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3536: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3537: PetscFunctionReturn(PETSC_SUCCESS);
3538: }
3540: /*@
3541: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3542: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3543: `MatCholeskyFactorSymbolic()`.
3545: Collective
3547: Input Parameters:
3548: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3549: . mat - the initial matrix that is to be factored
3550: - info - options for factorization
3552: Level: developer
3554: Note:
3555: Most users should employ the `KSP` interface for linear solvers
3556: instead of working directly with matrix algebra routines such as this.
3557: See, e.g., `KSPCreate()`.
3559: Fortran Note:
3560: A valid (non-null) `info` argument must be provided
3562: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3563: @*/
3564: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3565: {
3566: MatFactorInfo tinfo;
3568: PetscFunctionBegin;
3573: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3574: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3575: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3576: MatCheckPreallocated(mat, 2);
3577: if (!info) {
3578: PetscCall(MatFactorInfoInitialize(&tinfo));
3579: info = &tinfo;
3580: }
3582: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3583: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3584: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3585: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3586: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3587: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3588: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3589: PetscFunctionReturn(PETSC_SUCCESS);
3590: }
3592: /*@
3593: MatQRFactor - Performs in-place QR factorization of matrix.
3595: Collective
3597: Input Parameters:
3598: + mat - the matrix
3599: . col - column permutation
3600: - info - options for factorization, includes
3601: .vb
3602: fill - expected fill as ratio of original fill.
3603: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3604: Run with the option -info to determine an optimal value to use
3605: .ve
3607: Level: developer
3609: Notes:
3610: Most users should employ the `KSP` interface for linear solvers
3611: instead of working directly with matrix algebra routines such as this.
3612: See, e.g., `KSPCreate()`.
3614: This changes the state of the matrix to a factored matrix; it cannot be used
3615: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3617: Fortran Note:
3618: A valid (non-null) `info` argument must be provided
3620: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3621: `MatSetUnfactored()`
3622: @*/
3623: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3624: {
3625: PetscFunctionBegin;
3628: if (info) PetscAssertPointer(info, 3);
3630: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3631: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3632: MatCheckPreallocated(mat, 1);
3633: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3634: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3635: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3636: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3637: PetscFunctionReturn(PETSC_SUCCESS);
3638: }
3640: /*@
3641: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3642: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3644: Collective
3646: Input Parameters:
3647: + fact - the factor matrix obtained with `MatGetFactor()`
3648: . mat - the matrix
3649: . col - column permutation
3650: - info - options for factorization, includes
3651: .vb
3652: fill - expected fill as ratio of original fill.
3653: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3654: Run with the option -info to determine an optimal value to use
3655: .ve
3657: Level: developer
3659: Note:
3660: Most users should employ the `KSP` interface for linear solvers
3661: instead of working directly with matrix algebra routines such as this.
3662: See, e.g., `KSPCreate()`.
3664: Fortran Note:
3665: A valid (non-null) `info` argument must be provided
3667: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3668: @*/
3669: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3670: {
3671: MatFactorInfo tinfo;
3673: PetscFunctionBegin;
3677: if (info) PetscAssertPointer(info, 4);
3680: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3681: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3682: MatCheckPreallocated(mat, 2);
3683: if (!info) {
3684: PetscCall(MatFactorInfoInitialize(&tinfo));
3685: info = &tinfo;
3686: }
3688: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3689: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3690: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3691: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3692: PetscFunctionReturn(PETSC_SUCCESS);
3693: }
3695: /*@
3696: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3697: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3699: Collective
3701: Input Parameters:
3702: + fact - the factor matrix obtained with `MatGetFactor()`
3703: . mat - the matrix
3704: - info - options for factorization
3706: Level: developer
3708: Notes:
3709: See `MatQRFactor()` for in-place factorization.
3711: Most users should employ the `KSP` interface for linear solvers
3712: instead of working directly with matrix algebra routines such as this.
3713: See, e.g., `KSPCreate()`.
3715: Fortran Note:
3716: A valid (non-null) `info` argument must be provided
3718: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3719: @*/
3720: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3721: {
3722: MatFactorInfo tinfo;
3724: PetscFunctionBegin;
3729: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3730: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3731: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3733: MatCheckPreallocated(mat, 2);
3734: if (!info) {
3735: PetscCall(MatFactorInfoInitialize(&tinfo));
3736: info = &tinfo;
3737: }
3739: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3740: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3741: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3742: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3743: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3744: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3745: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3746: PetscFunctionReturn(PETSC_SUCCESS);
3747: }
3749: /*@
3750: MatSolve - Solves $A x = b$, given a factored matrix.
3752: Neighbor-wise Collective
3754: Input Parameters:
3755: + mat - the factored matrix
3756: - b - the right-hand-side vector
3758: Output Parameter:
3759: . x - the result vector
3761: Level: developer
3763: Notes:
3764: The vectors `b` and `x` cannot be the same. I.e., one cannot
3765: call `MatSolve`(A,x,x).
3767: Most users should employ the `KSP` interface for linear solvers
3768: instead of working directly with matrix algebra routines such as this.
3769: See, e.g., `KSPCreate()`.
3771: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3772: @*/
3773: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3774: {
3775: PetscFunctionBegin;
3780: PetscCheckSameComm(mat, 1, b, 2);
3781: PetscCheckSameComm(mat, 1, x, 3);
3782: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3783: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3784: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3785: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3786: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3787: MatCheckPreallocated(mat, 1);
3789: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3790: PetscCall(VecFlag(x, mat->factorerrortype));
3791: if (mat->factorerrortype) PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3792: else PetscUseTypeMethod(mat, solve, b, x);
3793: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3794: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3795: PetscFunctionReturn(PETSC_SUCCESS);
3796: }
3798: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3799: {
3800: Vec b, x;
3801: PetscInt N, i;
3802: PetscErrorCode (*f)(Mat, Vec, Vec);
3803: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3805: PetscFunctionBegin;
3806: if (A->factorerrortype) {
3807: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3808: PetscCall(MatSetInf(X));
3809: PetscFunctionReturn(PETSC_SUCCESS);
3810: }
3811: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3812: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3813: PetscCall(MatBoundToCPU(A, &Abound));
3814: if (!Abound) {
3815: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3816: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3817: }
3818: #if PetscDefined(HAVE_CUDA)
3819: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3820: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3821: #elif PetscDefined(HAVE_HIP)
3822: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3823: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3824: #endif
3825: PetscCall(MatGetSize(B, NULL, &N));
3826: for (i = 0; i < N; i++) {
3827: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3828: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3829: PetscCall((*f)(A, b, x));
3830: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3831: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3832: }
3833: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3834: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3835: PetscFunctionReturn(PETSC_SUCCESS);
3836: }
3838: /*@
3839: MatMatSolve - Solves $A X = B$, given a factored matrix.
3841: Neighbor-wise Collective
3843: Input Parameters:
3844: + A - the factored matrix
3845: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3847: Output Parameter:
3848: . X - the result matrix (dense matrix)
3850: Level: developer
3852: Note:
3853: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3854: otherwise, `B` and `X` cannot be the same.
3856: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3857: @*/
3858: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3859: {
3860: PetscFunctionBegin;
3865: PetscCheckSameComm(A, 1, B, 2);
3866: PetscCheckSameComm(A, 1, X, 3);
3867: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3868: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3869: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3870: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3871: MatCheckPreallocated(A, 1);
3873: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3874: if (!A->ops->matsolve) {
3875: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3876: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3877: } else PetscUseTypeMethod(A, matsolve, B, X);
3878: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3879: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3880: PetscFunctionReturn(PETSC_SUCCESS);
3881: }
3883: /*@
3884: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3886: Neighbor-wise Collective
3888: Input Parameters:
3889: + A - the factored matrix
3890: - B - the right-hand-side matrix (`MATDENSE` matrix)
3892: Output Parameter:
3893: . X - the result matrix (dense matrix)
3895: Level: developer
3897: Note:
3898: The matrices `B` and `X` cannot be the same. I.e., one cannot
3899: call `MatMatSolveTranspose`(A,X,X).
3901: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3902: @*/
3903: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3904: {
3905: PetscFunctionBegin;
3910: PetscCheckSameComm(A, 1, B, 2);
3911: PetscCheckSameComm(A, 1, X, 3);
3912: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3913: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3914: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3915: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3916: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3917: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3918: MatCheckPreallocated(A, 1);
3920: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3921: if (!A->ops->matsolvetranspose) {
3922: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3923: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3924: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3925: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3926: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3927: PetscFunctionReturn(PETSC_SUCCESS);
3928: }
3930: /*@
3931: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3933: Neighbor-wise Collective
3935: Input Parameters:
3936: + A - the factored matrix
3937: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3939: Output Parameter:
3940: . X - the result matrix (dense matrix)
3942: Level: developer
3944: Note:
3945: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3946: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3948: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3949: @*/
3950: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3951: {
3952: PetscFunctionBegin;
3957: PetscCheckSameComm(A, 1, Bt, 2);
3958: PetscCheckSameComm(A, 1, X, 3);
3960: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3961: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3962: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3963: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3964: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3965: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3966: MatCheckPreallocated(A, 1);
3968: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3969: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3970: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3971: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3972: PetscFunctionReturn(PETSC_SUCCESS);
3973: }
3975: /*@
3976: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3977: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3979: Neighbor-wise Collective
3981: Input Parameters:
3982: + mat - the factored matrix
3983: - b - the right-hand-side vector
3985: Output Parameter:
3986: . x - the result vector
3988: Level: developer
3990: Notes:
3991: `MatSolve()` should be used for most applications, as it performs
3992: a forward solve followed by a backward solve.
3994: The vectors `b` and `x` cannot be the same, i.e., one cannot
3995: call `MatForwardSolve`(A,x,x).
3997: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3998: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3999: `MatForwardSolve()` solves $U^T*D y = b$, and
4000: `MatBackwardSolve()` solves $U x = y$.
4001: Thus they do not provide a symmetric preconditioner.
4003: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
4004: @*/
4005: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
4006: {
4007: PetscFunctionBegin;
4012: PetscCheckSameComm(mat, 1, b, 2);
4013: PetscCheckSameComm(mat, 1, x, 3);
4014: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4015: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4016: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4017: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4018: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4019: MatCheckPreallocated(mat, 1);
4021: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
4022: PetscUseTypeMethod(mat, forwardsolve, b, x);
4023: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
4024: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4025: PetscFunctionReturn(PETSC_SUCCESS);
4026: }
4028: /*@
4029: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
4030: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
4032: Neighbor-wise Collective
4034: Input Parameters:
4035: + mat - the factored matrix
4036: - b - the right-hand-side vector
4038: Output Parameter:
4039: . x - the result vector
4041: Level: developer
4043: Notes:
4044: `MatSolve()` should be used for most applications, as it performs
4045: a forward solve followed by a backward solve.
4047: The vectors `b` and `x` cannot be the same. I.e., one cannot
4048: call `MatBackwardSolve`(A,x,x).
4050: For matrix in `MATSEQBAIJ` format with block size larger than 1,
4051: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
4052: `MatForwardSolve()` solves $U^T*D y = b$, and
4053: `MatBackwardSolve()` solves $U x = y$.
4054: Thus they do not provide a symmetric preconditioner.
4056: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
4057: @*/
4058: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
4059: {
4060: PetscFunctionBegin;
4065: PetscCheckSameComm(mat, 1, b, 2);
4066: PetscCheckSameComm(mat, 1, x, 3);
4067: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4068: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4069: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4070: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4071: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4072: MatCheckPreallocated(mat, 1);
4074: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
4075: PetscUseTypeMethod(mat, backwardsolve, b, x);
4076: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
4077: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4078: PetscFunctionReturn(PETSC_SUCCESS);
4079: }
4081: /*@
4082: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
4084: Neighbor-wise Collective
4086: Input Parameters:
4087: + mat - the factored matrix
4088: . b - the right-hand-side vector
4089: - y - the vector to be added to
4091: Output Parameter:
4092: . x - the result vector
4094: Level: developer
4096: Note:
4097: The vectors `b` and `x` cannot be the same. I.e., one cannot
4098: call `MatSolveAdd`(A,x,y,x).
4100: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
4101: @*/
4102: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
4103: {
4104: PetscScalar one = 1.0;
4105: Vec tmp;
4107: PetscFunctionBegin;
4113: PetscCheckSameComm(mat, 1, b, 2);
4114: PetscCheckSameComm(mat, 1, y, 3);
4115: PetscCheckSameComm(mat, 1, x, 4);
4116: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4117: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4118: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4119: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4120: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4121: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4122: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4123: MatCheckPreallocated(mat, 1);
4125: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4126: PetscCall(VecFlag(x, mat->factorerrortype));
4127: if (mat->factorerrortype) {
4128: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4129: } else if (mat->ops->solveadd) {
4130: PetscUseTypeMethod(mat, solveadd, b, y, x);
4131: } else {
4132: /* do the solve then the add manually */
4133: if (x != y) {
4134: PetscCall(MatSolve(mat, b, x));
4135: PetscCall(VecAXPY(x, one, y));
4136: } else {
4137: PetscCall(VecDuplicate(x, &tmp));
4138: PetscCall(VecCopy(x, tmp));
4139: PetscCall(MatSolve(mat, b, x));
4140: PetscCall(VecAXPY(x, one, tmp));
4141: PetscCall(VecDestroy(&tmp));
4142: }
4143: }
4144: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4145: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4146: PetscFunctionReturn(PETSC_SUCCESS);
4147: }
4149: /*@
4150: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4152: Neighbor-wise Collective
4154: Input Parameters:
4155: + mat - the factored matrix
4156: - b - the right-hand-side vector
4158: Output Parameter:
4159: . x - the result vector
4161: Level: developer
4163: Notes:
4164: The vectors `b` and `x` cannot be the same. I.e., one cannot
4165: call `MatSolveTranspose`(A,x,x).
4167: Most users should employ the `KSP` interface for linear solvers
4168: instead of working directly with matrix algebra routines such as this.
4169: See, e.g., `KSPCreate()`.
4171: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4172: @*/
4173: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4174: {
4175: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4177: PetscFunctionBegin;
4182: PetscCheckSameComm(mat, 1, b, 2);
4183: PetscCheckSameComm(mat, 1, x, 3);
4184: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4185: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4186: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4187: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4188: MatCheckPreallocated(mat, 1);
4189: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4190: PetscCall(VecFlag(x, mat->factorerrortype));
4191: if (mat->factorerrortype) {
4192: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4193: } else {
4194: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4195: PetscCall((*f)(mat, b, x));
4196: }
4197: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4198: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4199: PetscFunctionReturn(PETSC_SUCCESS);
4200: }
4202: /*@
4203: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4204: factored matrix.
4206: Neighbor-wise Collective
4208: Input Parameters:
4209: + mat - the factored matrix
4210: . b - the right-hand-side vector
4211: - y - the vector to be added to
4213: Output Parameter:
4214: . x - the result vector
4216: Level: developer
4218: Note:
4219: The vectors `b` and `x` cannot be the same. I.e., one cannot
4220: call `MatSolveTransposeAdd`(A,x,y,x).
4222: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4223: @*/
4224: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4225: {
4226: PetscScalar one = 1.0;
4227: Vec tmp;
4228: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4230: PetscFunctionBegin;
4236: PetscCheckSameComm(mat, 1, b, 2);
4237: PetscCheckSameComm(mat, 1, y, 3);
4238: PetscCheckSameComm(mat, 1, x, 4);
4239: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4240: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4241: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4242: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4243: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4244: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4245: MatCheckPreallocated(mat, 1);
4247: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4248: PetscCall(VecFlag(x, mat->factorerrortype));
4249: if (mat->factorerrortype) {
4250: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4251: } else if (f) {
4252: PetscCall((*f)(mat, b, y, x));
4253: } else {
4254: /* do the solve then the add manually */
4255: if (x != y) {
4256: PetscCall(MatSolveTranspose(mat, b, x));
4257: PetscCall(VecAXPY(x, one, y));
4258: } else {
4259: PetscCall(VecDuplicate(x, &tmp));
4260: PetscCall(VecCopy(x, tmp));
4261: PetscCall(MatSolveTranspose(mat, b, x));
4262: PetscCall(VecAXPY(x, one, tmp));
4263: PetscCall(VecDestroy(&tmp));
4264: }
4265: }
4266: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4267: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4268: PetscFunctionReturn(PETSC_SUCCESS);
4269: }
4271: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4272: /*@
4273: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4275: Neighbor-wise Collective
4277: Input Parameters:
4278: + mat - the matrix
4279: . b - the right-hand side
4280: . omega - the relaxation factor
4281: . flag - flag indicating the type of SOR (see below)
4282: . shift - diagonal shift
4283: . its - the number of iterations
4284: - lits - the number of local iterations
4286: Output Parameter:
4287: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4289: SOR Flags:
4290: + `SOR_FORWARD_SWEEP` - forward SOR
4291: . `SOR_BACKWARD_SWEEP` - backward SOR
4292: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4293: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4294: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4295: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4296: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4297: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies upper/lower triangular part of matrix to vector (with `omega`)
4298: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4300: Level: developer
4302: Notes:
4303: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4304: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4305: on each processor.
4307: Application programmers will not generally use `MatSOR()` directly,
4308: but instead will employ `PCSOR` or `PCEISENSTAT`
4310: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with inodes, this does a block SOR smoothing, otherwise it does a pointwise smoothing.
4311: For `MATAIJ` matrices with inodes, the block sizes are determined by the inode sizes, not the block size set with `MatSetBlockSize()`
4313: Vectors `x` and `b` CANNOT be the same
4315: The flags are implemented as bitwise inclusive or operations.
4316: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4317: to specify a zero initial guess for SSOR.
4319: Developer Note:
4320: We should add block SOR support for `MATAIJ` matrices with block size set to greater than one and no inodes
4322: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4323: @*/
4324: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4325: {
4326: PetscFunctionBegin;
4331: PetscCheckSameComm(mat, 1, b, 2);
4332: PetscCheckSameComm(mat, 1, x, 8);
4333: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4334: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4335: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4336: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4337: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4338: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4339: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4340: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4342: MatCheckPreallocated(mat, 1);
4343: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4344: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4345: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4346: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4347: PetscFunctionReturn(PETSC_SUCCESS);
4348: }
4350: /*
4351: Default matrix copy routine.
4352: */
4353: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4354: {
4355: PetscInt i, rstart = 0, rend = 0, nz;
4356: const PetscInt *cwork;
4357: const PetscScalar *vwork;
4359: PetscFunctionBegin;
4360: if (B->assembled) PetscCall(MatZeroEntries(B));
4361: if (str == SAME_NONZERO_PATTERN) {
4362: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4363: for (i = rstart; i < rend; i++) {
4364: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4365: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4366: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4367: }
4368: } else {
4369: PetscCall(MatAYPX(B, 0.0, A, str));
4370: }
4371: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4372: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4373: PetscFunctionReturn(PETSC_SUCCESS);
4374: }
4376: /*@
4377: MatCopy - Copies a matrix to another matrix.
4379: Collective
4381: Input Parameters:
4382: + A - the matrix
4383: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4385: Output Parameter:
4386: . B - where the copy is put
4388: Level: intermediate
4390: Notes:
4391: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4393: `MatCopy()` copies the matrix entries of a matrix to another existing
4394: matrix (after first zeroing the second matrix). A related routine is
4395: `MatConvert()`, which first creates a new matrix and then copies the data.
4397: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4398: @*/
4399: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4400: {
4401: PetscInt i;
4403: PetscFunctionBegin;
4408: PetscCheckSameComm(A, 1, B, 2);
4409: MatCheckPreallocated(B, 2);
4410: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4411: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4412: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4413: A->cmap->N, B->cmap->N);
4414: MatCheckPreallocated(A, 1);
4415: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4417: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4418: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4419: else PetscCall(MatCopy_Basic(A, B, str));
4421: B->stencil.dim = A->stencil.dim;
4422: B->stencil.noc = A->stencil.noc;
4423: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4424: B->stencil.dims[i] = A->stencil.dims[i];
4425: B->stencil.starts[i] = A->stencil.starts[i];
4426: }
4428: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4429: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4430: PetscFunctionReturn(PETSC_SUCCESS);
4431: }
4433: /*@
4434: MatConvert - Converts a matrix to another matrix, either of the same
4435: or different type.
4437: Collective
4439: Input Parameters:
4440: + mat - the matrix
4441: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4442: same type as the original matrix.
4443: - reuse - denotes if the destination matrix is to be created or reused.
4444: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4445: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4447: Output Parameter:
4448: . M - pointer to place new matrix
4450: Level: intermediate
4452: Notes:
4453: `MatConvert()` first creates a new matrix and then copies the data from
4454: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4455: entries of one matrix to another already existing matrix context.
4457: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4458: the MPI communicator of the generated matrix is always the same as the communicator
4459: of the input matrix.
4461: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4462: @*/
4463: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4464: {
4465: PetscBool sametype, issame, flg;
4466: PetscBool3 issymmetric, ishermitian, isspd;
4467: char convname[256], mtype[256];
4468: Mat B;
4470: PetscFunctionBegin;
4473: PetscAssertPointer(M, 4);
4474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4476: MatCheckPreallocated(mat, 1);
4478: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4479: if (flg) newtype = mtype;
4481: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4482: PetscCall(PetscStrcmp(newtype, "same", &issame));
4483: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4484: if (reuse == MAT_REUSE_MATRIX) {
4486: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4487: }
4489: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4490: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4491: PetscFunctionReturn(PETSC_SUCCESS);
4492: }
4494: /* Cache Mat options because some converters use MatHeaderReplace() */
4495: issymmetric = mat->symmetric;
4496: ishermitian = mat->hermitian;
4497: isspd = mat->spd;
4499: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4500: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4501: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4502: } else {
4503: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4504: const char *prefix[3] = {"seq", "mpi", ""};
4505: PetscInt i;
4506: /*
4507: Order of precedence:
4508: 0) See if newtype is a superclass of the current matrix.
4509: 1) See if a specialized converter is known to the current matrix.
4510: 2) See if a specialized converter is known to the desired matrix class.
4511: 3) See if a good general converter is registered for the desired class
4512: (as of 6/27/03 only MATMPIADJ falls into this category).
4513: 4) See if a good general converter is known for the current matrix.
4514: 5) Use a really basic converter.
4515: */
4517: /* 0) See if newtype is a superclass of the current matrix.
4518: i.e mat is mpiaij and newtype is aij */
4519: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4520: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4521: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4522: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4523: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4524: if (flg) {
4525: if (reuse == MAT_INPLACE_MATRIX) {
4526: PetscCall(PetscInfo(mat, "Early return\n"));
4527: PetscFunctionReturn(PETSC_SUCCESS);
4528: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4529: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4530: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4531: PetscFunctionReturn(PETSC_SUCCESS);
4532: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4533: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4534: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4535: PetscFunctionReturn(PETSC_SUCCESS);
4536: }
4537: }
4538: }
4539: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4540: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4541: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4542: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4543: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4544: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4545: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4546: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4547: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4548: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4549: if (conv) goto foundconv;
4550: }
4552: /* 2) See if a specialized converter is known to the desired matrix class. */
4553: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4554: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4555: PetscCall(MatSetType(B, newtype));
4556: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4557: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4558: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4559: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4560: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4561: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4562: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4563: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4564: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4565: if (conv) {
4566: PetscCall(MatDestroy(&B));
4567: goto foundconv;
4568: }
4569: }
4571: /* 3) See if a good general converter is registered for the desired class */
4572: conv = B->ops->convertfrom;
4573: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4574: PetscCall(MatDestroy(&B));
4575: if (conv) goto foundconv;
4577: /* 4) See if a good general converter is known for the current matrix */
4578: if (mat->ops->convert) conv = mat->ops->convert;
4579: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4580: if (conv) goto foundconv;
4582: /* 5) Use a really basic converter. */
4583: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4584: conv = MatConvert_Basic;
4586: foundconv:
4587: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4588: PetscCall((*conv)(mat, newtype, reuse, M));
4589: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4590: /* the block sizes must be same if the mappings are copied over */
4591: (*M)->rmap->bs = mat->rmap->bs;
4592: (*M)->cmap->bs = mat->cmap->bs;
4593: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4594: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4595: (*M)->rmap->mapping = mat->rmap->mapping;
4596: (*M)->cmap->mapping = mat->cmap->mapping;
4597: }
4598: (*M)->stencil.dim = mat->stencil.dim;
4599: (*M)->stencil.noc = mat->stencil.noc;
4600: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4601: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4602: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4603: }
4604: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4605: }
4606: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4608: /* Reset Mat options */
4609: if (issymmetric != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PetscBool3ToBool(issymmetric)));
4610: if (ishermitian != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PetscBool3ToBool(ishermitian)));
4611: if (isspd != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_SPD, PetscBool3ToBool(isspd)));
4612: PetscFunctionReturn(PETSC_SUCCESS);
4613: }
4615: /*@
4616: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4618: Not Collective
4620: Input Parameter:
4621: . mat - the matrix, must be a factored matrix
4623: Output Parameter:
4624: . type - the string name of the package (do not free this string)
4626: Level: intermediate
4628: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4629: @*/
4630: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4631: {
4632: PetscErrorCode (*conv)(Mat, MatSolverType *);
4634: PetscFunctionBegin;
4637: PetscAssertPointer(type, 2);
4638: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4639: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4640: if (conv) PetscCall((*conv)(mat, type));
4641: else *type = MATSOLVERPETSC;
4642: PetscFunctionReturn(PETSC_SUCCESS);
4643: }
4645: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4646: struct _MatSolverTypeForSpecifcType {
4647: MatType mtype;
4648: /* no entry for MAT_FACTOR_NONE */
4649: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4650: MatSolverTypeForSpecifcType next;
4651: };
4653: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4654: struct _MatSolverTypeHolder {
4655: char *name;
4656: MatSolverTypeForSpecifcType handlers;
4657: MatSolverTypeHolder next;
4658: };
4660: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4662: /*@C
4663: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4665: Logically Collective, No Fortran Support
4667: Input Parameters:
4668: + package - name of the package, for example `petsc` or `superlu`
4669: . mtype - the matrix type that works with this package
4670: . ftype - the type of factorization supported by the package
4671: - createfactor - routine that will create the factored matrix ready to be used
4673: Level: developer
4675: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4676: `MatGetFactor()`
4677: @*/
4678: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4679: {
4680: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4681: PetscBool flg;
4682: MatSolverTypeForSpecifcType inext, iprev = NULL;
4684: PetscFunctionBegin;
4685: PetscCall(MatInitializePackage());
4686: if (!next) {
4687: PetscCall(PetscNew(&MatSolverTypeHolders));
4688: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4689: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4690: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4691: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4692: PetscFunctionReturn(PETSC_SUCCESS);
4693: }
4694: while (next) {
4695: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4696: if (flg) {
4697: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4698: inext = next->handlers;
4699: while (inext) {
4700: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4701: if (flg) {
4702: inext->createfactor[(int)ftype - 1] = createfactor;
4703: PetscFunctionReturn(PETSC_SUCCESS);
4704: }
4705: iprev = inext;
4706: inext = inext->next;
4707: }
4708: PetscCall(PetscNew(&iprev->next));
4709: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4710: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4711: PetscFunctionReturn(PETSC_SUCCESS);
4712: }
4713: prev = next;
4714: next = next->next;
4715: }
4716: PetscCall(PetscNew(&prev->next));
4717: PetscCall(PetscStrallocpy(package, &prev->next->name));
4718: PetscCall(PetscNew(&prev->next->handlers));
4719: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4720: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4721: PetscFunctionReturn(PETSC_SUCCESS);
4722: }
4724: /*@C
4725: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4727: Input Parameters:
4728: + type - name of the package, for example `petsc` or `superlu`, if this is 'NULL', then the first result that satisfies the other criteria is returned
4729: . ftype - the type of factorization supported by the type
4730: - mtype - the matrix type that works with this type
4732: Output Parameters:
4733: + foundtype - `PETSC_TRUE` if the type was registered
4734: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4735: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4737: Calling sequence of `createfactor`:
4738: + A - the matrix providing the factor matrix
4739: . ftype - the `MatFactorType` of the factor requested
4740: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4742: Level: developer
4744: Note:
4745: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4746: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4747: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4749: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4750: `MatInitializePackage()`
4751: @*/
4752: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4753: {
4754: MatSolverTypeHolder next = MatSolverTypeHolders;
4755: PetscBool flg;
4756: MatSolverTypeForSpecifcType inext;
4758: PetscFunctionBegin;
4759: if (foundtype) *foundtype = PETSC_FALSE;
4760: if (foundmtype) *foundmtype = PETSC_FALSE;
4761: if (createfactor) *createfactor = NULL;
4763: if (type) {
4764: while (next) {
4765: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4766: if (flg) {
4767: if (foundtype) *foundtype = PETSC_TRUE;
4768: inext = next->handlers;
4769: while (inext) {
4770: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4771: if (flg) {
4772: if (foundmtype) *foundmtype = PETSC_TRUE;
4773: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4774: PetscFunctionReturn(PETSC_SUCCESS);
4775: }
4776: inext = inext->next;
4777: }
4778: }
4779: next = next->next;
4780: }
4781: } else {
4782: while (next) {
4783: inext = next->handlers;
4784: while (inext) {
4785: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4786: if (flg && inext->createfactor[(int)ftype - 1]) {
4787: if (foundtype) *foundtype = PETSC_TRUE;
4788: if (foundmtype) *foundmtype = PETSC_TRUE;
4789: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4790: PetscFunctionReturn(PETSC_SUCCESS);
4791: }
4792: inext = inext->next;
4793: }
4794: next = next->next;
4795: }
4796: /* try with base classes inext->mtype */
4797: next = MatSolverTypeHolders;
4798: while (next) {
4799: inext = next->handlers;
4800: while (inext) {
4801: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4802: if (flg && inext->createfactor[(int)ftype - 1]) {
4803: if (foundtype) *foundtype = PETSC_TRUE;
4804: if (foundmtype) *foundmtype = PETSC_TRUE;
4805: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4806: PetscFunctionReturn(PETSC_SUCCESS);
4807: }
4808: inext = inext->next;
4809: }
4810: next = next->next;
4811: }
4812: }
4813: PetscFunctionReturn(PETSC_SUCCESS);
4814: }
4816: PetscErrorCode MatSolverTypeDestroy(void)
4817: {
4818: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4819: MatSolverTypeForSpecifcType inext, iprev;
4821: PetscFunctionBegin;
4822: while (next) {
4823: PetscCall(PetscFree(next->name));
4824: inext = next->handlers;
4825: while (inext) {
4826: PetscCall(PetscFree(inext->mtype));
4827: iprev = inext;
4828: inext = inext->next;
4829: PetscCall(PetscFree(iprev));
4830: }
4831: prev = next;
4832: next = next->next;
4833: PetscCall(PetscFree(prev));
4834: }
4835: MatSolverTypeHolders = NULL;
4836: PetscFunctionReturn(PETSC_SUCCESS);
4837: }
4839: /*@
4840: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4842: Logically Collective
4844: Input Parameter:
4845: . mat - the matrix
4847: Output Parameter:
4848: . flg - `PETSC_TRUE` if uses the ordering
4850: Level: developer
4852: Note:
4853: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4854: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4856: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4857: @*/
4858: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4859: {
4860: PetscFunctionBegin;
4861: *flg = mat->canuseordering;
4862: PetscFunctionReturn(PETSC_SUCCESS);
4863: }
4865: /*@
4866: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4868: Logically Collective
4870: Input Parameters:
4871: + mat - the matrix obtained with `MatGetFactor()`
4872: - ftype - the factorization type to be used
4874: Output Parameter:
4875: . otype - the preferred ordering type
4877: Level: developer
4879: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4880: @*/
4881: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4882: {
4883: PetscFunctionBegin;
4884: *otype = mat->preferredordering[ftype];
4885: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4886: PetscFunctionReturn(PETSC_SUCCESS);
4887: }
4889: /*@
4890: MatGetFactor - Returns a matrix suitable to calls to routines such as `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatILUFactorSymbolic()`,
4891: `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactorNumeric()`, `MatILUFactorNumeric()`, and
4892: `MatICCFactorNumeric()`
4894: Collective
4896: Input Parameters:
4897: + mat - the matrix
4898: . type - name of solver type, for example, `superlu_dist`, `petsc` (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4899: the other criteria is returned
4900: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4902: Output Parameter:
4903: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4905: Options Database Keys:
4906: + -pc_factor_mat_solver_type type - choose the type at run time. When using `KSP` solvers
4907: . -pc_factor_mat_factor_on_host (true|false) - do matrix factorization on host (with device matrices). Default is doing it on device
4908: - -pc_factor_mat_solve_on_host (true|false) - do matrix solve on host (with device matrices). Default is doing it on device
4910: Level: intermediate
4912: Notes:
4913: Some of the packages, such as MUMPS, have options for controlling the factorization, these are in the form `-prefix_mat_packagename_packageoption`
4914: (for example, `-mat_mumps_icntl_6 1`) where `prefix` is normally set automatically from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly,
4915: without using a `PC`, one can set the prefix by
4916: calling `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4918: Some PETSc matrix formats have alternative solvers available that are provided by alternative packages
4919: such as PaStiX, SuperLU_DIST, MUMPS etc. PETSc must have been configured to use the external solver,
4920: using the corresponding `./configure` option such as `--download-package` or `--with-package-dir`.
4922: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4923: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4924: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4926: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4927: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4929: Developer Note:
4930: This should actually be called `MatCreateFactor()` since it creates a new factor object
4932: The `MatGetFactor()` implementations should not be accessing the PETSc options database or making other decisions about solver options,
4933: that should be delayed until the later operations. This is to ensure the correct options prefix has been set in the factor matrix.
4935: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4936: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`,
4937: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`,
4938: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatILUFactorSymbolic()`,
4939: `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactorNumeric()`, `MatILUFactorNumeric()`,
4940: `MatICCFactorNumeric()`
4941: @*/
4942: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4943: {
4944: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4945: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4947: PetscFunctionBegin;
4951: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4952: MatCheckPreallocated(mat, 1);
4954: PetscCall(MatIsShell(mat, &shell));
4955: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4956: if (hasop) {
4957: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4958: PetscFunctionReturn(PETSC_SUCCESS);
4959: }
4961: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4962: if (!foundtype) {
4963: if (type) {
4964: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4965: ((PetscObject)mat)->type_name, type);
4966: } else {
4967: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4968: }
4969: }
4970: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4971: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4973: PetscCall((*conv)(mat, ftype, f));
4974: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4975: PetscFunctionReturn(PETSC_SUCCESS);
4976: }
4978: /*@
4979: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4981: Not Collective
4983: Input Parameters:
4984: + mat - the matrix
4985: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's default)
4986: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4988: Output Parameter:
4989: . flg - PETSC_TRUE if the factorization is available
4991: Level: intermediate
4993: Notes:
4994: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4995: such as pastix, superlu, mumps etc.
4997: PETSc must have been ./configure to use the external solver, using the option --download-package
4999: Developer Note:
5000: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
5002: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
5003: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
5004: @*/
5005: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
5006: {
5007: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
5009: PetscFunctionBegin;
5011: PetscAssertPointer(flg, 4);
5013: *flg = PETSC_FALSE;
5014: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
5016: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5017: MatCheckPreallocated(mat, 1);
5019: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
5020: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
5021: PetscFunctionReturn(PETSC_SUCCESS);
5022: }
5024: /*@
5025: MatDuplicate - Duplicates a matrix including the non-zero structure.
5027: Collective
5029: Input Parameters:
5030: + mat - the matrix
5031: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
5032: See the manual page for `MatDuplicateOption()` for an explanation of these options.
5034: Output Parameter:
5035: . M - pointer to place new matrix
5037: Level: intermediate
5039: Notes:
5040: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
5042: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
5044: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
5046: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
5047: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
5048: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
5050: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
5051: @*/
5052: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
5053: {
5054: Mat B;
5055: VecType vtype;
5056: PetscInt i;
5057: PetscObject dm, container_h, container_d;
5058: PetscErrorCodeFn *viewf;
5060: PetscFunctionBegin;
5063: PetscAssertPointer(M, 3);
5064: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
5065: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5066: MatCheckPreallocated(mat, 1);
5068: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
5069: PetscUseTypeMethod(mat, duplicate, op, M);
5070: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
5071: B = *M;
5073: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
5074: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
5075: PetscCall(MatGetVecType(mat, &vtype));
5076: PetscCall(MatSetVecType(B, vtype));
5078: B->stencil.dim = mat->stencil.dim;
5079: B->stencil.noc = mat->stencil.noc;
5080: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
5081: B->stencil.dims[i] = mat->stencil.dims[i];
5082: B->stencil.starts[i] = mat->stencil.starts[i];
5083: }
5085: B->nooffproczerorows = mat->nooffproczerorows;
5086: B->nooffprocentries = mat->nooffprocentries;
5088: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
5089: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
5090: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
5091: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
5092: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
5093: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
5094: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
5095: PetscCall(PetscObjectStateIncrease((PetscObject)B));
5096: PetscFunctionReturn(PETSC_SUCCESS);
5097: }
5099: /*@
5100: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
5102: Logically Collective
5104: Input Parameter:
5105: . mat - the matrix
5107: Output Parameter:
5108: . v - the diagonal of the matrix
5110: Level: intermediate
5112: Note:
5113: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5114: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5115: is larger than `ndiag`, the values of the remaining entries are unspecified.
5117: Currently only correct in parallel for square matrices.
5119: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5120: @*/
5121: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5122: {
5123: PetscFunctionBegin;
5127: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5128: MatCheckPreallocated(mat, 1);
5129: if (PetscDefined(USE_DEBUG)) {
5130: PetscInt nv, row, col, ndiag;
5132: PetscCall(VecGetLocalSize(v, &nv));
5133: PetscCall(MatGetLocalSize(mat, &row, &col));
5134: ndiag = PetscMin(row, col);
5135: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5136: }
5138: PetscUseTypeMethod(mat, getdiagonal, v);
5139: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5140: PetscFunctionReturn(PETSC_SUCCESS);
5141: }
5143: /*@
5144: MatGetRowMin - Gets the minimum value (of the real part) of each
5145: row of the matrix
5147: Logically Collective
5149: Input Parameter:
5150: . mat - the matrix
5152: Output Parameters:
5153: + v - the vector for storing the maximums
5154: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5156: Level: intermediate
5158: Note:
5159: The result of this call are the same as if one converted the matrix to dense format
5160: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5162: This code is only implemented for a couple of matrix formats.
5164: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5165: `MatGetRowMax()`
5166: @*/
5167: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5168: {
5169: PetscFunctionBegin;
5173: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5175: if (!mat->cmap->N) {
5176: PetscCall(VecSet(v, PETSC_MAX_REAL));
5177: if (idx) {
5178: PetscInt i, m = mat->rmap->n;
5179: for (i = 0; i < m; i++) idx[i] = -1;
5180: }
5181: } else {
5182: MatCheckPreallocated(mat, 1);
5183: }
5184: PetscUseTypeMethod(mat, getrowmin, v, idx);
5185: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5186: PetscFunctionReturn(PETSC_SUCCESS);
5187: }
5189: /*@
5190: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5191: row of the matrix
5193: Logically Collective
5195: Input Parameter:
5196: . mat - the matrix
5198: Output Parameters:
5199: + v - the vector for storing the minimums
5200: - idx - the indices of the column found for each row (or `NULL` if not needed)
5202: Level: intermediate
5204: Notes:
5205: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5206: row is 0 (the first column).
5208: This code is only implemented for a couple of matrix formats.
5210: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5211: @*/
5212: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5213: {
5214: PetscFunctionBegin;
5218: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5219: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5221: if (!mat->cmap->N) {
5222: PetscCall(VecSet(v, 0.0));
5223: if (idx) {
5224: PetscInt i, m = mat->rmap->n;
5225: for (i = 0; i < m; i++) idx[i] = -1;
5226: }
5227: } else {
5228: MatCheckPreallocated(mat, 1);
5229: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5230: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5231: }
5232: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5233: PetscFunctionReturn(PETSC_SUCCESS);
5234: }
5236: /*@
5237: MatGetRowMax - Gets the maximum value (of the real part) of each
5238: row of the matrix
5240: Logically Collective
5242: Input Parameter:
5243: . mat - the matrix
5245: Output Parameters:
5246: + v - the vector for storing the maximums
5247: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5249: Level: intermediate
5251: Notes:
5252: The result of this call are the same as if one converted the matrix to dense format
5253: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5255: This code is only implemented for a couple of matrix formats.
5257: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5258: @*/
5259: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5260: {
5261: PetscFunctionBegin;
5265: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5267: if (!mat->cmap->N) {
5268: PetscCall(VecSet(v, PETSC_MIN_REAL));
5269: if (idx) {
5270: PetscInt i, m = mat->rmap->n;
5271: for (i = 0; i < m; i++) idx[i] = -1;
5272: }
5273: } else {
5274: MatCheckPreallocated(mat, 1);
5275: PetscUseTypeMethod(mat, getrowmax, v, idx);
5276: }
5277: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5278: PetscFunctionReturn(PETSC_SUCCESS);
5279: }
5281: /*@
5282: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5283: row of the matrix
5285: Logically Collective
5287: Input Parameter:
5288: . mat - the matrix
5290: Output Parameters:
5291: + v - the vector for storing the maximums
5292: - idx - the indices of the column found for each row (or `NULL` if not needed)
5294: Level: intermediate
5296: Notes:
5297: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5298: row is 0 (the first column).
5300: This code is only implemented for a couple of matrix formats.
5302: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5303: @*/
5304: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5305: {
5306: PetscFunctionBegin;
5310: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5312: if (!mat->cmap->N) {
5313: PetscCall(VecSet(v, 0.0));
5314: if (idx) {
5315: PetscInt i, m = mat->rmap->n;
5316: for (i = 0; i < m; i++) idx[i] = -1;
5317: }
5318: } else {
5319: MatCheckPreallocated(mat, 1);
5320: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5321: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5322: }
5323: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5324: PetscFunctionReturn(PETSC_SUCCESS);
5325: }
5327: /*@
5328: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5330: Logically Collective
5332: Input Parameter:
5333: . mat - the matrix
5335: Output Parameter:
5336: . v - the vector for storing the sum
5338: Level: intermediate
5340: This code is only implemented for a couple of matrix formats.
5342: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5343: @*/
5344: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5345: {
5346: PetscFunctionBegin;
5350: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5352: if (!mat->cmap->N) PetscCall(VecSet(v, 0.0));
5353: else {
5354: MatCheckPreallocated(mat, 1);
5355: PetscUseTypeMethod(mat, getrowsumabs, v);
5356: }
5357: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5358: PetscFunctionReturn(PETSC_SUCCESS);
5359: }
5361: /*@
5362: MatGetRowSum - Gets the sum of each row of the matrix
5364: Logically or Neighborhood Collective
5366: Input Parameter:
5367: . mat - the matrix
5369: Output Parameter:
5370: . v - the vector for storing the sum of rows
5372: Level: intermediate
5374: Note:
5375: This code is slow since it is not currently specialized for different formats
5377: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5378: @*/
5379: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5380: {
5381: Vec ones;
5383: PetscFunctionBegin;
5387: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5388: MatCheckPreallocated(mat, 1);
5389: PetscCall(MatCreateVecs(mat, &ones, NULL));
5390: PetscCall(VecSet(ones, 1.));
5391: PetscCall(MatMult(mat, ones, v));
5392: PetscCall(VecDestroy(&ones));
5393: PetscFunctionReturn(PETSC_SUCCESS);
5394: }
5396: /*@
5397: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5398: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5400: Collective
5402: Input Parameter:
5403: . mat - the matrix to provide the transpose
5405: Output Parameter:
5406: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5408: Level: advanced
5410: Note:
5411: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5412: routine allows bypassing that call.
5414: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5415: @*/
5416: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5417: {
5418: MatParentState *rb = NULL;
5420: PetscFunctionBegin;
5421: PetscCall(PetscNew(&rb));
5422: rb->id = ((PetscObject)mat)->id;
5423: rb->state = 0;
5424: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5425: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5426: PetscFunctionReturn(PETSC_SUCCESS);
5427: }
5429: static PetscErrorCode MatTranspose_Private(Mat mat, MatReuse reuse, Mat *B, PetscBool conjugate)
5430: {
5431: PetscContainer rB = NULL;
5432: MatParentState *rb = NULL;
5433: PetscErrorCode (*f)(Mat, MatReuse, Mat *) = NULL;
5435: PetscFunctionBegin;
5438: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5439: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5440: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5441: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5442: MatCheckPreallocated(mat, 1);
5443: if (reuse == MAT_REUSE_MATRIX) {
5444: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5445: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5446: PetscCall(PetscContainerGetPointer(rB, &rb));
5447: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5448: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5449: }
5451: if (conjugate) {
5452: f = mat->ops->hermitiantranspose;
5453: if (f) PetscCall((*f)(mat, reuse, B));
5454: }
5455: if (!f && !(reuse == MAT_INPLACE_MATRIX && mat->hermitian == PETSC_BOOL3_TRUE && conjugate)) {
5456: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5457: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5458: PetscUseTypeMethod(mat, transpose, reuse, B);
5459: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5460: }
5461: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5462: if (conjugate) PetscCall(MatConjugate(*B));
5463: }
5465: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5466: if (reuse != MAT_INPLACE_MATRIX) {
5467: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5468: PetscCall(PetscContainerGetPointer(rB, &rb));
5469: rb->state = ((PetscObject)mat)->state;
5470: rb->nonzerostate = mat->nonzerostate;
5471: }
5472: PetscFunctionReturn(PETSC_SUCCESS);
5473: }
5475: /*@
5476: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5478: Collective
5480: Input Parameters:
5481: + mat - the matrix to transpose
5482: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5484: Output Parameter:
5485: . B - the transpose of the matrix
5487: Level: intermediate
5489: Notes:
5490: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5492: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5493: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5495: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5497: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5498: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5500: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5502: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5504: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5505: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5506: @*/
5507: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5508: {
5509: PetscFunctionBegin;
5510: PetscCall(MatTranspose_Private(mat, reuse, B, PETSC_FALSE));
5511: PetscFunctionReturn(PETSC_SUCCESS);
5512: }
5514: /*@
5515: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5517: Collective
5519: Input Parameter:
5520: . A - the matrix to transpose
5522: Output Parameter:
5523: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5524: numerical portion.
5526: Level: intermediate
5528: Note:
5529: This is not supported for many matrix types, use `MatTranspose()` in those cases
5531: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5532: @*/
5533: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5534: {
5535: PetscFunctionBegin;
5538: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5539: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5540: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5541: PetscUseTypeMethod(A, transposesymbolic, B);
5542: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5544: PetscCall(MatTransposeSetPrecursor(A, *B));
5545: PetscFunctionReturn(PETSC_SUCCESS);
5546: }
5548: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5549: {
5550: PetscContainer rB;
5551: MatParentState *rb;
5553: PetscFunctionBegin;
5556: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5557: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5558: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5559: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5560: PetscCall(PetscContainerGetPointer(rB, &rb));
5561: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5562: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5563: PetscFunctionReturn(PETSC_SUCCESS);
5564: }
5566: /*@
5567: MatIsTranspose - Test whether a matrix is another one's transpose,
5568: or its own, in which case it tests symmetry.
5570: Collective
5572: Input Parameters:
5573: + A - the matrix to test
5574: . B - the matrix to test against, this can equal the first parameter
5575: - tol - tolerance, differences between entries smaller than this are counted as zero
5577: Output Parameter:
5578: . flg - the result
5580: Level: intermediate
5582: Notes:
5583: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5584: test involves parallel copies of the block off-diagonal parts of the matrix.
5586: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5587: @*/
5588: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5589: {
5590: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5592: PetscFunctionBegin;
5595: PetscAssertPointer(flg, 4);
5596: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5597: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5598: *flg = PETSC_FALSE;
5599: if (f && g) {
5600: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5601: PetscCall((*f)(A, B, tol, flg));
5602: } else {
5603: MatType mattype;
5605: PetscCall(MatGetType(f ? B : A, &mattype));
5606: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5607: }
5608: PetscFunctionReturn(PETSC_SUCCESS);
5609: }
5611: /*@
5612: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5614: Collective
5616: Input Parameters:
5617: + mat - the matrix to transpose and complex conjugate
5618: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5620: Output Parameter:
5621: . B - the Hermitian transpose
5623: Level: intermediate
5625: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5626: @*/
5627: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5628: {
5629: PetscFunctionBegin;
5630: PetscCall(MatTranspose_Private(mat, reuse, B, PETSC_TRUE));
5631: PetscFunctionReturn(PETSC_SUCCESS);
5632: }
5634: /*@
5635: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5637: Collective
5639: Input Parameters:
5640: + A - the matrix to test
5641: . B - the matrix to test against, this can equal the first parameter
5642: - tol - tolerance, differences between entries smaller than this are counted as zero
5644: Output Parameter:
5645: . flg - the result
5647: Level: intermediate
5649: Notes:
5650: Only available for `MATAIJ` matrices.
5652: The sequential algorithm
5653: has a running time of the order of the number of nonzeros; the parallel
5654: test involves parallel copies of the block off-diagonal parts of the matrix.
5656: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5657: @*/
5658: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5659: {
5660: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5662: PetscFunctionBegin;
5665: PetscAssertPointer(flg, 4);
5666: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5667: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5668: if (f && g) {
5669: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5670: PetscCall((*f)(A, B, tol, flg));
5671: } else {
5672: MatType mattype;
5674: PetscCall(MatGetType(f ? B : A, &mattype));
5675: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for Hermitian transpose", mattype);
5676: }
5677: PetscFunctionReturn(PETSC_SUCCESS);
5678: }
5680: /*@
5681: MatPermute - Creates a new matrix with rows and columns permuted from the
5682: original.
5684: Collective
5686: Input Parameters:
5687: + mat - the matrix to permute
5688: . row - row permutation, each processor supplies only the permutation for its rows
5689: - col - column permutation, each processor supplies only the permutation for its columns
5691: Output Parameter:
5692: . B - the permuted matrix
5694: Level: advanced
5696: Note:
5697: The index sets map from row/col of permuted matrix to row/col of original matrix.
5698: The index sets should be on the same communicator as mat and have the same local sizes.
5700: Developer Note:
5701: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5702: exploit the fact that row and col are permutations, consider implementing the
5703: more general `MatCreateSubMatrix()` instead.
5705: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5706: @*/
5707: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5708: {
5709: PetscFunctionBegin;
5714: PetscAssertPointer(B, 4);
5715: PetscCheckSameComm(mat, 1, row, 2);
5716: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5717: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5718: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5719: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5720: MatCheckPreallocated(mat, 1);
5722: if (mat->ops->permute) {
5723: PetscUseTypeMethod(mat, permute, row, col, B);
5724: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5725: } else {
5726: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5727: }
5728: PetscFunctionReturn(PETSC_SUCCESS);
5729: }
5731: /*@
5732: MatEqual - Compares two matrices.
5734: Collective
5736: Input Parameters:
5737: + A - the first matrix
5738: - B - the second matrix
5740: Output Parameter:
5741: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5743: Level: intermediate
5745: Note:
5746: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing
5747: the results of several matrix-vector product using randomly created vectors, see `MatMultEqual()`.
5749: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5750: @*/
5751: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5752: {
5753: PetscFunctionBegin;
5758: PetscAssertPointer(flg, 3);
5759: PetscCheckSameComm(A, 1, B, 2);
5760: MatCheckPreallocated(A, 1);
5761: MatCheckPreallocated(B, 2);
5762: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5763: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5764: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5765: B->cmap->N);
5766: if (A->ops->equal && A->ops->equal == B->ops->equal) PetscUseTypeMethod(A, equal, B, flg);
5767: else PetscCall(MatMultEqual(A, B, 10, flg));
5768: PetscFunctionReturn(PETSC_SUCCESS);
5769: }
5771: /*@
5772: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5773: matrices that are stored as vectors. Either of the two scaling
5774: matrices can be `NULL`.
5776: Collective
5778: Input Parameters:
5779: + mat - the matrix to be scaled
5780: . l - the left scaling vector (or `NULL`)
5781: - r - the right scaling vector (or `NULL`)
5783: Level: intermediate
5785: Note:
5786: `MatDiagonalScale()` computes $A = LAR$, where
5787: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5788: The L scales the rows of the matrix, the R scales the columns of the matrix.
5790: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5791: @*/
5792: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5793: {
5794: PetscBool flg = PETSC_FALSE;
5796: PetscFunctionBegin;
5799: if (l) {
5801: PetscCheckSameComm(mat, 1, l, 2);
5802: }
5803: if (r) {
5805: PetscCheckSameComm(mat, 1, r, 3);
5806: }
5807: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5808: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5809: MatCheckPreallocated(mat, 1);
5810: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5812: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5813: PetscUseTypeMethod(mat, diagonalscale, l, r);
5814: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5815: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5816: if (l != r && (PetscBool3ToBool(mat->symmetric) || PetscBool3ToBool(mat->hermitian))) {
5817: if (!PetscDefined(USE_COMPLEX) || PetscBool3ToBool(mat->symmetric)) {
5818: if (l && r) PetscCall(VecEqual(l, r, &flg));
5819: if (!flg) {
5820: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
5821: PetscCheck(!flg, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format, left and right scaling vectors must be the same");
5822: mat->symmetric = mat->spd = PETSC_BOOL3_FALSE;
5823: if (!PetscDefined(USE_COMPLEX)) mat->hermitian = PETSC_BOOL3_FALSE;
5824: else mat->hermitian = PETSC_BOOL3_UNKNOWN;
5825: }
5826: }
5827: if (PetscDefined(USE_COMPLEX) && PetscBool3ToBool(mat->hermitian)) {
5828: flg = PETSC_FALSE;
5829: if (l && r) {
5830: Vec conjugate;
5832: PetscCall(VecDuplicate(l, &conjugate));
5833: PetscCall(VecCopy(l, conjugate));
5834: PetscCall(VecConjugate(conjugate));
5835: PetscCall(VecEqual(conjugate, r, &flg));
5836: PetscCall(VecDestroy(&conjugate));
5837: }
5838: if (!flg) {
5839: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
5840: PetscCheck(!flg, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format and Hermitian matrix, left and right scaling vectors must be conjugate one of the other");
5841: mat->hermitian = PETSC_BOOL3_FALSE;
5842: mat->symmetric = mat->spd = PETSC_BOOL3_UNKNOWN;
5843: }
5844: }
5845: }
5846: PetscFunctionReturn(PETSC_SUCCESS);
5847: }
5849: /*@
5850: MatScale - Scales all elements of a matrix by a given number.
5852: Logically Collective
5854: Input Parameters:
5855: + mat - the matrix to be scaled
5856: - a - the scaling value
5858: Level: intermediate
5860: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5861: @*/
5862: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5863: {
5864: PetscFunctionBegin;
5867: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5868: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5870: MatCheckPreallocated(mat, 1);
5872: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5873: if (a != (PetscScalar)1.0) {
5874: PetscUseTypeMethod(mat, scale, a);
5875: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5876: }
5877: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5878: PetscFunctionReturn(PETSC_SUCCESS);
5879: }
5881: /*@
5882: MatNorm - Calculates various norms of a matrix.
5884: Collective
5886: Input Parameters:
5887: + mat - the matrix
5888: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5890: Output Parameter:
5891: . nrm - the resulting norm
5893: Level: intermediate
5895: .seealso: [](ch_matrices), `Mat`
5896: @*/
5897: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5898: {
5899: PetscFunctionBegin;
5902: PetscAssertPointer(nrm, 3);
5904: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5905: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5906: MatCheckPreallocated(mat, 1);
5908: PetscUseTypeMethod(mat, norm, type, nrm);
5909: PetscFunctionReturn(PETSC_SUCCESS);
5910: }
5912: /*
5913: This variable is used to prevent counting of MatAssemblyBegin() that
5914: are called from within a MatAssemblyEnd().
5915: */
5916: static PetscInt MatAssemblyEnd_InUse = 0;
5917: /*@
5918: MatAssemblyBegin - Begins assembling the matrix. This routine should
5919: be called after completing all calls to `MatSetValues()`.
5921: Collective
5923: Input Parameters:
5924: + mat - the matrix
5925: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5927: Level: beginner
5929: Notes:
5930: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5931: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5933: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5934: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5935: using the matrix.
5937: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5938: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5939: a global collective operation requiring all processes that share the matrix.
5941: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5942: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5943: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5945: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5946: @*/
5947: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5948: {
5949: PetscFunctionBegin;
5952: MatCheckPreallocated(mat, 1);
5953: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5954: if (mat->assembled) {
5955: mat->was_assembled = PETSC_TRUE;
5956: mat->assembled = PETSC_FALSE;
5957: }
5959: if (!MatAssemblyEnd_InUse) {
5960: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5961: PetscTryTypeMethod(mat, assemblybegin, type);
5962: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5963: } else PetscTryTypeMethod(mat, assemblybegin, type);
5964: PetscFunctionReturn(PETSC_SUCCESS);
5965: }
5967: /*@
5968: MatAssembled - Indicates if a matrix has been assembled and is ready for
5969: use; for example, in matrix-vector product.
5971: Not Collective
5973: Input Parameter:
5974: . mat - the matrix
5976: Output Parameter:
5977: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5979: Level: advanced
5981: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5982: @*/
5983: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5984: {
5985: PetscFunctionBegin;
5987: PetscAssertPointer(assembled, 2);
5988: *assembled = mat->assembled;
5989: PetscFunctionReturn(PETSC_SUCCESS);
5990: }
5992: /*@
5993: MatAssemblyEnd - Completes assembling the matrix. This routine should
5994: be called after `MatAssemblyBegin()`.
5996: Collective
5998: Input Parameters:
5999: + mat - the matrix
6000: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
6002: Options Database Key:
6003: . -mat_view [viewertype][:...] - option name and values. See `MatViewFromOptions()`/`PetscObjectViewFromOptions()` for the possible arguments
6005: Level: beginner
6007: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`,
6008: `MatViewFromOptions()`, `PetscObjectViewFromOptions()`
6009: @*/
6010: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
6011: {
6012: static PetscInt inassm = 0;
6013: PetscBool flg = PETSC_FALSE;
6015: PetscFunctionBegin;
6019: inassm++;
6020: MatAssemblyEnd_InUse++;
6021: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
6022: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
6023: PetscTryTypeMethod(mat, assemblyend, type);
6024: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
6025: } else PetscTryTypeMethod(mat, assemblyend, type);
6027: /* Flush assembly is not a true assembly */
6028: if (type != MAT_FLUSH_ASSEMBLY) {
6029: if (mat->num_ass) {
6030: if (!mat->symmetry_eternal) {
6031: mat->symmetric = PETSC_BOOL3_UNKNOWN;
6032: mat->hermitian = PETSC_BOOL3_UNKNOWN;
6033: }
6034: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
6035: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
6036: }
6037: mat->num_ass++;
6038: mat->assembled = PETSC_TRUE;
6039: mat->ass_nonzerostate = mat->nonzerostate;
6040: }
6042: mat->insertmode = NOT_SET_VALUES;
6043: MatAssemblyEnd_InUse--;
6044: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6045: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
6046: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6048: if (mat->checksymmetryonassembly) {
6049: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
6050: if (flg) {
6051: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
6052: } else {
6053: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
6054: }
6055: }
6056: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
6057: }
6058: inassm--;
6059: PetscFunctionReturn(PETSC_SUCCESS);
6060: }
6062: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
6063: /*@
6064: MatSetOption - Sets a parameter option for a matrix. Some options
6065: may be specific to certain storage formats. Some options
6066: determine how values will be inserted (or added). Sorted,
6067: row-oriented input will generally assemble the fastest. The default
6068: is row-oriented.
6070: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
6072: Input Parameters:
6073: + mat - the matrix
6074: . op - the option, one of those listed below (and possibly others),
6075: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6077: Options Describing Matrix Structure:
6078: + `MAT_SPD` - symmetric positive definite
6079: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
6080: . `MAT_HERMITIAN` - transpose is the complex conjugation
6081: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
6082: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
6083: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
6084: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
6086: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
6087: do not need to be computed (usually at a high cost)
6089: Options For Use with `MatSetValues()`:
6090: Insert a logically dense subblock, which can be
6091: . `MAT_ROW_ORIENTED` - row-oriented (default)
6093: These options reflect the data you pass in with `MatSetValues()`; it has
6094: nothing to do with how the data is stored internally in the matrix
6095: data structure.
6097: When (re)assembling a matrix, we can restrict the input for
6098: efficiency/debugging purposes. These options include
6099: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
6100: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
6101: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
6102: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
6103: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
6104: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
6105: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
6106: performance for very large process counts.
6107: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
6108: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
6109: functions, instead sending only neighbor messages.
6111: Level: intermediate
6113: Notes:
6114: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
6116: Some options are relevant only for particular matrix types and
6117: are thus ignored by others. Other options are not supported by
6118: certain matrix types and will generate an error message if set.
6120: If using Fortran to compute a matrix, one may need to
6121: use the column-oriented option (or convert to the row-oriented
6122: format).
6124: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
6125: that would generate a new entry in the nonzero structure is instead
6126: ignored. Thus, if memory has not already been allocated for this particular
6127: data, then the insertion is ignored. For dense matrices, in which
6128: the entire array is allocated, no entries are ever ignored.
6129: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6131: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
6132: that would generate a new entry in the nonzero structure instead produces
6133: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6135: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
6136: that would generate a new entry that has not been preallocated will
6137: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
6138: only.) This is a useful flag when debugging matrix memory preallocation.
6139: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6141: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6142: other processors should be dropped, rather than stashed.
6143: This is useful if you know that the "owning" processor is also
6144: always generating the correct matrix entries, so that PETSc need
6145: not transfer duplicate entries generated on another processor.
6147: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6148: searches during matrix assembly. When this flag is set, the hash table
6149: is created during the first matrix assembly. This hash table is
6150: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6151: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6152: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6153: supported by `MATMPIBAIJ` format only.
6155: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6156: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6158: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6159: a zero location in the matrix
6161: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6163: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6164: zero row routines and thus improves performance for very large process counts.
6166: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6167: part of the matrix (since they should match the upper triangular part).
6169: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6170: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6171: with finite difference schemes with non-periodic boundary conditions.
6173: Developer Note:
6174: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6175: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6176: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6177: not changed.
6179: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6180: @*/
6181: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6182: {
6183: PetscFunctionBegin;
6185: if (op > 0) {
6188: }
6190: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6192: switch (op) {
6193: case MAT_FORCE_DIAGONAL_ENTRIES:
6194: mat->force_diagonals = flg;
6195: PetscFunctionReturn(PETSC_SUCCESS);
6196: case MAT_NO_OFF_PROC_ENTRIES:
6197: mat->nooffprocentries = flg;
6198: PetscFunctionReturn(PETSC_SUCCESS);
6199: case MAT_SUBSET_OFF_PROC_ENTRIES:
6200: mat->assembly_subset = flg;
6201: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6202: #if !defined(PETSC_HAVE_MPIUNI)
6203: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6204: #endif
6205: mat->stash.first_assembly_done = PETSC_FALSE;
6206: }
6207: PetscFunctionReturn(PETSC_SUCCESS);
6208: case MAT_NO_OFF_PROC_ZERO_ROWS:
6209: mat->nooffproczerorows = flg;
6210: PetscFunctionReturn(PETSC_SUCCESS);
6211: case MAT_SPD:
6212: if (flg) {
6213: mat->spd = PETSC_BOOL3_TRUE;
6214: mat->symmetric = PETSC_BOOL3_TRUE;
6215: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6216: #if !defined(PETSC_USE_COMPLEX)
6217: mat->hermitian = PETSC_BOOL3_TRUE;
6218: #endif
6219: } else {
6220: mat->spd = PETSC_BOOL3_FALSE;
6221: }
6222: break;
6223: case MAT_SYMMETRIC:
6224: mat->symmetric = PetscBoolToBool3(flg);
6225: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6226: #if !defined(PETSC_USE_COMPLEX)
6227: mat->hermitian = PetscBoolToBool3(flg);
6228: #endif
6229: break;
6230: case MAT_HERMITIAN:
6231: mat->hermitian = PetscBoolToBool3(flg);
6232: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6233: #if !defined(PETSC_USE_COMPLEX)
6234: mat->symmetric = PetscBoolToBool3(flg);
6235: #endif
6236: break;
6237: case MAT_STRUCTURALLY_SYMMETRIC:
6238: mat->structurally_symmetric = PetscBoolToBool3(flg);
6239: break;
6240: case MAT_SYMMETRY_ETERNAL:
6241: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6242: mat->symmetry_eternal = flg;
6243: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6244: break;
6245: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6246: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6247: mat->structural_symmetry_eternal = flg;
6248: break;
6249: case MAT_SPD_ETERNAL:
6250: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6251: mat->spd_eternal = flg;
6252: if (flg) {
6253: mat->structural_symmetry_eternal = PETSC_TRUE;
6254: mat->symmetry_eternal = PETSC_TRUE;
6255: }
6256: break;
6257: case MAT_STRUCTURE_ONLY:
6258: mat->structure_only = flg;
6259: break;
6260: case MAT_SORTED_FULL:
6261: mat->sortedfull = flg;
6262: break;
6263: default:
6264: break;
6265: }
6266: PetscTryTypeMethod(mat, setoption, op, flg);
6267: PetscFunctionReturn(PETSC_SUCCESS);
6268: }
6270: /*@
6271: MatGetOption - Gets a parameter option that has been set for a matrix.
6273: Logically Collective
6275: Input Parameters:
6276: + mat - the matrix
6277: - op - the option, this only responds to certain options, check the code for which ones
6279: Output Parameter:
6280: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6282: Level: intermediate
6284: Notes:
6285: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6287: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6288: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6290: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6291: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6292: @*/
6293: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6294: {
6295: PetscFunctionBegin;
6299: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6300: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6302: switch (op) {
6303: case MAT_NO_OFF_PROC_ENTRIES:
6304: *flg = mat->nooffprocentries;
6305: break;
6306: case MAT_NO_OFF_PROC_ZERO_ROWS:
6307: *flg = mat->nooffproczerorows;
6308: break;
6309: case MAT_SYMMETRIC:
6310: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6311: break;
6312: case MAT_HERMITIAN:
6313: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6314: break;
6315: case MAT_STRUCTURALLY_SYMMETRIC:
6316: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6317: break;
6318: case MAT_SPD:
6319: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6320: break;
6321: case MAT_SYMMETRY_ETERNAL:
6322: *flg = mat->symmetry_eternal;
6323: break;
6324: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6325: *flg = mat->symmetry_eternal;
6326: break;
6327: default:
6328: break;
6329: }
6330: PetscFunctionReturn(PETSC_SUCCESS);
6331: }
6333: /*@
6334: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6335: this routine retains the old nonzero structure.
6337: Logically Collective
6339: Input Parameter:
6340: . mat - the matrix
6342: Level: intermediate
6344: Note:
6345: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6346: See the Performance chapter of the users manual for information on preallocating matrices.
6348: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6349: @*/
6350: PetscErrorCode MatZeroEntries(Mat mat)
6351: {
6352: PetscFunctionBegin;
6355: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6356: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6357: MatCheckPreallocated(mat, 1);
6359: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6360: PetscUseTypeMethod(mat, zeroentries);
6361: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6362: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6363: PetscFunctionReturn(PETSC_SUCCESS);
6364: }
6366: /*@
6367: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6368: of a set of rows and columns of a matrix.
6370: Collective
6372: Input Parameters:
6373: + mat - the matrix
6374: . numRows - the number of rows/columns to zero
6375: . rows - the global row indices
6376: . diag - value put in the diagonal of the eliminated rows
6377: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6378: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6380: Level: intermediate
6382: Notes:
6383: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6385: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6386: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6388: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6389: Krylov method to take advantage of the known solution on the zeroed rows.
6391: For the parallel case, all processes that share the matrix (i.e.,
6392: those in the communicator used for matrix creation) MUST call this
6393: routine, regardless of whether any rows being zeroed are owned by
6394: them.
6396: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6397: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6398: missing.
6400: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6401: list only rows local to itself).
6403: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6405: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6406: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6407: @*/
6408: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6409: {
6410: PetscFunctionBegin;
6413: if (numRows) PetscAssertPointer(rows, 3);
6414: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6415: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6416: MatCheckPreallocated(mat, 1);
6418: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6419: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6420: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6421: PetscFunctionReturn(PETSC_SUCCESS);
6422: }
6424: /*@
6425: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6426: of a set of rows and columns of a matrix.
6428: Collective
6430: Input Parameters:
6431: + mat - the matrix
6432: . is - the rows to zero
6433: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6434: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6435: - b - optional vector of right-hand side, that will be adjusted by provided solution
6437: Level: intermediate
6439: Note:
6440: See `MatZeroRowsColumns()` for details on how this routine operates.
6442: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6443: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6444: @*/
6445: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6446: {
6447: PetscInt numRows;
6448: const PetscInt *rows;
6450: PetscFunctionBegin;
6455: PetscCall(ISGetLocalSize(is, &numRows));
6456: PetscCall(ISGetIndices(is, &rows));
6457: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6458: PetscCall(ISRestoreIndices(is, &rows));
6459: PetscFunctionReturn(PETSC_SUCCESS);
6460: }
6462: /*@
6463: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6464: of a set of rows of a matrix.
6466: Collective
6468: Input Parameters:
6469: + mat - the matrix
6470: . numRows - the number of rows to zero
6471: . rows - the global row indices
6472: . diag - value put in the diagonal of the zeroed rows
6473: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6474: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6476: Level: intermediate
6478: Notes:
6479: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6481: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6483: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6484: Krylov method to take advantage of the known solution on the zeroed rows.
6486: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6487: from the matrix.
6489: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6490: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6491: formats this does not alter the nonzero structure.
6493: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6494: of the matrix is not changed the values are
6495: merely zeroed.
6497: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6498: formats can optionally remove the main diagonal entry from the
6499: nonzero structure as well, by passing 0.0 as the final argument).
6501: For the parallel case, all processes that share the matrix (i.e.,
6502: those in the communicator used for matrix creation) MUST call this
6503: routine, regardless of whether any rows being zeroed are owned by
6504: them.
6506: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6507: list only rows local to itself).
6509: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6510: owns that are to be zeroed. This saves a global synchronization in the implementation.
6512: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6513: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6514: @*/
6515: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6516: {
6517: PetscFunctionBegin;
6520: if (numRows) PetscAssertPointer(rows, 3);
6521: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6522: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6523: MatCheckPreallocated(mat, 1);
6525: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6526: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6527: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6528: PetscFunctionReturn(PETSC_SUCCESS);
6529: }
6531: /*@
6532: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6533: of a set of rows of a matrix indicated by an `IS`
6535: Collective
6537: Input Parameters:
6538: + mat - the matrix
6539: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6540: . diag - value put in all diagonals of eliminated rows
6541: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6542: - b - optional vector of right-hand side, that will be adjusted by provided solution
6544: Level: intermediate
6546: Note:
6547: See `MatZeroRows()` for details on how this routine operates.
6549: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6550: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6551: @*/
6552: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6553: {
6554: PetscInt numRows = 0;
6555: const PetscInt *rows = NULL;
6557: PetscFunctionBegin;
6560: if (is) {
6562: PetscCall(ISGetLocalSize(is, &numRows));
6563: PetscCall(ISGetIndices(is, &rows));
6564: }
6565: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6566: if (is) PetscCall(ISRestoreIndices(is, &rows));
6567: PetscFunctionReturn(PETSC_SUCCESS);
6568: }
6570: /*@
6571: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6572: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6574: Collective
6576: Input Parameters:
6577: + mat - the matrix
6578: . numRows - the number of rows to remove
6579: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6580: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6581: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6582: - b - optional vector of right-hand side, that will be adjusted by provided solution
6584: Level: intermediate
6586: Notes:
6587: See `MatZeroRows()` for details on how this routine operates.
6589: The grid coordinates are across the entire grid, not just the local portion
6591: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6592: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6593: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6594: `DM_BOUNDARY_PERIODIC` boundary type.
6596: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6597: a single value per point) you can skip filling those indices.
6599: Fortran Note:
6600: `idxm` and `idxn` should be declared as
6601: .vb
6602: MatStencil idxm(4, m)
6603: .ve
6604: and the values inserted using
6605: .vb
6606: idxm(MatStencil_i, 1) = i
6607: idxm(MatStencil_j, 1) = j
6608: idxm(MatStencil_k, 1) = k
6609: idxm(MatStencil_c, 1) = c
6610: etc
6611: .ve
6613: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6614: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6615: @*/
6616: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6617: {
6618: PetscInt dim = mat->stencil.dim;
6619: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6620: PetscInt *dims = mat->stencil.dims + 1;
6621: PetscInt *starts = mat->stencil.starts;
6622: PetscInt *dxm = (PetscInt *)rows;
6623: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6625: PetscFunctionBegin;
6628: if (numRows) PetscAssertPointer(rows, 3);
6630: PetscCall(PetscMalloc1(numRows, &jdxm));
6631: for (i = 0; i < numRows; ++i) {
6632: /* Skip unused dimensions (they are ordered k, j, i, c) */
6633: for (j = 0; j < 3 - sdim; ++j) dxm++;
6634: /* Local index in X dir */
6635: tmp = *dxm++ - starts[0];
6636: /* Loop over remaining dimensions */
6637: for (j = 0; j < dim - 1; ++j) {
6638: /* If nonlocal, set index to be negative */
6639: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6640: /* Update local index */
6641: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6642: }
6643: /* Skip component slot if necessary */
6644: if (mat->stencil.noc) dxm++;
6645: /* Local row number */
6646: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6647: }
6648: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6649: PetscCall(PetscFree(jdxm));
6650: PetscFunctionReturn(PETSC_SUCCESS);
6651: }
6653: /*@
6654: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6655: of a set of rows and columns of a matrix.
6657: Collective
6659: Input Parameters:
6660: + mat - the matrix
6661: . numRows - the number of rows/columns to remove
6662: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6663: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6664: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6665: - b - optional vector of right-hand side, that will be adjusted by provided solution
6667: Level: intermediate
6669: Notes:
6670: See `MatZeroRowsColumns()` for details on how this routine operates.
6672: The grid coordinates are across the entire grid, not just the local portion
6674: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6675: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6676: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6677: `DM_BOUNDARY_PERIODIC` boundary type.
6679: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6680: a single value per point) you can skip filling those indices.
6682: Fortran Note:
6683: `idxm` and `idxn` should be declared as
6684: .vb
6685: MatStencil idxm(4, m)
6686: .ve
6687: and the values inserted using
6688: .vb
6689: idxm(MatStencil_i, 1) = i
6690: idxm(MatStencil_j, 1) = j
6691: idxm(MatStencil_k, 1) = k
6692: idxm(MatStencil_c, 1) = c
6693: etc
6694: .ve
6696: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6697: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6698: @*/
6699: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6700: {
6701: PetscInt dim = mat->stencil.dim;
6702: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6703: PetscInt *dims = mat->stencil.dims + 1;
6704: PetscInt *starts = mat->stencil.starts;
6705: PetscInt *dxm = (PetscInt *)rows;
6706: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6708: PetscFunctionBegin;
6711: if (numRows) PetscAssertPointer(rows, 3);
6713: PetscCall(PetscMalloc1(numRows, &jdxm));
6714: for (i = 0; i < numRows; ++i) {
6715: /* Skip unused dimensions (they are ordered k, j, i, c) */
6716: for (j = 0; j < 3 - sdim; ++j) dxm++;
6717: /* Local index in X dir */
6718: tmp = *dxm++ - starts[0];
6719: /* Loop over remaining dimensions */
6720: for (j = 0; j < dim - 1; ++j) {
6721: /* If nonlocal, set index to be negative */
6722: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6723: /* Update local index */
6724: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6725: }
6726: /* Skip component slot if necessary */
6727: if (mat->stencil.noc) dxm++;
6728: /* Local row number */
6729: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6730: }
6731: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6732: PetscCall(PetscFree(jdxm));
6733: PetscFunctionReturn(PETSC_SUCCESS);
6734: }
6736: /*@
6737: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6738: of a set of rows of a matrix; using local numbering of rows.
6740: Collective
6742: Input Parameters:
6743: + mat - the matrix
6744: . numRows - the number of rows to remove
6745: . rows - the local row indices
6746: . diag - value put in all diagonals of eliminated rows
6747: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6748: - b - optional vector of right-hand side, that will be adjusted by provided solution
6750: Level: intermediate
6752: Notes:
6753: Before calling `MatZeroRowsLocal()`, the user must first set the
6754: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6756: See `MatZeroRows()` for details on how this routine operates.
6758: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6759: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6760: @*/
6761: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6762: {
6763: PetscFunctionBegin;
6766: if (numRows) PetscAssertPointer(rows, 3);
6767: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6768: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6769: MatCheckPreallocated(mat, 1);
6771: if (mat->ops->zerorowslocal) {
6772: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6773: } else {
6774: IS is, newis;
6775: PetscInt *newRows, nl = 0;
6777: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6778: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6779: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6780: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6781: for (PetscInt i = 0; i < numRows; i++)
6782: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6783: PetscUseTypeMethod(mat, zerorows, nl, newRows, diag, x, b);
6784: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6785: PetscCall(ISDestroy(&newis));
6786: PetscCall(ISDestroy(&is));
6787: }
6788: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6789: PetscFunctionReturn(PETSC_SUCCESS);
6790: }
6792: /*@
6793: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6794: of a set of rows of a matrix; using local numbering of rows.
6796: Collective
6798: Input Parameters:
6799: + mat - the matrix
6800: . is - index set of rows to remove
6801: . diag - value put in all diagonals of eliminated rows
6802: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6803: - b - optional vector of right-hand side, that will be adjusted by provided solution
6805: Level: intermediate
6807: Notes:
6808: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6809: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6811: See `MatZeroRows()` for details on how this routine operates.
6813: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6814: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6815: @*/
6816: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6817: {
6818: PetscInt numRows;
6819: const PetscInt *rows;
6821: PetscFunctionBegin;
6825: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6826: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6827: MatCheckPreallocated(mat, 1);
6829: PetscCall(ISGetLocalSize(is, &numRows));
6830: PetscCall(ISGetIndices(is, &rows));
6831: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6832: PetscCall(ISRestoreIndices(is, &rows));
6833: PetscFunctionReturn(PETSC_SUCCESS);
6834: }
6836: /*@
6837: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6838: of a set of rows and columns of a matrix; using local numbering of rows.
6840: Collective
6842: Input Parameters:
6843: + mat - the matrix
6844: . numRows - the number of rows to remove
6845: . rows - the global row indices
6846: . diag - value put in all diagonals of eliminated rows
6847: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6848: - b - optional vector of right-hand side, that will be adjusted by provided solution
6850: Level: intermediate
6852: Notes:
6853: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6854: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6856: See `MatZeroRowsColumns()` for details on how this routine operates.
6858: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6859: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6860: @*/
6861: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6862: {
6863: PetscFunctionBegin;
6866: if (numRows) PetscAssertPointer(rows, 3);
6867: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6868: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6869: MatCheckPreallocated(mat, 1);
6871: if (mat->ops->zerorowscolumnslocal) {
6872: PetscUseTypeMethod(mat, zerorowscolumnslocal, numRows, rows, diag, x, b);
6873: } else {
6874: IS is, newis;
6875: PetscInt *newRows, nl = 0;
6877: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6878: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6879: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6880: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6881: for (PetscInt i = 0; i < numRows; i++)
6882: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6883: PetscUseTypeMethod(mat, zerorowscolumns, nl, newRows, diag, x, b);
6884: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6885: PetscCall(ISDestroy(&newis));
6886: PetscCall(ISDestroy(&is));
6887: }
6888: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6889: PetscFunctionReturn(PETSC_SUCCESS);
6890: }
6892: /*@
6893: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6894: of a set of rows and columns of a matrix; using local numbering of rows.
6896: Collective
6898: Input Parameters:
6899: + mat - the matrix
6900: . is - index set of rows to remove
6901: . diag - value put in all diagonals of eliminated rows
6902: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6903: - b - optional vector of right-hand side, that will be adjusted by provided solution
6905: Level: intermediate
6907: Notes:
6908: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6909: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6911: See `MatZeroRowsColumns()` for details on how this routine operates.
6913: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6914: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6915: @*/
6916: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6917: {
6918: PetscInt numRows;
6919: const PetscInt *rows;
6921: PetscFunctionBegin;
6925: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6926: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6927: MatCheckPreallocated(mat, 1);
6929: PetscCall(ISGetLocalSize(is, &numRows));
6930: PetscCall(ISGetIndices(is, &rows));
6931: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6932: PetscCall(ISRestoreIndices(is, &rows));
6933: PetscFunctionReturn(PETSC_SUCCESS);
6934: }
6936: /*@
6937: MatGetSize - Returns the numbers of rows and columns in a matrix.
6939: Not Collective
6941: Input Parameter:
6942: . mat - the matrix
6944: Output Parameters:
6945: + m - the number of global rows
6946: - n - the number of global columns
6948: Level: beginner
6950: Note:
6951: Both output parameters can be `NULL` on input.
6953: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6954: @*/
6955: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6956: {
6957: PetscFunctionBegin;
6959: if (m) *m = mat->rmap->N;
6960: if (n) *n = mat->cmap->N;
6961: PetscFunctionReturn(PETSC_SUCCESS);
6962: }
6964: /*@
6965: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6966: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6968: Not Collective
6970: Input Parameter:
6971: . mat - the matrix
6973: Output Parameters:
6974: + m - the number of local rows, use `NULL` to not obtain this value
6975: - n - the number of local columns, use `NULL` to not obtain this value
6977: Level: beginner
6979: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6980: @*/
6981: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6982: {
6983: PetscFunctionBegin;
6985: if (m) PetscAssertPointer(m, 2);
6986: if (n) PetscAssertPointer(n, 3);
6987: if (m) *m = mat->rmap->n;
6988: if (n) *n = mat->cmap->n;
6989: PetscFunctionReturn(PETSC_SUCCESS);
6990: }
6992: /*@
6993: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6994: vector one multiplies this matrix by that are owned by this processor.
6996: Not Collective, unless matrix has not been allocated, then collective
6998: Input Parameter:
6999: . mat - the matrix
7001: Output Parameters:
7002: + m - the global index of the first local column, use `NULL` to not obtain this value
7003: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
7005: Level: developer
7007: Notes:
7008: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7010: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7011: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7013: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7014: the local values in the matrix.
7016: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
7017: Layouts](sec_matlayout) for details on matrix layouts.
7019: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
7020: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
7021: @*/
7022: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
7023: {
7024: PetscFunctionBegin;
7027: if (m) PetscAssertPointer(m, 2);
7028: if (n) PetscAssertPointer(n, 3);
7029: MatCheckPreallocated(mat, 1);
7030: if (m) *m = mat->cmap->rstart;
7031: if (n) *n = mat->cmap->rend;
7032: PetscFunctionReturn(PETSC_SUCCESS);
7033: }
7035: /*@
7036: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
7037: this MPI process.
7039: Not Collective
7041: Input Parameter:
7042: . mat - the matrix
7044: Output Parameters:
7045: + m - the global index of the first local row, use `NULL` to not obtain this value
7046: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
7048: Level: beginner
7050: Notes:
7051: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7053: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7054: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7056: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7057: the local values in the matrix.
7059: The high argument is one more than the last element stored locally.
7061: For all matrices it returns the range of matrix rows associated with rows of a vector that
7062: would contain the result of a matrix vector product with this matrix. See [Matrix
7063: Layouts](sec_matlayout) for details on matrix layouts.
7065: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
7066: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
7067: @*/
7068: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
7069: {
7070: PetscFunctionBegin;
7073: if (m) PetscAssertPointer(m, 2);
7074: if (n) PetscAssertPointer(n, 3);
7075: MatCheckPreallocated(mat, 1);
7076: if (m) *m = mat->rmap->rstart;
7077: if (n) *n = mat->rmap->rend;
7078: PetscFunctionReturn(PETSC_SUCCESS);
7079: }
7081: /*@C
7082: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
7083: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
7085: Not Collective, unless matrix has not been allocated
7087: Input Parameter:
7088: . mat - the matrix
7090: Output Parameter:
7091: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
7092: where `size` is the number of MPI processes used by `mat`
7094: Level: beginner
7096: Notes:
7097: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7099: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7100: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7102: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7103: the local values in the matrix.
7105: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
7106: would contain the result of a matrix vector product with this matrix. See [Matrix
7107: Layouts](sec_matlayout) for details on matrix layouts.
7109: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
7110: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
7111: `DMDAGetGhostCorners()`, `DM`
7112: @*/
7113: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
7114: {
7115: PetscFunctionBegin;
7118: MatCheckPreallocated(mat, 1);
7119: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
7120: PetscFunctionReturn(PETSC_SUCCESS);
7121: }
7123: /*@C
7124: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
7125: vector one multiplies this vector by that are owned by each processor.
7127: Not Collective, unless matrix has not been allocated
7129: Input Parameter:
7130: . mat - the matrix
7132: Output Parameter:
7133: . ranges - start of each processors portion plus one more than the total length at the end
7135: Level: beginner
7137: Notes:
7138: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7140: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7141: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7143: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7144: the local values in the matrix.
7146: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
7147: Layouts](sec_matlayout) for details on matrix layouts.
7149: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
7150: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7151: `DMDAGetGhostCorners()`, `DM`
7152: @*/
7153: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7154: {
7155: PetscFunctionBegin;
7158: MatCheckPreallocated(mat, 1);
7159: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7160: PetscFunctionReturn(PETSC_SUCCESS);
7161: }
7163: /*@
7164: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7166: Not Collective
7168: Input Parameter:
7169: . A - matrix
7171: Output Parameters:
7172: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7173: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7175: Level: intermediate
7177: Note:
7178: You should call `ISDestroy()` on the returned `IS`
7180: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7181: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7182: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7183: details on matrix layouts.
7185: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7186: @*/
7187: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7188: {
7189: PetscErrorCode (*f)(Mat, IS *, IS *);
7191: PetscFunctionBegin;
7194: MatCheckPreallocated(A, 1);
7195: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7196: if (f) {
7197: PetscCall((*f)(A, rows, cols));
7198: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7199: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7200: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7201: }
7202: PetscFunctionReturn(PETSC_SUCCESS);
7203: }
7205: /*@
7206: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7207: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7208: to complete the factorization.
7210: Collective
7212: Input Parameters:
7213: + fact - the factorized matrix obtained with `MatGetFactor()`
7214: . mat - the matrix
7215: . row - row permutation
7216: . col - column permutation
7217: - info - structure containing
7218: .vb
7219: levels - number of levels of fill.
7220: expected fill - as ratio of original fill.
7221: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7222: missing diagonal entries)
7223: .ve
7225: Level: developer
7227: Notes:
7228: See [Matrix Factorization](sec_matfactor) for additional information.
7230: Most users should employ the `KSP` interface for linear solvers
7231: instead of working directly with matrix algebra routines such as this.
7232: See, e.g., `KSPCreate()`.
7234: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7236: Fortran Note:
7237: A valid (non-null) `info` argument must be provided
7239: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
7240: `MatGetOrdering()`, `MatFactorInfo`
7241: @*/
7242: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7243: {
7244: PetscFunctionBegin;
7249: PetscAssertPointer(info, 5);
7250: PetscAssertPointer(fact, 1);
7251: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7252: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7253: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7254: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7255: MatCheckPreallocated(mat, 2);
7257: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7258: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7259: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7260: PetscFunctionReturn(PETSC_SUCCESS);
7261: }
7263: /*@
7264: MatICCFactorSymbolic - Performs symbolic incomplete
7265: Cholesky factorization for a symmetric matrix. Use
7266: `MatCholeskyFactorNumeric()` to complete the factorization.
7268: Collective
7270: Input Parameters:
7271: + fact - the factorized matrix obtained with `MatGetFactor()`
7272: . mat - the matrix to be factored
7273: . perm - row and column permutation
7274: - info - structure containing
7275: .vb
7276: levels - number of levels of fill.
7277: expected fill - as ratio of original fill.
7278: .ve
7280: Level: developer
7282: Notes:
7283: Most users should employ the `KSP` interface for linear solvers
7284: instead of working directly with matrix algebra routines such as this.
7285: See, e.g., `KSPCreate()`.
7287: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7289: Fortran Note:
7290: A valid (non-null) `info` argument must be provided
7292: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7293: @*/
7294: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7295: {
7296: PetscFunctionBegin;
7300: PetscAssertPointer(info, 4);
7301: PetscAssertPointer(fact, 1);
7302: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7303: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7304: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7305: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7306: MatCheckPreallocated(mat, 2);
7308: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7309: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7310: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7311: PetscFunctionReturn(PETSC_SUCCESS);
7312: }
7314: /*@C
7315: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7316: points to an array of valid matrices, they may be reused to store the new
7317: submatrices.
7319: Collective
7321: Input Parameters:
7322: + mat - the matrix
7323: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7324: . irow - index set of rows to extract
7325: . icol - index set of columns to extract
7326: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7328: Output Parameter:
7329: . submat - the array of submatrices
7331: Level: advanced
7333: Notes:
7334: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7335: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7336: to extract a parallel submatrix.
7338: Some matrix types place restrictions on the row and column
7339: indices, such as that they be sorted or that they be equal to each other.
7341: The index sets may not have duplicate entries.
7343: When extracting submatrices from a parallel matrix, each processor can
7344: form a different submatrix by setting the rows and columns of its
7345: individual index sets according to the local submatrix desired.
7347: When finished using the submatrices, the user should destroy
7348: them with `MatDestroySubMatrices()`.
7350: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7351: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7353: This routine creates the matrices in submat; you should NOT create them before
7354: calling it. It also allocates the array of matrix pointers submat.
7356: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7357: request one row/column in a block, they must request all rows/columns that are in
7358: that block. For example, if the block size is 2 you cannot request just row 0 and
7359: column 0.
7361: Fortran Note:
7362: .vb
7363: Mat, pointer :: submat(:)
7364: .ve
7366: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7367: @*/
7368: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7369: {
7370: PetscInt i;
7371: PetscBool eq;
7373: PetscFunctionBegin;
7376: if (n) {
7377: PetscAssertPointer(irow, 3);
7379: PetscAssertPointer(icol, 4);
7381: }
7382: PetscAssertPointer(submat, 6);
7383: if (n && scall == MAT_REUSE_MATRIX) {
7384: PetscAssertPointer(*submat, 6);
7386: }
7387: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7388: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7389: MatCheckPreallocated(mat, 1);
7390: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7391: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7392: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7393: for (i = 0; i < n; i++) {
7394: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7395: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7396: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7397: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7398: if (mat->boundtocpu && mat->bindingpropagates) {
7399: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7400: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7401: }
7402: #endif
7403: }
7404: PetscFunctionReturn(PETSC_SUCCESS);
7405: }
7407: /*@C
7408: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of `mat` (by pairs of `IS` that may live on subcomms).
7410: Collective
7412: Input Parameters:
7413: + mat - the matrix
7414: . n - the number of submatrixes to be extracted
7415: . irow - index set of rows to extract
7416: . icol - index set of columns to extract
7417: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7419: Output Parameter:
7420: . submat - the array of submatrices
7422: Level: advanced
7424: Note:
7425: This is used by `PCGASM`
7427: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7428: @*/
7429: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7430: {
7431: PetscInt i;
7432: PetscBool eq;
7434: PetscFunctionBegin;
7437: if (n) {
7438: PetscAssertPointer(irow, 3);
7440: PetscAssertPointer(icol, 4);
7442: }
7443: PetscAssertPointer(submat, 6);
7444: if (n && scall == MAT_REUSE_MATRIX) {
7445: PetscAssertPointer(*submat, 6);
7447: }
7448: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7449: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7450: MatCheckPreallocated(mat, 1);
7452: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7453: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7454: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7455: for (i = 0; i < n; i++) {
7456: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7457: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7458: }
7459: PetscFunctionReturn(PETSC_SUCCESS);
7460: }
7462: /*@C
7463: MatDestroyMatrices - Destroys an array of matrices
7465: Collective
7467: Input Parameters:
7468: + n - the number of local matrices
7469: - mat - the matrices (this is a pointer to the array of matrices)
7471: Level: advanced
7473: Notes:
7474: Frees not only the matrices, but also the array that contains the matrices
7476: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7478: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7479: @*/
7480: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7481: {
7482: PetscInt i;
7484: PetscFunctionBegin;
7485: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7486: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7487: PetscAssertPointer(mat, 2);
7489: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7491: /* memory is allocated even if n = 0 */
7492: PetscCall(PetscFree(*mat));
7493: PetscFunctionReturn(PETSC_SUCCESS);
7494: }
7496: /*@C
7497: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7499: Collective
7501: Input Parameters:
7502: + n - the number of local matrices
7503: - mat - the matrices (this is a pointer to the array of matrices, to match the calling sequence of `MatCreateSubMatrices()`)
7505: Level: advanced
7507: Note:
7508: Frees not only the matrices, but also the array that contains the matrices
7510: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7511: @*/
7512: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7513: {
7514: Mat mat0;
7516: PetscFunctionBegin;
7517: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7518: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7519: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7520: PetscAssertPointer(mat, 2);
7522: mat0 = (*mat)[0];
7523: if (mat0 && mat0->ops->destroysubmatrices) {
7524: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7525: } else {
7526: PetscCall(MatDestroyMatrices(n, mat));
7527: }
7528: PetscFunctionReturn(PETSC_SUCCESS);
7529: }
7531: /*@
7532: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7534: Collective
7536: Input Parameter:
7537: . mat - the matrix
7539: Output Parameter:
7540: . matstruct - the sequential matrix with the nonzero structure of `mat`
7542: Level: developer
7544: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7545: @*/
7546: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7547: {
7548: PetscFunctionBegin;
7550: PetscAssertPointer(matstruct, 2);
7553: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7554: MatCheckPreallocated(mat, 1);
7556: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7557: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7558: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7559: PetscFunctionReturn(PETSC_SUCCESS);
7560: }
7562: /*@C
7563: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7565: Collective
7567: Input Parameter:
7568: . mat - the matrix
7570: Level: advanced
7572: Note:
7573: This is not needed, one can just call `MatDestroy()`
7575: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7576: @*/
7577: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7578: {
7579: PetscFunctionBegin;
7580: PetscAssertPointer(mat, 1);
7581: PetscCall(MatDestroy(mat));
7582: PetscFunctionReturn(PETSC_SUCCESS);
7583: }
7585: /*@
7586: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7587: replaces the index sets by larger ones that represent submatrices with
7588: additional overlap.
7590: Collective
7592: Input Parameters:
7593: + mat - the matrix
7594: . n - the number of index sets
7595: . is - the array of index sets (these index sets will changed during the call)
7596: - ov - the additional overlap requested
7598: Options Database Key:
7599: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7601: Level: developer
7603: Note:
7604: The computed overlap preserves the matrix block sizes when the blocks are square.
7605: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7606: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7608: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7609: @*/
7610: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7611: {
7612: PetscInt i, bs, cbs;
7614: PetscFunctionBegin;
7618: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7619: if (n) {
7620: PetscAssertPointer(is, 3);
7622: }
7623: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7624: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7625: MatCheckPreallocated(mat, 1);
7627: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7628: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7629: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7630: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7631: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7632: if (bs == cbs) {
7633: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7634: }
7635: PetscFunctionReturn(PETSC_SUCCESS);
7636: }
7638: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7640: /*@
7641: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7642: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7643: additional overlap.
7645: Collective
7647: Input Parameters:
7648: + mat - the matrix
7649: . n - the number of index sets
7650: . is - the array of index sets (these index sets will changed during the call)
7651: - ov - the additional overlap requested
7653: ` Options Database Key:
7654: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7656: Level: developer
7658: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7659: @*/
7660: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7661: {
7662: PetscInt i;
7664: PetscFunctionBegin;
7667: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7668: if (n) {
7669: PetscAssertPointer(is, 3);
7671: }
7672: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7673: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7674: MatCheckPreallocated(mat, 1);
7675: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7676: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7677: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7678: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7679: PetscFunctionReturn(PETSC_SUCCESS);
7680: }
7682: /*@
7683: MatGetBlockSize - Returns the matrix block size.
7685: Not Collective
7687: Input Parameter:
7688: . mat - the matrix
7690: Output Parameter:
7691: . bs - block size
7693: Level: intermediate
7695: Notes:
7696: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7698: If the block size has not been set yet this routine returns 1.
7700: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7701: @*/
7702: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7703: {
7704: PetscFunctionBegin;
7706: PetscAssertPointer(bs, 2);
7707: *bs = mat->rmap->bs;
7708: PetscFunctionReturn(PETSC_SUCCESS);
7709: }
7711: /*@
7712: MatGetBlockSizes - Returns the matrix block row and column sizes.
7714: Not Collective
7716: Input Parameter:
7717: . mat - the matrix
7719: Output Parameters:
7720: + rbs - row block size
7721: - cbs - column block size
7723: Level: intermediate
7725: Notes:
7726: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7727: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7729: If a block size has not been set yet this routine returns 1.
7731: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7732: @*/
7733: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7734: {
7735: PetscFunctionBegin;
7737: if (rbs) PetscAssertPointer(rbs, 2);
7738: if (cbs) PetscAssertPointer(cbs, 3);
7739: if (rbs) *rbs = mat->rmap->bs;
7740: if (cbs) *cbs = mat->cmap->bs;
7741: PetscFunctionReturn(PETSC_SUCCESS);
7742: }
7744: /*@
7745: MatSetBlockSize - Sets the matrix block size.
7747: Logically Collective
7749: Input Parameters:
7750: + mat - the matrix
7751: - bs - block size
7753: Level: intermediate
7755: Notes:
7756: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7757: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7759: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7760: is compatible with the matrix local sizes.
7762: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7763: @*/
7764: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7765: {
7766: PetscFunctionBegin;
7769: PetscCall(MatSetBlockSizes(mat, bs, bs));
7770: PetscFunctionReturn(PETSC_SUCCESS);
7771: }
7773: typedef struct {
7774: PetscInt n;
7775: IS *is;
7776: Mat *mat;
7777: PetscObjectState nonzerostate;
7778: Mat C;
7779: } EnvelopeData;
7781: static PetscErrorCode EnvelopeDataDestroy(PetscCtxRt ptr)
7782: {
7783: EnvelopeData *edata = *(EnvelopeData **)ptr;
7785: PetscFunctionBegin;
7786: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7787: PetscCall(PetscFree(edata->is));
7788: PetscCall(PetscFree(edata));
7789: PetscFunctionReturn(PETSC_SUCCESS);
7790: }
7792: /*@
7793: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7794: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7796: Collective
7798: Input Parameter:
7799: . mat - the matrix
7801: Level: intermediate
7803: Notes:
7804: There can be zeros within the blocks
7806: The blocks can overlap between processes, including laying on more than two processes
7808: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7809: @*/
7810: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7811: {
7812: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7813: PetscInt *diag, *odiag, sc;
7814: VecScatter scatter;
7815: PetscScalar *seqv;
7816: const PetscScalar *parv;
7817: const PetscInt *ia, *ja;
7818: PetscBool set, flag, done;
7819: Mat AA = mat, A;
7820: MPI_Comm comm;
7821: PetscMPIInt rank, size, tag;
7822: MPI_Status status;
7823: PetscContainer container;
7824: EnvelopeData *edata;
7825: Vec seq, par;
7826: IS isglobal;
7828: PetscFunctionBegin;
7830: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7831: if (!set || !flag) {
7832: /* TODO: only needs nonzero structure of transpose */
7833: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7834: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7835: }
7836: PetscCall(MatAIJGetLocalMat(AA, &A));
7837: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7838: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7840: PetscCall(MatGetLocalSize(mat, &n, NULL));
7841: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7842: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7843: PetscCallMPI(MPI_Comm_size(comm, &size));
7844: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7846: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7848: if (rank > 0) {
7849: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7850: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7851: }
7852: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7853: for (i = 0; i < n; i++) {
7854: env = PetscMax(env, ja[ia[i + 1] - 1]);
7855: II = rstart + i;
7856: if (env == II) {
7857: starts[lblocks] = tbs;
7858: sizes[lblocks++] = 1 + II - tbs;
7859: tbs = 1 + II;
7860: }
7861: }
7862: if (rank < size - 1) {
7863: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7864: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7865: }
7867: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7868: if (!set || !flag) PetscCall(MatDestroy(&AA));
7869: PetscCall(MatDestroy(&A));
7871: PetscCall(PetscNew(&edata));
7872: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7873: edata->n = lblocks;
7874: /* create IS needed for extracting blocks from the original matrix */
7875: PetscCall(PetscMalloc1(lblocks, &edata->is));
7876: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7878: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7879: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7880: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7881: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7882: PetscCall(MatSetType(edata->C, MATAIJ));
7884: /* Communicate the start and end of each row, from each block to the correct rank */
7885: /* TODO: Use PetscSF instead of VecScatter */
7886: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7887: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7888: PetscCall(VecGetArrayWrite(seq, &seqv));
7889: for (PetscInt i = 0; i < lblocks; i++) {
7890: for (PetscInt j = 0; j < sizes[i]; j++) {
7891: seqv[cnt] = starts[i];
7892: seqv[cnt + 1] = starts[i] + sizes[i];
7893: cnt += 2;
7894: }
7895: }
7896: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7897: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7898: sc -= cnt;
7899: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7900: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7901: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7902: PetscCall(ISDestroy(&isglobal));
7903: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7904: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7905: PetscCall(VecScatterDestroy(&scatter));
7906: PetscCall(VecDestroy(&seq));
7907: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7908: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7909: PetscCall(VecGetArrayRead(par, &parv));
7910: cnt = 0;
7911: PetscCall(MatGetSize(mat, NULL, &n));
7912: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7913: PetscInt start, end, d = 0, od = 0;
7915: start = (PetscInt)PetscRealPart(parv[cnt]);
7916: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7917: cnt += 2;
7919: if (start < cstart) {
7920: od += cstart - start + n - cend;
7921: d += cend - cstart;
7922: } else if (start < cend) {
7923: od += n - cend;
7924: d += cend - start;
7925: } else od += n - start;
7926: if (end <= cstart) {
7927: od -= cstart - end + n - cend;
7928: d -= cend - cstart;
7929: } else if (end < cend) {
7930: od -= n - cend;
7931: d -= cend - end;
7932: } else od -= n - end;
7934: odiag[i] = od;
7935: diag[i] = d;
7936: }
7937: PetscCall(VecRestoreArrayRead(par, &parv));
7938: PetscCall(VecDestroy(&par));
7939: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7940: PetscCall(PetscFree2(diag, odiag));
7941: PetscCall(PetscFree2(sizes, starts));
7943: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7944: PetscCall(PetscContainerSetPointer(container, edata));
7945: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7946: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7947: PetscCall(PetscObjectDereference((PetscObject)container));
7948: PetscFunctionReturn(PETSC_SUCCESS);
7949: }
7951: /*@
7952: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7954: Collective
7956: Input Parameters:
7957: + A - the matrix
7958: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7960: Output Parameter:
7961: . C - matrix with inverted block diagonal of `A`
7963: Level: advanced
7965: Note:
7966: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7968: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7969: @*/
7970: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7971: {
7972: PetscContainer container;
7973: EnvelopeData *edata;
7974: PetscObjectState nonzerostate;
7976: PetscFunctionBegin;
7977: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7978: if (!container) {
7979: PetscCall(MatComputeVariableBlockEnvelope(A));
7980: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7981: }
7982: PetscCall(PetscContainerGetPointer(container, &edata));
7983: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7984: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7985: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7987: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7988: *C = edata->C;
7990: for (PetscInt i = 0; i < edata->n; i++) {
7991: Mat D;
7992: PetscScalar *dvalues;
7994: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7995: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7996: PetscCall(MatSeqDenseInvert(D));
7997: PetscCall(MatDenseGetArray(D, &dvalues));
7998: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7999: PetscCall(MatDestroy(&D));
8000: }
8001: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
8002: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
8003: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
8004: PetscFunctionReturn(PETSC_SUCCESS);
8005: }
8007: /*@
8008: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
8010: Not Collective
8012: Input Parameters:
8013: + mat - the matrix
8014: . nblocks - the number of blocks on this process, each block can only exist on a single process
8015: - bsizes - the block sizes
8017: Level: intermediate
8019: Notes:
8020: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
8022: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
8024: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
8025: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
8026: @*/
8027: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
8028: {
8029: PetscInt ncnt = 0, nlocal;
8031: PetscFunctionBegin;
8033: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
8034: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
8035: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
8036: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
8037: PetscCall(PetscFree(mat->bsizes));
8038: mat->nblocks = nblocks;
8039: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
8040: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
8041: PetscFunctionReturn(PETSC_SUCCESS);
8042: }
8044: /*@C
8045: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
8047: Not Collective; No Fortran Support
8049: Input Parameter:
8050: . mat - the matrix
8052: Output Parameters:
8053: + nblocks - the number of blocks on this process
8054: - bsizes - the block sizes
8056: Level: intermediate
8058: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
8059: @*/
8060: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
8061: {
8062: PetscFunctionBegin;
8064: if (nblocks) *nblocks = mat->nblocks;
8065: if (bsizes) *bsizes = mat->bsizes;
8066: PetscFunctionReturn(PETSC_SUCCESS);
8067: }
8069: /*@
8070: MatSelectVariableBlockSizes - When creating a submatrix, pass on the variable block sizes
8072: Not Collective
8074: Input Parameter:
8075: + subA - the submatrix
8076: . A - the original matrix
8077: - isrow - The `IS` of selected rows for the submatrix, must be sorted
8079: Level: developer
8081: Notes:
8082: If the index set is not sorted or contains off-process entries, this function will do nothing.
8084: .seealso: [](ch_matrices), `Mat`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
8085: @*/
8086: PetscErrorCode MatSelectVariableBlockSizes(Mat subA, Mat A, IS isrow)
8087: {
8088: const PetscInt *rows;
8089: PetscInt n, rStart, rEnd, Nb = 0;
8090: PetscBool flg = A->bsizes ? PETSC_TRUE : PETSC_FALSE;
8092: PetscFunctionBegin;
8093: // The code for block size extraction does not support an unsorted IS
8094: if (flg) PetscCall(ISSorted(isrow, &flg));
8095: // We don't support originally off-diagonal blocks
8096: if (flg) {
8097: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
8098: PetscCall(ISGetLocalSize(isrow, &n));
8099: PetscCall(ISGetIndices(isrow, &rows));
8100: for (PetscInt i = 0; i < n && flg; ++i) {
8101: if (rows[i] < rStart || rows[i] >= rEnd) flg = PETSC_FALSE;
8102: }
8103: PetscCall(ISRestoreIndices(isrow, &rows));
8104: }
8105: // quiet return if we can't extract block size
8106: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)subA)));
8107: if (!flg) PetscFunctionReturn(PETSC_SUCCESS);
8109: // extract block sizes
8110: PetscCall(ISGetIndices(isrow, &rows));
8111: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8112: PetscBool occupied = PETSC_FALSE;
8114: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8115: const PetscInt row = gr + br;
8117: if (i == n) break;
8118: if (rows[i] == row) {
8119: occupied = PETSC_TRUE;
8120: ++i;
8121: }
8122: while (i < n && rows[i] < row) ++i;
8123: }
8124: gr += A->bsizes[b];
8125: if (occupied) ++Nb;
8126: }
8127: subA->nblocks = Nb;
8128: PetscCall(PetscFree(subA->bsizes));
8129: PetscCall(PetscMalloc1(subA->nblocks, &subA->bsizes));
8130: PetscInt sb = 0;
8131: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8132: if (sb < subA->nblocks) subA->bsizes[sb] = 0;
8133: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8134: const PetscInt row = gr + br;
8136: if (i == n) break;
8137: if (rows[i] == row) {
8138: ++subA->bsizes[sb];
8139: ++i;
8140: }
8141: while (i < n && rows[i] < row) ++i;
8142: }
8143: gr += A->bsizes[b];
8144: if (sb < subA->nblocks && subA->bsizes[sb]) ++sb;
8145: }
8146: PetscCheck(sb == subA->nblocks, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of blocks %" PetscInt_FMT " != %" PetscInt_FMT, sb, subA->nblocks);
8147: PetscInt nlocal, ncnt = 0;
8148: PetscCall(MatGetLocalSize(subA, &nlocal, NULL));
8149: PetscCheck(subA->nblocks >= 0 && subA->nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", subA->nblocks, nlocal);
8150: for (PetscInt i = 0; i < subA->nblocks; i++) ncnt += subA->bsizes[i];
8151: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
8152: PetscCall(ISRestoreIndices(isrow, &rows));
8153: PetscFunctionReturn(PETSC_SUCCESS);
8154: }
8156: /*@
8157: MatSetBlockSizes - Sets the matrix block row and column sizes.
8159: Logically Collective
8161: Input Parameters:
8162: + mat - the matrix
8163: . rbs - row block size
8164: - cbs - column block size
8166: Level: intermediate
8168: Notes:
8169: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
8170: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
8171: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
8173: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
8174: are compatible with the matrix local sizes.
8176: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
8178: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
8179: @*/
8180: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
8181: {
8182: PetscFunctionBegin;
8186: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
8187: if (mat->rmap->refcnt) {
8188: ISLocalToGlobalMapping l2g = NULL;
8189: PetscLayout nmap = NULL;
8191: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
8192: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
8193: PetscCall(PetscLayoutDestroy(&mat->rmap));
8194: mat->rmap = nmap;
8195: mat->rmap->mapping = l2g;
8196: }
8197: if (mat->cmap->refcnt) {
8198: ISLocalToGlobalMapping l2g = NULL;
8199: PetscLayout nmap = NULL;
8201: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
8202: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
8203: PetscCall(PetscLayoutDestroy(&mat->cmap));
8204: mat->cmap = nmap;
8205: mat->cmap->mapping = l2g;
8206: }
8207: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
8208: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
8209: PetscFunctionReturn(PETSC_SUCCESS);
8210: }
8212: /*@
8213: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
8215: Logically Collective
8217: Input Parameters:
8218: + mat - the matrix
8219: . fromRow - matrix from which to copy row block size
8220: - fromCol - matrix from which to copy column block size (can be same as `fromRow`)
8222: Level: developer
8224: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
8225: @*/
8226: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8227: {
8228: PetscFunctionBegin;
8232: PetscTryTypeMethod(mat, setblocksizes, fromRow->rmap->bs, fromCol->cmap->bs);
8233: PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8234: PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8235: PetscFunctionReturn(PETSC_SUCCESS);
8236: }
8238: /*@
8239: MatResidual - Default routine to calculate the residual r = b - Ax
8241: Collective
8243: Input Parameters:
8244: + mat - the matrix
8245: . b - the right-hand-side
8246: - x - the approximate solution
8248: Output Parameter:
8249: . r - location to store the residual
8251: Level: developer
8253: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8254: @*/
8255: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8256: {
8257: PetscFunctionBegin;
8263: MatCheckPreallocated(mat, 1);
8264: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8265: if (!mat->ops->residual) {
8266: PetscCall(MatMult(mat, x, r));
8267: PetscCall(VecAYPX(r, -1.0, b));
8268: } else {
8269: PetscUseTypeMethod(mat, residual, b, x, r);
8270: }
8271: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8272: PetscFunctionReturn(PETSC_SUCCESS);
8273: }
8275: /*@C
8276: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8278: Collective
8280: Input Parameters:
8281: + mat - the matrix
8282: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8283: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8284: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8285: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8286: always used.
8288: Output Parameters:
8289: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8290: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8291: . ja - the column indices, use `NULL` if not needed
8292: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8293: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8295: Level: developer
8297: Notes:
8298: You CANNOT change any of the ia[] or ja[] values.
8300: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8302: Fortran Notes:
8303: Use
8304: .vb
8305: PetscInt, pointer :: ia(:),ja(:)
8306: call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8307: ! Access the ith and jth entries via ia(i) and ja(j)
8308: .ve
8310: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8311: @*/
8312: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8313: {
8314: PetscFunctionBegin;
8317: if (n) PetscAssertPointer(n, 5);
8318: if (ia) PetscAssertPointer(ia, 6);
8319: if (ja) PetscAssertPointer(ja, 7);
8320: if (done) PetscAssertPointer(done, 8);
8321: MatCheckPreallocated(mat, 1);
8322: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8323: else {
8324: if (done) *done = PETSC_TRUE;
8325: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8326: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8327: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8328: }
8329: PetscFunctionReturn(PETSC_SUCCESS);
8330: }
8332: /*@C
8333: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8335: Collective
8337: Input Parameters:
8338: + mat - the matrix
8339: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8340: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8341: symmetrized
8342: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8343: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8344: always used.
8346: Output Parameters:
8347: + n - number of columns in the (possibly compressed) matrix
8348: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8349: . ja - the row indices
8350: - done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8352: Level: developer
8354: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8355: @*/
8356: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8357: {
8358: PetscFunctionBegin;
8361: PetscAssertPointer(n, 5);
8362: if (ia) PetscAssertPointer(ia, 6);
8363: if (ja) PetscAssertPointer(ja, 7);
8364: PetscAssertPointer(done, 8);
8365: MatCheckPreallocated(mat, 1);
8366: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8367: else {
8368: *done = PETSC_TRUE;
8369: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8370: }
8371: PetscFunctionReturn(PETSC_SUCCESS);
8372: }
8374: /*@C
8375: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8377: Collective
8379: Input Parameters:
8380: + mat - the matrix
8381: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8382: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8383: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8384: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8385: always used.
8386: . n - size of (possibly compressed) matrix
8387: . ia - the row pointers
8388: - ja - the column indices
8390: Output Parameter:
8391: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8393: Level: developer
8395: Note:
8396: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8397: us of the array after it has been restored. If you pass `NULL`, it will
8398: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8400: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8401: @*/
8402: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8403: {
8404: PetscFunctionBegin;
8407: if (ia) PetscAssertPointer(ia, 6);
8408: if (ja) PetscAssertPointer(ja, 7);
8409: if (done) PetscAssertPointer(done, 8);
8410: MatCheckPreallocated(mat, 1);
8412: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8413: else {
8414: if (done) *done = PETSC_TRUE;
8415: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8416: if (n) *n = 0;
8417: if (ia) *ia = NULL;
8418: if (ja) *ja = NULL;
8419: }
8420: PetscFunctionReturn(PETSC_SUCCESS);
8421: }
8423: /*@C
8424: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8426: Collective
8428: Input Parameters:
8429: + mat - the matrix
8430: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8431: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8432: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8433: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8434: always used.
8436: Output Parameters:
8437: + n - size of (possibly compressed) matrix
8438: . ia - the column pointers
8439: . ja - the row indices
8440: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8442: Level: developer
8444: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8445: @*/
8446: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8447: {
8448: PetscFunctionBegin;
8451: if (ia) PetscAssertPointer(ia, 6);
8452: if (ja) PetscAssertPointer(ja, 7);
8453: PetscAssertPointer(done, 8);
8454: MatCheckPreallocated(mat, 1);
8456: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8457: else {
8458: *done = PETSC_TRUE;
8459: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8460: if (n) *n = 0;
8461: if (ia) *ia = NULL;
8462: if (ja) *ja = NULL;
8463: }
8464: PetscFunctionReturn(PETSC_SUCCESS);
8465: }
8467: /*@
8468: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8469: `MatGetColumnIJ()`.
8471: Collective
8473: Input Parameters:
8474: + mat - the matrix
8475: . ncolors - maximum color value
8476: . n - number of entries in colorarray
8477: - colorarray - array indicating color for each column
8479: Output Parameter:
8480: . iscoloring - coloring generated using colorarray information
8482: Level: developer
8484: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8485: @*/
8486: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8487: {
8488: PetscFunctionBegin;
8491: PetscAssertPointer(colorarray, 4);
8492: PetscAssertPointer(iscoloring, 5);
8493: MatCheckPreallocated(mat, 1);
8495: if (!mat->ops->coloringpatch) {
8496: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8497: } else {
8498: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8499: }
8500: PetscFunctionReturn(PETSC_SUCCESS);
8501: }
8503: /*@
8504: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8506: Logically Collective
8508: Input Parameter:
8509: . mat - the factored matrix to be reset
8511: Level: developer
8513: Notes:
8514: This routine should be used only with factored matrices formed by in-place
8515: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8516: format). This option can save memory, for example, when solving nonlinear
8517: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8518: ILU(0) preconditioner.
8520: One can specify in-place ILU(0) factorization by calling
8521: .vb
8522: PCType(pc,PCILU);
8523: PCFactorSeUseInPlace(pc);
8524: .ve
8525: or by using the options -pc_type ilu -pc_factor_in_place
8527: In-place factorization ILU(0) can also be used as a local
8528: solver for the blocks within the block Jacobi or additive Schwarz
8529: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8530: for details on setting local solver options.
8532: Most users should employ the `KSP` interface for linear solvers
8533: instead of working directly with matrix algebra routines such as this.
8534: See, e.g., `KSPCreate()`.
8536: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8537: @*/
8538: PetscErrorCode MatSetUnfactored(Mat mat)
8539: {
8540: PetscFunctionBegin;
8543: MatCheckPreallocated(mat, 1);
8544: mat->factortype = MAT_FACTOR_NONE;
8545: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8546: PetscUseTypeMethod(mat, setunfactored);
8547: PetscFunctionReturn(PETSC_SUCCESS);
8548: }
8550: /*@
8551: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8552: as the original matrix.
8554: Collective
8556: Input Parameters:
8557: + mat - the original matrix
8558: . isrow - parallel `IS` containing the rows this processor should obtain
8559: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8560: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8562: Output Parameter:
8563: . newmat - the new submatrix, of the same type as the original matrix
8565: Level: advanced
8567: Notes:
8568: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8570: Some matrix types place restrictions on the row and column indices, such
8571: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8572: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8574: The index sets may not have duplicate entries.
8576: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8577: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8578: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8579: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8580: you are finished using it.
8582: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8583: the input matrix.
8585: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8587: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8588: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8590: Example usage:
8591: Consider the following 8x8 matrix with 34 non-zero values, that is
8592: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8593: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8594: as follows
8595: .vb
8596: 1 2 0 | 0 3 0 | 0 4
8597: Proc0 0 5 6 | 7 0 0 | 8 0
8598: 9 0 10 | 11 0 0 | 12 0
8599: -------------------------------------
8600: 13 0 14 | 15 16 17 | 0 0
8601: Proc1 0 18 0 | 19 20 21 | 0 0
8602: 0 0 0 | 22 23 0 | 24 0
8603: -------------------------------------
8604: Proc2 25 26 27 | 0 0 28 | 29 0
8605: 30 0 0 | 31 32 33 | 0 34
8606: .ve
8608: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8610: .vb
8611: 2 0 | 0 3 0 | 0
8612: Proc0 5 6 | 7 0 0 | 8
8613: -------------------------------
8614: Proc1 18 0 | 19 20 21 | 0
8615: -------------------------------
8616: Proc2 26 27 | 0 0 28 | 29
8617: 0 0 | 31 32 33 | 0
8618: .ve
8620: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8621: @*/
8622: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8623: {
8624: PetscMPIInt size;
8625: Mat *local;
8626: IS iscoltmp;
8627: PetscBool flg;
8629: PetscFunctionBegin;
8633: PetscAssertPointer(newmat, 5);
8636: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8637: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8638: PetscCheck(cll != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_INPLACE_MATRIX");
8640: MatCheckPreallocated(mat, 1);
8641: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8643: if (!iscol || isrow == iscol) {
8644: PetscBool stride;
8645: PetscMPIInt grabentirematrix = 0, grab;
8646: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8647: if (stride) {
8648: PetscInt first, step, n, rstart, rend;
8649: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8650: if (step == 1) {
8651: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8652: if (rstart == first) {
8653: PetscCall(ISGetLocalSize(isrow, &n));
8654: if (n == rend - rstart) grabentirematrix = 1;
8655: }
8656: }
8657: }
8658: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8659: if (grab) {
8660: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8661: if (cll == MAT_INITIAL_MATRIX) {
8662: *newmat = mat;
8663: PetscCall(PetscObjectReference((PetscObject)mat));
8664: }
8665: PetscFunctionReturn(PETSC_SUCCESS);
8666: }
8667: }
8669: if (!iscol) {
8670: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8671: } else {
8672: iscoltmp = iscol;
8673: }
8675: /* if original matrix is on just one processor then use submatrix generated */
8676: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8677: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8678: goto setproperties;
8679: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8680: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8681: *newmat = *local;
8682: PetscCall(PetscFree(local));
8683: goto setproperties;
8684: } else if (!mat->ops->createsubmatrix) {
8685: /* Create a new matrix type that implements the operation using the full matrix */
8686: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8687: switch (cll) {
8688: case MAT_INITIAL_MATRIX:
8689: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8690: break;
8691: case MAT_REUSE_MATRIX:
8692: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8693: break;
8694: default:
8695: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8696: }
8697: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8698: goto setproperties;
8699: }
8701: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8702: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8703: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8705: setproperties:
8706: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8707: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8708: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8709: }
8710: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8711: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8712: if (!iscol || isrow == iscol) PetscCall(MatSelectVariableBlockSizes(*newmat, mat, isrow));
8713: PetscFunctionReturn(PETSC_SUCCESS);
8714: }
8716: /*@
8717: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8719: Not Collective
8721: Input Parameters:
8722: + A - the matrix we wish to propagate options from
8723: - B - the matrix we wish to propagate options to
8725: Level: beginner
8727: Note:
8728: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8730: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8731: @*/
8732: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8733: {
8734: PetscFunctionBegin;
8737: B->symmetry_eternal = A->symmetry_eternal;
8738: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8739: B->symmetric = A->symmetric;
8740: B->structurally_symmetric = A->structurally_symmetric;
8741: B->spd = A->spd;
8742: B->hermitian = A->hermitian;
8743: PetscFunctionReturn(PETSC_SUCCESS);
8744: }
8746: /*@
8747: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8748: used during the assembly process to store values that belong to
8749: other processors.
8751: Not Collective
8753: Input Parameters:
8754: + mat - the matrix
8755: . size - the initial size of the stash.
8756: - bsize - the initial size of the block-stash(if used).
8758: Options Database Keys:
8759: + -matstash_initial_size size or size0,size1,...,sizep-1 - set initial size
8760: - -matstash_block_initial_size bsize or bsize0,bsize1,...,bsizep-1 - set initial block size
8762: Level: intermediate
8764: Notes:
8765: The block-stash is used for values set with `MatSetValuesBlocked()` while
8766: the stash is used for values set with `MatSetValues()`
8768: Run with the option -info and look for output of the form
8769: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8770: to determine the appropriate value, MM, to use for size and
8771: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8772: to determine the value, BMM to use for bsize
8774: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8775: @*/
8776: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8777: {
8778: PetscFunctionBegin;
8781: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8782: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8783: PetscFunctionReturn(PETSC_SUCCESS);
8784: }
8786: /*@
8787: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8788: the matrix
8790: Neighbor-wise Collective
8792: Input Parameters:
8793: + A - the matrix
8794: . x - the vector to be multiplied by the interpolation operator
8795: - y - the vector to be added to the result
8797: Output Parameter:
8798: . w - the resulting vector
8800: Level: intermediate
8802: Notes:
8803: `w` may be the same vector as `y`.
8805: This allows one to use either the restriction or interpolation (its transpose)
8806: matrix to do the interpolation
8808: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8809: @*/
8810: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8811: {
8812: PetscInt M, N, Ny;
8814: PetscFunctionBegin;
8819: PetscCall(MatGetSize(A, &M, &N));
8820: PetscCall(VecGetSize(y, &Ny));
8821: if (M == Ny) PetscCall(MatMultAdd(A, x, y, w));
8822: else PetscCall(MatMultTransposeAdd(A, x, y, w));
8823: PetscFunctionReturn(PETSC_SUCCESS);
8824: }
8826: /*@
8827: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8828: the matrix
8830: Neighbor-wise Collective
8832: Input Parameters:
8833: + A - the matrix
8834: - x - the vector to be interpolated
8836: Output Parameter:
8837: . y - the resulting vector
8839: Level: intermediate
8841: Note:
8842: This allows one to use either the restriction or interpolation (its transpose)
8843: matrix to do the interpolation
8845: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8846: @*/
8847: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8848: {
8849: PetscInt M, N, Ny;
8851: PetscFunctionBegin;
8855: PetscCall(MatGetSize(A, &M, &N));
8856: PetscCall(VecGetSize(y, &Ny));
8857: if (M == Ny) PetscCall(MatMult(A, x, y));
8858: else PetscCall(MatMultTranspose(A, x, y));
8859: PetscFunctionReturn(PETSC_SUCCESS);
8860: }
8862: /*@
8863: MatRestrict - $y = A*x$ or $A^T*x$
8865: Neighbor-wise Collective
8867: Input Parameters:
8868: + A - the matrix
8869: - x - the vector to be restricted
8871: Output Parameter:
8872: . y - the resulting vector
8874: Level: intermediate
8876: Note:
8877: This allows one to use either the restriction or interpolation (its transpose)
8878: matrix to do the restriction
8880: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8881: @*/
8882: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8883: {
8884: PetscInt M, N, Nx;
8886: PetscFunctionBegin;
8890: PetscCall(MatGetSize(A, &M, &N));
8891: PetscCall(VecGetSize(x, &Nx));
8892: if (M == Nx) PetscCall(MatMultTranspose(A, x, y));
8893: else PetscCall(MatMult(A, x, y));
8894: PetscFunctionReturn(PETSC_SUCCESS);
8895: }
8897: /*@
8898: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8900: Neighbor-wise Collective
8902: Input Parameters:
8903: + A - the matrix
8904: . x - the input dense matrix to be multiplied
8905: - w - the input dense matrix to be added to the result
8907: Output Parameter:
8908: . y - the output dense matrix
8910: Level: intermediate
8912: Note:
8913: This allows one to use either the restriction or interpolation (its transpose)
8914: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8915: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8917: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8918: @*/
8919: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8920: {
8921: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8922: PetscBool trans = PETSC_TRUE;
8923: MatReuse reuse = MAT_INITIAL_MATRIX;
8925: PetscFunctionBegin;
8931: PetscCall(MatGetSize(A, &M, &N));
8932: PetscCall(MatGetSize(x, &Mx, &Nx));
8933: if (N == Mx) trans = PETSC_FALSE;
8934: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8935: Mo = trans ? N : M;
8936: if (*y) {
8937: PetscCall(MatGetSize(*y, &My, &Ny));
8938: if (Mo == My && Nx == Ny) reuse = MAT_REUSE_MATRIX;
8939: else {
8940: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8941: PetscCall(MatDestroy(y));
8942: }
8943: }
8945: if (w && *y == w) { /* this is to minimize changes in PCMG */
8946: PetscBool flg;
8948: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8949: if (w) {
8950: PetscInt My, Ny, Mw, Nw;
8952: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8953: PetscCall(MatGetSize(*y, &My, &Ny));
8954: PetscCall(MatGetSize(w, &Mw, &Nw));
8955: if (!flg || My != Mw || Ny != Nw) w = NULL;
8956: }
8957: if (!w) {
8958: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8959: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8960: PetscCall(PetscObjectDereference((PetscObject)w));
8961: } else PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8962: }
8963: if (!trans) PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8964: else PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8965: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8966: PetscFunctionReturn(PETSC_SUCCESS);
8967: }
8969: /*@
8970: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8972: Neighbor-wise Collective
8974: Input Parameters:
8975: + A - the matrix
8976: - x - the input dense matrix
8978: Output Parameter:
8979: . y - the output dense matrix
8981: Level: intermediate
8983: Note:
8984: This allows one to use either the restriction or interpolation (its transpose)
8985: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8986: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8988: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8989: @*/
8990: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8991: {
8992: PetscFunctionBegin;
8993: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8994: PetscFunctionReturn(PETSC_SUCCESS);
8995: }
8997: /*@
8998: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
9000: Neighbor-wise Collective
9002: Input Parameters:
9003: + A - the matrix
9004: - x - the input dense matrix
9006: Output Parameter:
9007: . y - the output dense matrix
9009: Level: intermediate
9011: Note:
9012: This allows one to use either the restriction or interpolation (its transpose)
9013: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
9014: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
9016: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
9017: @*/
9018: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
9019: {
9020: PetscFunctionBegin;
9021: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
9022: PetscFunctionReturn(PETSC_SUCCESS);
9023: }
9025: /*@
9026: MatGetNullSpace - retrieves the null space of a matrix.
9028: Logically Collective
9030: Input Parameters:
9031: + mat - the matrix
9032: - nullsp - the null space object
9034: Level: developer
9036: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
9037: @*/
9038: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
9039: {
9040: PetscFunctionBegin;
9042: PetscAssertPointer(nullsp, 2);
9043: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
9044: PetscFunctionReturn(PETSC_SUCCESS);
9045: }
9047: /*@C
9048: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
9050: Logically Collective
9052: Input Parameters:
9053: + n - the number of matrices
9054: - mat - the array of matrices
9056: Output Parameters:
9057: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
9059: Level: developer
9061: Note:
9062: Call `MatRestoreNullspaces()` to provide these to another array of matrices
9064: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9065: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
9066: @*/
9067: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9068: {
9069: PetscFunctionBegin;
9070: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9071: PetscAssertPointer(mat, 2);
9072: PetscAssertPointer(nullsp, 3);
9074: PetscCall(PetscCalloc1(3 * n, nullsp));
9075: for (PetscInt i = 0; i < n; i++) {
9077: (*nullsp)[i] = mat[i]->nullsp;
9078: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
9079: (*nullsp)[n + i] = mat[i]->nearnullsp;
9080: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
9081: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
9082: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
9083: }
9084: PetscFunctionReturn(PETSC_SUCCESS);
9085: }
9087: /*@C
9088: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
9090: Logically Collective
9092: Input Parameters:
9093: + n - the number of matrices
9094: . mat - the array of matrices
9095: - nullsp - an array of null spaces
9097: Level: developer
9099: Note:
9100: Call `MatGetNullSpaces()` to create `nullsp`
9102: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9103: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
9104: @*/
9105: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9106: {
9107: PetscFunctionBegin;
9108: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9109: PetscAssertPointer(mat, 2);
9110: PetscAssertPointer(nullsp, 3);
9111: PetscAssertPointer(*nullsp, 3);
9113: for (PetscInt i = 0; i < n; i++) {
9115: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9116: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9117: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9118: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9119: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9120: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9121: }
9122: PetscCall(PetscFree(*nullsp));
9123: PetscFunctionReturn(PETSC_SUCCESS);
9124: }
9126: /*@
9127: MatSetNullSpace - attaches a null space to a matrix.
9129: Logically Collective
9131: Input Parameters:
9132: + mat - the matrix
9133: - nullsp - the null space object
9135: Level: advanced
9137: Notes:
9138: This null space is used by the `KSP` linear solvers to solve singular systems.
9140: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9142: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
9143: to zero but the linear system will still be solved in a least squares sense.
9145: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9146: the domain of a matrix $A$ (from $R^n$ to $R^m$ ($m$ rows, $n$ columns) $R^n$ = the direct sum of the null space of $A$, $n(A)$, plus the range of $A^T$, $R(A^T)$.
9147: Similarly $R^m$ = direct sum $n(A^T) + R(A)$. Hence the linear system $A x = b$ has a solution only if $b$ in $R(A)$ (or correspondingly $b$ is orthogonal to
9148: $n(A^T))$ and if $x$ is a solution then $x + \alpha n(A)$ is a solution for any $\alpha$. The minimum norm solution is orthogonal to $n(A)$. For problems without a solution
9149: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving $A x = \hat{b}$ where $\hat{b}$ is $b$ orthogonalized to the $n(A^T)$.
9150: This $\hat{b}$ can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9152: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one has called
9153: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9154: routine also automatically calls `MatSetTransposeNullSpace()`.
9156: The user should call `MatNullSpaceDestroy()`.
9158: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9159: `KSPSetPCSide()`
9160: @*/
9161: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9162: {
9163: PetscFunctionBegin;
9166: PetscCall(PetscObjectReference((PetscObject)nullsp));
9167: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9168: mat->nullsp = nullsp;
9169: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9170: PetscFunctionReturn(PETSC_SUCCESS);
9171: }
9173: /*@
9174: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9176: Logically Collective
9178: Input Parameters:
9179: + mat - the matrix
9180: - nullsp - the null space object
9182: Level: developer
9184: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9185: @*/
9186: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9187: {
9188: PetscFunctionBegin;
9191: PetscAssertPointer(nullsp, 2);
9192: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9193: PetscFunctionReturn(PETSC_SUCCESS);
9194: }
9196: /*@
9197: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9199: Logically Collective
9201: Input Parameters:
9202: + mat - the matrix
9203: - nullsp - the null space object
9205: Level: advanced
9207: Notes:
9208: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9210: See `MatSetNullSpace()`
9212: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9213: @*/
9214: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9215: {
9216: PetscFunctionBegin;
9219: PetscCall(PetscObjectReference((PetscObject)nullsp));
9220: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9221: mat->transnullsp = nullsp;
9222: PetscFunctionReturn(PETSC_SUCCESS);
9223: }
9225: /*@
9226: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9227: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9229: Logically Collective
9231: Input Parameters:
9232: + mat - the matrix
9233: - nullsp - the null space object
9235: Level: advanced
9237: Notes:
9238: Overwrites any previous near null space that may have been attached
9240: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9242: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9243: @*/
9244: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9245: {
9246: PetscFunctionBegin;
9250: MatCheckPreallocated(mat, 1);
9251: PetscCall(PetscObjectReference((PetscObject)nullsp));
9252: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9253: mat->nearnullsp = nullsp;
9254: PetscFunctionReturn(PETSC_SUCCESS);
9255: }
9257: /*@
9258: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9260: Not Collective
9262: Input Parameter:
9263: . mat - the matrix
9265: Output Parameter:
9266: . nullsp - the null space object, `NULL` if not set
9268: Level: advanced
9270: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9271: @*/
9272: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9273: {
9274: PetscFunctionBegin;
9277: PetscAssertPointer(nullsp, 2);
9278: MatCheckPreallocated(mat, 1);
9279: *nullsp = mat->nearnullsp;
9280: PetscFunctionReturn(PETSC_SUCCESS);
9281: }
9283: /*@
9284: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9286: Collective
9288: Input Parameters:
9289: + mat - the matrix
9290: . row - row/column permutation
9291: - info - information on desired factorization process
9293: Level: developer
9295: Notes:
9296: Probably really in-place only when level of fill is zero, otherwise allocates
9297: new space to store factored matrix and deletes previous memory.
9299: Most users should employ the `KSP` interface for linear solvers
9300: instead of working directly with matrix algebra routines such as this.
9301: See, e.g., `KSPCreate()`.
9303: Fortran Note:
9304: A valid (non-null) `info` argument must be provided
9306: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9307: @*/
9308: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9309: {
9310: PetscFunctionBegin;
9314: PetscAssertPointer(info, 3);
9315: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9316: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9317: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9318: MatCheckPreallocated(mat, 1);
9319: PetscUseTypeMethod(mat, iccfactor, row, info);
9320: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9321: PetscFunctionReturn(PETSC_SUCCESS);
9322: }
9324: /*@
9325: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9326: ghosted ones.
9328: Not Collective
9330: Input Parameters:
9331: + mat - the matrix
9332: - diag - the diagonal values, including ghost ones
9334: Level: developer
9336: Notes:
9337: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9339: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9341: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9342: @*/
9343: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9344: {
9345: PetscMPIInt size;
9347: PetscFunctionBegin;
9352: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9353: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9354: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9355: if (size == 1) {
9356: PetscInt n, m;
9357: PetscCall(VecGetSize(diag, &n));
9358: PetscCall(MatGetSize(mat, NULL, &m));
9359: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9360: PetscCall(MatDiagonalScale(mat, NULL, diag));
9361: } else PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9362: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9363: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9364: PetscFunctionReturn(PETSC_SUCCESS);
9365: }
9367: /*@
9368: MatGetInertia - Gets the inertia from a factored matrix
9370: Collective
9372: Input Parameter:
9373: . mat - the matrix
9375: Output Parameters:
9376: + nneg - number of negative eigenvalues
9377: . nzero - number of zero eigenvalues
9378: - npos - number of positive eigenvalues
9380: Level: advanced
9382: Note:
9383: Matrix must have been factored by `MatCholeskyFactor()`
9385: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9386: @*/
9387: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9388: {
9389: PetscFunctionBegin;
9392: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9393: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9394: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9395: PetscFunctionReturn(PETSC_SUCCESS);
9396: }
9398: /*@C
9399: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9401: Neighbor-wise Collective
9403: Input Parameters:
9404: + mat - the factored matrix obtained with `MatGetFactor()`
9405: - b - the right-hand-side vectors
9407: Output Parameter:
9408: . x - the result vectors
9410: Level: developer
9412: Note:
9413: The vectors `b` and `x` cannot be the same. I.e., one cannot
9414: call `MatSolves`(A,x,x).
9416: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9417: @*/
9418: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9419: {
9420: PetscFunctionBegin;
9423: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9424: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9425: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9427: MatCheckPreallocated(mat, 1);
9428: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9429: PetscUseTypeMethod(mat, solves, b, x);
9430: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9431: PetscFunctionReturn(PETSC_SUCCESS);
9432: }
9434: /*@
9435: MatIsSymmetric - Test whether a matrix is symmetric
9437: Collective
9439: Input Parameters:
9440: + A - the matrix to test
9441: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9443: Output Parameter:
9444: . flg - the result
9446: Level: intermediate
9448: Notes:
9449: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9451: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9453: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9454: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9456: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9457: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9458: @*/
9459: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9460: {
9461: PetscFunctionBegin;
9463: PetscAssertPointer(flg, 3);
9464: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9465: else {
9466: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9467: else PetscCall(MatIsTranspose(A, A, tol, flg));
9468: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9469: }
9470: PetscFunctionReturn(PETSC_SUCCESS);
9471: }
9473: /*@
9474: MatIsHermitian - Test whether a matrix is Hermitian
9476: Collective
9478: Input Parameters:
9479: + A - the matrix to test
9480: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9482: Output Parameter:
9483: . flg - the result
9485: Level: intermediate
9487: Notes:
9488: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9490: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9492: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9493: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9495: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9496: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9497: @*/
9498: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9499: {
9500: PetscFunctionBegin;
9502: PetscAssertPointer(flg, 3);
9503: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9504: else {
9505: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9506: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9507: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9508: }
9509: PetscFunctionReturn(PETSC_SUCCESS);
9510: }
9512: /*@
9513: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9515: Not Collective
9517: Input Parameter:
9518: . A - the matrix to check
9520: Output Parameters:
9521: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9522: - flg - the result (only valid if set is `PETSC_TRUE`)
9524: Level: advanced
9526: Notes:
9527: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9528: if you want it explicitly checked
9530: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9531: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9533: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9534: @*/
9535: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9536: {
9537: PetscFunctionBegin;
9539: PetscAssertPointer(set, 2);
9540: PetscAssertPointer(flg, 3);
9541: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9542: *set = PETSC_TRUE;
9543: *flg = PetscBool3ToBool(A->symmetric);
9544: } else *set = PETSC_FALSE;
9545: PetscFunctionReturn(PETSC_SUCCESS);
9546: }
9548: /*@
9549: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9551: Not Collective
9553: Input Parameter:
9554: . A - the matrix to check
9556: Output Parameters:
9557: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9558: - flg - the result (only valid if set is `PETSC_TRUE`)
9560: Level: advanced
9562: Notes:
9563: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9565: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9566: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9568: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9569: @*/
9570: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9571: {
9572: PetscFunctionBegin;
9574: PetscAssertPointer(set, 2);
9575: PetscAssertPointer(flg, 3);
9576: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9577: *set = PETSC_TRUE;
9578: *flg = PetscBool3ToBool(A->spd);
9579: } else *set = PETSC_FALSE;
9580: PetscFunctionReturn(PETSC_SUCCESS);
9581: }
9583: /*@
9584: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9586: Not Collective
9588: Input Parameter:
9589: . A - the matrix to check
9591: Output Parameters:
9592: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9593: - flg - the result (only valid if set is `PETSC_TRUE`)
9595: Level: advanced
9597: Notes:
9598: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9599: if you want it explicitly checked
9601: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9602: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9604: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9605: @*/
9606: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9607: {
9608: PetscFunctionBegin;
9610: PetscAssertPointer(set, 2);
9611: PetscAssertPointer(flg, 3);
9612: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9613: *set = PETSC_TRUE;
9614: *flg = PetscBool3ToBool(A->hermitian);
9615: } else *set = PETSC_FALSE;
9616: PetscFunctionReturn(PETSC_SUCCESS);
9617: }
9619: /*@
9620: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9622: Collective
9624: Input Parameter:
9625: . A - the matrix to test
9627: Output Parameter:
9628: . flg - the result
9630: Level: intermediate
9632: Notes:
9633: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9635: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9636: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9638: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9639: @*/
9640: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9641: {
9642: PetscFunctionBegin;
9644: PetscAssertPointer(flg, 2);
9645: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->structurally_symmetric);
9646: else {
9647: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9648: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9649: }
9650: PetscFunctionReturn(PETSC_SUCCESS);
9651: }
9653: /*@
9654: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9656: Not Collective
9658: Input Parameter:
9659: . A - the matrix to check
9661: Output Parameters:
9662: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9663: - flg - the result (only valid if set is PETSC_TRUE)
9665: Level: advanced
9667: Notes:
9668: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9669: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9671: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9673: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9674: @*/
9675: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9676: {
9677: PetscFunctionBegin;
9679: PetscAssertPointer(set, 2);
9680: PetscAssertPointer(flg, 3);
9681: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9682: *set = PETSC_TRUE;
9683: *flg = PetscBool3ToBool(A->structurally_symmetric);
9684: } else *set = PETSC_FALSE;
9685: PetscFunctionReturn(PETSC_SUCCESS);
9686: }
9688: /*@
9689: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9690: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9692: Not Collective
9694: Input Parameter:
9695: . mat - the matrix
9697: Output Parameters:
9698: + nstash - the size of the stash
9699: . reallocs - the number of additional mallocs incurred.
9700: . bnstash - the size of the block stash
9701: - breallocs - the number of additional mallocs incurred.in the block stash
9703: Level: advanced
9705: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9706: @*/
9707: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9708: {
9709: PetscFunctionBegin;
9710: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9711: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9712: PetscFunctionReturn(PETSC_SUCCESS);
9713: }
9715: /*@
9716: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9717: parallel layout, `PetscLayout` for rows and columns
9719: Collective
9721: Input Parameter:
9722: . mat - the matrix
9724: Output Parameters:
9725: + right - (optional) vector that the matrix can be multiplied against
9726: - left - (optional) vector that the matrix vector product can be stored in
9728: Options Database Key:
9729: . -mat_vec_type type - set the `VecType` of the created vectors during `MatSetFromOptions()`
9731: Level: advanced
9733: Notes:
9734: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9736: The `VecType` of the created vectors is determined by the `MatType` of `mat`. This can be overridden by using `MatSetVecType()` or the option `-mat_vec_type`.
9738: These are new vectors which are not owned by the `mat`, they should be destroyed with `VecDestroy()` when no longer needed.
9740: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`, `MatSetVecType()`
9741: @*/
9742: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9743: {
9744: PetscFunctionBegin;
9747: if (mat->ops->getvecs) {
9748: PetscUseTypeMethod(mat, getvecs, right, left);
9749: } else {
9750: if (right) {
9751: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9752: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9753: PetscCall(VecSetType(*right, mat->defaultvectype));
9754: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9755: if (mat->boundtocpu && mat->bindingpropagates) {
9756: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9757: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9758: }
9759: #endif
9760: }
9761: if (left) {
9762: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9763: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9764: PetscCall(VecSetType(*left, mat->defaultvectype));
9765: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9766: if (mat->boundtocpu && mat->bindingpropagates) {
9767: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9768: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9769: }
9770: #endif
9771: }
9772: }
9773: PetscFunctionReturn(PETSC_SUCCESS);
9774: }
9776: /*@
9777: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9778: with default values.
9780: Not Collective
9782: Input Parameter:
9783: . info - the `MatFactorInfo` data structure
9785: Level: developer
9787: Notes:
9788: The solvers are generally used through the `KSP` and `PC` objects, for example
9789: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9791: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9793: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9794: @*/
9795: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9796: {
9797: PetscFunctionBegin;
9798: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9799: PetscFunctionReturn(PETSC_SUCCESS);
9800: }
9802: /*@
9803: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9805: Collective
9807: Input Parameters:
9808: + mat - the factored matrix
9809: - is - the index set defining the Schur indices (0-based)
9811: Level: advanced
9813: Notes:
9814: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9816: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9818: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9820: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9821: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9822: @*/
9823: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9824: {
9825: PetscErrorCode (*f)(Mat, IS);
9827: PetscFunctionBegin;
9832: PetscCheckSameComm(mat, 1, is, 2);
9833: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9834: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9835: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9836: PetscCall(MatDestroy(&mat->schur));
9837: PetscCall((*f)(mat, is));
9838: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9839: PetscFunctionReturn(PETSC_SUCCESS);
9840: }
9842: /*@
9843: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9845: Logically Collective
9847: Input Parameters:
9848: + F - the factored matrix obtained by calling `MatGetFactor()`
9849: . S - location where to return the Schur complement, can be `NULL`
9850: - status - the status of the Schur complement matrix, can be `NULL`
9852: Level: advanced
9854: Notes:
9855: You must call `MatFactorSetSchurIS()` before calling this routine.
9857: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9859: The routine provides a copy of the Schur matrix stored within the solver data structures.
9860: The caller must destroy the object when it is no longer needed.
9861: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9863: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9865: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9867: Developer Note:
9868: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9869: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9871: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9872: @*/
9873: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9874: {
9875: PetscFunctionBegin;
9877: if (S) PetscAssertPointer(S, 2);
9878: if (status) PetscAssertPointer(status, 3);
9879: if (S) {
9880: PetscErrorCode (*f)(Mat, Mat *);
9882: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9883: if (f) PetscCall((*f)(F, S));
9884: else PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9885: }
9886: if (status) *status = F->schur_status;
9887: PetscFunctionReturn(PETSC_SUCCESS);
9888: }
9890: /*@
9891: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9893: Logically Collective
9895: Input Parameters:
9896: + F - the factored matrix obtained by calling `MatGetFactor()`
9897: . S - location where to return the Schur complement, can be `NULL`
9898: - status - the status of the Schur complement matrix, can be `NULL`
9900: Level: advanced
9902: Notes:
9903: You must call `MatFactorSetSchurIS()` before calling this routine.
9905: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9907: The routine returns a the Schur Complement stored within the data structures of the solver.
9909: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9911: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9913: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9915: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9917: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9918: @*/
9919: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9920: {
9921: PetscFunctionBegin;
9923: if (S) {
9924: PetscAssertPointer(S, 2);
9925: *S = F->schur;
9926: }
9927: if (status) {
9928: PetscAssertPointer(status, 3);
9929: *status = F->schur_status;
9930: }
9931: PetscFunctionReturn(PETSC_SUCCESS);
9932: }
9934: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9935: {
9936: Mat S = F->schur;
9938: PetscFunctionBegin;
9939: switch (F->schur_status) {
9940: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9941: case MAT_FACTOR_SCHUR_INVERTED:
9942: if (S) {
9943: S->ops->solve = NULL;
9944: S->ops->matsolve = NULL;
9945: S->ops->solvetranspose = NULL;
9946: S->ops->matsolvetranspose = NULL;
9947: S->ops->solveadd = NULL;
9948: S->ops->solvetransposeadd = NULL;
9949: S->factortype = MAT_FACTOR_NONE;
9950: PetscCall(PetscFree(S->solvertype));
9951: }
9952: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9953: break;
9954: default:
9955: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9956: }
9957: PetscFunctionReturn(PETSC_SUCCESS);
9958: }
9960: /*@
9961: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9963: Logically Collective
9965: Input Parameters:
9966: + F - the factored matrix obtained by calling `MatGetFactor()`
9967: . S - location where the Schur complement is stored
9968: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9970: Level: advanced
9972: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9973: @*/
9974: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9975: {
9976: PetscFunctionBegin;
9978: if (S) {
9980: *S = NULL;
9981: }
9982: F->schur_status = status;
9983: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9984: PetscFunctionReturn(PETSC_SUCCESS);
9985: }
9987: /*@
9988: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9990: Logically Collective
9992: Input Parameters:
9993: + F - the factored matrix obtained by calling `MatGetFactor()`
9994: . rhs - location where the right-hand side of the Schur complement system is stored
9995: - sol - location where the solution of the Schur complement system has to be returned
9997: Level: advanced
9999: Notes:
10000: The sizes of the vectors should match the size of the Schur complement
10002: Must be called after `MatFactorSetSchurIS()`
10004: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
10005: @*/
10006: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
10007: {
10008: PetscFunctionBegin;
10015: PetscCheckSameComm(F, 1, rhs, 2);
10016: PetscCheckSameComm(F, 1, sol, 3);
10017: PetscCall(MatFactorFactorizeSchurComplement(F));
10018: switch (F->schur_status) {
10019: case MAT_FACTOR_SCHUR_FACTORED:
10020: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
10021: break;
10022: case MAT_FACTOR_SCHUR_INVERTED:
10023: PetscCall(MatMultTranspose(F->schur, rhs, sol));
10024: break;
10025: default:
10026: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10027: }
10028: PetscFunctionReturn(PETSC_SUCCESS);
10029: }
10031: /*@
10032: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
10034: Logically Collective
10036: Input Parameters:
10037: + F - the factored matrix obtained by calling `MatGetFactor()`
10038: . rhs - location where the right-hand side of the Schur complement system is stored
10039: - sol - location where the solution of the Schur complement system has to be returned
10041: Level: advanced
10043: Notes:
10044: The sizes of the vectors should match the size of the Schur complement
10046: Must be called after `MatFactorSetSchurIS()`
10048: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
10049: @*/
10050: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
10051: {
10052: PetscFunctionBegin;
10059: PetscCheckSameComm(F, 1, rhs, 2);
10060: PetscCheckSameComm(F, 1, sol, 3);
10061: PetscCall(MatFactorFactorizeSchurComplement(F));
10062: switch (F->schur_status) {
10063: case MAT_FACTOR_SCHUR_FACTORED:
10064: PetscCall(MatSolve(F->schur, rhs, sol));
10065: break;
10066: case MAT_FACTOR_SCHUR_INVERTED:
10067: PetscCall(MatMult(F->schur, rhs, sol));
10068: break;
10069: default:
10070: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10071: }
10072: PetscFunctionReturn(PETSC_SUCCESS);
10073: }
10075: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
10076: #if PetscDefined(HAVE_CUDA)
10077: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
10078: #endif
10080: /* Schur status updated in the interface */
10081: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
10082: {
10083: Mat S = F->schur;
10085: PetscFunctionBegin;
10086: if (S) {
10087: PetscMPIInt size;
10088: PetscBool isdense, isdensecuda;
10090: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
10091: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
10092: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
10093: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
10094: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
10095: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
10096: if (isdense) {
10097: PetscCall(MatSeqDenseInvertFactors_Private(S));
10098: } else if (isdensecuda) {
10099: #if defined(PETSC_HAVE_CUDA)
10100: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10101: #endif
10102: }
10103: // HIP??????????????
10104: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10105: }
10106: PetscFunctionReturn(PETSC_SUCCESS);
10107: }
10109: /*@
10110: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10112: Logically Collective
10114: Input Parameter:
10115: . F - the factored matrix obtained by calling `MatGetFactor()`
10117: Level: advanced
10119: Notes:
10120: Must be called after `MatFactorSetSchurIS()`.
10122: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10124: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10125: @*/
10126: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10127: {
10128: PetscFunctionBegin;
10131: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10132: PetscCall(MatFactorFactorizeSchurComplement(F));
10133: PetscCall(MatFactorInvertSchurComplement_Private(F));
10134: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10135: PetscFunctionReturn(PETSC_SUCCESS);
10136: }
10138: /*@
10139: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10141: Logically Collective
10143: Input Parameter:
10144: . F - the factored matrix obtained by calling `MatGetFactor()`
10146: Level: advanced
10148: Note:
10149: Must be called after `MatFactorSetSchurIS()`
10151: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10152: @*/
10153: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10154: {
10155: MatFactorInfo info;
10157: PetscFunctionBegin;
10160: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10161: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10162: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10163: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10164: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10165: } else {
10166: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10167: }
10168: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10169: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10170: PetscFunctionReturn(PETSC_SUCCESS);
10171: }
10173: /*@
10174: MatPtAP - Creates the matrix product $C = P^T * A * P$
10176: Neighbor-wise Collective
10178: Input Parameters:
10179: + A - the matrix
10180: . P - the projection matrix
10181: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10182: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10183: if the result is a dense matrix this is irrelevant
10185: Output Parameter:
10186: . C - the product matrix
10188: Level: intermediate
10190: Notes:
10191: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10193: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_PtAP`
10194: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10196: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10198: Developer Note:
10199: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10201: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10202: @*/
10203: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10204: {
10205: PetscFunctionBegin;
10206: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10207: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10209: if (scall == MAT_INITIAL_MATRIX) {
10210: PetscCall(MatProductCreate(A, P, NULL, C));
10211: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10212: PetscCall(MatProductSetAlgorithm(*C, "default"));
10213: PetscCall(MatProductSetFill(*C, fill));
10215: (*C)->product->api_user = PETSC_TRUE;
10216: PetscCall(MatProductSetFromOptions(*C));
10217: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10218: PetscCall(MatProductSymbolic(*C));
10219: } else { /* scall == MAT_REUSE_MATRIX */
10220: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10221: }
10223: PetscCall(MatProductNumeric(*C));
10224: if (A->symmetric == PETSC_BOOL3_TRUE) {
10225: PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10226: (*C)->spd = A->spd;
10227: }
10228: PetscFunctionReturn(PETSC_SUCCESS);
10229: }
10231: /*@
10232: MatRARt - Creates the matrix product $C = R * A * R^T$
10234: Neighbor-wise Collective
10236: Input Parameters:
10237: + A - the matrix
10238: . R - the projection matrix
10239: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10240: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10241: if the result is a dense matrix this is irrelevant
10243: Output Parameter:
10244: . C - the product matrix
10246: Level: intermediate
10248: Notes:
10249: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10251: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_RARt`
10252: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10254: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10255: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10256: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10257: We recommend using `MatPtAP()` when possible.
10259: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10261: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10262: @*/
10263: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10264: {
10265: PetscFunctionBegin;
10266: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10267: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10269: if (scall == MAT_INITIAL_MATRIX) {
10270: PetscCall(MatProductCreate(A, R, NULL, C));
10271: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10272: PetscCall(MatProductSetAlgorithm(*C, "default"));
10273: PetscCall(MatProductSetFill(*C, fill));
10275: (*C)->product->api_user = PETSC_TRUE;
10276: PetscCall(MatProductSetFromOptions(*C));
10277: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10278: PetscCall(MatProductSymbolic(*C));
10279: } else { /* scall == MAT_REUSE_MATRIX */
10280: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10281: }
10283: PetscCall(MatProductNumeric(*C));
10284: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10285: PetscFunctionReturn(PETSC_SUCCESS);
10286: }
10288: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10289: {
10290: PetscBool flg = PETSC_TRUE;
10292: PetscFunctionBegin;
10293: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10294: if (scall == MAT_INITIAL_MATRIX) {
10295: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10296: PetscCall(MatProductCreate(A, B, NULL, C));
10297: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10298: PetscCall(MatProductSetFill(*C, fill));
10299: } else { /* scall == MAT_REUSE_MATRIX */
10300: Mat_Product *product = (*C)->product;
10302: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10303: if (flg && product && product->type != ptype) {
10304: PetscCall(MatProductClear(*C));
10305: product = NULL;
10306: }
10307: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10308: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10309: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10310: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10311: product = (*C)->product;
10312: product->fill = fill;
10313: product->clear = PETSC_TRUE;
10314: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10315: flg = PETSC_FALSE;
10316: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10317: }
10318: }
10319: if (flg) {
10320: (*C)->product->api_user = PETSC_TRUE;
10321: PetscCall(MatProductSetType(*C, ptype));
10322: PetscCall(MatProductSetFromOptions(*C));
10323: PetscCall(MatProductSymbolic(*C));
10324: }
10325: PetscCall(MatProductNumeric(*C));
10326: PetscFunctionReturn(PETSC_SUCCESS);
10327: }
10329: /*@
10330: MatMatMult - Performs matrix-matrix multiplication $ C=A*B $.
10332: Neighbor-wise Collective
10334: Input Parameters:
10335: + A - the left matrix
10336: . B - the right matrix
10337: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10338: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10339: if the result is a dense matrix this is irrelevant
10341: Output Parameter:
10342: . C - the product matrix
10344: Notes:
10345: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10347: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10348: call to this function with `MAT_INITIAL_MATRIX`.
10350: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10352: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10353: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10355: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10357: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_AB`
10358: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10360: Example of Usage:
10361: .vb
10362: MatProductCreate(A,B,NULL,&C);
10363: MatProductSetType(C,MATPRODUCT_AB);
10364: MatProductSymbolic(C);
10365: MatProductNumeric(C); // compute C=A * B
10366: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10367: MatProductNumeric(C);
10368: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10369: MatProductNumeric(C);
10370: .ve
10372: Level: intermediate
10374: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10375: @*/
10376: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10377: {
10378: PetscFunctionBegin;
10379: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10380: PetscFunctionReturn(PETSC_SUCCESS);
10381: }
10383: /*@
10384: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10386: Neighbor-wise Collective
10388: Input Parameters:
10389: + A - the left matrix
10390: . B - the right matrix
10391: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10392: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10394: Output Parameter:
10395: . C - the product matrix
10397: Options Database Key:
10398: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10399: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10400: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10402: Level: intermediate
10404: Notes:
10405: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10407: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10409: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10410: actually needed.
10412: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10413: and for pairs of `MATMPIDENSE` matrices.
10415: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_ABt`
10416: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10418: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10420: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()`, `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10421: @*/
10422: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10423: {
10424: PetscFunctionBegin;
10425: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10426: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10427: PetscFunctionReturn(PETSC_SUCCESS);
10428: }
10430: /*@
10431: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10433: Neighbor-wise Collective
10435: Input Parameters:
10436: + A - the left matrix
10437: . B - the right matrix
10438: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10439: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10441: Output Parameter:
10442: . C - the product matrix
10444: Level: intermediate
10446: Notes:
10447: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10449: `MAT_REUSE_MATRIX` can only be used if `A` and `B` have the same nonzero pattern as in the previous call.
10451: This is a convenience routine that wraps the use of `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_AtB`
10452: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10454: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10455: actually needed.
10457: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10458: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10460: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10462: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10463: @*/
10464: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10465: {
10466: PetscFunctionBegin;
10467: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10468: PetscFunctionReturn(PETSC_SUCCESS);
10469: }
10471: /*@
10472: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10474: Neighbor-wise Collective
10476: Input Parameters:
10477: + A - the left matrix
10478: . B - the middle matrix
10479: . C - the right matrix
10480: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10481: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10482: if the result is a dense matrix this is irrelevant
10484: Output Parameter:
10485: . D - the product matrix
10487: Level: intermediate
10489: Notes:
10490: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10492: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10494: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_ABC`
10495: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10497: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10498: actually needed.
10500: If you have many matrices with the same non-zero structure to multiply, you
10501: should use `MAT_REUSE_MATRIX` in all calls but the first
10503: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10505: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10506: @*/
10507: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10508: {
10509: PetscFunctionBegin;
10510: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10511: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10513: if (scall == MAT_INITIAL_MATRIX) {
10514: PetscCall(MatProductCreate(A, B, C, D));
10515: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10516: PetscCall(MatProductSetAlgorithm(*D, "default"));
10517: PetscCall(MatProductSetFill(*D, fill));
10519: (*D)->product->api_user = PETSC_TRUE;
10520: PetscCall(MatProductSetFromOptions(*D));
10521: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10522: ((PetscObject)C)->type_name);
10523: PetscCall(MatProductSymbolic(*D));
10524: } else { /* user may change input matrices when REUSE */
10525: PetscCall(MatProductReplaceMats(A, B, C, *D));
10526: }
10527: PetscCall(MatProductNumeric(*D));
10528: PetscFunctionReturn(PETSC_SUCCESS);
10529: }
10531: /*@
10532: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10534: Collective
10536: Input Parameters:
10537: + mat - the matrix
10538: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10539: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10540: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10542: Output Parameter:
10543: . matredundant - redundant matrix
10545: Level: advanced
10547: Notes:
10548: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10549: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10551: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10552: calling it.
10554: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10556: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10557: @*/
10558: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10559: {
10560: MPI_Comm comm;
10561: PetscMPIInt size;
10562: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10563: Mat_Redundant *redund = NULL;
10564: PetscSubcomm psubcomm = NULL;
10565: MPI_Comm subcomm_in = subcomm;
10566: Mat *matseq;
10567: IS isrow, iscol;
10568: PetscBool newsubcomm = PETSC_FALSE;
10570: PetscFunctionBegin;
10572: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10573: PetscAssertPointer(*matredundant, 5);
10575: }
10577: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10578: if (size == 1 || nsubcomm == 1) {
10579: if (reuse == MAT_INITIAL_MATRIX) {
10580: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10581: } else {
10582: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10583: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10584: }
10585: PetscFunctionReturn(PETSC_SUCCESS);
10586: }
10588: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10589: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10590: MatCheckPreallocated(mat, 1);
10592: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10593: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10594: /* create psubcomm, then get subcomm */
10595: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10596: PetscCallMPI(MPI_Comm_size(comm, &size));
10597: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10599: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10600: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10601: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10602: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10603: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10604: newsubcomm = PETSC_TRUE;
10605: PetscCall(PetscSubcommDestroy(&psubcomm));
10606: }
10608: /* get isrow, iscol and a local sequential matrix matseq[0] */
10609: if (reuse == MAT_INITIAL_MATRIX) {
10610: mloc_sub = PETSC_DECIDE;
10611: nloc_sub = PETSC_DECIDE;
10612: if (bs < 1) {
10613: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10614: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10615: } else {
10616: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10617: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10618: }
10619: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10620: rstart = rend - mloc_sub;
10621: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10622: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10623: PetscCall(ISSetIdentity(iscol));
10624: } else { /* reuse == MAT_REUSE_MATRIX */
10625: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10626: /* retrieve subcomm */
10627: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10628: redund = (*matredundant)->redundant;
10629: isrow = redund->isrow;
10630: iscol = redund->iscol;
10631: matseq = redund->matseq;
10632: }
10633: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10635: /* get matredundant over subcomm */
10636: if (reuse == MAT_INITIAL_MATRIX) {
10637: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10639: /* create a supporting struct and attach it to C for reuse */
10640: PetscCall(PetscNew(&redund));
10641: (*matredundant)->redundant = redund;
10642: redund->isrow = isrow;
10643: redund->iscol = iscol;
10644: redund->matseq = matseq;
10645: if (newsubcomm) {
10646: redund->subcomm = subcomm;
10647: } else {
10648: redund->subcomm = MPI_COMM_NULL;
10649: }
10650: } else {
10651: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10652: }
10653: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10654: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10655: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10656: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10657: }
10658: #endif
10659: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10660: PetscFunctionReturn(PETSC_SUCCESS);
10661: }
10663: /*@C
10664: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10665: a given `Mat`. Each submatrix can span multiple procs.
10667: Collective
10669: Input Parameters:
10670: + mat - the matrix
10671: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10672: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10674: Output Parameter:
10675: . subMat - parallel sub-matrices each spanning a given `subcomm`
10677: Level: advanced
10679: Notes:
10680: The submatrix partition across processors is dictated by `subComm` a
10681: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10682: is not restricted to be grouped with consecutive original MPI processes.
10684: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10685: map directly to the layout of the original matrix [wrt the local
10686: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10687: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10688: the `subMat`. However the offDiagMat looses some columns - and this is
10689: reconstructed with `MatSetValues()`
10691: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10693: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10694: @*/
10695: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10696: {
10697: PetscMPIInt commsize, subCommSize;
10699: PetscFunctionBegin;
10700: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10701: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10702: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10704: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10705: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10706: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10707: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10708: PetscFunctionReturn(PETSC_SUCCESS);
10709: }
10711: /*@
10712: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10714: Not Collective
10716: Input Parameters:
10717: + mat - matrix to extract local submatrix from
10718: . isrow - local row indices for submatrix
10719: - iscol - local column indices for submatrix
10721: Output Parameter:
10722: . submat - the submatrix
10724: Level: intermediate
10726: Notes:
10727: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10729: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10730: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10732: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10733: `MatSetValuesBlockedLocal()` will also be implemented.
10735: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10736: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10738: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10739: @*/
10740: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10741: {
10742: PetscFunctionBegin;
10746: PetscCheckSameComm(isrow, 2, iscol, 3);
10747: PetscAssertPointer(submat, 4);
10748: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10750: if (mat->ops->getlocalsubmatrix) {
10751: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10752: } else {
10753: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10754: }
10755: (*submat)->assembled = mat->assembled;
10756: PetscFunctionReturn(PETSC_SUCCESS);
10757: }
10759: /*@
10760: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10762: Not Collective
10764: Input Parameters:
10765: + mat - matrix to extract local submatrix from
10766: . isrow - local row indices for submatrix
10767: . iscol - local column indices for submatrix
10768: - submat - the submatrix
10770: Level: intermediate
10772: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10773: @*/
10774: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10775: {
10776: PetscFunctionBegin;
10780: PetscCheckSameComm(isrow, 2, iscol, 3);
10781: PetscAssertPointer(submat, 4);
10784: if (mat->ops->restorelocalsubmatrix) {
10785: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10786: } else {
10787: PetscCall(MatDestroy(submat));
10788: }
10789: *submat = NULL;
10790: PetscFunctionReturn(PETSC_SUCCESS);
10791: }
10793: /*@
10794: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10796: Collective
10798: Input Parameter:
10799: . mat - the matrix
10801: Output Parameter:
10802: . is - if any rows have zero diagonals this contains the list of them
10804: Level: developer
10806: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10807: @*/
10808: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10809: {
10810: PetscFunctionBegin;
10813: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10814: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10816: if (!mat->ops->findzerodiagonals) {
10817: Vec diag;
10818: const PetscScalar *a;
10819: PetscInt *rows;
10820: PetscInt rStart, rEnd, r, nrow = 0;
10822: PetscCall(MatCreateVecs(mat, &diag, NULL));
10823: PetscCall(MatGetDiagonal(mat, diag));
10824: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10825: PetscCall(VecGetArrayRead(diag, &a));
10826: for (r = 0; r < rEnd - rStart; ++r)
10827: if (a[r] == 0.0) ++nrow;
10828: PetscCall(PetscMalloc1(nrow, &rows));
10829: nrow = 0;
10830: for (r = 0; r < rEnd - rStart; ++r)
10831: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10832: PetscCall(VecRestoreArrayRead(diag, &a));
10833: PetscCall(VecDestroy(&diag));
10834: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10835: } else {
10836: PetscUseTypeMethod(mat, findzerodiagonals, is);
10837: }
10838: PetscFunctionReturn(PETSC_SUCCESS);
10839: }
10841: /*@
10842: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10844: Collective
10846: Input Parameter:
10847: . mat - the matrix
10849: Output Parameter:
10850: . is - contains the list of rows with off block diagonal entries
10852: Level: developer
10854: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10855: @*/
10856: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10857: {
10858: PetscFunctionBegin;
10861: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10862: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10864: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10865: PetscFunctionReturn(PETSC_SUCCESS);
10866: }
10868: /*@C
10869: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10871: Collective; No Fortran Support
10873: Input Parameter:
10874: . mat - the matrix
10876: Output Parameter:
10877: . values - the block inverses in column major order (FORTRAN-like)
10879: Level: advanced
10881: Notes:
10882: The size of the blocks is determined by the block size of the matrix.
10884: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10886: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10888: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10889: @*/
10890: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10891: {
10892: PetscFunctionBegin;
10894: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10895: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10896: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10897: PetscFunctionReturn(PETSC_SUCCESS);
10898: }
10900: /*@
10901: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10903: Collective; No Fortran Support
10905: Input Parameters:
10906: + mat - the matrix
10907: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10908: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10910: Output Parameter:
10911: . values - the block inverses in column major order (FORTRAN-like)
10913: Level: advanced
10915: Notes:
10916: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10918: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10920: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10921: @*/
10922: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10923: {
10924: PetscFunctionBegin;
10926: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10927: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10928: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10929: PetscFunctionReturn(PETSC_SUCCESS);
10930: }
10932: /*@
10933: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10935: Collective
10937: Input Parameters:
10938: + A - the matrix
10939: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10941: Level: advanced
10943: Note:
10944: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10946: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10947: @*/
10948: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10949: {
10950: const PetscScalar *vals;
10951: PetscInt *dnnz;
10952: PetscInt m, rstart, rend, bs, i, j;
10954: PetscFunctionBegin;
10955: PetscCall(MatInvertBlockDiagonal(A, &vals));
10956: PetscCall(MatGetBlockSize(A, &bs));
10957: PetscCall(MatGetLocalSize(A, &m, NULL));
10958: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10959: PetscCall(MatSetBlockSizes(C, A->rmap->bs, A->cmap->bs));
10960: PetscCall(PetscMalloc1(m / bs, &dnnz));
10961: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10962: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10963: PetscCall(PetscFree(dnnz));
10964: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10965: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10966: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10967: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
10968: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10969: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10970: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_FALSE));
10971: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10972: PetscFunctionReturn(PETSC_SUCCESS);
10973: }
10975: /*@
10976: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10977: via `MatTransposeColoringCreate()`.
10979: Collective
10981: Input Parameter:
10982: . c - coloring context
10984: Level: intermediate
10986: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10987: @*/
10988: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10989: {
10990: MatTransposeColoring matcolor = *c;
10992: PetscFunctionBegin;
10993: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10994: if (--((PetscObject)matcolor)->refct > 0) {
10995: matcolor = NULL;
10996: PetscFunctionReturn(PETSC_SUCCESS);
10997: }
10999: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
11000: PetscCall(PetscFree(matcolor->rows));
11001: PetscCall(PetscFree(matcolor->den2sp));
11002: PetscCall(PetscFree(matcolor->colorforcol));
11003: PetscCall(PetscFree(matcolor->columns));
11004: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
11005: PetscCall(PetscHeaderDestroy(c));
11006: PetscFunctionReturn(PETSC_SUCCESS);
11007: }
11009: /*@
11010: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
11011: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
11012: `MatTransposeColoring` to sparse `B`.
11014: Collective
11016: Input Parameters:
11017: + coloring - coloring context created with `MatTransposeColoringCreate()`
11018: - B - sparse matrix
11020: Output Parameter:
11021: . Btdense - dense matrix $B^T$
11023: Level: developer
11025: Note:
11026: These are used internally for some implementations of `MatRARt()`
11028: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
11029: @*/
11030: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
11031: {
11032: PetscFunctionBegin;
11037: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
11038: PetscFunctionReturn(PETSC_SUCCESS);
11039: }
11041: /*@
11042: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
11043: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
11044: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
11045: $C_{sp}$ from $C_{den}$.
11047: Collective
11049: Input Parameters:
11050: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
11051: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
11053: Output Parameter:
11054: . Csp - sparse matrix
11056: Level: developer
11058: Note:
11059: These are used internally for some implementations of `MatRARt()`
11061: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
11062: @*/
11063: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
11064: {
11065: PetscFunctionBegin;
11070: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
11071: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
11072: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
11073: PetscFunctionReturn(PETSC_SUCCESS);
11074: }
11076: /*@
11077: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
11079: Collective
11081: Input Parameters:
11082: + mat - the matrix product C
11083: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
11085: Output Parameter:
11086: . color - the new coloring context
11088: Level: intermediate
11090: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
11091: `MatTransColoringApplyDenToSp()`
11092: @*/
11093: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
11094: {
11095: MatTransposeColoring c;
11096: MPI_Comm comm;
11098: PetscFunctionBegin;
11099: PetscAssertPointer(color, 3);
11101: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11102: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
11103: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
11104: c->ctype = iscoloring->ctype;
11105: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
11106: *color = c;
11107: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11108: PetscFunctionReturn(PETSC_SUCCESS);
11109: }
11111: /*@
11112: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
11113: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11115: Not Collective
11117: Input Parameter:
11118: . mat - the matrix
11120: Output Parameter:
11121: . state - the current state
11123: Level: intermediate
11125: Notes:
11126: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11127: different matrices
11129: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11131: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11133: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11134: @*/
11135: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11136: {
11137: PetscFunctionBegin;
11139: *state = mat->nonzerostate;
11140: PetscFunctionReturn(PETSC_SUCCESS);
11141: }
11143: /*@
11144: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11145: matrices from each processor
11147: Collective
11149: Input Parameters:
11150: + comm - the communicators the parallel matrix will live on
11151: . seqmat - the input sequential matrices
11152: . n - number of local columns (or `PETSC_DECIDE`)
11153: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11155: Output Parameter:
11156: . mpimat - the parallel matrix generated
11158: Level: developer
11160: Note:
11161: The number of columns of the matrix in EACH processor MUST be the same.
11163: .seealso: [](ch_matrices), `Mat`
11164: @*/
11165: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11166: {
11167: PetscMPIInt size;
11169: PetscFunctionBegin;
11170: PetscCallMPI(MPI_Comm_size(comm, &size));
11171: if (size == 1) {
11172: if (reuse == MAT_INITIAL_MATRIX) {
11173: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11174: } else {
11175: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11176: }
11177: PetscFunctionReturn(PETSC_SUCCESS);
11178: }
11180: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11182: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11183: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11184: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11185: PetscFunctionReturn(PETSC_SUCCESS);
11186: }
11188: /*@
11189: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11191: Collective
11193: Input Parameters:
11194: + A - the matrix to create subdomains from
11195: - N - requested number of subdomains
11197: Output Parameters:
11198: + n - number of subdomains resulting on this MPI process
11199: - iss - `IS` list with indices of subdomains on this MPI process
11201: Level: advanced
11203: Note:
11204: The number of subdomains must be smaller than the communicator size
11206: .seealso: [](ch_matrices), `Mat`, `IS`
11207: @*/
11208: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11209: {
11210: MPI_Comm comm, subcomm;
11211: PetscMPIInt size, rank, color;
11212: PetscInt rstart, rend, k;
11214: PetscFunctionBegin;
11215: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11216: PetscCallMPI(MPI_Comm_size(comm, &size));
11217: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11218: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11219: *n = 1;
11220: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11221: color = rank / k;
11222: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11223: PetscCall(PetscMalloc1(1, iss));
11224: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11225: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11226: PetscCallMPI(MPI_Comm_free(&subcomm));
11227: PetscFunctionReturn(PETSC_SUCCESS);
11228: }
11230: /*@
11231: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11233: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11234: If they are not the same, uses `MatMatMatMult()`.
11236: Once the coarse grid problem is constructed, correct for interpolation operators
11237: that are not of full rank, which can legitimately happen in the case of non-nested
11238: geometric multigrid.
11240: Input Parameters:
11241: + restrct - restriction operator
11242: . dA - fine grid matrix
11243: . interpolate - interpolation operator
11244: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11245: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11247: Output Parameter:
11248: . A - the Galerkin coarse matrix
11250: Options Database Key:
11251: . -pc_mg_galerkin (both|pmat|mat|none) - for what matrices the Galerkin process should be used
11253: Level: developer
11255: Note:
11256: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11258: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11259: @*/
11260: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11261: {
11262: IS zerorows;
11263: Vec diag;
11265: PetscFunctionBegin;
11266: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11267: /* Construct the coarse grid matrix */
11268: if (interpolate == restrct) {
11269: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11270: } else {
11271: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11272: }
11274: /* If the interpolation matrix is not of full rank, A will have zero rows.
11275: This can legitimately happen in the case of non-nested geometric multigrid.
11276: In that event, we set the rows of the matrix to the rows of the identity,
11277: ignoring the equations (as the RHS will also be zero). */
11279: PetscCall(MatFindZeroRows(*A, &zerorows));
11281: if (zerorows != NULL) { /* if there are any zero rows */
11282: PetscCall(MatCreateVecs(*A, &diag, NULL));
11283: PetscCall(MatGetDiagonal(*A, diag));
11284: PetscCall(VecISSet(diag, zerorows, 1.0));
11285: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11286: PetscCall(VecDestroy(&diag));
11287: PetscCall(ISDestroy(&zerorows));
11288: }
11289: PetscFunctionReturn(PETSC_SUCCESS);
11290: }
11292: /*@C
11293: MatSetOperation - Allows user to set a matrix operation for any matrix type
11295: Logically Collective
11297: Input Parameters:
11298: + mat - the matrix
11299: . op - the name of the operation
11300: - f - the function that provides the operation
11302: Level: developer
11304: Example Usage:
11305: .vb
11306: extern PetscErrorCode usermult(Mat, Vec, Vec);
11308: PetscCall(MatCreateXXX(comm, ..., &A));
11309: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscErrorCodeFn *)usermult));
11310: .ve
11312: Notes:
11313: See the file `include/petscmat.h` for a complete list of matrix
11314: operations, which all have the form MATOP_<OPERATION>, where
11315: <OPERATION> is the name (in all capital letters) of the
11316: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11318: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11319: sequence as the usual matrix interface routines, since they
11320: are intended to be accessed via the usual matrix interface
11321: routines, e.g.,
11322: .vb
11323: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11324: .ve
11326: In particular each function MUST return `PETSC_SUCCESS` on success and
11327: nonzero on failure.
11329: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11331: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11332: @*/
11333: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, PetscErrorCodeFn *f)
11334: {
11335: PetscFunctionBegin;
11337: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (PetscErrorCodeFn *)mat->ops->view) mat->ops->viewnative = mat->ops->view;
11338: (((PetscErrorCodeFn **)mat->ops)[op]) = f;
11339: PetscFunctionReturn(PETSC_SUCCESS);
11340: }
11342: /*@C
11343: MatGetOperation - Gets a matrix operation for any matrix type.
11345: Not Collective
11347: Input Parameters:
11348: + mat - the matrix
11349: - op - the name of the operation
11351: Output Parameter:
11352: . f - the function that provides the operation
11354: Level: developer
11356: Example Usage:
11357: .vb
11358: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11360: MatGetOperation(A, MATOP_MULT, (PetscErrorCodeFn **)&usermult);
11361: .ve
11363: Notes:
11364: See the file `include/petscmat.h` for a complete list of matrix
11365: operations, which all have the form MATOP_<OPERATION>, where
11366: <OPERATION> is the name (in all capital letters) of the
11367: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11369: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11371: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11372: @*/
11373: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, PetscErrorCodeFn **f)
11374: {
11375: PetscFunctionBegin;
11377: *f = (((PetscErrorCodeFn **)mat->ops)[op]);
11378: PetscFunctionReturn(PETSC_SUCCESS);
11379: }
11381: /*@
11382: MatHasOperation - Determines whether the given matrix supports the particular operation.
11384: Not Collective
11386: Input Parameters:
11387: + mat - the matrix
11388: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11390: Output Parameter:
11391: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11393: Level: advanced
11395: Note:
11396: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11398: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11399: @*/
11400: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11401: {
11402: PetscFunctionBegin;
11404: PetscAssertPointer(has, 3);
11405: if (mat->ops->hasoperation) {
11406: PetscUseTypeMethod(mat, hasoperation, op, has);
11407: } else {
11408: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11409: else {
11410: *has = PETSC_FALSE;
11411: if (op == MATOP_CREATE_SUBMATRIX) {
11412: PetscMPIInt size;
11414: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11415: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11416: }
11417: }
11418: }
11419: PetscFunctionReturn(PETSC_SUCCESS);
11420: }
11422: /*@
11423: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11425: Collective
11427: Input Parameter:
11428: . mat - the matrix
11430: Output Parameter:
11431: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11433: Level: beginner
11435: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11436: @*/
11437: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11438: {
11439: PetscFunctionBegin;
11442: PetscAssertPointer(cong, 2);
11443: if (!mat->rmap || !mat->cmap) {
11444: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11445: PetscFunctionReturn(PETSC_SUCCESS);
11446: }
11447: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11448: PetscCall(PetscLayoutSetUp(mat->rmap));
11449: PetscCall(PetscLayoutSetUp(mat->cmap));
11450: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11451: if (*cong) mat->congruentlayouts = 1;
11452: else mat->congruentlayouts = 0;
11453: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11454: PetscFunctionReturn(PETSC_SUCCESS);
11455: }
11457: PetscErrorCode MatSetInf(Mat A)
11458: {
11459: PetscFunctionBegin;
11460: PetscUseTypeMethod(A, setinf);
11461: PetscFunctionReturn(PETSC_SUCCESS);
11462: }
11464: /*@
11465: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11466: and possibly removes small values from the graph structure.
11468: Collective
11470: Input Parameters:
11471: + A - the matrix
11472: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11473: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11474: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11475: . num_idx - size of 'index' array
11476: - index - array of block indices to use for graph strength of connection weight
11478: Output Parameter:
11479: . graph - the resulting graph
11481: Level: advanced
11483: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11484: @*/
11485: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11486: {
11487: PetscFunctionBegin;
11491: PetscAssertPointer(graph, 7);
11492: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11493: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11494: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11495: PetscFunctionReturn(PETSC_SUCCESS);
11496: }
11498: /*@
11499: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11500: meaning the same memory is used for the matrix, and no new memory is allocated.
11502: Collective
11504: Input Parameters:
11505: + A - the matrix
11506: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11508: Level: intermediate
11510: Developer Note:
11511: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11512: of the arrays in the data structure are unneeded.
11514: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11515: @*/
11516: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11517: {
11518: PetscFunctionBegin;
11520: PetscUseTypeMethod(A, eliminatezeros, keep);
11521: PetscFunctionReturn(PETSC_SUCCESS);
11522: }
11524: /*@C
11525: MatGetCurrentMemType - Get the memory location of the matrix
11527: Not Collective, but the result will be the same on all MPI processes
11529: Input Parameter:
11530: . A - the matrix whose memory type we are checking
11532: Output Parameter:
11533: . m - the memory type
11535: Level: intermediate
11537: .seealso: [](ch_matrices), `Mat`, `MatBoundToCPU()`, `PetscMemType`
11538: @*/
11539: PetscErrorCode MatGetCurrentMemType(Mat A, PetscMemType *m)
11540: {
11541: PetscFunctionBegin;
11543: PetscAssertPointer(m, 2);
11544: if (A->ops->getcurrentmemtype) PetscUseTypeMethod(A, getcurrentmemtype, m);
11545: else *m = PETSC_MEMTYPE_HOST;
11546: PetscFunctionReturn(PETSC_SUCCESS);
11547: }