Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_CreateGraph;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
43: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
44: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
45: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
46: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
47: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
49: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
51: /*@
52: MatSetRandom - Sets all components of a matrix to random numbers.
54: Logically Collective
56: Input Parameters:
57: + x - the matrix
58: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
59: it will create one internally.
61: Example:
62: .vb
63: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64: MatSetRandom(x,rctx);
65: PetscRandomDestroy(rctx);
66: .ve
68: Level: intermediate
70: Notes:
71: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
73: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
75: It generates an error if used on unassembled sparse matrices that have not been preallocated.
77: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
78: @*/
79: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
80: {
81: PetscRandom randObj = NULL;
83: PetscFunctionBegin;
87: MatCheckPreallocated(x, 1);
89: if (!rctx) {
90: MPI_Comm comm;
91: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
92: PetscCall(PetscRandomCreate(comm, &randObj));
93: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
94: PetscCall(PetscRandomSetFromOptions(randObj));
95: rctx = randObj;
96: }
97: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
98: PetscUseTypeMethod(x, setrandom, rctx);
99: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
101: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(PetscRandomDestroy(&randObj));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: /*@
108: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
110: Logically Collective
112: Input Parameter:
113: . A - A matrix in unassembled, hash table form
115: Output Parameter:
116: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
118: Example:
119: .vb
120: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
121: PetscCall(MatCopyHashToXAIJ(A, B));
122: .ve
124: Level: advanced
126: Notes:
127: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
129: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
130: @*/
131: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
132: {
133: PetscFunctionBegin;
135: PetscUseTypeMethod(A, copyhashtoxaij, B);
136: PetscFunctionReturn(PETSC_SUCCESS);
137: }
139: /*@
140: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
142: Logically Collective
144: Input Parameter:
145: . mat - the factored matrix
147: Output Parameters:
148: + pivot - the pivot value computed
149: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
150: the share the matrix
152: Level: advanced
154: Notes:
155: This routine does not work for factorizations done with external packages.
157: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
159: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
161: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
162: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
163: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
164: @*/
165: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
166: {
167: PetscFunctionBegin;
169: PetscAssertPointer(pivot, 2);
170: PetscAssertPointer(row, 3);
171: *pivot = mat->factorerror_zeropivot_value;
172: *row = mat->factorerror_zeropivot_row;
173: PetscFunctionReturn(PETSC_SUCCESS);
174: }
176: /*@
177: MatFactorGetError - gets the error code from a factorization
179: Logically Collective
181: Input Parameter:
182: . mat - the factored matrix
184: Output Parameter:
185: . err - the error code
187: Level: advanced
189: Note:
190: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
192: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
193: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
194: @*/
195: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
196: {
197: PetscFunctionBegin;
199: PetscAssertPointer(err, 2);
200: *err = mat->factorerrortype;
201: PetscFunctionReturn(PETSC_SUCCESS);
202: }
204: /*@
205: MatFactorClearError - clears the error code in a factorization
207: Logically Collective
209: Input Parameter:
210: . mat - the factored matrix
212: Level: developer
214: Note:
215: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
217: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
218: `MatGetErrorCode()`, `MatFactorError`
219: @*/
220: PetscErrorCode MatFactorClearError(Mat mat)
221: {
222: PetscFunctionBegin;
224: mat->factorerrortype = MAT_FACTOR_NOERROR;
225: mat->factorerror_zeropivot_value = 0.0;
226: mat->factorerror_zeropivot_row = 0;
227: PetscFunctionReturn(PETSC_SUCCESS);
228: }
230: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
231: {
232: Vec r, l;
233: const PetscScalar *al;
234: PetscInt i, nz, gnz, N, n, st;
236: PetscFunctionBegin;
237: PetscCall(MatCreateVecs(mat, &r, &l));
238: if (!cols) { /* nonzero rows */
239: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
240: PetscCall(MatGetSize(mat, &N, NULL));
241: PetscCall(MatGetLocalSize(mat, &n, NULL));
242: PetscCall(VecSet(l, 0.0));
243: PetscCall(VecSetRandom(r, NULL));
244: PetscCall(MatMult(mat, r, l));
245: PetscCall(VecGetArrayRead(l, &al));
246: } else { /* nonzero columns */
247: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
248: PetscCall(MatGetSize(mat, NULL, &N));
249: PetscCall(MatGetLocalSize(mat, NULL, &n));
250: PetscCall(VecSet(r, 0.0));
251: PetscCall(VecSetRandom(l, NULL));
252: PetscCall(MatMultTranspose(mat, l, r));
253: PetscCall(VecGetArrayRead(r, &al));
254: }
255: if (tol <= 0.0) {
256: for (i = 0, nz = 0; i < n; i++)
257: if (al[i] != 0.0) nz++;
258: } else {
259: for (i = 0, nz = 0; i < n; i++)
260: if (PetscAbsScalar(al[i]) > tol) nz++;
261: }
262: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
263: if (gnz != N) {
264: PetscInt *nzr;
265: PetscCall(PetscMalloc1(nz, &nzr));
266: if (nz) {
267: if (tol < 0) {
268: for (i = 0, nz = 0; i < n; i++)
269: if (al[i] != 0.0) nzr[nz++] = i + st;
270: } else {
271: for (i = 0, nz = 0; i < n; i++)
272: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
273: }
274: }
275: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
276: } else *nonzero = NULL;
277: if (!cols) { /* nonzero rows */
278: PetscCall(VecRestoreArrayRead(l, &al));
279: } else {
280: PetscCall(VecRestoreArrayRead(r, &al));
281: }
282: PetscCall(VecDestroy(&l));
283: PetscCall(VecDestroy(&r));
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /*@
288: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
290: Input Parameter:
291: . mat - the matrix
293: Output Parameter:
294: . keptrows - the rows that are not completely zero
296: Level: intermediate
298: Note:
299: `keptrows` is set to `NULL` if all rows are nonzero.
301: Developer Note:
302: If `keptrows` is not `NULL`, it must be sorted.
304: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
305: @*/
306: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
307: {
308: PetscFunctionBegin;
311: PetscAssertPointer(keptrows, 2);
312: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
313: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
314: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
315: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
316: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatFindZeroRows - Locate all rows that are completely zero in the matrix
323: Input Parameter:
324: . mat - the matrix
326: Output Parameter:
327: . zerorows - the rows that are completely zero
329: Level: intermediate
331: Note:
332: `zerorows` is set to `NULL` if no rows are zero.
334: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
335: @*/
336: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
337: {
338: IS keptrows;
339: PetscInt m, n;
341: PetscFunctionBegin;
344: PetscAssertPointer(zerorows, 2);
345: PetscCall(MatFindNonzeroRows(mat, &keptrows));
346: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
347: In keeping with this convention, we set zerorows to NULL if there are no zero
348: rows. */
349: if (keptrows == NULL) {
350: *zerorows = NULL;
351: } else {
352: PetscCall(MatGetOwnershipRange(mat, &m, &n));
353: PetscCall(ISComplement(keptrows, m, n, zerorows));
354: PetscCall(ISDestroy(&keptrows));
355: }
356: PetscFunctionReturn(PETSC_SUCCESS);
357: }
359: /*@
360: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
362: Not Collective
364: Input Parameter:
365: . A - the matrix
367: Output Parameter:
368: . a - the diagonal part (which is a SEQUENTIAL matrix)
370: Level: advanced
372: Notes:
373: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
375: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
377: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
378: @*/
379: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
380: {
381: PetscFunctionBegin;
384: PetscAssertPointer(a, 2);
385: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
386: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
387: else {
388: PetscMPIInt size;
390: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
391: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
392: *a = A;
393: }
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: /*@
398: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
400: Collective
402: Input Parameter:
403: . mat - the matrix
405: Output Parameter:
406: . trace - the sum of the diagonal entries
408: Level: advanced
410: .seealso: [](ch_matrices), `Mat`
411: @*/
412: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
413: {
414: Vec diag;
416: PetscFunctionBegin;
418: PetscAssertPointer(trace, 2);
419: PetscCall(MatCreateVecs(mat, &diag, NULL));
420: PetscCall(MatGetDiagonal(mat, diag));
421: PetscCall(VecSum(diag, trace));
422: PetscCall(VecDestroy(&diag));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: /*@
427: MatRealPart - Zeros out the imaginary part of the matrix
429: Logically Collective
431: Input Parameter:
432: . mat - the matrix
434: Level: advanced
436: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
437: @*/
438: PetscErrorCode MatRealPart(Mat mat)
439: {
440: PetscFunctionBegin;
443: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
444: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
445: MatCheckPreallocated(mat, 1);
446: PetscUseTypeMethod(mat, realpart);
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@C
451: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
453: Collective
455: Input Parameter:
456: . mat - the matrix
458: Output Parameters:
459: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
460: - ghosts - the global indices of the ghost points
462: Level: advanced
464: Note:
465: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
467: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
468: @*/
469: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
470: {
471: PetscFunctionBegin;
474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
476: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
477: else {
478: if (nghosts) *nghosts = 0;
479: if (ghosts) *ghosts = NULL;
480: }
481: PetscFunctionReturn(PETSC_SUCCESS);
482: }
484: /*@
485: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
487: Logically Collective
489: Input Parameter:
490: . mat - the matrix
492: Level: advanced
494: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
495: @*/
496: PetscErrorCode MatImaginaryPart(Mat mat)
497: {
498: PetscFunctionBegin;
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: MatCheckPreallocated(mat, 1);
504: PetscUseTypeMethod(mat, imaginarypart);
505: PetscFunctionReturn(PETSC_SUCCESS);
506: }
508: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
509: /*@C
510: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
511: for each row that you get to ensure that your application does
512: not bleed memory.
514: Not Collective
516: Input Parameters:
517: + mat - the matrix
518: - row - the row to get
520: Output Parameters:
521: + ncols - if not `NULL`, the number of nonzeros in `row`
522: . cols - if not `NULL`, the column numbers
523: - vals - if not `NULL`, the numerical values
525: Level: advanced
527: Notes:
528: This routine is provided for people who need to have direct access
529: to the structure of a matrix. We hope that we provide enough
530: high-level matrix routines that few users will need it.
532: `MatGetRow()` always returns 0-based column indices, regardless of
533: whether the internal representation is 0-based (default) or 1-based.
535: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
536: not wish to extract these quantities.
538: The user can only examine the values extracted with `MatGetRow()`;
539: the values CANNOT be altered. To change the matrix entries, one
540: must use `MatSetValues()`.
542: You can only have one call to `MatGetRow()` outstanding for a particular
543: matrix at a time, per processor. `MatGetRow()` can only obtain rows
544: associated with the given processor, it cannot get rows from the
545: other processors; for that we suggest using `MatCreateSubMatrices()`, then
546: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
547: is in the global number of rows.
549: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
551: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
553: Fortran Note:
554: .vb
555: PetscInt, pointer :: cols(:)
556: PetscScalar, pointer :: vals(:)
557: .ve
559: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
560: @*/
561: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
562: {
563: PetscInt incols;
565: PetscFunctionBegin;
568: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
569: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
570: MatCheckPreallocated(mat, 1);
571: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
572: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
573: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
574: if (ncols) *ncols = incols;
575: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
576: PetscFunctionReturn(PETSC_SUCCESS);
577: }
579: /*@
580: MatConjugate - replaces the matrix values with their complex conjugates
582: Logically Collective
584: Input Parameter:
585: . mat - the matrix
587: Level: advanced
589: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
590: @*/
591: PetscErrorCode MatConjugate(Mat mat)
592: {
593: PetscFunctionBegin;
595: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
596: if (PetscDefined(USE_COMPLEX) && !(mat->symmetric == PETSC_BOOL3_TRUE && mat->hermitian == PETSC_BOOL3_TRUE)) {
597: PetscUseTypeMethod(mat, conjugate);
598: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
599: }
600: PetscFunctionReturn(PETSC_SUCCESS);
601: }
603: /*@C
604: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
606: Not Collective
608: Input Parameters:
609: + mat - the matrix
610: . row - the row to get
611: . ncols - the number of nonzeros
612: . cols - the columns of the nonzeros
613: - vals - if nonzero the column values
615: Level: advanced
617: Notes:
618: This routine should be called after you have finished examining the entries.
620: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
621: us of the array after it has been restored. If you pass `NULL`, it will
622: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
624: Fortran Note:
625: .vb
626: PetscInt, pointer :: cols(:)
627: PetscScalar, pointer :: vals(:)
628: .ve
630: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
631: @*/
632: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
633: {
634: PetscFunctionBegin;
636: if (ncols) PetscAssertPointer(ncols, 3);
637: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
638: PetscTryTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
639: if (ncols) *ncols = 0;
640: if (cols) *cols = NULL;
641: if (vals) *vals = NULL;
642: PetscFunctionReturn(PETSC_SUCCESS);
643: }
645: /*@
646: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
647: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
649: Not Collective
651: Input Parameter:
652: . mat - the matrix
654: Level: advanced
656: Note:
657: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
659: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
660: @*/
661: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
662: {
663: PetscFunctionBegin;
666: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
667: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
668: MatCheckPreallocated(mat, 1);
669: PetscTryTypeMethod(mat, getrowuppertriangular);
670: PetscFunctionReturn(PETSC_SUCCESS);
671: }
673: /*@
674: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
676: Not Collective
678: Input Parameter:
679: . mat - the matrix
681: Level: advanced
683: Note:
684: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
686: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
687: @*/
688: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
689: {
690: PetscFunctionBegin;
693: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
694: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
695: MatCheckPreallocated(mat, 1);
696: PetscTryTypeMethod(mat, restorerowuppertriangular);
697: PetscFunctionReturn(PETSC_SUCCESS);
698: }
700: /*@
701: MatSetOptionsPrefix - Sets the prefix used for searching for all
702: `Mat` options in the database.
704: Logically Collective
706: Input Parameters:
707: + A - the matrix
708: - prefix - the prefix to prepend to all option names
710: Level: advanced
712: Notes:
713: A hyphen (-) must NOT be given at the beginning of the prefix name.
714: The first character of all runtime options is AUTOMATICALLY the hyphen.
716: This is NOT used for options for the factorization of the matrix. Normally the
717: prefix is automatically passed in from the PC calling the factorization. To set
718: it directly use `MatSetOptionsPrefixFactor()`
720: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
721: @*/
722: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
723: {
724: PetscFunctionBegin;
726: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
727: PetscTryMethod(A, "MatSetOptionsPrefix_C", (Mat, const char[]), (A, prefix));
728: PetscFunctionReturn(PETSC_SUCCESS);
729: }
731: /*@
732: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
733: for matrices created with `MatGetFactor()`
735: Logically Collective
737: Input Parameters:
738: + A - the matrix
739: - prefix - the prefix to prepend to all option names for the factored matrix
741: Level: developer
743: Notes:
744: A hyphen (-) must NOT be given at the beginning of the prefix name.
745: The first character of all runtime options is AUTOMATICALLY the hyphen.
747: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
748: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
750: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
751: @*/
752: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
753: {
754: PetscFunctionBegin;
756: if (prefix) {
757: PetscAssertPointer(prefix, 2);
758: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
759: if (prefix != A->factorprefix) {
760: PetscCall(PetscFree(A->factorprefix));
761: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
762: }
763: } else PetscCall(PetscFree(A->factorprefix));
764: PetscFunctionReturn(PETSC_SUCCESS);
765: }
767: /*@
768: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
769: for matrices created with `MatGetFactor()`
771: Logically Collective
773: Input Parameters:
774: + A - the matrix
775: - prefix - the prefix to prepend to all option names for the factored matrix
777: Level: developer
779: Notes:
780: A hyphen (-) must NOT be given at the beginning of the prefix name.
781: The first character of all runtime options is AUTOMATICALLY the hyphen.
783: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
784: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
786: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
787: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
788: `MatSetOptionsPrefix()`
789: @*/
790: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
791: {
792: size_t len1, len2, new_len;
794: PetscFunctionBegin;
796: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
797: if (!A->factorprefix) {
798: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
799: PetscFunctionReturn(PETSC_SUCCESS);
800: }
801: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
803: PetscCall(PetscStrlen(A->factorprefix, &len1));
804: PetscCall(PetscStrlen(prefix, &len2));
805: new_len = len1 + len2 + 1;
806: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
807: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
808: PetscFunctionReturn(PETSC_SUCCESS);
809: }
811: /*@
812: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
813: matrix options in the database.
815: Logically Collective
817: Input Parameters:
818: + A - the matrix
819: - prefix - the prefix to prepend to all option names
821: Level: advanced
823: Note:
824: A hyphen (-) must NOT be given at the beginning of the prefix name.
825: The first character of all runtime options is AUTOMATICALLY the hyphen.
827: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
828: @*/
829: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
830: {
831: PetscFunctionBegin;
833: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
834: PetscTryMethod(A, "MatAppendOptionsPrefix_C", (Mat, const char[]), (A, prefix));
835: PetscFunctionReturn(PETSC_SUCCESS);
836: }
838: /*@
839: MatGetOptionsPrefix - Gets the prefix used for searching for all
840: matrix options in the database.
842: Not Collective
844: Input Parameter:
845: . A - the matrix
847: Output Parameter:
848: . prefix - pointer to the prefix string used
850: Level: advanced
852: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
853: @*/
854: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
855: {
856: PetscFunctionBegin;
858: PetscAssertPointer(prefix, 2);
859: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
860: PetscFunctionReturn(PETSC_SUCCESS);
861: }
863: /*@
864: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
866: Not Collective
868: Input Parameter:
869: . A - the matrix
871: Output Parameter:
872: . state - the object state
874: Level: advanced
876: Note:
877: Object state is an integer which gets increased every time
878: the object is changed. By saving and later querying the object state
879: one can determine whether information about the object is still current.
881: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
883: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
884: @*/
885: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
886: {
887: PetscFunctionBegin;
889: PetscAssertPointer(state, 2);
890: PetscCall(PetscObjectStateGet((PetscObject)A, state));
891: PetscFunctionReturn(PETSC_SUCCESS);
892: }
894: /*@
895: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
897: Collective
899: Input Parameter:
900: . A - the matrix
902: Level: beginner
904: Notes:
905: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
906: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
907: makes all of the preallocation space available
909: Current values in the matrix are lost in this call
911: Currently only supported for `MATAIJ` matrices.
913: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
914: @*/
915: PetscErrorCode MatResetPreallocation(Mat A)
916: {
917: PetscFunctionBegin;
920: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
921: PetscFunctionReturn(PETSC_SUCCESS);
922: }
924: /*@
925: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
927: Collective
929: Input Parameter:
930: . A - the matrix
932: Level: intermediate
934: Notes:
935: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
937: Currently only supported for `MATAIJ` matrices.
939: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
940: @*/
941: PetscErrorCode MatResetHash(Mat A)
942: {
943: PetscFunctionBegin;
946: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
947: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
948: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
949: /* These flags are used to determine whether certain setups occur */
950: A->was_assembled = PETSC_FALSE;
951: A->assembled = PETSC_FALSE;
952: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
953: PetscCall(PetscObjectStateIncrease((PetscObject)A));
954: PetscFunctionReturn(PETSC_SUCCESS);
955: }
957: /*@
958: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
960: Collective
962: Input Parameter:
963: . A - the matrix
965: Level: advanced
967: Notes:
968: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
969: setting values in the matrix.
971: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
973: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
974: @*/
975: PetscErrorCode MatSetUp(Mat A)
976: {
977: PetscFunctionBegin;
979: if (!((PetscObject)A)->type_name) {
980: PetscMPIInt size;
982: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
983: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
984: }
985: if (!A->preallocated) PetscTryTypeMethod(A, setup);
986: PetscCall(PetscLayoutSetUp(A->rmap));
987: PetscCall(PetscLayoutSetUp(A->cmap));
988: A->preallocated = PETSC_TRUE;
989: PetscFunctionReturn(PETSC_SUCCESS);
990: }
992: #if defined(PETSC_HAVE_SAWS)
993: #include <petscviewersaws.h>
994: #endif
996: /*
997: If threadsafety is on extraneous matrices may be printed
999: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1000: */
1001: #if !defined(PETSC_HAVE_THREADSAFETY)
1002: static PetscInt insidematview = 0;
1003: #endif
1005: /*@
1006: MatViewFromOptions - View properties of the matrix based on options set in the options database
1008: Collective
1010: Input Parameters:
1011: + A - the matrix
1012: . obj - optional additional object that provides the options prefix to use
1013: - name - command line option
1015: Options Database Key:
1016: . -mat_view [viewertype]:... - the viewer and its options
1018: Level: intermediate
1020: Note:
1021: .vb
1022: If no value is provided ascii:stdout is used
1023: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
1024: for example ascii::ascii_info prints just the information about the object not all details
1025: unless :append is given filename opens in write mode, overwriting what was already there
1026: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1027: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1028: socket[:port] defaults to the standard output port
1029: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1030: .ve
1032: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1033: @*/
1034: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1035: {
1036: PetscFunctionBegin;
1038: #if !defined(PETSC_HAVE_THREADSAFETY)
1039: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1040: #endif
1041: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1042: PetscFunctionReturn(PETSC_SUCCESS);
1043: }
1045: /*@
1046: MatView - display information about a matrix in a variety ways
1048: Collective on viewer
1050: Input Parameters:
1051: + mat - the matrix
1052: - viewer - visualization context
1054: Options Database Keys:
1055: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1056: . -mat_view ::ascii_info_detail - Prints more detailed info
1057: . -mat_view - Prints matrix in ASCII format
1058: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1059: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1060: . -display <name> - Sets display name (default is host)
1061: . -draw_pause <sec> - Sets number of seconds to pause after display
1062: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1063: . -viewer_socket_machine <machine> - -
1064: . -viewer_socket_port <port> - -
1065: . -mat_view binary - save matrix to file in binary format
1066: - -viewer_binary_filename <name> - -
1068: Level: beginner
1070: Notes:
1071: The available visualization contexts include
1072: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1073: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1074: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1075: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1077: The user can open alternative visualization contexts with
1078: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1079: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1080: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1081: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1083: The user can call `PetscViewerPushFormat()` to specify the output
1084: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1085: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1086: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1087: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1088: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1089: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1090: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1091: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1092: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1094: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1095: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1097: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1099: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1100: viewer is used.
1102: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1103: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1105: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1106: and then use the following mouse functions.
1107: .vb
1108: left mouse: zoom in
1109: middle mouse: zoom out
1110: right mouse: continue with the simulation
1111: .ve
1113: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1114: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1115: @*/
1116: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1117: {
1118: PetscInt rows, cols, rbs, cbs;
1119: PetscBool isascii, isstring, issaws;
1120: PetscViewerFormat format;
1121: PetscMPIInt size;
1123: PetscFunctionBegin;
1126: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1129: PetscCall(PetscViewerGetFormat(viewer, &format));
1130: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1131: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1133: #if !defined(PETSC_HAVE_THREADSAFETY)
1134: insidematview++;
1135: #endif
1136: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1137: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1138: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1139: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1141: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1142: if (isascii) {
1143: if (!mat->preallocated) {
1144: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1145: #if !defined(PETSC_HAVE_THREADSAFETY)
1146: insidematview--;
1147: #endif
1148: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1149: PetscFunctionReturn(PETSC_SUCCESS);
1150: }
1151: if (!mat->assembled) {
1152: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1153: #if !defined(PETSC_HAVE_THREADSAFETY)
1154: insidematview--;
1155: #endif
1156: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1157: PetscFunctionReturn(PETSC_SUCCESS);
1158: }
1159: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1160: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1161: MatNullSpace nullsp, transnullsp;
1163: PetscCall(PetscViewerASCIIPushTab(viewer));
1164: PetscCall(MatGetSize(mat, &rows, &cols));
1165: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1166: if (rbs != 1 || cbs != 1) {
1167: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1168: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1169: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1170: if (mat->factortype) {
1171: MatSolverType solver;
1172: PetscCall(MatFactorGetSolverType(mat, &solver));
1173: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1174: }
1175: if (mat->ops->getinfo) {
1176: PetscBool is_constant_or_diagonal;
1178: // Don't print nonzero information for constant or diagonal matrices, it just adds noise to the output
1179: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &is_constant_or_diagonal, MATCONSTANTDIAGONAL, MATDIAGONAL, ""));
1180: if (!is_constant_or_diagonal) {
1181: MatInfo info;
1183: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1184: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1185: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1186: }
1187: }
1188: PetscCall(MatGetNullSpace(mat, &nullsp));
1189: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1190: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1191: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1192: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1193: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1194: PetscCall(PetscViewerASCIIPushTab(viewer));
1195: PetscCall(MatProductView(mat, viewer));
1196: PetscCall(PetscViewerASCIIPopTab(viewer));
1197: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1198: IS tmp;
1200: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1201: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1202: PetscCall(PetscViewerASCIIPushTab(viewer));
1203: PetscCall(ISView(tmp, viewer));
1204: PetscCall(PetscViewerASCIIPopTab(viewer));
1205: PetscCall(ISDestroy(&tmp));
1206: }
1207: }
1208: } else if (issaws) {
1209: #if defined(PETSC_HAVE_SAWS)
1210: PetscMPIInt rank;
1212: PetscCall(PetscObjectName((PetscObject)mat));
1213: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1214: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1215: #endif
1216: } else if (isstring) {
1217: const char *type;
1218: PetscCall(MatGetType(mat, &type));
1219: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1220: PetscTryTypeMethod(mat, view, viewer);
1221: }
1222: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1223: PetscCall(PetscViewerASCIIPushTab(viewer));
1224: PetscUseTypeMethod(mat, viewnative, viewer);
1225: PetscCall(PetscViewerASCIIPopTab(viewer));
1226: } else if (mat->ops->view) {
1227: PetscCall(PetscViewerASCIIPushTab(viewer));
1228: PetscUseTypeMethod(mat, view, viewer);
1229: PetscCall(PetscViewerASCIIPopTab(viewer));
1230: }
1231: if (isascii) {
1232: PetscCall(PetscViewerGetFormat(viewer, &format));
1233: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1234: }
1235: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1236: #if !defined(PETSC_HAVE_THREADSAFETY)
1237: insidematview--;
1238: #endif
1239: PetscFunctionReturn(PETSC_SUCCESS);
1240: }
1242: #if defined(PETSC_USE_DEBUG)
1243: #include <../src/sys/totalview/tv_data_display.h>
1244: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1245: {
1246: TV_add_row("Local rows", "int", &mat->rmap->n);
1247: TV_add_row("Local columns", "int", &mat->cmap->n);
1248: TV_add_row("Global rows", "int", &mat->rmap->N);
1249: TV_add_row("Global columns", "int", &mat->cmap->N);
1250: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1251: return TV_format_OK;
1252: }
1253: #endif
1255: /*@
1256: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1257: with `MatView()`. The matrix format is determined from the options database.
1258: Generates a parallel MPI matrix if the communicator has more than one
1259: processor. The default matrix type is `MATAIJ`.
1261: Collective
1263: Input Parameters:
1264: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1265: or some related function before a call to `MatLoad()`
1266: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1268: Options Database Key:
1269: . -matload_block_size <bs> - set block size
1271: Level: beginner
1273: Notes:
1274: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1275: `Mat` before calling this routine if you wish to set it from the options database.
1277: `MatLoad()` automatically loads into the options database any options
1278: given in the file filename.info where filename is the name of the file
1279: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1280: file will be ignored if you use the -viewer_binary_skip_info option.
1282: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1283: sets the default matrix type AIJ and sets the local and global sizes.
1284: If type and/or size is already set, then the same are used.
1286: In parallel, each processor can load a subset of rows (or the
1287: entire matrix). This routine is especially useful when a large
1288: matrix is stored on disk and only part of it is desired on each
1289: processor. For example, a parallel solver may access only some of
1290: the rows from each processor. The algorithm used here reads
1291: relatively small blocks of data rather than reading the entire
1292: matrix and then subsetting it.
1294: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1295: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1296: or the sequence like
1297: .vb
1298: `PetscViewer` v;
1299: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1300: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1301: `PetscViewerSetFromOptions`(v);
1302: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1303: `PetscViewerFileSetName`(v,"datafile");
1304: .ve
1305: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1306: .vb
1307: -viewer_type {binary, hdf5}
1308: .ve
1310: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1311: and src/mat/tutorials/ex10.c with the second approach.
1313: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1314: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1315: Multiple objects, both matrices and vectors, can be stored within the same file.
1316: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1318: Most users should not need to know the details of the binary storage
1319: format, since `MatLoad()` and `MatView()` completely hide these details.
1320: But for anyone who is interested, the standard binary matrix storage
1321: format is
1323: .vb
1324: PetscInt MAT_FILE_CLASSID
1325: PetscInt number of rows
1326: PetscInt number of columns
1327: PetscInt total number of nonzeros
1328: PetscInt *number nonzeros in each row
1329: PetscInt *column indices of all nonzeros (starting index is zero)
1330: PetscScalar *values of all nonzeros
1331: .ve
1332: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1333: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1334: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1336: PETSc automatically does the byte swapping for
1337: machines that store the bytes reversed. Thus if you write your own binary
1338: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1339: and `PetscBinaryWrite()` to see how this may be done.
1341: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1342: Each processor's chunk is loaded independently by its owning MPI process.
1343: Multiple objects, both matrices and vectors, can be stored within the same file.
1344: They are looked up by their PetscObject name.
1346: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1347: by default the same structure and naming of the AIJ arrays and column count
1348: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1349: .vb
1350: save example.mat A b -v7.3
1351: .ve
1352: can be directly read by this routine (see Reference 1 for details).
1354: Depending on your MATLAB version, this format might be a default,
1355: otherwise you can set it as default in Preferences.
1357: Unless -nocompression flag is used to save the file in MATLAB,
1358: PETSc must be configured with ZLIB package.
1360: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1362: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1364: Corresponding `MatView()` is not yet implemented.
1366: The loaded matrix is actually a transpose of the original one in MATLAB,
1367: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1368: With this format, matrix is automatically transposed by PETSc,
1369: unless the matrix is marked as SPD or symmetric
1370: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1372: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1374: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1375: @*/
1376: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1377: {
1378: PetscBool flg;
1380: PetscFunctionBegin;
1384: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1386: flg = PETSC_FALSE;
1387: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1388: if (flg) {
1389: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1390: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1391: }
1392: flg = PETSC_FALSE;
1393: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1394: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1396: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1397: PetscUseTypeMethod(mat, load, viewer);
1398: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1399: PetscFunctionReturn(PETSC_SUCCESS);
1400: }
1402: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1403: {
1404: Mat_Redundant *redund = *redundant;
1406: PetscFunctionBegin;
1407: if (redund) {
1408: if (redund->matseq) { /* via MatCreateSubMatrices() */
1409: PetscCall(ISDestroy(&redund->isrow));
1410: PetscCall(ISDestroy(&redund->iscol));
1411: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1412: } else {
1413: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1414: PetscCall(PetscFree(redund->sbuf_j));
1415: PetscCall(PetscFree(redund->sbuf_a));
1416: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1417: PetscCall(PetscFree(redund->rbuf_j[i]));
1418: PetscCall(PetscFree(redund->rbuf_a[i]));
1419: }
1420: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1421: }
1423: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1424: PetscCall(PetscFree(redund));
1425: }
1426: PetscFunctionReturn(PETSC_SUCCESS);
1427: }
1429: /*@
1430: MatDestroy - Frees space taken by a matrix.
1432: Collective
1434: Input Parameter:
1435: . A - the matrix
1437: Level: beginner
1439: Developer Note:
1440: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1441: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1442: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1443: if changes are needed here.
1445: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1446: @*/
1447: PetscErrorCode MatDestroy(Mat *A)
1448: {
1449: PetscFunctionBegin;
1450: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1452: if (--((PetscObject)*A)->refct > 0) {
1453: *A = NULL;
1454: PetscFunctionReturn(PETSC_SUCCESS);
1455: }
1457: /* if memory was published with SAWs then destroy it */
1458: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1459: PetscTryTypeMethod(*A, destroy);
1461: PetscCall(PetscFree((*A)->factorprefix));
1462: PetscCall(PetscFree((*A)->defaultvectype));
1463: PetscCall(PetscFree((*A)->defaultrandtype));
1464: PetscCall(PetscFree((*A)->bsizes));
1465: PetscCall(PetscFree((*A)->solvertype));
1466: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1467: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1468: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1469: PetscCall(MatProductClear(*A));
1470: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1471: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1472: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1473: PetscCall(MatDestroy(&(*A)->schur));
1474: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1475: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1476: PetscCall(PetscHeaderDestroy(A));
1477: PetscFunctionReturn(PETSC_SUCCESS);
1478: }
1480: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1481: /*@
1482: MatSetValues - Inserts or adds a block of values into a matrix.
1483: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1484: MUST be called after all calls to `MatSetValues()` have been completed.
1486: Not Collective
1488: Input Parameters:
1489: + mat - the matrix
1490: . m - the number of rows
1491: . idxm - the global indices of the rows
1492: . n - the number of columns
1493: . idxn - the global indices of the columns
1494: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1495: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1496: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1498: Level: beginner
1500: Notes:
1501: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1502: options cannot be mixed without intervening calls to the assembly
1503: routines.
1505: `MatSetValues()` uses 0-based row and column numbers in Fortran
1506: as well as in C.
1508: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1509: simply ignored. This allows easily inserting element stiffness matrices
1510: with homogeneous Dirichlet boundary conditions that you don't want represented
1511: in the matrix.
1513: Efficiency Alert:
1514: The routine `MatSetValuesBlocked()` may offer much better efficiency
1515: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1517: Fortran Notes:
1518: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1519: .vb
1520: call MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
1521: .ve
1523: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1525: Developer Note:
1526: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1527: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1529: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1530: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1531: @*/
1532: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1533: {
1534: PetscFunctionBeginHot;
1537: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1538: PetscAssertPointer(idxm, 3);
1539: PetscAssertPointer(idxn, 5);
1540: MatCheckPreallocated(mat, 1);
1542: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1543: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1545: if (PetscDefined(USE_DEBUG)) {
1546: PetscInt i, j;
1548: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1549: if (v) {
1550: for (i = 0; i < m; i++) {
1551: for (j = 0; j < n; j++) {
1552: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1553: #if defined(PETSC_USE_COMPLEX)
1554: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1555: #else
1556: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1557: #endif
1558: }
1559: }
1560: }
1561: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1562: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1563: }
1565: if (mat->assembled) {
1566: mat->was_assembled = PETSC_TRUE;
1567: mat->assembled = PETSC_FALSE;
1568: }
1569: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1570: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1571: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1572: PetscFunctionReturn(PETSC_SUCCESS);
1573: }
1575: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1576: /*@
1577: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1578: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1579: MUST be called after all calls to `MatSetValues()` have been completed.
1581: Not Collective
1583: Input Parameters:
1584: + mat - the matrix
1585: . ism - the rows to provide
1586: . isn - the columns to provide
1587: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1588: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1589: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1591: Level: beginner
1593: Notes:
1594: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1596: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1597: options cannot be mixed without intervening calls to the assembly
1598: routines.
1600: `MatSetValues()` uses 0-based row and column numbers in Fortran
1601: as well as in C.
1603: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1604: simply ignored. This allows easily inserting element stiffness matrices
1605: with homogeneous Dirichlet boundary conditions that you don't want represented
1606: in the matrix.
1608: Fortran Note:
1609: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1611: Efficiency Alert:
1612: The routine `MatSetValuesBlocked()` may offer much better efficiency
1613: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1615: This is currently not optimized for any particular `ISType`
1617: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1618: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1619: @*/
1620: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1621: {
1622: PetscInt m, n;
1623: const PetscInt *rows, *cols;
1625: PetscFunctionBeginHot;
1627: PetscCall(ISGetIndices(ism, &rows));
1628: PetscCall(ISGetIndices(isn, &cols));
1629: PetscCall(ISGetLocalSize(ism, &m));
1630: PetscCall(ISGetLocalSize(isn, &n));
1631: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1632: PetscCall(ISRestoreIndices(ism, &rows));
1633: PetscCall(ISRestoreIndices(isn, &cols));
1634: PetscFunctionReturn(PETSC_SUCCESS);
1635: }
1637: /*@
1638: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1639: values into a matrix
1641: Not Collective
1643: Input Parameters:
1644: + mat - the matrix
1645: . row - the (block) row to set
1646: - v - a one-dimensional array that contains the values. For `MATBAIJ` they are implicitly stored as a two-dimensional array, by default in row-major order.
1647: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1649: Level: intermediate
1651: Notes:
1652: The values, `v`, are column-oriented (for the block version) and sorted
1654: All the nonzero values in `row` must be provided
1656: The matrix must have previously had its column indices set, likely by having been assembled.
1658: `row` must belong to this MPI process
1660: Fortran Note:
1661: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1663: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1664: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1665: @*/
1666: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1667: {
1668: PetscInt globalrow;
1670: PetscFunctionBegin;
1673: PetscAssertPointer(v, 3);
1674: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1675: PetscCall(MatSetValuesRow(mat, globalrow, v));
1676: PetscFunctionReturn(PETSC_SUCCESS);
1677: }
1679: /*@
1680: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1681: values into a matrix
1683: Not Collective
1685: Input Parameters:
1686: + mat - the matrix
1687: . row - the (block) row to set
1688: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1690: Level: advanced
1692: Notes:
1693: The values, `v`, are column-oriented for the block version.
1695: All the nonzeros in `row` must be provided
1697: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1699: `row` must belong to this process
1701: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1702: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1703: @*/
1704: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1705: {
1706: PetscFunctionBeginHot;
1709: MatCheckPreallocated(mat, 1);
1710: PetscAssertPointer(v, 3);
1711: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1712: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1713: mat->insertmode = INSERT_VALUES;
1715: if (mat->assembled) {
1716: mat->was_assembled = PETSC_TRUE;
1717: mat->assembled = PETSC_FALSE;
1718: }
1719: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1720: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1721: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1722: PetscFunctionReturn(PETSC_SUCCESS);
1723: }
1725: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1726: /*@
1727: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1728: Using structured grid indexing
1730: Not Collective
1732: Input Parameters:
1733: + mat - the matrix
1734: . m - number of rows being entered
1735: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1736: . n - number of columns being entered
1737: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1738: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1739: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1740: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1742: Level: beginner
1744: Notes:
1745: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1747: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1748: options cannot be mixed without intervening calls to the assembly
1749: routines.
1751: The grid coordinates are across the entire grid, not just the local portion
1753: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1754: as well as in C.
1756: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1758: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1759: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1761: The columns and rows in the stencil passed in MUST be contained within the
1762: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1763: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1764: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1765: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1767: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1768: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1769: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1770: `DM_BOUNDARY_PERIODIC` boundary type.
1772: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1773: a single value per point) you can skip filling those indices.
1775: Inspired by the structured grid interface to the HYPRE package
1776: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1778: Fortran Note:
1779: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1781: Efficiency Alert:
1782: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1783: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1785: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1786: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1787: @*/
1788: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1789: {
1790: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1791: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1792: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1794: PetscFunctionBegin;
1795: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1798: PetscAssertPointer(idxm, 3);
1799: PetscAssertPointer(idxn, 5);
1801: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1802: jdxm = buf;
1803: jdxn = buf + m;
1804: } else {
1805: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1806: jdxm = bufm;
1807: jdxn = bufn;
1808: }
1809: for (i = 0; i < m; i++) {
1810: for (j = 0; j < 3 - sdim; j++) dxm++;
1811: tmp = *dxm++ - starts[0];
1812: for (j = 0; j < dim - 1; j++) {
1813: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1814: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1815: }
1816: if (mat->stencil.noc) dxm++;
1817: jdxm[i] = tmp;
1818: }
1819: for (i = 0; i < n; i++) {
1820: for (j = 0; j < 3 - sdim; j++) dxn++;
1821: tmp = *dxn++ - starts[0];
1822: for (j = 0; j < dim - 1; j++) {
1823: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1824: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1825: }
1826: if (mat->stencil.noc) dxn++;
1827: jdxn[i] = tmp;
1828: }
1829: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1830: PetscCall(PetscFree2(bufm, bufn));
1831: PetscFunctionReturn(PETSC_SUCCESS);
1832: }
1834: /*@
1835: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1836: Using structured grid indexing
1838: Not Collective
1840: Input Parameters:
1841: + mat - the matrix
1842: . m - number of rows being entered
1843: . idxm - grid coordinates for matrix rows being entered
1844: . n - number of columns being entered
1845: . idxn - grid coordinates for matrix columns being entered
1846: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1847: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1848: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1850: Level: beginner
1852: Notes:
1853: By default the values, `v`, are row-oriented and unsorted.
1854: See `MatSetOption()` for other options.
1856: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1857: options cannot be mixed without intervening calls to the assembly
1858: routines.
1860: The grid coordinates are across the entire grid, not just the local portion
1862: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1863: as well as in C.
1865: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1867: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1868: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1870: The columns and rows in the stencil passed in MUST be contained within the
1871: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1872: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1873: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1874: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1876: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1877: simply ignored. This allows easily inserting element stiffness matrices
1878: with homogeneous Dirichlet boundary conditions that you don't want represented
1879: in the matrix.
1881: Inspired by the structured grid interface to the HYPRE package
1882: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1884: Fortran Notes:
1885: `idxm` and `idxn` should be declared as
1886: .vb
1887: MatStencil idxm(4,m),idxn(4,n)
1888: .ve
1889: and the values inserted using
1890: .vb
1891: idxm(MatStencil_i,1) = i
1892: idxm(MatStencil_j,1) = j
1893: idxm(MatStencil_k,1) = k
1894: etc
1895: .ve
1897: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1899: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1900: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1901: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1902: @*/
1903: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1904: {
1905: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1906: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1907: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1909: PetscFunctionBegin;
1910: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1913: PetscAssertPointer(idxm, 3);
1914: PetscAssertPointer(idxn, 5);
1915: PetscAssertPointer(v, 6);
1917: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1918: jdxm = buf;
1919: jdxn = buf + m;
1920: } else {
1921: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1922: jdxm = bufm;
1923: jdxn = bufn;
1924: }
1925: for (i = 0; i < m; i++) {
1926: for (j = 0; j < 3 - sdim; j++) dxm++;
1927: tmp = *dxm++ - starts[0];
1928: for (j = 0; j < sdim - 1; j++) {
1929: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1930: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1931: }
1932: dxm++;
1933: jdxm[i] = tmp;
1934: }
1935: for (i = 0; i < n; i++) {
1936: for (j = 0; j < 3 - sdim; j++) dxn++;
1937: tmp = *dxn++ - starts[0];
1938: for (j = 0; j < sdim - 1; j++) {
1939: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1940: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1941: }
1942: dxn++;
1943: jdxn[i] = tmp;
1944: }
1945: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1946: PetscCall(PetscFree2(bufm, bufn));
1947: PetscFunctionReturn(PETSC_SUCCESS);
1948: }
1950: /*@
1951: MatSetStencil - Sets the grid information for setting values into a matrix via
1952: `MatSetValuesStencil()`
1954: Not Collective
1956: Input Parameters:
1957: + mat - the matrix
1958: . dim - dimension of the grid 1, 2, or 3
1959: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1960: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1961: - dof - number of degrees of freedom per node
1963: Level: beginner
1965: Notes:
1966: Inspired by the structured grid interface to the HYPRE package
1967: (www.llnl.gov/CASC/hyper)
1969: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1970: user.
1972: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1973: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1974: @*/
1975: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1976: {
1977: PetscFunctionBegin;
1979: PetscAssertPointer(dims, 3);
1980: PetscAssertPointer(starts, 4);
1982: mat->stencil.dim = dim + (dof > 1);
1983: for (PetscInt i = 0; i < dim; i++) {
1984: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1985: mat->stencil.starts[i] = starts[dim - i - 1];
1986: }
1987: mat->stencil.dims[dim] = dof;
1988: mat->stencil.starts[dim] = 0;
1989: mat->stencil.noc = (PetscBool)(dof == 1);
1990: PetscFunctionReturn(PETSC_SUCCESS);
1991: }
1993: /*@
1994: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1996: Not Collective
1998: Input Parameters:
1999: + mat - the matrix
2000: . m - the number of block rows
2001: . idxm - the global block indices
2002: . n - the number of block columns
2003: . idxn - the global block indices
2004: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2005: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2006: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
2008: Level: intermediate
2010: Notes:
2011: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2012: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2014: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2015: NOT the total number of rows/columns; for example, if the block size is 2 and
2016: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2017: The values in `idxm` would be 1 2; that is the first index for each block divided by
2018: the block size.
2020: You must call `MatSetBlockSize()` when constructing this matrix (before
2021: preallocating it).
2023: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2025: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2026: options cannot be mixed without intervening calls to the assembly
2027: routines.
2029: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2030: as well as in C.
2032: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2033: simply ignored. This allows easily inserting element stiffness matrices
2034: with homogeneous Dirichlet boundary conditions that you don't want represented
2035: in the matrix.
2037: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2038: internal searching must be done to determine where to place the
2039: data in the matrix storage space. By instead inserting blocks of
2040: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2041: reduced.
2043: Example:
2044: .vb
2045: Suppose m=n=2 and block size(bs) = 2 The array is
2047: 1 2 | 3 4
2048: 5 6 | 7 8
2049: - - - | - - -
2050: 9 10 | 11 12
2051: 13 14 | 15 16
2053: v[] should be passed in like
2054: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2056: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2057: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2058: .ve
2060: Fortran Notes:
2061: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2062: .vb
2063: call MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
2064: .ve
2066: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2068: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2069: @*/
2070: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2071: {
2072: PetscFunctionBeginHot;
2075: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2076: PetscAssertPointer(idxm, 3);
2077: PetscAssertPointer(idxn, 5);
2078: MatCheckPreallocated(mat, 1);
2079: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2080: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2081: if (PetscDefined(USE_DEBUG)) {
2082: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2083: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2084: }
2085: if (PetscDefined(USE_DEBUG)) {
2086: PetscInt rbs, cbs, M, N, i;
2087: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2088: PetscCall(MatGetSize(mat, &M, &N));
2089: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2090: for (i = 0; i < n; i++)
2091: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2092: }
2093: if (mat->assembled) {
2094: mat->was_assembled = PETSC_TRUE;
2095: mat->assembled = PETSC_FALSE;
2096: }
2097: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2098: if (mat->ops->setvaluesblocked) {
2099: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2100: } else {
2101: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2102: PetscInt i, j, bs, cbs;
2104: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2105: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2106: iidxm = buf;
2107: iidxn = buf + m * bs;
2108: } else {
2109: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2110: iidxm = bufr;
2111: iidxn = bufc;
2112: }
2113: for (i = 0; i < m; i++) {
2114: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2115: }
2116: if (m != n || bs != cbs || idxm != idxn) {
2117: for (i = 0; i < n; i++) {
2118: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2119: }
2120: } else iidxn = iidxm;
2121: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2122: PetscCall(PetscFree2(bufr, bufc));
2123: }
2124: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2125: PetscFunctionReturn(PETSC_SUCCESS);
2126: }
2128: /*@
2129: MatGetValues - Gets a block of local values from a matrix.
2131: Not Collective; can only return values that are owned by the give process
2133: Input Parameters:
2134: + mat - the matrix
2135: . v - a logically two-dimensional array for storing the values
2136: . m - the number of rows
2137: . idxm - the global indices of the rows
2138: . n - the number of columns
2139: - idxn - the global indices of the columns
2141: Level: advanced
2143: Notes:
2144: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2145: The values, `v`, are then returned in a row-oriented format,
2146: analogous to that used by default in `MatSetValues()`.
2148: `MatGetValues()` uses 0-based row and column numbers in
2149: Fortran as well as in C.
2151: `MatGetValues()` requires that the matrix has been assembled
2152: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2153: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2154: without intermediate matrix assembly.
2156: Negative row or column indices will be ignored and those locations in `v` will be
2157: left unchanged.
2159: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2160: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2161: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2163: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2164: @*/
2165: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2166: {
2167: PetscFunctionBegin;
2170: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2171: PetscAssertPointer(idxm, 3);
2172: PetscAssertPointer(idxn, 5);
2173: PetscAssertPointer(v, 6);
2174: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2175: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2176: MatCheckPreallocated(mat, 1);
2178: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2179: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2180: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2181: PetscFunctionReturn(PETSC_SUCCESS);
2182: }
2184: /*@
2185: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2186: defined previously by `MatSetLocalToGlobalMapping()`
2188: Not Collective
2190: Input Parameters:
2191: + mat - the matrix
2192: . nrow - number of rows
2193: . irow - the row local indices
2194: . ncol - number of columns
2195: - icol - the column local indices
2197: Output Parameter:
2198: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2199: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2201: Level: advanced
2203: Notes:
2204: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2206: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2207: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2208: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2209: with `MatSetLocalToGlobalMapping()`.
2211: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2212: `MatSetValuesLocal()`, `MatGetValues()`
2213: @*/
2214: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2215: {
2216: PetscFunctionBeginHot;
2219: MatCheckPreallocated(mat, 1);
2220: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2221: PetscAssertPointer(irow, 3);
2222: PetscAssertPointer(icol, 5);
2223: if (PetscDefined(USE_DEBUG)) {
2224: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2225: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2226: }
2227: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2228: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2229: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2230: else {
2231: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2232: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2233: irowm = buf;
2234: icolm = buf + nrow;
2235: } else {
2236: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2237: irowm = bufr;
2238: icolm = bufc;
2239: }
2240: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2241: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2242: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2243: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2244: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2245: PetscCall(PetscFree2(bufr, bufc));
2246: }
2247: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2248: PetscFunctionReturn(PETSC_SUCCESS);
2249: }
2251: /*@
2252: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2253: the same size. Currently, this can only be called once and creates the given matrix.
2255: Not Collective
2257: Input Parameters:
2258: + mat - the matrix
2259: . nb - the number of blocks
2260: . bs - the number of rows (and columns) in each block
2261: . rows - a concatenation of the rows for each block
2262: - v - a concatenation of logically two-dimensional arrays of values
2264: Level: advanced
2266: Notes:
2267: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2269: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2271: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2272: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2273: @*/
2274: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2275: {
2276: PetscFunctionBegin;
2279: PetscAssertPointer(rows, 4);
2280: PetscAssertPointer(v, 5);
2281: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2283: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2284: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2285: else {
2286: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2287: }
2288: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2289: PetscFunctionReturn(PETSC_SUCCESS);
2290: }
2292: /*@
2293: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2294: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2295: using a local (per-processor) numbering.
2297: Not Collective
2299: Input Parameters:
2300: + x - the matrix
2301: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2302: - cmapping - column mapping
2304: Level: intermediate
2306: Note:
2307: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2309: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2310: @*/
2311: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2312: {
2313: PetscFunctionBegin;
2318: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2319: else {
2320: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2321: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2322: }
2323: PetscFunctionReturn(PETSC_SUCCESS);
2324: }
2326: /*@
2327: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2329: Not Collective
2331: Input Parameter:
2332: . A - the matrix
2334: Output Parameters:
2335: + rmapping - row mapping
2336: - cmapping - column mapping
2338: Level: advanced
2340: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2341: @*/
2342: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2343: {
2344: PetscFunctionBegin;
2347: if (rmapping) {
2348: PetscAssertPointer(rmapping, 2);
2349: *rmapping = A->rmap->mapping;
2350: }
2351: if (cmapping) {
2352: PetscAssertPointer(cmapping, 3);
2353: *cmapping = A->cmap->mapping;
2354: }
2355: PetscFunctionReturn(PETSC_SUCCESS);
2356: }
2358: /*@
2359: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2361: Logically Collective
2363: Input Parameters:
2364: + A - the matrix
2365: . rmap - row layout
2366: - cmap - column layout
2368: Level: advanced
2370: Note:
2371: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2373: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2374: @*/
2375: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2376: {
2377: PetscFunctionBegin;
2379: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2380: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2381: PetscFunctionReturn(PETSC_SUCCESS);
2382: }
2384: /*@
2385: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2387: Not Collective
2389: Input Parameter:
2390: . A - the matrix
2392: Output Parameters:
2393: + rmap - row layout
2394: - cmap - column layout
2396: Level: advanced
2398: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2399: @*/
2400: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2401: {
2402: PetscFunctionBegin;
2405: if (rmap) {
2406: PetscAssertPointer(rmap, 2);
2407: *rmap = A->rmap;
2408: }
2409: if (cmap) {
2410: PetscAssertPointer(cmap, 3);
2411: *cmap = A->cmap;
2412: }
2413: PetscFunctionReturn(PETSC_SUCCESS);
2414: }
2416: /*@
2417: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2418: using a local numbering of the rows and columns.
2420: Not Collective
2422: Input Parameters:
2423: + mat - the matrix
2424: . nrow - number of rows
2425: . irow - the row local indices
2426: . ncol - number of columns
2427: . icol - the column local indices
2428: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2429: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2430: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2432: Level: intermediate
2434: Notes:
2435: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2437: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2438: options cannot be mixed without intervening calls to the assembly
2439: routines.
2441: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2442: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2444: Fortran Notes:
2445: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2446: .vb
2447: call MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2448: .ve
2450: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2452: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2453: `MatGetValuesLocal()`
2454: @*/
2455: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2456: {
2457: PetscFunctionBeginHot;
2460: MatCheckPreallocated(mat, 1);
2461: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2462: PetscAssertPointer(irow, 3);
2463: PetscAssertPointer(icol, 5);
2464: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2465: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2466: if (PetscDefined(USE_DEBUG)) {
2467: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2468: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2469: }
2471: if (mat->assembled) {
2472: mat->was_assembled = PETSC_TRUE;
2473: mat->assembled = PETSC_FALSE;
2474: }
2475: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2476: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2477: else {
2478: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2479: const PetscInt *irowm, *icolm;
2481: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2482: bufr = buf;
2483: bufc = buf + nrow;
2484: irowm = bufr;
2485: icolm = bufc;
2486: } else {
2487: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2488: irowm = bufr;
2489: icolm = bufc;
2490: }
2491: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2492: else irowm = irow;
2493: if (mat->cmap->mapping) {
2494: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2495: else icolm = irowm;
2496: } else icolm = icol;
2497: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2498: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2499: }
2500: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2501: PetscFunctionReturn(PETSC_SUCCESS);
2502: }
2504: /*@
2505: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2506: using a local ordering of the nodes a block at a time.
2508: Not Collective
2510: Input Parameters:
2511: + mat - the matrix
2512: . nrow - number of rows
2513: . irow - the row local indices
2514: . ncol - number of columns
2515: . icol - the column local indices
2516: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2517: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2518: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2520: Level: intermediate
2522: Notes:
2523: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2524: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2526: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2527: options cannot be mixed without intervening calls to the assembly
2528: routines.
2530: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2531: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2533: Fortran Notes:
2534: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2535: .vb
2536: call MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2537: .ve
2539: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2541: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2542: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2543: @*/
2544: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2545: {
2546: PetscFunctionBeginHot;
2549: MatCheckPreallocated(mat, 1);
2550: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2551: PetscAssertPointer(irow, 3);
2552: PetscAssertPointer(icol, 5);
2553: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2554: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2555: if (PetscDefined(USE_DEBUG)) {
2556: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2557: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2558: }
2560: if (mat->assembled) {
2561: mat->was_assembled = PETSC_TRUE;
2562: mat->assembled = PETSC_FALSE;
2563: }
2564: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2565: PetscInt irbs, rbs;
2566: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2567: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2568: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2569: }
2570: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2571: PetscInt icbs, cbs;
2572: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2573: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2574: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2575: }
2576: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2577: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2578: else {
2579: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2580: const PetscInt *irowm, *icolm;
2582: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2583: bufr = buf;
2584: bufc = buf + nrow;
2585: irowm = bufr;
2586: icolm = bufc;
2587: } else {
2588: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2589: irowm = bufr;
2590: icolm = bufc;
2591: }
2592: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2593: else irowm = irow;
2594: if (mat->cmap->mapping) {
2595: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2596: else icolm = irowm;
2597: } else icolm = icol;
2598: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2599: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2600: }
2601: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2602: PetscFunctionReturn(PETSC_SUCCESS);
2603: }
2605: /*@
2606: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2608: Collective
2610: Input Parameters:
2611: + mat - the matrix
2612: - x - the vector to be multiplied
2614: Output Parameter:
2615: . y - the result
2617: Level: developer
2619: Note:
2620: The vectors `x` and `y` cannot be the same. I.e., one cannot
2621: call `MatMultDiagonalBlock`(A,y,y).
2623: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2624: @*/
2625: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2626: {
2627: PetscFunctionBegin;
2633: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2634: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2635: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2636: MatCheckPreallocated(mat, 1);
2638: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2639: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2640: PetscFunctionReturn(PETSC_SUCCESS);
2641: }
2643: /*@
2644: MatMult - Computes the matrix-vector product, $y = Ax$.
2646: Neighbor-wise Collective
2648: Input Parameters:
2649: + mat - the matrix
2650: - x - the vector to be multiplied
2652: Output Parameter:
2653: . y - the result
2655: Level: beginner
2657: Note:
2658: The vectors `x` and `y` cannot be the same. I.e., one cannot
2659: call `MatMult`(A,y,y).
2661: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2662: @*/
2663: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2664: {
2665: PetscFunctionBegin;
2669: VecCheckAssembled(x);
2671: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2672: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2673: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2674: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2675: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2676: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2677: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2678: PetscCall(VecSetErrorIfLocked(y, 3));
2679: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2680: MatCheckPreallocated(mat, 1);
2682: PetscCall(VecLockReadPush(x));
2683: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2684: PetscUseTypeMethod(mat, mult, x, y);
2685: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2686: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2687: PetscCall(VecLockReadPop(x));
2688: PetscFunctionReturn(PETSC_SUCCESS);
2689: }
2691: /*@
2692: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2694: Neighbor-wise Collective
2696: Input Parameters:
2697: + mat - the matrix
2698: - x - the vector to be multiplied
2700: Output Parameter:
2701: . y - the result
2703: Level: beginner
2705: Notes:
2706: The vectors `x` and `y` cannot be the same. I.e., one cannot
2707: call `MatMultTranspose`(A,y,y).
2709: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2710: use `MatMultHermitianTranspose()`
2712: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2713: @*/
2714: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2715: {
2716: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2718: PetscFunctionBegin;
2722: VecCheckAssembled(x);
2725: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2726: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2727: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2728: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2729: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2730: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2731: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2732: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2733: MatCheckPreallocated(mat, 1);
2735: if (!mat->ops->multtranspose) {
2736: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2737: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2738: } else op = mat->ops->multtranspose;
2739: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2740: PetscCall(VecLockReadPush(x));
2741: PetscCall((*op)(mat, x, y));
2742: PetscCall(VecLockReadPop(x));
2743: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2744: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2745: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2746: PetscFunctionReturn(PETSC_SUCCESS);
2747: }
2749: /*@
2750: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2752: Neighbor-wise Collective
2754: Input Parameters:
2755: + mat - the matrix
2756: - x - the vector to be multiplied
2758: Output Parameter:
2759: . y - the result
2761: Level: beginner
2763: Notes:
2764: The vectors `x` and `y` cannot be the same. I.e., one cannot
2765: call `MatMultHermitianTranspose`(A,y,y).
2767: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2769: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2771: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2772: @*/
2773: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2774: {
2775: PetscFunctionBegin;
2781: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2782: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2783: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2784: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2785: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2786: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2787: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2788: MatCheckPreallocated(mat, 1);
2790: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2791: #if defined(PETSC_USE_COMPLEX)
2792: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2793: PetscCall(VecLockReadPush(x));
2794: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2795: else PetscUseTypeMethod(mat, mult, x, y);
2796: PetscCall(VecLockReadPop(x));
2797: } else {
2798: Vec w;
2799: PetscCall(VecDuplicate(x, &w));
2800: PetscCall(VecCopy(x, w));
2801: PetscCall(VecConjugate(w));
2802: PetscCall(MatMultTranspose(mat, w, y));
2803: PetscCall(VecDestroy(&w));
2804: PetscCall(VecConjugate(y));
2805: }
2806: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2807: #else
2808: PetscCall(MatMultTranspose(mat, x, y));
2809: #endif
2810: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2811: PetscFunctionReturn(PETSC_SUCCESS);
2812: }
2814: /*@
2815: MatMultAdd - Computes $v3 = v2 + A * v1$.
2817: Neighbor-wise Collective
2819: Input Parameters:
2820: + mat - the matrix
2821: . v1 - the vector to be multiplied by `mat`
2822: - v2 - the vector to be added to the result
2824: Output Parameter:
2825: . v3 - the result
2827: Level: beginner
2829: Note:
2830: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2831: call `MatMultAdd`(A,v1,v2,v1).
2833: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2834: @*/
2835: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2836: {
2837: PetscFunctionBegin;
2844: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2845: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2846: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2847: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2848: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2849: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2850: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2851: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2852: MatCheckPreallocated(mat, 1);
2854: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2855: PetscCall(VecLockReadPush(v1));
2856: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2857: PetscCall(VecLockReadPop(v1));
2858: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2859: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2860: PetscFunctionReturn(PETSC_SUCCESS);
2861: }
2863: /*@
2864: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2866: Neighbor-wise Collective
2868: Input Parameters:
2869: + mat - the matrix
2870: . v1 - the vector to be multiplied by the transpose of the matrix
2871: - v2 - the vector to be added to the result
2873: Output Parameter:
2874: . v3 - the result
2876: Level: beginner
2878: Note:
2879: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2880: call `MatMultTransposeAdd`(A,v1,v2,v1).
2882: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2883: @*/
2884: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2885: {
2886: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2888: PetscFunctionBegin;
2895: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2896: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2897: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2898: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2899: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2900: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2901: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2902: MatCheckPreallocated(mat, 1);
2904: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2905: PetscCall(VecLockReadPush(v1));
2906: PetscCall((*op)(mat, v1, v2, v3));
2907: PetscCall(VecLockReadPop(v1));
2908: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2909: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2910: PetscFunctionReturn(PETSC_SUCCESS);
2911: }
2913: /*@
2914: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2916: Neighbor-wise Collective
2918: Input Parameters:
2919: + mat - the matrix
2920: . v1 - the vector to be multiplied by the Hermitian transpose
2921: - v2 - the vector to be added to the result
2923: Output Parameter:
2924: . v3 - the result
2926: Level: beginner
2928: Note:
2929: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2930: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2932: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2933: @*/
2934: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2935: {
2936: PetscFunctionBegin;
2943: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2944: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2945: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2946: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2947: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2948: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2949: MatCheckPreallocated(mat, 1);
2951: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2952: PetscCall(VecLockReadPush(v1));
2953: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2954: else {
2955: Vec w, z;
2956: PetscCall(VecDuplicate(v1, &w));
2957: PetscCall(VecCopy(v1, w));
2958: PetscCall(VecConjugate(w));
2959: PetscCall(VecDuplicate(v3, &z));
2960: PetscCall(MatMultTranspose(mat, w, z));
2961: PetscCall(VecDestroy(&w));
2962: PetscCall(VecConjugate(z));
2963: if (v2 != v3) {
2964: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2965: } else {
2966: PetscCall(VecAXPY(v3, 1.0, z));
2967: }
2968: PetscCall(VecDestroy(&z));
2969: }
2970: PetscCall(VecLockReadPop(v1));
2971: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2972: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2973: PetscFunctionReturn(PETSC_SUCCESS);
2974: }
2976: /*@
2977: MatGetFactorType - gets the type of factorization a matrix is
2979: Not Collective
2981: Input Parameter:
2982: . mat - the matrix
2984: Output Parameter:
2985: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2987: Level: intermediate
2989: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2990: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2991: @*/
2992: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2993: {
2994: PetscFunctionBegin;
2997: PetscAssertPointer(t, 2);
2998: *t = mat->factortype;
2999: PetscFunctionReturn(PETSC_SUCCESS);
3000: }
3002: /*@
3003: MatSetFactorType - sets the type of factorization a matrix is
3005: Logically Collective
3007: Input Parameters:
3008: + mat - the matrix
3009: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3011: Level: intermediate
3013: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3014: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3015: @*/
3016: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3017: {
3018: PetscFunctionBegin;
3021: mat->factortype = t;
3022: PetscFunctionReturn(PETSC_SUCCESS);
3023: }
3025: /*@
3026: MatGetInfo - Returns information about matrix storage (number of
3027: nonzeros, memory, etc.).
3029: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3031: Input Parameters:
3032: + mat - the matrix
3033: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3035: Output Parameter:
3036: . info - matrix information context
3038: Options Database Key:
3039: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3041: Level: intermediate
3043: Notes:
3044: The `MatInfo` context contains a variety of matrix data, including
3045: number of nonzeros allocated and used, number of mallocs during
3046: matrix assembly, etc. Additional information for factored matrices
3047: is provided (such as the fill ratio, number of mallocs during
3048: factorization, etc.).
3050: Example:
3051: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3052: data within the `MatInfo` context. For example,
3053: .vb
3054: MatInfo info;
3055: Mat A;
3056: double mal, nz_a, nz_u;
3058: MatGetInfo(A, MAT_LOCAL, &info);
3059: mal = info.mallocs;
3060: nz_a = info.nz_allocated;
3061: .ve
3063: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3064: @*/
3065: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3066: {
3067: PetscFunctionBegin;
3070: PetscAssertPointer(info, 3);
3071: MatCheckPreallocated(mat, 1);
3072: PetscUseTypeMethod(mat, getinfo, flag, info);
3073: PetscFunctionReturn(PETSC_SUCCESS);
3074: }
3076: /*
3077: This is used by external packages where it is not easy to get the info from the actual
3078: matrix factorization.
3079: */
3080: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3081: {
3082: PetscFunctionBegin;
3083: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3084: PetscFunctionReturn(PETSC_SUCCESS);
3085: }
3087: /*@
3088: MatLUFactor - Performs in-place LU factorization of matrix.
3090: Collective
3092: Input Parameters:
3093: + mat - the matrix
3094: . row - row permutation
3095: . col - column permutation
3096: - info - options for factorization, includes
3097: .vb
3098: fill - expected fill as ratio of original fill.
3099: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3100: Run with the option -info to determine an optimal value to use
3101: .ve
3103: Level: developer
3105: Notes:
3106: Most users should employ the `KSP` interface for linear solvers
3107: instead of working directly with matrix algebra routines such as this.
3108: See, e.g., `KSPCreate()`.
3110: This changes the state of the matrix to a factored matrix; it cannot be used
3111: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3113: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3114: when not using `KSP`.
3116: Fortran Note:
3117: A valid (non-null) `info` argument must be provided
3119: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3120: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3121: @*/
3122: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3123: {
3124: MatFactorInfo tinfo;
3126: PetscFunctionBegin;
3130: if (info) PetscAssertPointer(info, 4);
3132: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3133: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3134: MatCheckPreallocated(mat, 1);
3135: if (!info) {
3136: PetscCall(MatFactorInfoInitialize(&tinfo));
3137: info = &tinfo;
3138: }
3140: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3141: PetscUseTypeMethod(mat, lufactor, row, col, info);
3142: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3143: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3144: PetscFunctionReturn(PETSC_SUCCESS);
3145: }
3147: /*@
3148: MatILUFactor - Performs in-place ILU factorization of matrix.
3150: Collective
3152: Input Parameters:
3153: + mat - the matrix
3154: . row - row permutation
3155: . col - column permutation
3156: - info - structure containing
3157: .vb
3158: levels - number of levels of fill.
3159: expected fill - as ratio of original fill.
3160: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3161: missing diagonal entries)
3162: .ve
3164: Level: developer
3166: Notes:
3167: Most users should employ the `KSP` interface for linear solvers
3168: instead of working directly with matrix algebra routines such as this.
3169: See, e.g., `KSPCreate()`.
3171: Probably really in-place only when level of fill is zero, otherwise allocates
3172: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3173: when not using `KSP`.
3175: Fortran Note:
3176: A valid (non-null) `info` argument must be provided
3178: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3179: @*/
3180: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3181: {
3182: PetscFunctionBegin;
3186: PetscAssertPointer(info, 4);
3188: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3189: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3190: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3191: MatCheckPreallocated(mat, 1);
3193: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3194: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3195: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3196: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3197: PetscFunctionReturn(PETSC_SUCCESS);
3198: }
3200: /*@
3201: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3202: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3204: Collective
3206: Input Parameters:
3207: + fact - the factor matrix obtained with `MatGetFactor()`
3208: . mat - the matrix
3209: . row - the row permutation
3210: . col - the column permutation
3211: - info - options for factorization, includes
3212: .vb
3213: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3214: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3215: .ve
3217: Level: developer
3219: Notes:
3220: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3222: Most users should employ the simplified `KSP` interface for linear solvers
3223: instead of working directly with matrix algebra routines such as this.
3224: See, e.g., `KSPCreate()`.
3226: Fortran Note:
3227: A valid (non-null) `info` argument must be provided
3229: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3230: @*/
3231: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3232: {
3233: MatFactorInfo tinfo;
3235: PetscFunctionBegin;
3240: if (info) PetscAssertPointer(info, 5);
3243: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3244: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3245: MatCheckPreallocated(mat, 2);
3246: if (!info) {
3247: PetscCall(MatFactorInfoInitialize(&tinfo));
3248: info = &tinfo;
3249: }
3251: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3252: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3253: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3254: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3255: PetscFunctionReturn(PETSC_SUCCESS);
3256: }
3258: /*@
3259: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3260: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3262: Collective
3264: Input Parameters:
3265: + fact - the factor matrix obtained with `MatGetFactor()`
3266: . mat - the matrix
3267: - info - options for factorization
3269: Level: developer
3271: Notes:
3272: See `MatLUFactor()` for in-place factorization. See
3273: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3275: Most users should employ the `KSP` interface for linear solvers
3276: instead of working directly with matrix algebra routines such as this.
3277: See, e.g., `KSPCreate()`.
3279: Fortran Note:
3280: A valid (non-null) `info` argument must be provided
3282: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3283: @*/
3284: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3285: {
3286: MatFactorInfo tinfo;
3288: PetscFunctionBegin;
3293: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3294: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3295: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3297: MatCheckPreallocated(mat, 2);
3298: if (!info) {
3299: PetscCall(MatFactorInfoInitialize(&tinfo));
3300: info = &tinfo;
3301: }
3303: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3304: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3305: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3306: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3307: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3308: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3309: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3310: PetscFunctionReturn(PETSC_SUCCESS);
3311: }
3313: /*@
3314: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3315: symmetric matrix.
3317: Collective
3319: Input Parameters:
3320: + mat - the matrix
3321: . perm - row and column permutations
3322: - info - expected fill as ratio of original fill
3324: Level: developer
3326: Notes:
3327: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3328: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3330: Most users should employ the `KSP` interface for linear solvers
3331: instead of working directly with matrix algebra routines such as this.
3332: See, e.g., `KSPCreate()`.
3334: Fortran Note:
3335: A valid (non-null) `info` argument must be provided
3337: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3338: `MatGetOrdering()`
3339: @*/
3340: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3341: {
3342: MatFactorInfo tinfo;
3344: PetscFunctionBegin;
3347: if (info) PetscAssertPointer(info, 3);
3349: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3350: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3351: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3352: MatCheckPreallocated(mat, 1);
3353: if (!info) {
3354: PetscCall(MatFactorInfoInitialize(&tinfo));
3355: info = &tinfo;
3356: }
3358: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3359: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3360: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3361: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3362: PetscFunctionReturn(PETSC_SUCCESS);
3363: }
3365: /*@
3366: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3367: of a symmetric matrix.
3369: Collective
3371: Input Parameters:
3372: + fact - the factor matrix obtained with `MatGetFactor()`
3373: . mat - the matrix
3374: . perm - row and column permutations
3375: - info - options for factorization, includes
3376: .vb
3377: fill - expected fill as ratio of original fill.
3378: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3379: Run with the option -info to determine an optimal value to use
3380: .ve
3382: Level: developer
3384: Notes:
3385: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3386: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3388: Most users should employ the `KSP` interface for linear solvers
3389: instead of working directly with matrix algebra routines such as this.
3390: See, e.g., `KSPCreate()`.
3392: Fortran Note:
3393: A valid (non-null) `info` argument must be provided
3395: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3396: `MatGetOrdering()`
3397: @*/
3398: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3399: {
3400: MatFactorInfo tinfo;
3402: PetscFunctionBegin;
3406: if (info) PetscAssertPointer(info, 4);
3409: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3410: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3411: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3412: MatCheckPreallocated(mat, 2);
3413: if (!info) {
3414: PetscCall(MatFactorInfoInitialize(&tinfo));
3415: info = &tinfo;
3416: }
3418: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3419: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3420: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3421: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3422: PetscFunctionReturn(PETSC_SUCCESS);
3423: }
3425: /*@
3426: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3427: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3428: `MatCholeskyFactorSymbolic()`.
3430: Collective
3432: Input Parameters:
3433: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3434: . mat - the initial matrix that is to be factored
3435: - info - options for factorization
3437: Level: developer
3439: Note:
3440: Most users should employ the `KSP` interface for linear solvers
3441: instead of working directly with matrix algebra routines such as this.
3442: See, e.g., `KSPCreate()`.
3444: Fortran Note:
3445: A valid (non-null) `info` argument must be provided
3447: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3448: @*/
3449: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3450: {
3451: MatFactorInfo tinfo;
3453: PetscFunctionBegin;
3458: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3459: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3460: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3461: MatCheckPreallocated(mat, 2);
3462: if (!info) {
3463: PetscCall(MatFactorInfoInitialize(&tinfo));
3464: info = &tinfo;
3465: }
3467: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3468: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3469: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3470: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3471: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3472: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3473: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3474: PetscFunctionReturn(PETSC_SUCCESS);
3475: }
3477: /*@
3478: MatQRFactor - Performs in-place QR factorization of matrix.
3480: Collective
3482: Input Parameters:
3483: + mat - the matrix
3484: . col - column permutation
3485: - info - options for factorization, includes
3486: .vb
3487: fill - expected fill as ratio of original fill.
3488: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3489: Run with the option -info to determine an optimal value to use
3490: .ve
3492: Level: developer
3494: Notes:
3495: Most users should employ the `KSP` interface for linear solvers
3496: instead of working directly with matrix algebra routines such as this.
3497: See, e.g., `KSPCreate()`.
3499: This changes the state of the matrix to a factored matrix; it cannot be used
3500: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3502: Fortran Note:
3503: A valid (non-null) `info` argument must be provided
3505: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3506: `MatSetUnfactored()`
3507: @*/
3508: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3509: {
3510: PetscFunctionBegin;
3513: if (info) PetscAssertPointer(info, 3);
3515: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3516: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3517: MatCheckPreallocated(mat, 1);
3518: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3519: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3520: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3521: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3522: PetscFunctionReturn(PETSC_SUCCESS);
3523: }
3525: /*@
3526: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3527: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3529: Collective
3531: Input Parameters:
3532: + fact - the factor matrix obtained with `MatGetFactor()`
3533: . mat - the matrix
3534: . col - column permutation
3535: - info - options for factorization, includes
3536: .vb
3537: fill - expected fill as ratio of original fill.
3538: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3539: Run with the option -info to determine an optimal value to use
3540: .ve
3542: Level: developer
3544: Note:
3545: Most users should employ the `KSP` interface for linear solvers
3546: instead of working directly with matrix algebra routines such as this.
3547: See, e.g., `KSPCreate()`.
3549: Fortran Note:
3550: A valid (non-null) `info` argument must be provided
3552: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3553: @*/
3554: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3555: {
3556: MatFactorInfo tinfo;
3558: PetscFunctionBegin;
3562: if (info) PetscAssertPointer(info, 4);
3565: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3566: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3567: MatCheckPreallocated(mat, 2);
3568: if (!info) {
3569: PetscCall(MatFactorInfoInitialize(&tinfo));
3570: info = &tinfo;
3571: }
3573: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3574: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3575: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3576: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3577: PetscFunctionReturn(PETSC_SUCCESS);
3578: }
3580: /*@
3581: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3582: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3584: Collective
3586: Input Parameters:
3587: + fact - the factor matrix obtained with `MatGetFactor()`
3588: . mat - the matrix
3589: - info - options for factorization
3591: Level: developer
3593: Notes:
3594: See `MatQRFactor()` for in-place factorization.
3596: Most users should employ the `KSP` interface for linear solvers
3597: instead of working directly with matrix algebra routines such as this.
3598: See, e.g., `KSPCreate()`.
3600: Fortran Note:
3601: A valid (non-null) `info` argument must be provided
3603: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3604: @*/
3605: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3606: {
3607: MatFactorInfo tinfo;
3609: PetscFunctionBegin;
3614: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3615: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3616: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3618: MatCheckPreallocated(mat, 2);
3619: if (!info) {
3620: PetscCall(MatFactorInfoInitialize(&tinfo));
3621: info = &tinfo;
3622: }
3624: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3625: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3626: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3627: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3628: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3629: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3630: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3631: PetscFunctionReturn(PETSC_SUCCESS);
3632: }
3634: /*@
3635: MatSolve - Solves $A x = b$, given a factored matrix.
3637: Neighbor-wise Collective
3639: Input Parameters:
3640: + mat - the factored matrix
3641: - b - the right-hand-side vector
3643: Output Parameter:
3644: . x - the result vector
3646: Level: developer
3648: Notes:
3649: The vectors `b` and `x` cannot be the same. I.e., one cannot
3650: call `MatSolve`(A,x,x).
3652: Most users should employ the `KSP` interface for linear solvers
3653: instead of working directly with matrix algebra routines such as this.
3654: See, e.g., `KSPCreate()`.
3656: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3657: @*/
3658: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3659: {
3660: PetscFunctionBegin;
3665: PetscCheckSameComm(mat, 1, b, 2);
3666: PetscCheckSameComm(mat, 1, x, 3);
3667: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3668: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3669: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3670: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3671: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3672: MatCheckPreallocated(mat, 1);
3674: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3675: PetscCall(VecFlag(x, mat->factorerrortype));
3676: if (mat->factorerrortype) PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3677: else PetscUseTypeMethod(mat, solve, b, x);
3678: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3679: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3680: PetscFunctionReturn(PETSC_SUCCESS);
3681: }
3683: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3684: {
3685: Vec b, x;
3686: PetscInt N, i;
3687: PetscErrorCode (*f)(Mat, Vec, Vec);
3688: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3690: PetscFunctionBegin;
3691: if (A->factorerrortype) {
3692: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3693: PetscCall(MatSetInf(X));
3694: PetscFunctionReturn(PETSC_SUCCESS);
3695: }
3696: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3697: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3698: PetscCall(MatBoundToCPU(A, &Abound));
3699: if (!Abound) {
3700: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3701: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3702: }
3703: #if PetscDefined(HAVE_CUDA)
3704: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3705: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3706: #elif PetscDefined(HAVE_HIP)
3707: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3708: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3709: #endif
3710: PetscCall(MatGetSize(B, NULL, &N));
3711: for (i = 0; i < N; i++) {
3712: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3713: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3714: PetscCall((*f)(A, b, x));
3715: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3716: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3717: }
3718: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3719: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3720: PetscFunctionReturn(PETSC_SUCCESS);
3721: }
3723: /*@
3724: MatMatSolve - Solves $A X = B$, given a factored matrix.
3726: Neighbor-wise Collective
3728: Input Parameters:
3729: + A - the factored matrix
3730: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3732: Output Parameter:
3733: . X - the result matrix (dense matrix)
3735: Level: developer
3737: Note:
3738: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3739: otherwise, `B` and `X` cannot be the same.
3741: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3742: @*/
3743: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3744: {
3745: PetscFunctionBegin;
3750: PetscCheckSameComm(A, 1, B, 2);
3751: PetscCheckSameComm(A, 1, X, 3);
3752: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3753: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3754: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3755: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3756: MatCheckPreallocated(A, 1);
3758: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3759: if (!A->ops->matsolve) {
3760: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3761: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3762: } else PetscUseTypeMethod(A, matsolve, B, X);
3763: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3764: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3765: PetscFunctionReturn(PETSC_SUCCESS);
3766: }
3768: /*@
3769: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3771: Neighbor-wise Collective
3773: Input Parameters:
3774: + A - the factored matrix
3775: - B - the right-hand-side matrix (`MATDENSE` matrix)
3777: Output Parameter:
3778: . X - the result matrix (dense matrix)
3780: Level: developer
3782: Note:
3783: The matrices `B` and `X` cannot be the same. I.e., one cannot
3784: call `MatMatSolveTranspose`(A,X,X).
3786: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3787: @*/
3788: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3789: {
3790: PetscFunctionBegin;
3795: PetscCheckSameComm(A, 1, B, 2);
3796: PetscCheckSameComm(A, 1, X, 3);
3797: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3798: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3799: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3800: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3801: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3802: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3803: MatCheckPreallocated(A, 1);
3805: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3806: if (!A->ops->matsolvetranspose) {
3807: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3808: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3809: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3810: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3811: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3812: PetscFunctionReturn(PETSC_SUCCESS);
3813: }
3815: /*@
3816: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3818: Neighbor-wise Collective
3820: Input Parameters:
3821: + A - the factored matrix
3822: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3824: Output Parameter:
3825: . X - the result matrix (dense matrix)
3827: Level: developer
3829: Note:
3830: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3831: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3833: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3834: @*/
3835: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3836: {
3837: PetscFunctionBegin;
3842: PetscCheckSameComm(A, 1, Bt, 2);
3843: PetscCheckSameComm(A, 1, X, 3);
3845: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3846: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3847: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3848: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3849: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3850: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3851: MatCheckPreallocated(A, 1);
3853: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3854: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3855: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3856: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3857: PetscFunctionReturn(PETSC_SUCCESS);
3858: }
3860: /*@
3861: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3862: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3864: Neighbor-wise Collective
3866: Input Parameters:
3867: + mat - the factored matrix
3868: - b - the right-hand-side vector
3870: Output Parameter:
3871: . x - the result vector
3873: Level: developer
3875: Notes:
3876: `MatSolve()` should be used for most applications, as it performs
3877: a forward solve followed by a backward solve.
3879: The vectors `b` and `x` cannot be the same, i.e., one cannot
3880: call `MatForwardSolve`(A,x,x).
3882: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3883: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3884: `MatForwardSolve()` solves $U^T*D y = b$, and
3885: `MatBackwardSolve()` solves $U x = y$.
3886: Thus they do not provide a symmetric preconditioner.
3888: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3889: @*/
3890: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3891: {
3892: PetscFunctionBegin;
3897: PetscCheckSameComm(mat, 1, b, 2);
3898: PetscCheckSameComm(mat, 1, x, 3);
3899: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3900: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3901: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3902: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3903: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3904: MatCheckPreallocated(mat, 1);
3906: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3907: PetscUseTypeMethod(mat, forwardsolve, b, x);
3908: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3909: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3910: PetscFunctionReturn(PETSC_SUCCESS);
3911: }
3913: /*@
3914: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3915: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3917: Neighbor-wise Collective
3919: Input Parameters:
3920: + mat - the factored matrix
3921: - b - the right-hand-side vector
3923: Output Parameter:
3924: . x - the result vector
3926: Level: developer
3928: Notes:
3929: `MatSolve()` should be used for most applications, as it performs
3930: a forward solve followed by a backward solve.
3932: The vectors `b` and `x` cannot be the same. I.e., one cannot
3933: call `MatBackwardSolve`(A,x,x).
3935: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3936: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3937: `MatForwardSolve()` solves $U^T*D y = b$, and
3938: `MatBackwardSolve()` solves $U x = y$.
3939: Thus they do not provide a symmetric preconditioner.
3941: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3942: @*/
3943: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3944: {
3945: PetscFunctionBegin;
3950: PetscCheckSameComm(mat, 1, b, 2);
3951: PetscCheckSameComm(mat, 1, x, 3);
3952: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3953: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3954: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3955: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3956: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3957: MatCheckPreallocated(mat, 1);
3959: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3960: PetscUseTypeMethod(mat, backwardsolve, b, x);
3961: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3962: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3963: PetscFunctionReturn(PETSC_SUCCESS);
3964: }
3966: /*@
3967: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3969: Neighbor-wise Collective
3971: Input Parameters:
3972: + mat - the factored matrix
3973: . b - the right-hand-side vector
3974: - y - the vector to be added to
3976: Output Parameter:
3977: . x - the result vector
3979: Level: developer
3981: Note:
3982: The vectors `b` and `x` cannot be the same. I.e., one cannot
3983: call `MatSolveAdd`(A,x,y,x).
3985: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3986: @*/
3987: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3988: {
3989: PetscScalar one = 1.0;
3990: Vec tmp;
3992: PetscFunctionBegin;
3998: PetscCheckSameComm(mat, 1, b, 2);
3999: PetscCheckSameComm(mat, 1, y, 3);
4000: PetscCheckSameComm(mat, 1, x, 4);
4001: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4002: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4003: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4004: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4005: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4006: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4007: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4008: MatCheckPreallocated(mat, 1);
4010: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4011: PetscCall(VecFlag(x, mat->factorerrortype));
4012: if (mat->factorerrortype) {
4013: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4014: } else if (mat->ops->solveadd) {
4015: PetscUseTypeMethod(mat, solveadd, b, y, x);
4016: } else {
4017: /* do the solve then the add manually */
4018: if (x != y) {
4019: PetscCall(MatSolve(mat, b, x));
4020: PetscCall(VecAXPY(x, one, y));
4021: } else {
4022: PetscCall(VecDuplicate(x, &tmp));
4023: PetscCall(VecCopy(x, tmp));
4024: PetscCall(MatSolve(mat, b, x));
4025: PetscCall(VecAXPY(x, one, tmp));
4026: PetscCall(VecDestroy(&tmp));
4027: }
4028: }
4029: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4030: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4031: PetscFunctionReturn(PETSC_SUCCESS);
4032: }
4034: /*@
4035: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4037: Neighbor-wise Collective
4039: Input Parameters:
4040: + mat - the factored matrix
4041: - b - the right-hand-side vector
4043: Output Parameter:
4044: . x - the result vector
4046: Level: developer
4048: Notes:
4049: The vectors `b` and `x` cannot be the same. I.e., one cannot
4050: call `MatSolveTranspose`(A,x,x).
4052: Most users should employ the `KSP` interface for linear solvers
4053: instead of working directly with matrix algebra routines such as this.
4054: See, e.g., `KSPCreate()`.
4056: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4057: @*/
4058: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4059: {
4060: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4062: PetscFunctionBegin;
4067: PetscCheckSameComm(mat, 1, b, 2);
4068: PetscCheckSameComm(mat, 1, x, 3);
4069: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4070: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4071: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4072: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4073: MatCheckPreallocated(mat, 1);
4074: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4075: PetscCall(VecFlag(x, mat->factorerrortype));
4076: if (mat->factorerrortype) {
4077: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4078: } else {
4079: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4080: PetscCall((*f)(mat, b, x));
4081: }
4082: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4083: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4084: PetscFunctionReturn(PETSC_SUCCESS);
4085: }
4087: /*@
4088: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4089: factored matrix.
4091: Neighbor-wise Collective
4093: Input Parameters:
4094: + mat - the factored matrix
4095: . b - the right-hand-side vector
4096: - y - the vector to be added to
4098: Output Parameter:
4099: . x - the result vector
4101: Level: developer
4103: Note:
4104: The vectors `b` and `x` cannot be the same. I.e., one cannot
4105: call `MatSolveTransposeAdd`(A,x,y,x).
4107: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4108: @*/
4109: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4110: {
4111: PetscScalar one = 1.0;
4112: Vec tmp;
4113: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4115: PetscFunctionBegin;
4121: PetscCheckSameComm(mat, 1, b, 2);
4122: PetscCheckSameComm(mat, 1, y, 3);
4123: PetscCheckSameComm(mat, 1, x, 4);
4124: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4125: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4126: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4127: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4128: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4129: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4130: MatCheckPreallocated(mat, 1);
4132: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4133: PetscCall(VecFlag(x, mat->factorerrortype));
4134: if (mat->factorerrortype) {
4135: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4136: } else if (f) {
4137: PetscCall((*f)(mat, b, y, x));
4138: } else {
4139: /* do the solve then the add manually */
4140: if (x != y) {
4141: PetscCall(MatSolveTranspose(mat, b, x));
4142: PetscCall(VecAXPY(x, one, y));
4143: } else {
4144: PetscCall(VecDuplicate(x, &tmp));
4145: PetscCall(VecCopy(x, tmp));
4146: PetscCall(MatSolveTranspose(mat, b, x));
4147: PetscCall(VecAXPY(x, one, tmp));
4148: PetscCall(VecDestroy(&tmp));
4149: }
4150: }
4151: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4152: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4153: PetscFunctionReturn(PETSC_SUCCESS);
4154: }
4156: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4157: /*@
4158: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4160: Neighbor-wise Collective
4162: Input Parameters:
4163: + mat - the matrix
4164: . b - the right-hand side
4165: . omega - the relaxation factor
4166: . flag - flag indicating the type of SOR (see below)
4167: . shift - diagonal shift
4168: . its - the number of iterations
4169: - lits - the number of local iterations
4171: Output Parameter:
4172: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4174: SOR Flags:
4175: + `SOR_FORWARD_SWEEP` - forward SOR
4176: . `SOR_BACKWARD_SWEEP` - backward SOR
4177: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4178: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4179: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4180: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4181: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4182: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies upper/lower triangular part of matrix to vector (with `omega`)
4183: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4185: Level: developer
4187: Notes:
4188: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4189: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4190: on each processor.
4192: Application programmers will not generally use `MatSOR()` directly,
4193: but instead will employ `PCSOR` or `PCEISENSTAT`
4195: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with inodes, this does a block SOR smoothing, otherwise it does a pointwise smoothing.
4196: For `MATAIJ` matrices with inodes, the block sizes are determined by the inode sizes, not the block size set with `MatSetBlockSize()`
4198: Vectors `x` and `b` CANNOT be the same
4200: The flags are implemented as bitwise inclusive or operations.
4201: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4202: to specify a zero initial guess for SSOR.
4204: Developer Note:
4205: We should add block SOR support for `MATAIJ` matrices with block size set to greater than one and no inodes
4207: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4208: @*/
4209: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4210: {
4211: PetscFunctionBegin;
4216: PetscCheckSameComm(mat, 1, b, 2);
4217: PetscCheckSameComm(mat, 1, x, 8);
4218: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4219: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4220: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4221: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4222: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4223: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4224: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4225: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4227: MatCheckPreallocated(mat, 1);
4228: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4229: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4230: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4231: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4232: PetscFunctionReturn(PETSC_SUCCESS);
4233: }
4235: /*
4236: Default matrix copy routine.
4237: */
4238: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4239: {
4240: PetscInt i, rstart = 0, rend = 0, nz;
4241: const PetscInt *cwork;
4242: const PetscScalar *vwork;
4244: PetscFunctionBegin;
4245: if (B->assembled) PetscCall(MatZeroEntries(B));
4246: if (str == SAME_NONZERO_PATTERN) {
4247: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4248: for (i = rstart; i < rend; i++) {
4249: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4250: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4251: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4252: }
4253: } else {
4254: PetscCall(MatAYPX(B, 0.0, A, str));
4255: }
4256: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4257: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4258: PetscFunctionReturn(PETSC_SUCCESS);
4259: }
4261: /*@
4262: MatCopy - Copies a matrix to another matrix.
4264: Collective
4266: Input Parameters:
4267: + A - the matrix
4268: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4270: Output Parameter:
4271: . B - where the copy is put
4273: Level: intermediate
4275: Notes:
4276: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4278: `MatCopy()` copies the matrix entries of a matrix to another existing
4279: matrix (after first zeroing the second matrix). A related routine is
4280: `MatConvert()`, which first creates a new matrix and then copies the data.
4282: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4283: @*/
4284: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4285: {
4286: PetscInt i;
4288: PetscFunctionBegin;
4293: PetscCheckSameComm(A, 1, B, 2);
4294: MatCheckPreallocated(B, 2);
4295: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4296: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4297: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4298: A->cmap->N, B->cmap->N);
4299: MatCheckPreallocated(A, 1);
4300: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4302: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4303: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4304: else PetscCall(MatCopy_Basic(A, B, str));
4306: B->stencil.dim = A->stencil.dim;
4307: B->stencil.noc = A->stencil.noc;
4308: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4309: B->stencil.dims[i] = A->stencil.dims[i];
4310: B->stencil.starts[i] = A->stencil.starts[i];
4311: }
4313: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4314: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4315: PetscFunctionReturn(PETSC_SUCCESS);
4316: }
4318: /*@
4319: MatConvert - Converts a matrix to another matrix, either of the same
4320: or different type.
4322: Collective
4324: Input Parameters:
4325: + mat - the matrix
4326: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4327: same type as the original matrix.
4328: - reuse - denotes if the destination matrix is to be created or reused.
4329: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4330: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4332: Output Parameter:
4333: . M - pointer to place new matrix
4335: Level: intermediate
4337: Notes:
4338: `MatConvert()` first creates a new matrix and then copies the data from
4339: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4340: entries of one matrix to another already existing matrix context.
4342: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4343: the MPI communicator of the generated matrix is always the same as the communicator
4344: of the input matrix.
4346: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4347: @*/
4348: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4349: {
4350: PetscBool sametype, issame, flg;
4351: PetscBool3 issymmetric, ishermitian, isspd;
4352: char convname[256], mtype[256];
4353: Mat B;
4355: PetscFunctionBegin;
4358: PetscAssertPointer(M, 4);
4359: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4360: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4361: MatCheckPreallocated(mat, 1);
4363: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4364: if (flg) newtype = mtype;
4366: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4367: PetscCall(PetscStrcmp(newtype, "same", &issame));
4368: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4369: if (reuse == MAT_REUSE_MATRIX) {
4371: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4372: }
4374: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4375: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4376: PetscFunctionReturn(PETSC_SUCCESS);
4377: }
4379: /* Cache Mat options because some converters use MatHeaderReplace() */
4380: issymmetric = mat->symmetric;
4381: ishermitian = mat->hermitian;
4382: isspd = mat->spd;
4384: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4385: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4386: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4387: } else {
4388: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4389: const char *prefix[3] = {"seq", "mpi", ""};
4390: PetscInt i;
4391: /*
4392: Order of precedence:
4393: 0) See if newtype is a superclass of the current matrix.
4394: 1) See if a specialized converter is known to the current matrix.
4395: 2) See if a specialized converter is known to the desired matrix class.
4396: 3) See if a good general converter is registered for the desired class
4397: (as of 6/27/03 only MATMPIADJ falls into this category).
4398: 4) See if a good general converter is known for the current matrix.
4399: 5) Use a really basic converter.
4400: */
4402: /* 0) See if newtype is a superclass of the current matrix.
4403: i.e mat is mpiaij and newtype is aij */
4404: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4405: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4406: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4407: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4408: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4409: if (flg) {
4410: if (reuse == MAT_INPLACE_MATRIX) {
4411: PetscCall(PetscInfo(mat, "Early return\n"));
4412: PetscFunctionReturn(PETSC_SUCCESS);
4413: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4414: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4415: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4416: PetscFunctionReturn(PETSC_SUCCESS);
4417: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4418: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4419: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4420: PetscFunctionReturn(PETSC_SUCCESS);
4421: }
4422: }
4423: }
4424: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4425: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4426: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4427: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4428: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4429: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4430: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4431: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4432: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4433: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4434: if (conv) goto foundconv;
4435: }
4437: /* 2) See if a specialized converter is known to the desired matrix class. */
4438: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4439: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4440: PetscCall(MatSetType(B, newtype));
4441: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4442: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4443: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4444: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4445: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4446: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4447: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4448: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4449: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4450: if (conv) {
4451: PetscCall(MatDestroy(&B));
4452: goto foundconv;
4453: }
4454: }
4456: /* 3) See if a good general converter is registered for the desired class */
4457: conv = B->ops->convertfrom;
4458: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4459: PetscCall(MatDestroy(&B));
4460: if (conv) goto foundconv;
4462: /* 4) See if a good general converter is known for the current matrix */
4463: if (mat->ops->convert) conv = mat->ops->convert;
4464: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4465: if (conv) goto foundconv;
4467: /* 5) Use a really basic converter. */
4468: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4469: conv = MatConvert_Basic;
4471: foundconv:
4472: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4473: PetscCall((*conv)(mat, newtype, reuse, M));
4474: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4475: /* the block sizes must be same if the mappings are copied over */
4476: (*M)->rmap->bs = mat->rmap->bs;
4477: (*M)->cmap->bs = mat->cmap->bs;
4478: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4479: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4480: (*M)->rmap->mapping = mat->rmap->mapping;
4481: (*M)->cmap->mapping = mat->cmap->mapping;
4482: }
4483: (*M)->stencil.dim = mat->stencil.dim;
4484: (*M)->stencil.noc = mat->stencil.noc;
4485: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4486: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4487: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4488: }
4489: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4490: }
4491: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4493: /* Reset Mat options */
4494: if (issymmetric != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PetscBool3ToBool(issymmetric)));
4495: if (ishermitian != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PetscBool3ToBool(ishermitian)));
4496: if (isspd != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_SPD, PetscBool3ToBool(isspd)));
4497: PetscFunctionReturn(PETSC_SUCCESS);
4498: }
4500: /*@
4501: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4503: Not Collective
4505: Input Parameter:
4506: . mat - the matrix, must be a factored matrix
4508: Output Parameter:
4509: . type - the string name of the package (do not free this string)
4511: Level: intermediate
4513: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4514: @*/
4515: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4516: {
4517: PetscErrorCode (*conv)(Mat, MatSolverType *);
4519: PetscFunctionBegin;
4522: PetscAssertPointer(type, 2);
4523: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4524: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4525: if (conv) PetscCall((*conv)(mat, type));
4526: else *type = MATSOLVERPETSC;
4527: PetscFunctionReturn(PETSC_SUCCESS);
4528: }
4530: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4531: struct _MatSolverTypeForSpecifcType {
4532: MatType mtype;
4533: /* no entry for MAT_FACTOR_NONE */
4534: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4535: MatSolverTypeForSpecifcType next;
4536: };
4538: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4539: struct _MatSolverTypeHolder {
4540: char *name;
4541: MatSolverTypeForSpecifcType handlers;
4542: MatSolverTypeHolder next;
4543: };
4545: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4547: /*@C
4548: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4550: Logically Collective, No Fortran Support
4552: Input Parameters:
4553: + package - name of the package, for example `petsc` or `superlu`
4554: . mtype - the matrix type that works with this package
4555: . ftype - the type of factorization supported by the package
4556: - createfactor - routine that will create the factored matrix ready to be used
4558: Level: developer
4560: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4561: `MatGetFactor()`
4562: @*/
4563: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4564: {
4565: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4566: PetscBool flg;
4567: MatSolverTypeForSpecifcType inext, iprev = NULL;
4569: PetscFunctionBegin;
4570: PetscCall(MatInitializePackage());
4571: if (!next) {
4572: PetscCall(PetscNew(&MatSolverTypeHolders));
4573: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4574: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4575: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4576: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4577: PetscFunctionReturn(PETSC_SUCCESS);
4578: }
4579: while (next) {
4580: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4581: if (flg) {
4582: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4583: inext = next->handlers;
4584: while (inext) {
4585: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4586: if (flg) {
4587: inext->createfactor[(int)ftype - 1] = createfactor;
4588: PetscFunctionReturn(PETSC_SUCCESS);
4589: }
4590: iprev = inext;
4591: inext = inext->next;
4592: }
4593: PetscCall(PetscNew(&iprev->next));
4594: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4595: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4596: PetscFunctionReturn(PETSC_SUCCESS);
4597: }
4598: prev = next;
4599: next = next->next;
4600: }
4601: PetscCall(PetscNew(&prev->next));
4602: PetscCall(PetscStrallocpy(package, &prev->next->name));
4603: PetscCall(PetscNew(&prev->next->handlers));
4604: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4605: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4606: PetscFunctionReturn(PETSC_SUCCESS);
4607: }
4609: /*@C
4610: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4612: Input Parameters:
4613: + type - name of the package, for example `petsc` or `superlu`, if this is 'NULL', then the first result that satisfies the other criteria is returned
4614: . ftype - the type of factorization supported by the type
4615: - mtype - the matrix type that works with this type
4617: Output Parameters:
4618: + foundtype - `PETSC_TRUE` if the type was registered
4619: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4620: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4622: Calling sequence of `createfactor`:
4623: + A - the matrix providing the factor matrix
4624: . ftype - the `MatFactorType` of the factor requested
4625: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4627: Level: developer
4629: Note:
4630: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4631: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4632: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4634: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4635: `MatInitializePackage()`
4636: @*/
4637: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4638: {
4639: MatSolverTypeHolder next = MatSolverTypeHolders;
4640: PetscBool flg;
4641: MatSolverTypeForSpecifcType inext;
4643: PetscFunctionBegin;
4644: if (foundtype) *foundtype = PETSC_FALSE;
4645: if (foundmtype) *foundmtype = PETSC_FALSE;
4646: if (createfactor) *createfactor = NULL;
4648: if (type) {
4649: while (next) {
4650: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4651: if (flg) {
4652: if (foundtype) *foundtype = PETSC_TRUE;
4653: inext = next->handlers;
4654: while (inext) {
4655: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4656: if (flg) {
4657: if (foundmtype) *foundmtype = PETSC_TRUE;
4658: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4659: PetscFunctionReturn(PETSC_SUCCESS);
4660: }
4661: inext = inext->next;
4662: }
4663: }
4664: next = next->next;
4665: }
4666: } else {
4667: while (next) {
4668: inext = next->handlers;
4669: while (inext) {
4670: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4671: if (flg && inext->createfactor[(int)ftype - 1]) {
4672: if (foundtype) *foundtype = PETSC_TRUE;
4673: if (foundmtype) *foundmtype = PETSC_TRUE;
4674: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4675: PetscFunctionReturn(PETSC_SUCCESS);
4676: }
4677: inext = inext->next;
4678: }
4679: next = next->next;
4680: }
4681: /* try with base classes inext->mtype */
4682: next = MatSolverTypeHolders;
4683: while (next) {
4684: inext = next->handlers;
4685: while (inext) {
4686: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4687: if (flg && inext->createfactor[(int)ftype - 1]) {
4688: if (foundtype) *foundtype = PETSC_TRUE;
4689: if (foundmtype) *foundmtype = PETSC_TRUE;
4690: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4691: PetscFunctionReturn(PETSC_SUCCESS);
4692: }
4693: inext = inext->next;
4694: }
4695: next = next->next;
4696: }
4697: }
4698: PetscFunctionReturn(PETSC_SUCCESS);
4699: }
4701: PetscErrorCode MatSolverTypeDestroy(void)
4702: {
4703: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4704: MatSolverTypeForSpecifcType inext, iprev;
4706: PetscFunctionBegin;
4707: while (next) {
4708: PetscCall(PetscFree(next->name));
4709: inext = next->handlers;
4710: while (inext) {
4711: PetscCall(PetscFree(inext->mtype));
4712: iprev = inext;
4713: inext = inext->next;
4714: PetscCall(PetscFree(iprev));
4715: }
4716: prev = next;
4717: next = next->next;
4718: PetscCall(PetscFree(prev));
4719: }
4720: MatSolverTypeHolders = NULL;
4721: PetscFunctionReturn(PETSC_SUCCESS);
4722: }
4724: /*@
4725: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4727: Logically Collective
4729: Input Parameter:
4730: . mat - the matrix
4732: Output Parameter:
4733: . flg - `PETSC_TRUE` if uses the ordering
4735: Level: developer
4737: Note:
4738: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4739: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4741: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4742: @*/
4743: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4744: {
4745: PetscFunctionBegin;
4746: *flg = mat->canuseordering;
4747: PetscFunctionReturn(PETSC_SUCCESS);
4748: }
4750: /*@
4751: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4753: Logically Collective
4755: Input Parameters:
4756: + mat - the matrix obtained with `MatGetFactor()`
4757: - ftype - the factorization type to be used
4759: Output Parameter:
4760: . otype - the preferred ordering type
4762: Level: developer
4764: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4765: @*/
4766: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4767: {
4768: PetscFunctionBegin;
4769: *otype = mat->preferredordering[ftype];
4770: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4771: PetscFunctionReturn(PETSC_SUCCESS);
4772: }
4774: /*@
4775: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4777: Collective
4779: Input Parameters:
4780: + mat - the matrix
4781: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4782: the other criteria is returned
4783: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4785: Output Parameter:
4786: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4788: Options Database Keys:
4789: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4790: . -pc_factor_mat_factor_on_host <bool> - do mat factorization on host (with device matrices). Default is doing it on device
4791: - -pc_factor_mat_solve_on_host <bool> - do mat solve on host (with device matrices). Default is doing it on device
4793: Level: intermediate
4795: Notes:
4796: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4797: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4799: Users usually access the factorization solvers via `KSP`
4801: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4802: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4804: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4805: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4806: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4808: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4809: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4810: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4812: Developer Note:
4813: This should actually be called `MatCreateFactor()` since it creates a new factor object
4815: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4816: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4817: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4818: @*/
4819: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4820: {
4821: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4822: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4824: PetscFunctionBegin;
4828: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4829: MatCheckPreallocated(mat, 1);
4831: PetscCall(MatIsShell(mat, &shell));
4832: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4833: if (hasop) {
4834: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4835: PetscFunctionReturn(PETSC_SUCCESS);
4836: }
4838: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4839: if (!foundtype) {
4840: if (type) {
4841: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4842: ((PetscObject)mat)->type_name, type);
4843: } else {
4844: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4845: }
4846: }
4847: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4848: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4850: PetscCall((*conv)(mat, ftype, f));
4851: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4852: PetscFunctionReturn(PETSC_SUCCESS);
4853: }
4855: /*@
4856: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4858: Not Collective
4860: Input Parameters:
4861: + mat - the matrix
4862: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's default)
4863: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4865: Output Parameter:
4866: . flg - PETSC_TRUE if the factorization is available
4868: Level: intermediate
4870: Notes:
4871: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4872: such as pastix, superlu, mumps etc.
4874: PETSc must have been ./configure to use the external solver, using the option --download-package
4876: Developer Note:
4877: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4879: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4880: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4881: @*/
4882: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4883: {
4884: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4886: PetscFunctionBegin;
4888: PetscAssertPointer(flg, 4);
4890: *flg = PETSC_FALSE;
4891: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4893: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4894: MatCheckPreallocated(mat, 1);
4896: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4897: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4898: PetscFunctionReturn(PETSC_SUCCESS);
4899: }
4901: /*@
4902: MatDuplicate - Duplicates a matrix including the non-zero structure.
4904: Collective
4906: Input Parameters:
4907: + mat - the matrix
4908: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4909: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4911: Output Parameter:
4912: . M - pointer to place new matrix
4914: Level: intermediate
4916: Notes:
4917: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4919: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4921: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4923: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4924: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4925: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4927: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4928: @*/
4929: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4930: {
4931: Mat B;
4932: VecType vtype;
4933: PetscInt i;
4934: PetscObject dm, container_h, container_d;
4935: PetscErrorCodeFn *viewf;
4937: PetscFunctionBegin;
4940: PetscAssertPointer(M, 3);
4941: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4942: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4943: MatCheckPreallocated(mat, 1);
4945: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4946: PetscUseTypeMethod(mat, duplicate, op, M);
4947: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4948: B = *M;
4950: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4951: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4952: PetscCall(MatGetVecType(mat, &vtype));
4953: PetscCall(MatSetVecType(B, vtype));
4955: B->stencil.dim = mat->stencil.dim;
4956: B->stencil.noc = mat->stencil.noc;
4957: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4958: B->stencil.dims[i] = mat->stencil.dims[i];
4959: B->stencil.starts[i] = mat->stencil.starts[i];
4960: }
4962: B->nooffproczerorows = mat->nooffproczerorows;
4963: B->nooffprocentries = mat->nooffprocentries;
4965: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4966: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4967: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4968: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4969: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4970: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4971: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
4972: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4973: PetscFunctionReturn(PETSC_SUCCESS);
4974: }
4976: /*@
4977: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4979: Logically Collective
4981: Input Parameter:
4982: . mat - the matrix
4984: Output Parameter:
4985: . v - the diagonal of the matrix
4987: Level: intermediate
4989: Note:
4990: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
4991: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
4992: is larger than `ndiag`, the values of the remaining entries are unspecified.
4994: Currently only correct in parallel for square matrices.
4996: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
4997: @*/
4998: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
4999: {
5000: PetscFunctionBegin;
5004: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5005: MatCheckPreallocated(mat, 1);
5006: if (PetscDefined(USE_DEBUG)) {
5007: PetscInt nv, row, col, ndiag;
5009: PetscCall(VecGetLocalSize(v, &nv));
5010: PetscCall(MatGetLocalSize(mat, &row, &col));
5011: ndiag = PetscMin(row, col);
5012: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5013: }
5015: PetscUseTypeMethod(mat, getdiagonal, v);
5016: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5017: PetscFunctionReturn(PETSC_SUCCESS);
5018: }
5020: /*@
5021: MatGetRowMin - Gets the minimum value (of the real part) of each
5022: row of the matrix
5024: Logically Collective
5026: Input Parameter:
5027: . mat - the matrix
5029: Output Parameters:
5030: + v - the vector for storing the maximums
5031: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5033: Level: intermediate
5035: Note:
5036: The result of this call are the same as if one converted the matrix to dense format
5037: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5039: This code is only implemented for a couple of matrix formats.
5041: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5042: `MatGetRowMax()`
5043: @*/
5044: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5045: {
5046: PetscFunctionBegin;
5050: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5052: if (!mat->cmap->N) {
5053: PetscCall(VecSet(v, PETSC_MAX_REAL));
5054: if (idx) {
5055: PetscInt i, m = mat->rmap->n;
5056: for (i = 0; i < m; i++) idx[i] = -1;
5057: }
5058: } else {
5059: MatCheckPreallocated(mat, 1);
5060: }
5061: PetscUseTypeMethod(mat, getrowmin, v, idx);
5062: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5063: PetscFunctionReturn(PETSC_SUCCESS);
5064: }
5066: /*@
5067: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5068: row of the matrix
5070: Logically Collective
5072: Input Parameter:
5073: . mat - the matrix
5075: Output Parameters:
5076: + v - the vector for storing the minimums
5077: - idx - the indices of the column found for each row (or `NULL` if not needed)
5079: Level: intermediate
5081: Notes:
5082: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5083: row is 0 (the first column).
5085: This code is only implemented for a couple of matrix formats.
5087: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5088: @*/
5089: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5090: {
5091: PetscFunctionBegin;
5095: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5096: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5098: if (!mat->cmap->N) {
5099: PetscCall(VecSet(v, 0.0));
5100: if (idx) {
5101: PetscInt i, m = mat->rmap->n;
5102: for (i = 0; i < m; i++) idx[i] = -1;
5103: }
5104: } else {
5105: MatCheckPreallocated(mat, 1);
5106: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5107: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5108: }
5109: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5110: PetscFunctionReturn(PETSC_SUCCESS);
5111: }
5113: /*@
5114: MatGetRowMax - Gets the maximum value (of the real part) of each
5115: row of the matrix
5117: Logically Collective
5119: Input Parameter:
5120: . mat - the matrix
5122: Output Parameters:
5123: + v - the vector for storing the maximums
5124: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5126: Level: intermediate
5128: Notes:
5129: The result of this call are the same as if one converted the matrix to dense format
5130: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5132: This code is only implemented for a couple of matrix formats.
5134: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5135: @*/
5136: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5137: {
5138: PetscFunctionBegin;
5142: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5144: if (!mat->cmap->N) {
5145: PetscCall(VecSet(v, PETSC_MIN_REAL));
5146: if (idx) {
5147: PetscInt i, m = mat->rmap->n;
5148: for (i = 0; i < m; i++) idx[i] = -1;
5149: }
5150: } else {
5151: MatCheckPreallocated(mat, 1);
5152: PetscUseTypeMethod(mat, getrowmax, v, idx);
5153: }
5154: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5155: PetscFunctionReturn(PETSC_SUCCESS);
5156: }
5158: /*@
5159: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5160: row of the matrix
5162: Logically Collective
5164: Input Parameter:
5165: . mat - the matrix
5167: Output Parameters:
5168: + v - the vector for storing the maximums
5169: - idx - the indices of the column found for each row (or `NULL` if not needed)
5171: Level: intermediate
5173: Notes:
5174: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5175: row is 0 (the first column).
5177: This code is only implemented for a couple of matrix formats.
5179: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5180: @*/
5181: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5182: {
5183: PetscFunctionBegin;
5187: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5189: if (!mat->cmap->N) {
5190: PetscCall(VecSet(v, 0.0));
5191: if (idx) {
5192: PetscInt i, m = mat->rmap->n;
5193: for (i = 0; i < m; i++) idx[i] = -1;
5194: }
5195: } else {
5196: MatCheckPreallocated(mat, 1);
5197: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5198: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5199: }
5200: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5201: PetscFunctionReturn(PETSC_SUCCESS);
5202: }
5204: /*@
5205: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5207: Logically Collective
5209: Input Parameter:
5210: . mat - the matrix
5212: Output Parameter:
5213: . v - the vector for storing the sum
5215: Level: intermediate
5217: This code is only implemented for a couple of matrix formats.
5219: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5220: @*/
5221: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5222: {
5223: PetscFunctionBegin;
5227: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5229: if (!mat->cmap->N) {
5230: PetscCall(VecSet(v, 0.0));
5231: } else {
5232: MatCheckPreallocated(mat, 1);
5233: PetscUseTypeMethod(mat, getrowsumabs, v);
5234: }
5235: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5236: PetscFunctionReturn(PETSC_SUCCESS);
5237: }
5239: /*@
5240: MatGetRowSum - Gets the sum of each row of the matrix
5242: Logically or Neighborhood Collective
5244: Input Parameter:
5245: . mat - the matrix
5247: Output Parameter:
5248: . v - the vector for storing the sum of rows
5250: Level: intermediate
5252: Note:
5253: This code is slow since it is not currently specialized for different formats
5255: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5256: @*/
5257: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5258: {
5259: Vec ones;
5261: PetscFunctionBegin;
5265: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5266: MatCheckPreallocated(mat, 1);
5267: PetscCall(MatCreateVecs(mat, &ones, NULL));
5268: PetscCall(VecSet(ones, 1.));
5269: PetscCall(MatMult(mat, ones, v));
5270: PetscCall(VecDestroy(&ones));
5271: PetscFunctionReturn(PETSC_SUCCESS);
5272: }
5274: /*@
5275: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5276: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5278: Collective
5280: Input Parameter:
5281: . mat - the matrix to provide the transpose
5283: Output Parameter:
5284: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5286: Level: advanced
5288: Note:
5289: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5290: routine allows bypassing that call.
5292: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5293: @*/
5294: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5295: {
5296: MatParentState *rb = NULL;
5298: PetscFunctionBegin;
5299: PetscCall(PetscNew(&rb));
5300: rb->id = ((PetscObject)mat)->id;
5301: rb->state = 0;
5302: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5303: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5304: PetscFunctionReturn(PETSC_SUCCESS);
5305: }
5307: static PetscErrorCode MatTranspose_Private(Mat mat, MatReuse reuse, Mat *B, PetscBool conjugate)
5308: {
5309: PetscContainer rB = NULL;
5310: MatParentState *rb = NULL;
5311: PetscErrorCode (*f)(Mat, MatReuse, Mat *) = NULL;
5313: PetscFunctionBegin;
5316: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5317: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5318: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5319: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5320: MatCheckPreallocated(mat, 1);
5321: if (reuse == MAT_REUSE_MATRIX) {
5322: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5323: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5324: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5325: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5326: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5327: }
5329: if (conjugate) {
5330: f = mat->ops->hermitiantranspose;
5331: if (f) PetscCall((*f)(mat, reuse, B));
5332: }
5333: if (!f && !(reuse == MAT_INPLACE_MATRIX && mat->hermitian == PETSC_BOOL3_TRUE && conjugate)) {
5334: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5335: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5336: PetscUseTypeMethod(mat, transpose, reuse, B);
5337: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5338: }
5339: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5340: if (conjugate) PetscCall(MatConjugate(*B));
5341: }
5343: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5344: if (reuse != MAT_INPLACE_MATRIX) {
5345: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5346: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5347: rb->state = ((PetscObject)mat)->state;
5348: rb->nonzerostate = mat->nonzerostate;
5349: }
5350: PetscFunctionReturn(PETSC_SUCCESS);
5351: }
5353: /*@
5354: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5356: Collective
5358: Input Parameters:
5359: + mat - the matrix to transpose
5360: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5362: Output Parameter:
5363: . B - the transpose of the matrix
5365: Level: intermediate
5367: Notes:
5368: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5370: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5371: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5373: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5375: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5376: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5378: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5380: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5382: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5383: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5384: @*/
5385: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5386: {
5387: PetscFunctionBegin;
5388: PetscCall(MatTranspose_Private(mat, reuse, B, PETSC_FALSE));
5389: PetscFunctionReturn(PETSC_SUCCESS);
5390: }
5392: /*@
5393: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5395: Collective
5397: Input Parameter:
5398: . A - the matrix to transpose
5400: Output Parameter:
5401: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5402: numerical portion.
5404: Level: intermediate
5406: Note:
5407: This is not supported for many matrix types, use `MatTranspose()` in those cases
5409: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5410: @*/
5411: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5412: {
5413: PetscFunctionBegin;
5416: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5417: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5418: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5419: PetscUseTypeMethod(A, transposesymbolic, B);
5420: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5422: PetscCall(MatTransposeSetPrecursor(A, *B));
5423: PetscFunctionReturn(PETSC_SUCCESS);
5424: }
5426: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5427: {
5428: PetscContainer rB;
5429: MatParentState *rb;
5431: PetscFunctionBegin;
5434: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5435: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5436: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5437: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5438: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5439: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5440: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5441: PetscFunctionReturn(PETSC_SUCCESS);
5442: }
5444: /*@
5445: MatIsTranspose - Test whether a matrix is another one's transpose,
5446: or its own, in which case it tests symmetry.
5448: Collective
5450: Input Parameters:
5451: + A - the matrix to test
5452: . B - the matrix to test against, this can equal the first parameter
5453: - tol - tolerance, differences between entries smaller than this are counted as zero
5455: Output Parameter:
5456: . flg - the result
5458: Level: intermediate
5460: Notes:
5461: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5462: test involves parallel copies of the block off-diagonal parts of the matrix.
5464: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5465: @*/
5466: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5467: {
5468: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5470: PetscFunctionBegin;
5473: PetscAssertPointer(flg, 4);
5474: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5475: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5476: *flg = PETSC_FALSE;
5477: if (f && g) {
5478: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5479: PetscCall((*f)(A, B, tol, flg));
5480: } else {
5481: MatType mattype;
5483: PetscCall(MatGetType(f ? B : A, &mattype));
5484: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5485: }
5486: PetscFunctionReturn(PETSC_SUCCESS);
5487: }
5489: /*@
5490: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5492: Collective
5494: Input Parameters:
5495: + mat - the matrix to transpose and complex conjugate
5496: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5498: Output Parameter:
5499: . B - the Hermitian transpose
5501: Level: intermediate
5503: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5504: @*/
5505: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5506: {
5507: PetscFunctionBegin;
5508: PetscCall(MatTranspose_Private(mat, reuse, B, PETSC_TRUE));
5509: PetscFunctionReturn(PETSC_SUCCESS);
5510: }
5512: /*@
5513: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5515: Collective
5517: Input Parameters:
5518: + A - the matrix to test
5519: . B - the matrix to test against, this can equal the first parameter
5520: - tol - tolerance, differences between entries smaller than this are counted as zero
5522: Output Parameter:
5523: . flg - the result
5525: Level: intermediate
5527: Notes:
5528: Only available for `MATAIJ` matrices.
5530: The sequential algorithm
5531: has a running time of the order of the number of nonzeros; the parallel
5532: test involves parallel copies of the block off-diagonal parts of the matrix.
5534: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5535: @*/
5536: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5537: {
5538: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5540: PetscFunctionBegin;
5543: PetscAssertPointer(flg, 4);
5544: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5545: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5546: if (f && g) {
5547: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5548: PetscCall((*f)(A, B, tol, flg));
5549: } else {
5550: MatType mattype;
5552: PetscCall(MatGetType(f ? B : A, &mattype));
5553: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for Hermitian transpose", mattype);
5554: }
5555: PetscFunctionReturn(PETSC_SUCCESS);
5556: }
5558: /*@
5559: MatPermute - Creates a new matrix with rows and columns permuted from the
5560: original.
5562: Collective
5564: Input Parameters:
5565: + mat - the matrix to permute
5566: . row - row permutation, each processor supplies only the permutation for its rows
5567: - col - column permutation, each processor supplies only the permutation for its columns
5569: Output Parameter:
5570: . B - the permuted matrix
5572: Level: advanced
5574: Note:
5575: The index sets map from row/col of permuted matrix to row/col of original matrix.
5576: The index sets should be on the same communicator as mat and have the same local sizes.
5578: Developer Note:
5579: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5580: exploit the fact that row and col are permutations, consider implementing the
5581: more general `MatCreateSubMatrix()` instead.
5583: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5584: @*/
5585: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5586: {
5587: PetscFunctionBegin;
5592: PetscAssertPointer(B, 4);
5593: PetscCheckSameComm(mat, 1, row, 2);
5594: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5595: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5596: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5597: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5598: MatCheckPreallocated(mat, 1);
5600: if (mat->ops->permute) {
5601: PetscUseTypeMethod(mat, permute, row, col, B);
5602: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5603: } else {
5604: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5605: }
5606: PetscFunctionReturn(PETSC_SUCCESS);
5607: }
5609: /*@
5610: MatEqual - Compares two matrices.
5612: Collective
5614: Input Parameters:
5615: + A - the first matrix
5616: - B - the second matrix
5618: Output Parameter:
5619: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5621: Level: intermediate
5623: Note:
5624: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing
5625: the results of several matrix-vector product using randomly created vectors, see `MatMultEqual()`.
5627: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5628: @*/
5629: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5630: {
5631: PetscFunctionBegin;
5636: PetscAssertPointer(flg, 3);
5637: PetscCheckSameComm(A, 1, B, 2);
5638: MatCheckPreallocated(A, 1);
5639: MatCheckPreallocated(B, 2);
5640: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5641: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5642: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5643: B->cmap->N);
5644: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5645: PetscUseTypeMethod(A, equal, B, flg);
5646: } else {
5647: PetscCall(MatMultEqual(A, B, 10, flg));
5648: }
5649: PetscFunctionReturn(PETSC_SUCCESS);
5650: }
5652: /*@
5653: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5654: matrices that are stored as vectors. Either of the two scaling
5655: matrices can be `NULL`.
5657: Collective
5659: Input Parameters:
5660: + mat - the matrix to be scaled
5661: . l - the left scaling vector (or `NULL`)
5662: - r - the right scaling vector (or `NULL`)
5664: Level: intermediate
5666: Note:
5667: `MatDiagonalScale()` computes $A = LAR$, where
5668: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5669: The L scales the rows of the matrix, the R scales the columns of the matrix.
5671: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5672: @*/
5673: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5674: {
5675: PetscBool flg = PETSC_FALSE;
5677: PetscFunctionBegin;
5680: if (l) {
5682: PetscCheckSameComm(mat, 1, l, 2);
5683: }
5684: if (r) {
5686: PetscCheckSameComm(mat, 1, r, 3);
5687: }
5688: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5689: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5690: MatCheckPreallocated(mat, 1);
5691: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5693: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5694: PetscUseTypeMethod(mat, diagonalscale, l, r);
5695: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5696: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5697: if (l != r && (PetscBool3ToBool(mat->symmetric) || PetscBool3ToBool(mat->hermitian))) {
5698: if (!PetscDefined(USE_COMPLEX) || PetscBool3ToBool(mat->symmetric)) {
5699: if (l && r) PetscCall(VecEqual(l, r, &flg));
5700: if (!flg) {
5701: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
5702: PetscCheck(!flg, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format, left and right scaling vectors must be the same");
5703: mat->symmetric = mat->spd = PETSC_BOOL3_FALSE;
5704: if (!PetscDefined(USE_COMPLEX)) mat->hermitian = PETSC_BOOL3_FALSE;
5705: else mat->hermitian = PETSC_BOOL3_UNKNOWN;
5706: }
5707: }
5708: if (PetscDefined(USE_COMPLEX) && PetscBool3ToBool(mat->hermitian)) {
5709: flg = PETSC_FALSE;
5710: if (l && r) {
5711: Vec conjugate;
5713: PetscCall(VecDuplicate(l, &conjugate));
5714: PetscCall(VecCopy(l, conjugate));
5715: PetscCall(VecConjugate(conjugate));
5716: PetscCall(VecEqual(conjugate, r, &flg));
5717: PetscCall(VecDestroy(&conjugate));
5718: }
5719: if (!flg) {
5720: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
5721: PetscCheck(!flg, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format and Hermitian matrix, left and right scaling vectors must be conjugate one of the other");
5722: mat->hermitian = PETSC_BOOL3_FALSE;
5723: mat->symmetric = mat->spd = PETSC_BOOL3_UNKNOWN;
5724: }
5725: }
5726: }
5727: PetscFunctionReturn(PETSC_SUCCESS);
5728: }
5730: /*@
5731: MatScale - Scales all elements of a matrix by a given number.
5733: Logically Collective
5735: Input Parameters:
5736: + mat - the matrix to be scaled
5737: - a - the scaling value
5739: Level: intermediate
5741: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5742: @*/
5743: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5744: {
5745: PetscFunctionBegin;
5748: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5749: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5751: MatCheckPreallocated(mat, 1);
5753: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5754: if (a != (PetscScalar)1.0) {
5755: PetscUseTypeMethod(mat, scale, a);
5756: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5757: }
5758: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5759: PetscFunctionReturn(PETSC_SUCCESS);
5760: }
5762: /*@
5763: MatNorm - Calculates various norms of a matrix.
5765: Collective
5767: Input Parameters:
5768: + mat - the matrix
5769: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5771: Output Parameter:
5772: . nrm - the resulting norm
5774: Level: intermediate
5776: .seealso: [](ch_matrices), `Mat`
5777: @*/
5778: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5779: {
5780: PetscFunctionBegin;
5783: PetscAssertPointer(nrm, 3);
5785: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5786: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5787: MatCheckPreallocated(mat, 1);
5789: PetscUseTypeMethod(mat, norm, type, nrm);
5790: PetscFunctionReturn(PETSC_SUCCESS);
5791: }
5793: /*
5794: This variable is used to prevent counting of MatAssemblyBegin() that
5795: are called from within a MatAssemblyEnd().
5796: */
5797: static PetscInt MatAssemblyEnd_InUse = 0;
5798: /*@
5799: MatAssemblyBegin - Begins assembling the matrix. This routine should
5800: be called after completing all calls to `MatSetValues()`.
5802: Collective
5804: Input Parameters:
5805: + mat - the matrix
5806: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5808: Level: beginner
5810: Notes:
5811: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5812: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5814: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5815: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5816: using the matrix.
5818: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5819: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5820: a global collective operation requiring all processes that share the matrix.
5822: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5823: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5824: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5826: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5827: @*/
5828: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5829: {
5830: PetscFunctionBegin;
5833: MatCheckPreallocated(mat, 1);
5834: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5835: if (mat->assembled) {
5836: mat->was_assembled = PETSC_TRUE;
5837: mat->assembled = PETSC_FALSE;
5838: }
5840: if (!MatAssemblyEnd_InUse) {
5841: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5842: PetscTryTypeMethod(mat, assemblybegin, type);
5843: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5844: } else PetscTryTypeMethod(mat, assemblybegin, type);
5845: PetscFunctionReturn(PETSC_SUCCESS);
5846: }
5848: /*@
5849: MatAssembled - Indicates if a matrix has been assembled and is ready for
5850: use; for example, in matrix-vector product.
5852: Not Collective
5854: Input Parameter:
5855: . mat - the matrix
5857: Output Parameter:
5858: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5860: Level: advanced
5862: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5863: @*/
5864: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5865: {
5866: PetscFunctionBegin;
5868: PetscAssertPointer(assembled, 2);
5869: *assembled = mat->assembled;
5870: PetscFunctionReturn(PETSC_SUCCESS);
5871: }
5873: /*@
5874: MatAssemblyEnd - Completes assembling the matrix. This routine should
5875: be called after `MatAssemblyBegin()`.
5877: Collective
5879: Input Parameters:
5880: + mat - the matrix
5881: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5883: Options Database Keys:
5884: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5885: . -mat_view ::ascii_info_detail - Prints more detailed info
5886: . -mat_view - Prints matrix in ASCII format
5887: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5888: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5889: . -display <name> - Sets display name (default is host)
5890: . -draw_pause <sec> - Sets number of seconds to pause after display
5891: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5892: . -viewer_socket_machine <machine> - Machine to use for socket
5893: . -viewer_socket_port <port> - Port number to use for socket
5894: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5896: Level: beginner
5898: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5899: @*/
5900: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5901: {
5902: static PetscInt inassm = 0;
5903: PetscBool flg = PETSC_FALSE;
5905: PetscFunctionBegin;
5909: inassm++;
5910: MatAssemblyEnd_InUse++;
5911: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5912: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5913: PetscTryTypeMethod(mat, assemblyend, type);
5914: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5915: } else PetscTryTypeMethod(mat, assemblyend, type);
5917: /* Flush assembly is not a true assembly */
5918: if (type != MAT_FLUSH_ASSEMBLY) {
5919: if (mat->num_ass) {
5920: if (!mat->symmetry_eternal) {
5921: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5922: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5923: }
5924: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5925: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5926: }
5927: mat->num_ass++;
5928: mat->assembled = PETSC_TRUE;
5929: mat->ass_nonzerostate = mat->nonzerostate;
5930: }
5932: mat->insertmode = NOT_SET_VALUES;
5933: MatAssemblyEnd_InUse--;
5934: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5935: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5936: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5938: if (mat->checksymmetryonassembly) {
5939: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5940: if (flg) {
5941: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5942: } else {
5943: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5944: }
5945: }
5946: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5947: }
5948: inassm--;
5949: PetscFunctionReturn(PETSC_SUCCESS);
5950: }
5952: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5953: /*@
5954: MatSetOption - Sets a parameter option for a matrix. Some options
5955: may be specific to certain storage formats. Some options
5956: determine how values will be inserted (or added). Sorted,
5957: row-oriented input will generally assemble the fastest. The default
5958: is row-oriented.
5960: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5962: Input Parameters:
5963: + mat - the matrix
5964: . op - the option, one of those listed below (and possibly others),
5965: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5967: Options Describing Matrix Structure:
5968: + `MAT_SPD` - symmetric positive definite
5969: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5970: . `MAT_HERMITIAN` - transpose is the complex conjugation
5971: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5972: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5973: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5974: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5976: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5977: do not need to be computed (usually at a high cost)
5979: Options For Use with `MatSetValues()`:
5980: Insert a logically dense subblock, which can be
5981: . `MAT_ROW_ORIENTED` - row-oriented (default)
5983: These options reflect the data you pass in with `MatSetValues()`; it has
5984: nothing to do with how the data is stored internally in the matrix
5985: data structure.
5987: When (re)assembling a matrix, we can restrict the input for
5988: efficiency/debugging purposes. These options include
5989: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5990: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5991: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5992: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5993: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5994: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5995: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5996: performance for very large process counts.
5997: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5998: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5999: functions, instead sending only neighbor messages.
6001: Level: intermediate
6003: Notes:
6004: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
6006: Some options are relevant only for particular matrix types and
6007: are thus ignored by others. Other options are not supported by
6008: certain matrix types and will generate an error message if set.
6010: If using Fortran to compute a matrix, one may need to
6011: use the column-oriented option (or convert to the row-oriented
6012: format).
6014: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
6015: that would generate a new entry in the nonzero structure is instead
6016: ignored. Thus, if memory has not already been allocated for this particular
6017: data, then the insertion is ignored. For dense matrices, in which
6018: the entire array is allocated, no entries are ever ignored.
6019: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6021: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
6022: that would generate a new entry in the nonzero structure instead produces
6023: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6025: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
6026: that would generate a new entry that has not been preallocated will
6027: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
6028: only.) This is a useful flag when debugging matrix memory preallocation.
6029: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6031: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6032: other processors should be dropped, rather than stashed.
6033: This is useful if you know that the "owning" processor is also
6034: always generating the correct matrix entries, so that PETSc need
6035: not transfer duplicate entries generated on another processor.
6037: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6038: searches during matrix assembly. When this flag is set, the hash table
6039: is created during the first matrix assembly. This hash table is
6040: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6041: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6042: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6043: supported by `MATMPIBAIJ` format only.
6045: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6046: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6048: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6049: a zero location in the matrix
6051: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6053: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6054: zero row routines and thus improves performance for very large process counts.
6056: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6057: part of the matrix (since they should match the upper triangular part).
6059: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6060: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6061: with finite difference schemes with non-periodic boundary conditions.
6063: Developer Note:
6064: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6065: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6066: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6067: not changed.
6069: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6070: @*/
6071: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6072: {
6073: PetscFunctionBegin;
6075: if (op > 0) {
6078: }
6080: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6082: switch (op) {
6083: case MAT_FORCE_DIAGONAL_ENTRIES:
6084: mat->force_diagonals = flg;
6085: PetscFunctionReturn(PETSC_SUCCESS);
6086: case MAT_NO_OFF_PROC_ENTRIES:
6087: mat->nooffprocentries = flg;
6088: PetscFunctionReturn(PETSC_SUCCESS);
6089: case MAT_SUBSET_OFF_PROC_ENTRIES:
6090: mat->assembly_subset = flg;
6091: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6092: #if !defined(PETSC_HAVE_MPIUNI)
6093: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6094: #endif
6095: mat->stash.first_assembly_done = PETSC_FALSE;
6096: }
6097: PetscFunctionReturn(PETSC_SUCCESS);
6098: case MAT_NO_OFF_PROC_ZERO_ROWS:
6099: mat->nooffproczerorows = flg;
6100: PetscFunctionReturn(PETSC_SUCCESS);
6101: case MAT_SPD:
6102: if (flg) {
6103: mat->spd = PETSC_BOOL3_TRUE;
6104: mat->symmetric = PETSC_BOOL3_TRUE;
6105: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6106: #if !defined(PETSC_USE_COMPLEX)
6107: mat->hermitian = PETSC_BOOL3_TRUE;
6108: #endif
6109: } else {
6110: mat->spd = PETSC_BOOL3_FALSE;
6111: }
6112: break;
6113: case MAT_SYMMETRIC:
6114: mat->symmetric = PetscBoolToBool3(flg);
6115: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6116: #if !defined(PETSC_USE_COMPLEX)
6117: mat->hermitian = PetscBoolToBool3(flg);
6118: #endif
6119: break;
6120: case MAT_HERMITIAN:
6121: mat->hermitian = PetscBoolToBool3(flg);
6122: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6123: #if !defined(PETSC_USE_COMPLEX)
6124: mat->symmetric = PetscBoolToBool3(flg);
6125: #endif
6126: break;
6127: case MAT_STRUCTURALLY_SYMMETRIC:
6128: mat->structurally_symmetric = PetscBoolToBool3(flg);
6129: break;
6130: case MAT_SYMMETRY_ETERNAL:
6131: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6132: mat->symmetry_eternal = flg;
6133: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6134: break;
6135: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6136: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6137: mat->structural_symmetry_eternal = flg;
6138: break;
6139: case MAT_SPD_ETERNAL:
6140: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6141: mat->spd_eternal = flg;
6142: if (flg) {
6143: mat->structural_symmetry_eternal = PETSC_TRUE;
6144: mat->symmetry_eternal = PETSC_TRUE;
6145: }
6146: break;
6147: case MAT_STRUCTURE_ONLY:
6148: mat->structure_only = flg;
6149: break;
6150: case MAT_SORTED_FULL:
6151: mat->sortedfull = flg;
6152: break;
6153: default:
6154: break;
6155: }
6156: PetscTryTypeMethod(mat, setoption, op, flg);
6157: PetscFunctionReturn(PETSC_SUCCESS);
6158: }
6160: /*@
6161: MatGetOption - Gets a parameter option that has been set for a matrix.
6163: Logically Collective
6165: Input Parameters:
6166: + mat - the matrix
6167: - op - the option, this only responds to certain options, check the code for which ones
6169: Output Parameter:
6170: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6172: Level: intermediate
6174: Notes:
6175: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6177: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6178: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6180: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6181: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6182: @*/
6183: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6184: {
6185: PetscFunctionBegin;
6189: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6190: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6192: switch (op) {
6193: case MAT_NO_OFF_PROC_ENTRIES:
6194: *flg = mat->nooffprocentries;
6195: break;
6196: case MAT_NO_OFF_PROC_ZERO_ROWS:
6197: *flg = mat->nooffproczerorows;
6198: break;
6199: case MAT_SYMMETRIC:
6200: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6201: break;
6202: case MAT_HERMITIAN:
6203: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6204: break;
6205: case MAT_STRUCTURALLY_SYMMETRIC:
6206: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6207: break;
6208: case MAT_SPD:
6209: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6210: break;
6211: case MAT_SYMMETRY_ETERNAL:
6212: *flg = mat->symmetry_eternal;
6213: break;
6214: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6215: *flg = mat->symmetry_eternal;
6216: break;
6217: default:
6218: break;
6219: }
6220: PetscFunctionReturn(PETSC_SUCCESS);
6221: }
6223: /*@
6224: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6225: this routine retains the old nonzero structure.
6227: Logically Collective
6229: Input Parameter:
6230: . mat - the matrix
6232: Level: intermediate
6234: Note:
6235: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6236: See the Performance chapter of the users manual for information on preallocating matrices.
6238: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6239: @*/
6240: PetscErrorCode MatZeroEntries(Mat mat)
6241: {
6242: PetscFunctionBegin;
6245: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6246: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6247: MatCheckPreallocated(mat, 1);
6249: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6250: PetscUseTypeMethod(mat, zeroentries);
6251: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6252: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6253: PetscFunctionReturn(PETSC_SUCCESS);
6254: }
6256: /*@
6257: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6258: of a set of rows and columns of a matrix.
6260: Collective
6262: Input Parameters:
6263: + mat - the matrix
6264: . numRows - the number of rows/columns to zero
6265: . rows - the global row indices
6266: . diag - value put in the diagonal of the eliminated rows
6267: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6268: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6270: Level: intermediate
6272: Notes:
6273: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6275: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6276: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6278: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6279: Krylov method to take advantage of the known solution on the zeroed rows.
6281: For the parallel case, all processes that share the matrix (i.e.,
6282: those in the communicator used for matrix creation) MUST call this
6283: routine, regardless of whether any rows being zeroed are owned by
6284: them.
6286: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6287: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6288: missing.
6290: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6291: list only rows local to itself).
6293: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6295: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6296: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6297: @*/
6298: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6299: {
6300: PetscFunctionBegin;
6303: if (numRows) PetscAssertPointer(rows, 3);
6304: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6305: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6306: MatCheckPreallocated(mat, 1);
6308: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6309: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6310: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6311: PetscFunctionReturn(PETSC_SUCCESS);
6312: }
6314: /*@
6315: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6316: of a set of rows and columns of a matrix.
6318: Collective
6320: Input Parameters:
6321: + mat - the matrix
6322: . is - the rows to zero
6323: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6324: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6325: - b - optional vector of right-hand side, that will be adjusted by provided solution
6327: Level: intermediate
6329: Note:
6330: See `MatZeroRowsColumns()` for details on how this routine operates.
6332: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6333: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6334: @*/
6335: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6336: {
6337: PetscInt numRows;
6338: const PetscInt *rows;
6340: PetscFunctionBegin;
6345: PetscCall(ISGetLocalSize(is, &numRows));
6346: PetscCall(ISGetIndices(is, &rows));
6347: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6348: PetscCall(ISRestoreIndices(is, &rows));
6349: PetscFunctionReturn(PETSC_SUCCESS);
6350: }
6352: /*@
6353: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6354: of a set of rows of a matrix.
6356: Collective
6358: Input Parameters:
6359: + mat - the matrix
6360: . numRows - the number of rows to zero
6361: . rows - the global row indices
6362: . diag - value put in the diagonal of the zeroed rows
6363: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6364: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6366: Level: intermediate
6368: Notes:
6369: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6371: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6373: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6374: Krylov method to take advantage of the known solution on the zeroed rows.
6376: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6377: from the matrix.
6379: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6380: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6381: formats this does not alter the nonzero structure.
6383: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6384: of the matrix is not changed the values are
6385: merely zeroed.
6387: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6388: formats can optionally remove the main diagonal entry from the
6389: nonzero structure as well, by passing 0.0 as the final argument).
6391: For the parallel case, all processes that share the matrix (i.e.,
6392: those in the communicator used for matrix creation) MUST call this
6393: routine, regardless of whether any rows being zeroed are owned by
6394: them.
6396: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6397: list only rows local to itself).
6399: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6400: owns that are to be zeroed. This saves a global synchronization in the implementation.
6402: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6403: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6404: @*/
6405: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6406: {
6407: PetscFunctionBegin;
6410: if (numRows) PetscAssertPointer(rows, 3);
6411: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6412: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6413: MatCheckPreallocated(mat, 1);
6415: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6416: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6417: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6418: PetscFunctionReturn(PETSC_SUCCESS);
6419: }
6421: /*@
6422: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6423: of a set of rows of a matrix indicated by an `IS`
6425: Collective
6427: Input Parameters:
6428: + mat - the matrix
6429: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6430: . diag - value put in all diagonals of eliminated rows
6431: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6432: - b - optional vector of right-hand side, that will be adjusted by provided solution
6434: Level: intermediate
6436: Note:
6437: See `MatZeroRows()` for details on how this routine operates.
6439: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6440: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6441: @*/
6442: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6443: {
6444: PetscInt numRows = 0;
6445: const PetscInt *rows = NULL;
6447: PetscFunctionBegin;
6450: if (is) {
6452: PetscCall(ISGetLocalSize(is, &numRows));
6453: PetscCall(ISGetIndices(is, &rows));
6454: }
6455: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6456: if (is) PetscCall(ISRestoreIndices(is, &rows));
6457: PetscFunctionReturn(PETSC_SUCCESS);
6458: }
6460: /*@
6461: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6462: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6464: Collective
6466: Input Parameters:
6467: + mat - the matrix
6468: . numRows - the number of rows to remove
6469: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6470: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6471: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6472: - b - optional vector of right-hand side, that will be adjusted by provided solution
6474: Level: intermediate
6476: Notes:
6477: See `MatZeroRows()` for details on how this routine operates.
6479: The grid coordinates are across the entire grid, not just the local portion
6481: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6482: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6483: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6484: `DM_BOUNDARY_PERIODIC` boundary type.
6486: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6487: a single value per point) you can skip filling those indices.
6489: Fortran Note:
6490: `idxm` and `idxn` should be declared as
6491: .vb
6492: MatStencil idxm(4, m)
6493: .ve
6494: and the values inserted using
6495: .vb
6496: idxm(MatStencil_i, 1) = i
6497: idxm(MatStencil_j, 1) = j
6498: idxm(MatStencil_k, 1) = k
6499: idxm(MatStencil_c, 1) = c
6500: etc
6501: .ve
6503: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6504: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6505: @*/
6506: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6507: {
6508: PetscInt dim = mat->stencil.dim;
6509: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6510: PetscInt *dims = mat->stencil.dims + 1;
6511: PetscInt *starts = mat->stencil.starts;
6512: PetscInt *dxm = (PetscInt *)rows;
6513: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6515: PetscFunctionBegin;
6518: if (numRows) PetscAssertPointer(rows, 3);
6520: PetscCall(PetscMalloc1(numRows, &jdxm));
6521: for (i = 0; i < numRows; ++i) {
6522: /* Skip unused dimensions (they are ordered k, j, i, c) */
6523: for (j = 0; j < 3 - sdim; ++j) dxm++;
6524: /* Local index in X dir */
6525: tmp = *dxm++ - starts[0];
6526: /* Loop over remaining dimensions */
6527: for (j = 0; j < dim - 1; ++j) {
6528: /* If nonlocal, set index to be negative */
6529: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6530: /* Update local index */
6531: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6532: }
6533: /* Skip component slot if necessary */
6534: if (mat->stencil.noc) dxm++;
6535: /* Local row number */
6536: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6537: }
6538: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6539: PetscCall(PetscFree(jdxm));
6540: PetscFunctionReturn(PETSC_SUCCESS);
6541: }
6543: /*@
6544: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6545: of a set of rows and columns of a matrix.
6547: Collective
6549: Input Parameters:
6550: + mat - the matrix
6551: . numRows - the number of rows/columns to remove
6552: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6553: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6554: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6555: - b - optional vector of right-hand side, that will be adjusted by provided solution
6557: Level: intermediate
6559: Notes:
6560: See `MatZeroRowsColumns()` for details on how this routine operates.
6562: The grid coordinates are across the entire grid, not just the local portion
6564: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6565: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6566: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6567: `DM_BOUNDARY_PERIODIC` boundary type.
6569: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6570: a single value per point) you can skip filling those indices.
6572: Fortran Note:
6573: `idxm` and `idxn` should be declared as
6574: .vb
6575: MatStencil idxm(4, m)
6576: .ve
6577: and the values inserted using
6578: .vb
6579: idxm(MatStencil_i, 1) = i
6580: idxm(MatStencil_j, 1) = j
6581: idxm(MatStencil_k, 1) = k
6582: idxm(MatStencil_c, 1) = c
6583: etc
6584: .ve
6586: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6587: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6588: @*/
6589: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6590: {
6591: PetscInt dim = mat->stencil.dim;
6592: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6593: PetscInt *dims = mat->stencil.dims + 1;
6594: PetscInt *starts = mat->stencil.starts;
6595: PetscInt *dxm = (PetscInt *)rows;
6596: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6598: PetscFunctionBegin;
6601: if (numRows) PetscAssertPointer(rows, 3);
6603: PetscCall(PetscMalloc1(numRows, &jdxm));
6604: for (i = 0; i < numRows; ++i) {
6605: /* Skip unused dimensions (they are ordered k, j, i, c) */
6606: for (j = 0; j < 3 - sdim; ++j) dxm++;
6607: /* Local index in X dir */
6608: tmp = *dxm++ - starts[0];
6609: /* Loop over remaining dimensions */
6610: for (j = 0; j < dim - 1; ++j) {
6611: /* If nonlocal, set index to be negative */
6612: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6613: /* Update local index */
6614: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6615: }
6616: /* Skip component slot if necessary */
6617: if (mat->stencil.noc) dxm++;
6618: /* Local row number */
6619: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6620: }
6621: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6622: PetscCall(PetscFree(jdxm));
6623: PetscFunctionReturn(PETSC_SUCCESS);
6624: }
6626: /*@
6627: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6628: of a set of rows of a matrix; using local numbering of rows.
6630: Collective
6632: Input Parameters:
6633: + mat - the matrix
6634: . numRows - the number of rows to remove
6635: . rows - the local row indices
6636: . diag - value put in all diagonals of eliminated rows
6637: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6638: - b - optional vector of right-hand side, that will be adjusted by provided solution
6640: Level: intermediate
6642: Notes:
6643: Before calling `MatZeroRowsLocal()`, the user must first set the
6644: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6646: See `MatZeroRows()` for details on how this routine operates.
6648: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6649: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6650: @*/
6651: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6652: {
6653: PetscFunctionBegin;
6656: if (numRows) PetscAssertPointer(rows, 3);
6657: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6658: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6659: MatCheckPreallocated(mat, 1);
6661: if (mat->ops->zerorowslocal) {
6662: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6663: } else {
6664: IS is, newis;
6665: PetscInt *newRows, nl = 0;
6667: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6668: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6669: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6670: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6671: for (PetscInt i = 0; i < numRows; i++)
6672: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6673: PetscUseTypeMethod(mat, zerorows, nl, newRows, diag, x, b);
6674: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6675: PetscCall(ISDestroy(&newis));
6676: PetscCall(ISDestroy(&is));
6677: }
6678: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6679: PetscFunctionReturn(PETSC_SUCCESS);
6680: }
6682: /*@
6683: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6684: of a set of rows of a matrix; using local numbering of rows.
6686: Collective
6688: Input Parameters:
6689: + mat - the matrix
6690: . is - index set of rows to remove
6691: . diag - value put in all diagonals of eliminated rows
6692: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6693: - b - optional vector of right-hand side, that will be adjusted by provided solution
6695: Level: intermediate
6697: Notes:
6698: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6699: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6701: See `MatZeroRows()` for details on how this routine operates.
6703: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6704: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6705: @*/
6706: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6707: {
6708: PetscInt numRows;
6709: const PetscInt *rows;
6711: PetscFunctionBegin;
6715: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6716: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6717: MatCheckPreallocated(mat, 1);
6719: PetscCall(ISGetLocalSize(is, &numRows));
6720: PetscCall(ISGetIndices(is, &rows));
6721: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6722: PetscCall(ISRestoreIndices(is, &rows));
6723: PetscFunctionReturn(PETSC_SUCCESS);
6724: }
6726: /*@
6727: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6728: of a set of rows and columns of a matrix; using local numbering of rows.
6730: Collective
6732: Input Parameters:
6733: + mat - the matrix
6734: . numRows - the number of rows to remove
6735: . rows - the global row indices
6736: . diag - value put in all diagonals of eliminated rows
6737: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6738: - b - optional vector of right-hand side, that will be adjusted by provided solution
6740: Level: intermediate
6742: Notes:
6743: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6744: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6746: See `MatZeroRowsColumns()` for details on how this routine operates.
6748: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6749: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6750: @*/
6751: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6752: {
6753: PetscFunctionBegin;
6756: if (numRows) PetscAssertPointer(rows, 3);
6757: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6758: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6759: MatCheckPreallocated(mat, 1);
6761: if (mat->ops->zerorowscolumnslocal) {
6762: PetscUseTypeMethod(mat, zerorowscolumnslocal, numRows, rows, diag, x, b);
6763: } else {
6764: IS is, newis;
6765: PetscInt *newRows, nl = 0;
6767: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6768: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6769: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6770: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6771: for (PetscInt i = 0; i < numRows; i++)
6772: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6773: PetscUseTypeMethod(mat, zerorowscolumns, nl, newRows, diag, x, b);
6774: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6775: PetscCall(ISDestroy(&newis));
6776: PetscCall(ISDestroy(&is));
6777: }
6778: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6779: PetscFunctionReturn(PETSC_SUCCESS);
6780: }
6782: /*@
6783: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6784: of a set of rows and columns of a matrix; using local numbering of rows.
6786: Collective
6788: Input Parameters:
6789: + mat - the matrix
6790: . is - index set of rows to remove
6791: . diag - value put in all diagonals of eliminated rows
6792: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6793: - b - optional vector of right-hand side, that will be adjusted by provided solution
6795: Level: intermediate
6797: Notes:
6798: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6799: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6801: See `MatZeroRowsColumns()` for details on how this routine operates.
6803: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6804: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6805: @*/
6806: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6807: {
6808: PetscInt numRows;
6809: const PetscInt *rows;
6811: PetscFunctionBegin;
6815: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6816: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6817: MatCheckPreallocated(mat, 1);
6819: PetscCall(ISGetLocalSize(is, &numRows));
6820: PetscCall(ISGetIndices(is, &rows));
6821: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6822: PetscCall(ISRestoreIndices(is, &rows));
6823: PetscFunctionReturn(PETSC_SUCCESS);
6824: }
6826: /*@
6827: MatGetSize - Returns the numbers of rows and columns in a matrix.
6829: Not Collective
6831: Input Parameter:
6832: . mat - the matrix
6834: Output Parameters:
6835: + m - the number of global rows
6836: - n - the number of global columns
6838: Level: beginner
6840: Note:
6841: Both output parameters can be `NULL` on input.
6843: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6844: @*/
6845: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6846: {
6847: PetscFunctionBegin;
6849: if (m) *m = mat->rmap->N;
6850: if (n) *n = mat->cmap->N;
6851: PetscFunctionReturn(PETSC_SUCCESS);
6852: }
6854: /*@
6855: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6856: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6858: Not Collective
6860: Input Parameter:
6861: . mat - the matrix
6863: Output Parameters:
6864: + m - the number of local rows, use `NULL` to not obtain this value
6865: - n - the number of local columns, use `NULL` to not obtain this value
6867: Level: beginner
6869: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6870: @*/
6871: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6872: {
6873: PetscFunctionBegin;
6875: if (m) PetscAssertPointer(m, 2);
6876: if (n) PetscAssertPointer(n, 3);
6877: if (m) *m = mat->rmap->n;
6878: if (n) *n = mat->cmap->n;
6879: PetscFunctionReturn(PETSC_SUCCESS);
6880: }
6882: /*@
6883: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6884: vector one multiplies this matrix by that are owned by this processor.
6886: Not Collective, unless matrix has not been allocated, then collective
6888: Input Parameter:
6889: . mat - the matrix
6891: Output Parameters:
6892: + m - the global index of the first local column, use `NULL` to not obtain this value
6893: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6895: Level: developer
6897: Notes:
6898: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6900: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6901: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6903: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6904: the local values in the matrix.
6906: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6907: Layouts](sec_matlayout) for details on matrix layouts.
6909: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6910: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6911: @*/
6912: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6913: {
6914: PetscFunctionBegin;
6917: if (m) PetscAssertPointer(m, 2);
6918: if (n) PetscAssertPointer(n, 3);
6919: MatCheckPreallocated(mat, 1);
6920: if (m) *m = mat->cmap->rstart;
6921: if (n) *n = mat->cmap->rend;
6922: PetscFunctionReturn(PETSC_SUCCESS);
6923: }
6925: /*@
6926: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6927: this MPI process.
6929: Not Collective
6931: Input Parameter:
6932: . mat - the matrix
6934: Output Parameters:
6935: + m - the global index of the first local row, use `NULL` to not obtain this value
6936: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6938: Level: beginner
6940: Notes:
6941: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6943: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6944: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6946: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6947: the local values in the matrix.
6949: The high argument is one more than the last element stored locally.
6951: For all matrices it returns the range of matrix rows associated with rows of a vector that
6952: would contain the result of a matrix vector product with this matrix. See [Matrix
6953: Layouts](sec_matlayout) for details on matrix layouts.
6955: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6956: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6957: @*/
6958: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6959: {
6960: PetscFunctionBegin;
6963: if (m) PetscAssertPointer(m, 2);
6964: if (n) PetscAssertPointer(n, 3);
6965: MatCheckPreallocated(mat, 1);
6966: if (m) *m = mat->rmap->rstart;
6967: if (n) *n = mat->rmap->rend;
6968: PetscFunctionReturn(PETSC_SUCCESS);
6969: }
6971: /*@C
6972: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6973: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6975: Not Collective, unless matrix has not been allocated
6977: Input Parameter:
6978: . mat - the matrix
6980: Output Parameter:
6981: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6982: where `size` is the number of MPI processes used by `mat`
6984: Level: beginner
6986: Notes:
6987: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6989: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6990: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6992: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6993: the local values in the matrix.
6995: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6996: would contain the result of a matrix vector product with this matrix. See [Matrix
6997: Layouts](sec_matlayout) for details on matrix layouts.
6999: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
7000: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
7001: `DMDAGetGhostCorners()`, `DM`
7002: @*/
7003: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
7004: {
7005: PetscFunctionBegin;
7008: MatCheckPreallocated(mat, 1);
7009: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
7010: PetscFunctionReturn(PETSC_SUCCESS);
7011: }
7013: /*@C
7014: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
7015: vector one multiplies this vector by that are owned by each processor.
7017: Not Collective, unless matrix has not been allocated
7019: Input Parameter:
7020: . mat - the matrix
7022: Output Parameter:
7023: . ranges - start of each processors portion plus one more than the total length at the end
7025: Level: beginner
7027: Notes:
7028: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7030: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7031: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7033: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7034: the local values in the matrix.
7036: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
7037: Layouts](sec_matlayout) for details on matrix layouts.
7039: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
7040: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7041: `DMDAGetGhostCorners()`, `DM`
7042: @*/
7043: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7044: {
7045: PetscFunctionBegin;
7048: MatCheckPreallocated(mat, 1);
7049: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7050: PetscFunctionReturn(PETSC_SUCCESS);
7051: }
7053: /*@
7054: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7056: Not Collective
7058: Input Parameter:
7059: . A - matrix
7061: Output Parameters:
7062: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7063: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7065: Level: intermediate
7067: Note:
7068: You should call `ISDestroy()` on the returned `IS`
7070: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7071: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7072: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7073: details on matrix layouts.
7075: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7076: @*/
7077: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7078: {
7079: PetscErrorCode (*f)(Mat, IS *, IS *);
7081: PetscFunctionBegin;
7084: MatCheckPreallocated(A, 1);
7085: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7086: if (f) {
7087: PetscCall((*f)(A, rows, cols));
7088: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7089: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7090: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7091: }
7092: PetscFunctionReturn(PETSC_SUCCESS);
7093: }
7095: /*@
7096: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7097: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7098: to complete the factorization.
7100: Collective
7102: Input Parameters:
7103: + fact - the factorized matrix obtained with `MatGetFactor()`
7104: . mat - the matrix
7105: . row - row permutation
7106: . col - column permutation
7107: - info - structure containing
7108: .vb
7109: levels - number of levels of fill.
7110: expected fill - as ratio of original fill.
7111: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7112: missing diagonal entries)
7113: .ve
7115: Level: developer
7117: Notes:
7118: See [Matrix Factorization](sec_matfactor) for additional information.
7120: Most users should employ the `KSP` interface for linear solvers
7121: instead of working directly with matrix algebra routines such as this.
7122: See, e.g., `KSPCreate()`.
7124: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7126: Fortran Note:
7127: A valid (non-null) `info` argument must be provided
7129: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7130: `MatGetOrdering()`, `MatFactorInfo`
7131: @*/
7132: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7133: {
7134: PetscFunctionBegin;
7139: PetscAssertPointer(info, 5);
7140: PetscAssertPointer(fact, 1);
7141: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7142: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7143: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7144: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7145: MatCheckPreallocated(mat, 2);
7147: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7148: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7149: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7150: PetscFunctionReturn(PETSC_SUCCESS);
7151: }
7153: /*@
7154: MatICCFactorSymbolic - Performs symbolic incomplete
7155: Cholesky factorization for a symmetric matrix. Use
7156: `MatCholeskyFactorNumeric()` to complete the factorization.
7158: Collective
7160: Input Parameters:
7161: + fact - the factorized matrix obtained with `MatGetFactor()`
7162: . mat - the matrix to be factored
7163: . perm - row and column permutation
7164: - info - structure containing
7165: .vb
7166: levels - number of levels of fill.
7167: expected fill - as ratio of original fill.
7168: .ve
7170: Level: developer
7172: Notes:
7173: Most users should employ the `KSP` interface for linear solvers
7174: instead of working directly with matrix algebra routines such as this.
7175: See, e.g., `KSPCreate()`.
7177: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7179: Fortran Note:
7180: A valid (non-null) `info` argument must be provided
7182: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7183: @*/
7184: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7185: {
7186: PetscFunctionBegin;
7190: PetscAssertPointer(info, 4);
7191: PetscAssertPointer(fact, 1);
7192: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7193: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7194: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7195: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7196: MatCheckPreallocated(mat, 2);
7198: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7199: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7200: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7201: PetscFunctionReturn(PETSC_SUCCESS);
7202: }
7204: /*@C
7205: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7206: points to an array of valid matrices, they may be reused to store the new
7207: submatrices.
7209: Collective
7211: Input Parameters:
7212: + mat - the matrix
7213: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7214: . irow - index set of rows to extract
7215: . icol - index set of columns to extract
7216: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7218: Output Parameter:
7219: . submat - the array of submatrices
7221: Level: advanced
7223: Notes:
7224: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7225: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7226: to extract a parallel submatrix.
7228: Some matrix types place restrictions on the row and column
7229: indices, such as that they be sorted or that they be equal to each other.
7231: The index sets may not have duplicate entries.
7233: When extracting submatrices from a parallel matrix, each processor can
7234: form a different submatrix by setting the rows and columns of its
7235: individual index sets according to the local submatrix desired.
7237: When finished using the submatrices, the user should destroy
7238: them with `MatDestroySubMatrices()`.
7240: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7241: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7243: This routine creates the matrices in submat; you should NOT create them before
7244: calling it. It also allocates the array of matrix pointers submat.
7246: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7247: request one row/column in a block, they must request all rows/columns that are in
7248: that block. For example, if the block size is 2 you cannot request just row 0 and
7249: column 0.
7251: Fortran Note:
7252: .vb
7253: Mat, pointer :: submat(:)
7254: .ve
7256: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7257: @*/
7258: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7259: {
7260: PetscInt i;
7261: PetscBool eq;
7263: PetscFunctionBegin;
7266: if (n) {
7267: PetscAssertPointer(irow, 3);
7269: PetscAssertPointer(icol, 4);
7271: }
7272: PetscAssertPointer(submat, 6);
7273: if (n && scall == MAT_REUSE_MATRIX) {
7274: PetscAssertPointer(*submat, 6);
7276: }
7277: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7278: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7279: MatCheckPreallocated(mat, 1);
7280: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7281: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7282: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7283: for (i = 0; i < n; i++) {
7284: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7285: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7286: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7287: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7288: if (mat->boundtocpu && mat->bindingpropagates) {
7289: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7290: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7291: }
7292: #endif
7293: }
7294: PetscFunctionReturn(PETSC_SUCCESS);
7295: }
7297: /*@C
7298: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of `mat` (by pairs of `IS` that may live on subcomms).
7300: Collective
7302: Input Parameters:
7303: + mat - the matrix
7304: . n - the number of submatrixes to be extracted
7305: . irow - index set of rows to extract
7306: . icol - index set of columns to extract
7307: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7309: Output Parameter:
7310: . submat - the array of submatrices
7312: Level: advanced
7314: Note:
7315: This is used by `PCGASM`
7317: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7318: @*/
7319: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7320: {
7321: PetscInt i;
7322: PetscBool eq;
7324: PetscFunctionBegin;
7327: if (n) {
7328: PetscAssertPointer(irow, 3);
7330: PetscAssertPointer(icol, 4);
7332: }
7333: PetscAssertPointer(submat, 6);
7334: if (n && scall == MAT_REUSE_MATRIX) {
7335: PetscAssertPointer(*submat, 6);
7337: }
7338: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7339: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7340: MatCheckPreallocated(mat, 1);
7342: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7343: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7344: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7345: for (i = 0; i < n; i++) {
7346: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7347: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7348: }
7349: PetscFunctionReturn(PETSC_SUCCESS);
7350: }
7352: /*@C
7353: MatDestroyMatrices - Destroys an array of matrices
7355: Collective
7357: Input Parameters:
7358: + n - the number of local matrices
7359: - mat - the matrices (this is a pointer to the array of matrices)
7361: Level: advanced
7363: Notes:
7364: Frees not only the matrices, but also the array that contains the matrices
7366: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7368: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7369: @*/
7370: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7371: {
7372: PetscInt i;
7374: PetscFunctionBegin;
7375: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7376: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7377: PetscAssertPointer(mat, 2);
7379: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7381: /* memory is allocated even if n = 0 */
7382: PetscCall(PetscFree(*mat));
7383: PetscFunctionReturn(PETSC_SUCCESS);
7384: }
7386: /*@C
7387: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7389: Collective
7391: Input Parameters:
7392: + n - the number of local matrices
7393: - mat - the matrices (this is a pointer to the array of matrices, to match the calling sequence of `MatCreateSubMatrices()`)
7395: Level: advanced
7397: Note:
7398: Frees not only the matrices, but also the array that contains the matrices
7400: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7401: @*/
7402: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7403: {
7404: Mat mat0;
7406: PetscFunctionBegin;
7407: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7408: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7409: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7410: PetscAssertPointer(mat, 2);
7412: mat0 = (*mat)[0];
7413: if (mat0 && mat0->ops->destroysubmatrices) {
7414: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7415: } else {
7416: PetscCall(MatDestroyMatrices(n, mat));
7417: }
7418: PetscFunctionReturn(PETSC_SUCCESS);
7419: }
7421: /*@
7422: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7424: Collective
7426: Input Parameter:
7427: . mat - the matrix
7429: Output Parameter:
7430: . matstruct - the sequential matrix with the nonzero structure of `mat`
7432: Level: developer
7434: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7435: @*/
7436: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7437: {
7438: PetscFunctionBegin;
7440: PetscAssertPointer(matstruct, 2);
7443: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7444: MatCheckPreallocated(mat, 1);
7446: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7447: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7448: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7449: PetscFunctionReturn(PETSC_SUCCESS);
7450: }
7452: /*@C
7453: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7455: Collective
7457: Input Parameter:
7458: . mat - the matrix
7460: Level: advanced
7462: Note:
7463: This is not needed, one can just call `MatDestroy()`
7465: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7466: @*/
7467: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7468: {
7469: PetscFunctionBegin;
7470: PetscAssertPointer(mat, 1);
7471: PetscCall(MatDestroy(mat));
7472: PetscFunctionReturn(PETSC_SUCCESS);
7473: }
7475: /*@
7476: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7477: replaces the index sets by larger ones that represent submatrices with
7478: additional overlap.
7480: Collective
7482: Input Parameters:
7483: + mat - the matrix
7484: . n - the number of index sets
7485: . is - the array of index sets (these index sets will changed during the call)
7486: - ov - the additional overlap requested
7488: Options Database Key:
7489: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7491: Level: developer
7493: Note:
7494: The computed overlap preserves the matrix block sizes when the blocks are square.
7495: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7496: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7498: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7499: @*/
7500: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7501: {
7502: PetscInt i, bs, cbs;
7504: PetscFunctionBegin;
7508: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7509: if (n) {
7510: PetscAssertPointer(is, 3);
7512: }
7513: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7514: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7515: MatCheckPreallocated(mat, 1);
7517: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7518: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7519: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7520: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7521: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7522: if (bs == cbs) {
7523: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7524: }
7525: PetscFunctionReturn(PETSC_SUCCESS);
7526: }
7528: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7530: /*@
7531: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7532: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7533: additional overlap.
7535: Collective
7537: Input Parameters:
7538: + mat - the matrix
7539: . n - the number of index sets
7540: . is - the array of index sets (these index sets will changed during the call)
7541: - ov - the additional overlap requested
7543: ` Options Database Key:
7544: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7546: Level: developer
7548: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7549: @*/
7550: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7551: {
7552: PetscInt i;
7554: PetscFunctionBegin;
7557: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7558: if (n) {
7559: PetscAssertPointer(is, 3);
7561: }
7562: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7563: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7564: MatCheckPreallocated(mat, 1);
7565: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7566: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7567: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7568: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7569: PetscFunctionReturn(PETSC_SUCCESS);
7570: }
7572: /*@
7573: MatGetBlockSize - Returns the matrix block size.
7575: Not Collective
7577: Input Parameter:
7578: . mat - the matrix
7580: Output Parameter:
7581: . bs - block size
7583: Level: intermediate
7585: Notes:
7586: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7588: If the block size has not been set yet this routine returns 1.
7590: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7591: @*/
7592: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7593: {
7594: PetscFunctionBegin;
7596: PetscAssertPointer(bs, 2);
7597: *bs = mat->rmap->bs;
7598: PetscFunctionReturn(PETSC_SUCCESS);
7599: }
7601: /*@
7602: MatGetBlockSizes - Returns the matrix block row and column sizes.
7604: Not Collective
7606: Input Parameter:
7607: . mat - the matrix
7609: Output Parameters:
7610: + rbs - row block size
7611: - cbs - column block size
7613: Level: intermediate
7615: Notes:
7616: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7617: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7619: If a block size has not been set yet this routine returns 1.
7621: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7622: @*/
7623: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7624: {
7625: PetscFunctionBegin;
7627: if (rbs) PetscAssertPointer(rbs, 2);
7628: if (cbs) PetscAssertPointer(cbs, 3);
7629: if (rbs) *rbs = mat->rmap->bs;
7630: if (cbs) *cbs = mat->cmap->bs;
7631: PetscFunctionReturn(PETSC_SUCCESS);
7632: }
7634: /*@
7635: MatSetBlockSize - Sets the matrix block size.
7637: Logically Collective
7639: Input Parameters:
7640: + mat - the matrix
7641: - bs - block size
7643: Level: intermediate
7645: Notes:
7646: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7647: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7649: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7650: is compatible with the matrix local sizes.
7652: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7653: @*/
7654: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7655: {
7656: PetscFunctionBegin;
7659: PetscCall(MatSetBlockSizes(mat, bs, bs));
7660: PetscFunctionReturn(PETSC_SUCCESS);
7661: }
7663: typedef struct {
7664: PetscInt n;
7665: IS *is;
7666: Mat *mat;
7667: PetscObjectState nonzerostate;
7668: Mat C;
7669: } EnvelopeData;
7671: static PetscErrorCode EnvelopeDataDestroy(void **ptr)
7672: {
7673: EnvelopeData *edata = (EnvelopeData *)*ptr;
7675: PetscFunctionBegin;
7676: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7677: PetscCall(PetscFree(edata->is));
7678: PetscCall(PetscFree(edata));
7679: PetscFunctionReturn(PETSC_SUCCESS);
7680: }
7682: /*@
7683: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7684: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7686: Collective
7688: Input Parameter:
7689: . mat - the matrix
7691: Level: intermediate
7693: Notes:
7694: There can be zeros within the blocks
7696: The blocks can overlap between processes, including laying on more than two processes
7698: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7699: @*/
7700: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7701: {
7702: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7703: PetscInt *diag, *odiag, sc;
7704: VecScatter scatter;
7705: PetscScalar *seqv;
7706: const PetscScalar *parv;
7707: const PetscInt *ia, *ja;
7708: PetscBool set, flag, done;
7709: Mat AA = mat, A;
7710: MPI_Comm comm;
7711: PetscMPIInt rank, size, tag;
7712: MPI_Status status;
7713: PetscContainer container;
7714: EnvelopeData *edata;
7715: Vec seq, par;
7716: IS isglobal;
7718: PetscFunctionBegin;
7720: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7721: if (!set || !flag) {
7722: /* TODO: only needs nonzero structure of transpose */
7723: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7724: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7725: }
7726: PetscCall(MatAIJGetLocalMat(AA, &A));
7727: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7728: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7730: PetscCall(MatGetLocalSize(mat, &n, NULL));
7731: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7732: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7733: PetscCallMPI(MPI_Comm_size(comm, &size));
7734: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7736: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7738: if (rank > 0) {
7739: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7740: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7741: }
7742: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7743: for (i = 0; i < n; i++) {
7744: env = PetscMax(env, ja[ia[i + 1] - 1]);
7745: II = rstart + i;
7746: if (env == II) {
7747: starts[lblocks] = tbs;
7748: sizes[lblocks++] = 1 + II - tbs;
7749: tbs = 1 + II;
7750: }
7751: }
7752: if (rank < size - 1) {
7753: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7754: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7755: }
7757: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7758: if (!set || !flag) PetscCall(MatDestroy(&AA));
7759: PetscCall(MatDestroy(&A));
7761: PetscCall(PetscNew(&edata));
7762: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7763: edata->n = lblocks;
7764: /* create IS needed for extracting blocks from the original matrix */
7765: PetscCall(PetscMalloc1(lblocks, &edata->is));
7766: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7768: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7769: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7770: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7771: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7772: PetscCall(MatSetType(edata->C, MATAIJ));
7774: /* Communicate the start and end of each row, from each block to the correct rank */
7775: /* TODO: Use PetscSF instead of VecScatter */
7776: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7777: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7778: PetscCall(VecGetArrayWrite(seq, &seqv));
7779: for (PetscInt i = 0; i < lblocks; i++) {
7780: for (PetscInt j = 0; j < sizes[i]; j++) {
7781: seqv[cnt] = starts[i];
7782: seqv[cnt + 1] = starts[i] + sizes[i];
7783: cnt += 2;
7784: }
7785: }
7786: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7787: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7788: sc -= cnt;
7789: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7790: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7791: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7792: PetscCall(ISDestroy(&isglobal));
7793: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7794: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7795: PetscCall(VecScatterDestroy(&scatter));
7796: PetscCall(VecDestroy(&seq));
7797: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7798: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7799: PetscCall(VecGetArrayRead(par, &parv));
7800: cnt = 0;
7801: PetscCall(MatGetSize(mat, NULL, &n));
7802: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7803: PetscInt start, end, d = 0, od = 0;
7805: start = (PetscInt)PetscRealPart(parv[cnt]);
7806: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7807: cnt += 2;
7809: if (start < cstart) {
7810: od += cstart - start + n - cend;
7811: d += cend - cstart;
7812: } else if (start < cend) {
7813: od += n - cend;
7814: d += cend - start;
7815: } else od += n - start;
7816: if (end <= cstart) {
7817: od -= cstart - end + n - cend;
7818: d -= cend - cstart;
7819: } else if (end < cend) {
7820: od -= n - cend;
7821: d -= cend - end;
7822: } else od -= n - end;
7824: odiag[i] = od;
7825: diag[i] = d;
7826: }
7827: PetscCall(VecRestoreArrayRead(par, &parv));
7828: PetscCall(VecDestroy(&par));
7829: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7830: PetscCall(PetscFree2(diag, odiag));
7831: PetscCall(PetscFree2(sizes, starts));
7833: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7834: PetscCall(PetscContainerSetPointer(container, edata));
7835: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7836: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7837: PetscCall(PetscObjectDereference((PetscObject)container));
7838: PetscFunctionReturn(PETSC_SUCCESS);
7839: }
7841: /*@
7842: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7844: Collective
7846: Input Parameters:
7847: + A - the matrix
7848: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7850: Output Parameter:
7851: . C - matrix with inverted block diagonal of `A`
7853: Level: advanced
7855: Note:
7856: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7858: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7859: @*/
7860: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7861: {
7862: PetscContainer container;
7863: EnvelopeData *edata;
7864: PetscObjectState nonzerostate;
7866: PetscFunctionBegin;
7867: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7868: if (!container) {
7869: PetscCall(MatComputeVariableBlockEnvelope(A));
7870: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7871: }
7872: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7873: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7874: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7875: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7877: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7878: *C = edata->C;
7880: for (PetscInt i = 0; i < edata->n; i++) {
7881: Mat D;
7882: PetscScalar *dvalues;
7884: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7885: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7886: PetscCall(MatSeqDenseInvert(D));
7887: PetscCall(MatDenseGetArray(D, &dvalues));
7888: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7889: PetscCall(MatDestroy(&D));
7890: }
7891: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7892: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7893: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7894: PetscFunctionReturn(PETSC_SUCCESS);
7895: }
7897: /*@
7898: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7900: Not Collective
7902: Input Parameters:
7903: + mat - the matrix
7904: . nblocks - the number of blocks on this process, each block can only exist on a single process
7905: - bsizes - the block sizes
7907: Level: intermediate
7909: Notes:
7910: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7912: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7914: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7915: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7916: @*/
7917: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7918: {
7919: PetscInt ncnt = 0, nlocal;
7921: PetscFunctionBegin;
7923: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7924: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7925: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7926: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7927: PetscCall(PetscFree(mat->bsizes));
7928: mat->nblocks = nblocks;
7929: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7930: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7931: PetscFunctionReturn(PETSC_SUCCESS);
7932: }
7934: /*@C
7935: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7937: Not Collective; No Fortran Support
7939: Input Parameter:
7940: . mat - the matrix
7942: Output Parameters:
7943: + nblocks - the number of blocks on this process
7944: - bsizes - the block sizes
7946: Level: intermediate
7948: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7949: @*/
7950: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7951: {
7952: PetscFunctionBegin;
7954: if (nblocks) *nblocks = mat->nblocks;
7955: if (bsizes) *bsizes = mat->bsizes;
7956: PetscFunctionReturn(PETSC_SUCCESS);
7957: }
7959: /*@
7960: MatSelectVariableBlockSizes - When creating a submatrix, pass on the variable block sizes
7962: Not Collective
7964: Input Parameter:
7965: + subA - the submatrix
7966: . A - the original matrix
7967: - isrow - The `IS` of selected rows for the submatrix, must be sorted
7969: Level: developer
7971: Notes:
7972: If the index set is not sorted or contains off-process entries, this function will do nothing.
7974: .seealso: [](ch_matrices), `Mat`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7975: @*/
7976: PetscErrorCode MatSelectVariableBlockSizes(Mat subA, Mat A, IS isrow)
7977: {
7978: const PetscInt *rows;
7979: PetscInt n, rStart, rEnd, Nb = 0;
7980: PetscBool flg = A->bsizes ? PETSC_TRUE : PETSC_FALSE;
7982: PetscFunctionBegin;
7983: // The code for block size extraction does not support an unsorted IS
7984: if (flg) PetscCall(ISSorted(isrow, &flg));
7985: // We don't support originally off-diagonal blocks
7986: if (flg) {
7987: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
7988: PetscCall(ISGetLocalSize(isrow, &n));
7989: PetscCall(ISGetIndices(isrow, &rows));
7990: for (PetscInt i = 0; i < n && flg; ++i) {
7991: if (rows[i] < rStart || rows[i] >= rEnd) flg = PETSC_FALSE;
7992: }
7993: PetscCall(ISRestoreIndices(isrow, &rows));
7994: }
7995: // quiet return if we can't extract block size
7996: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)subA)));
7997: if (!flg) PetscFunctionReturn(PETSC_SUCCESS);
7999: // extract block sizes
8000: PetscCall(ISGetIndices(isrow, &rows));
8001: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8002: PetscBool occupied = PETSC_FALSE;
8004: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8005: const PetscInt row = gr + br;
8007: if (i == n) break;
8008: if (rows[i] == row) {
8009: occupied = PETSC_TRUE;
8010: ++i;
8011: }
8012: while (i < n && rows[i] < row) ++i;
8013: }
8014: gr += A->bsizes[b];
8015: if (occupied) ++Nb;
8016: }
8017: subA->nblocks = Nb;
8018: PetscCall(PetscFree(subA->bsizes));
8019: PetscCall(PetscMalloc1(subA->nblocks, &subA->bsizes));
8020: PetscInt sb = 0;
8021: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8022: if (sb < subA->nblocks) subA->bsizes[sb] = 0;
8023: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8024: const PetscInt row = gr + br;
8026: if (i == n) break;
8027: if (rows[i] == row) {
8028: ++subA->bsizes[sb];
8029: ++i;
8030: }
8031: while (i < n && rows[i] < row) ++i;
8032: }
8033: gr += A->bsizes[b];
8034: if (sb < subA->nblocks && subA->bsizes[sb]) ++sb;
8035: }
8036: PetscCheck(sb == subA->nblocks, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of blocks %" PetscInt_FMT " != %" PetscInt_FMT, sb, subA->nblocks);
8037: PetscInt nlocal, ncnt = 0;
8038: PetscCall(MatGetLocalSize(subA, &nlocal, NULL));
8039: PetscCheck(subA->nblocks >= 0 && subA->nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", subA->nblocks, nlocal);
8040: for (PetscInt i = 0; i < subA->nblocks; i++) ncnt += subA->bsizes[i];
8041: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
8042: PetscCall(ISRestoreIndices(isrow, &rows));
8043: PetscFunctionReturn(PETSC_SUCCESS);
8044: }
8046: /*@
8047: MatSetBlockSizes - Sets the matrix block row and column sizes.
8049: Logically Collective
8051: Input Parameters:
8052: + mat - the matrix
8053: . rbs - row block size
8054: - cbs - column block size
8056: Level: intermediate
8058: Notes:
8059: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
8060: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
8061: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
8063: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
8064: are compatible with the matrix local sizes.
8066: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
8068: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
8069: @*/
8070: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
8071: {
8072: PetscFunctionBegin;
8076: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
8077: if (mat->rmap->refcnt) {
8078: ISLocalToGlobalMapping l2g = NULL;
8079: PetscLayout nmap = NULL;
8081: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
8082: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
8083: PetscCall(PetscLayoutDestroy(&mat->rmap));
8084: mat->rmap = nmap;
8085: mat->rmap->mapping = l2g;
8086: }
8087: if (mat->cmap->refcnt) {
8088: ISLocalToGlobalMapping l2g = NULL;
8089: PetscLayout nmap = NULL;
8091: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
8092: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
8093: PetscCall(PetscLayoutDestroy(&mat->cmap));
8094: mat->cmap = nmap;
8095: mat->cmap->mapping = l2g;
8096: }
8097: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
8098: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
8099: PetscFunctionReturn(PETSC_SUCCESS);
8100: }
8102: /*@
8103: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
8105: Logically Collective
8107: Input Parameters:
8108: + mat - the matrix
8109: . fromRow - matrix from which to copy row block size
8110: - fromCol - matrix from which to copy column block size (can be same as `fromRow`)
8112: Level: developer
8114: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
8115: @*/
8116: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8117: {
8118: PetscFunctionBegin;
8122: PetscTryTypeMethod(mat, setblocksizes, fromRow->rmap->bs, fromCol->cmap->bs);
8123: PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8124: PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8125: PetscFunctionReturn(PETSC_SUCCESS);
8126: }
8128: /*@
8129: MatResidual - Default routine to calculate the residual r = b - Ax
8131: Collective
8133: Input Parameters:
8134: + mat - the matrix
8135: . b - the right-hand-side
8136: - x - the approximate solution
8138: Output Parameter:
8139: . r - location to store the residual
8141: Level: developer
8143: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8144: @*/
8145: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8146: {
8147: PetscFunctionBegin;
8153: MatCheckPreallocated(mat, 1);
8154: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8155: if (!mat->ops->residual) {
8156: PetscCall(MatMult(mat, x, r));
8157: PetscCall(VecAYPX(r, -1.0, b));
8158: } else {
8159: PetscUseTypeMethod(mat, residual, b, x, r);
8160: }
8161: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8162: PetscFunctionReturn(PETSC_SUCCESS);
8163: }
8165: /*@C
8166: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8168: Collective
8170: Input Parameters:
8171: + mat - the matrix
8172: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8173: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8174: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8175: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8176: always used.
8178: Output Parameters:
8179: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8180: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8181: . ja - the column indices, use `NULL` if not needed
8182: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8183: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8185: Level: developer
8187: Notes:
8188: You CANNOT change any of the ia[] or ja[] values.
8190: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8192: Fortran Notes:
8193: Use
8194: .vb
8195: PetscInt, pointer :: ia(:),ja(:)
8196: call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8197: ! Access the ith and jth entries via ia(i) and ja(j)
8198: .ve
8200: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8201: @*/
8202: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8203: {
8204: PetscFunctionBegin;
8207: if (n) PetscAssertPointer(n, 5);
8208: if (ia) PetscAssertPointer(ia, 6);
8209: if (ja) PetscAssertPointer(ja, 7);
8210: if (done) PetscAssertPointer(done, 8);
8211: MatCheckPreallocated(mat, 1);
8212: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8213: else {
8214: if (done) *done = PETSC_TRUE;
8215: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8216: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8217: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8218: }
8219: PetscFunctionReturn(PETSC_SUCCESS);
8220: }
8222: /*@C
8223: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8225: Collective
8227: Input Parameters:
8228: + mat - the matrix
8229: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8230: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8231: symmetrized
8232: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8233: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8234: always used.
8236: Output Parameters:
8237: + n - number of columns in the (possibly compressed) matrix
8238: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8239: . ja - the row indices
8240: - done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8242: Level: developer
8244: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8245: @*/
8246: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8247: {
8248: PetscFunctionBegin;
8251: PetscAssertPointer(n, 5);
8252: if (ia) PetscAssertPointer(ia, 6);
8253: if (ja) PetscAssertPointer(ja, 7);
8254: PetscAssertPointer(done, 8);
8255: MatCheckPreallocated(mat, 1);
8256: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8257: else {
8258: *done = PETSC_TRUE;
8259: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8260: }
8261: PetscFunctionReturn(PETSC_SUCCESS);
8262: }
8264: /*@C
8265: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8267: Collective
8269: Input Parameters:
8270: + mat - the matrix
8271: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8272: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8273: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8274: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8275: always used.
8276: . n - size of (possibly compressed) matrix
8277: . ia - the row pointers
8278: - ja - the column indices
8280: Output Parameter:
8281: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8283: Level: developer
8285: Note:
8286: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8287: us of the array after it has been restored. If you pass `NULL`, it will
8288: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8290: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8291: @*/
8292: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8293: {
8294: PetscFunctionBegin;
8297: if (ia) PetscAssertPointer(ia, 6);
8298: if (ja) PetscAssertPointer(ja, 7);
8299: if (done) PetscAssertPointer(done, 8);
8300: MatCheckPreallocated(mat, 1);
8302: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8303: else {
8304: if (done) *done = PETSC_TRUE;
8305: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8306: if (n) *n = 0;
8307: if (ia) *ia = NULL;
8308: if (ja) *ja = NULL;
8309: }
8310: PetscFunctionReturn(PETSC_SUCCESS);
8311: }
8313: /*@C
8314: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8316: Collective
8318: Input Parameters:
8319: + mat - the matrix
8320: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8321: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8322: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8323: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8324: always used.
8326: Output Parameters:
8327: + n - size of (possibly compressed) matrix
8328: . ia - the column pointers
8329: . ja - the row indices
8330: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8332: Level: developer
8334: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8335: @*/
8336: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8337: {
8338: PetscFunctionBegin;
8341: if (ia) PetscAssertPointer(ia, 6);
8342: if (ja) PetscAssertPointer(ja, 7);
8343: PetscAssertPointer(done, 8);
8344: MatCheckPreallocated(mat, 1);
8346: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8347: else {
8348: *done = PETSC_TRUE;
8349: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8350: if (n) *n = 0;
8351: if (ia) *ia = NULL;
8352: if (ja) *ja = NULL;
8353: }
8354: PetscFunctionReturn(PETSC_SUCCESS);
8355: }
8357: /*@
8358: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8359: `MatGetColumnIJ()`.
8361: Collective
8363: Input Parameters:
8364: + mat - the matrix
8365: . ncolors - maximum color value
8366: . n - number of entries in colorarray
8367: - colorarray - array indicating color for each column
8369: Output Parameter:
8370: . iscoloring - coloring generated using colorarray information
8372: Level: developer
8374: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8375: @*/
8376: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8377: {
8378: PetscFunctionBegin;
8381: PetscAssertPointer(colorarray, 4);
8382: PetscAssertPointer(iscoloring, 5);
8383: MatCheckPreallocated(mat, 1);
8385: if (!mat->ops->coloringpatch) {
8386: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8387: } else {
8388: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8389: }
8390: PetscFunctionReturn(PETSC_SUCCESS);
8391: }
8393: /*@
8394: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8396: Logically Collective
8398: Input Parameter:
8399: . mat - the factored matrix to be reset
8401: Level: developer
8403: Notes:
8404: This routine should be used only with factored matrices formed by in-place
8405: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8406: format). This option can save memory, for example, when solving nonlinear
8407: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8408: ILU(0) preconditioner.
8410: One can specify in-place ILU(0) factorization by calling
8411: .vb
8412: PCType(pc,PCILU);
8413: PCFactorSeUseInPlace(pc);
8414: .ve
8415: or by using the options -pc_type ilu -pc_factor_in_place
8417: In-place factorization ILU(0) can also be used as a local
8418: solver for the blocks within the block Jacobi or additive Schwarz
8419: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8420: for details on setting local solver options.
8422: Most users should employ the `KSP` interface for linear solvers
8423: instead of working directly with matrix algebra routines such as this.
8424: See, e.g., `KSPCreate()`.
8426: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8427: @*/
8428: PetscErrorCode MatSetUnfactored(Mat mat)
8429: {
8430: PetscFunctionBegin;
8433: MatCheckPreallocated(mat, 1);
8434: mat->factortype = MAT_FACTOR_NONE;
8435: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8436: PetscUseTypeMethod(mat, setunfactored);
8437: PetscFunctionReturn(PETSC_SUCCESS);
8438: }
8440: /*@
8441: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8442: as the original matrix.
8444: Collective
8446: Input Parameters:
8447: + mat - the original matrix
8448: . isrow - parallel `IS` containing the rows this processor should obtain
8449: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8450: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8452: Output Parameter:
8453: . newmat - the new submatrix, of the same type as the original matrix
8455: Level: advanced
8457: Notes:
8458: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8460: Some matrix types place restrictions on the row and column indices, such
8461: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8462: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8464: The index sets may not have duplicate entries.
8466: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8467: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8468: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8469: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8470: you are finished using it.
8472: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8473: the input matrix.
8475: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8477: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8478: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8480: Example usage:
8481: Consider the following 8x8 matrix with 34 non-zero values, that is
8482: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8483: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8484: as follows
8485: .vb
8486: 1 2 0 | 0 3 0 | 0 4
8487: Proc0 0 5 6 | 7 0 0 | 8 0
8488: 9 0 10 | 11 0 0 | 12 0
8489: -------------------------------------
8490: 13 0 14 | 15 16 17 | 0 0
8491: Proc1 0 18 0 | 19 20 21 | 0 0
8492: 0 0 0 | 22 23 0 | 24 0
8493: -------------------------------------
8494: Proc2 25 26 27 | 0 0 28 | 29 0
8495: 30 0 0 | 31 32 33 | 0 34
8496: .ve
8498: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8500: .vb
8501: 2 0 | 0 3 0 | 0
8502: Proc0 5 6 | 7 0 0 | 8
8503: -------------------------------
8504: Proc1 18 0 | 19 20 21 | 0
8505: -------------------------------
8506: Proc2 26 27 | 0 0 28 | 29
8507: 0 0 | 31 32 33 | 0
8508: .ve
8510: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8511: @*/
8512: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8513: {
8514: PetscMPIInt size;
8515: Mat *local;
8516: IS iscoltmp;
8517: PetscBool flg;
8519: PetscFunctionBegin;
8523: PetscAssertPointer(newmat, 5);
8526: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8527: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8528: PetscCheck(cll != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_INPLACE_MATRIX");
8530: MatCheckPreallocated(mat, 1);
8531: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8533: if (!iscol || isrow == iscol) {
8534: PetscBool stride;
8535: PetscMPIInt grabentirematrix = 0, grab;
8536: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8537: if (stride) {
8538: PetscInt first, step, n, rstart, rend;
8539: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8540: if (step == 1) {
8541: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8542: if (rstart == first) {
8543: PetscCall(ISGetLocalSize(isrow, &n));
8544: if (n == rend - rstart) grabentirematrix = 1;
8545: }
8546: }
8547: }
8548: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8549: if (grab) {
8550: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8551: if (cll == MAT_INITIAL_MATRIX) {
8552: *newmat = mat;
8553: PetscCall(PetscObjectReference((PetscObject)mat));
8554: }
8555: PetscFunctionReturn(PETSC_SUCCESS);
8556: }
8557: }
8559: if (!iscol) {
8560: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8561: } else {
8562: iscoltmp = iscol;
8563: }
8565: /* if original matrix is on just one processor then use submatrix generated */
8566: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8567: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8568: goto setproperties;
8569: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8570: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8571: *newmat = *local;
8572: PetscCall(PetscFree(local));
8573: goto setproperties;
8574: } else if (!mat->ops->createsubmatrix) {
8575: /* Create a new matrix type that implements the operation using the full matrix */
8576: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8577: switch (cll) {
8578: case MAT_INITIAL_MATRIX:
8579: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8580: break;
8581: case MAT_REUSE_MATRIX:
8582: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8583: break;
8584: default:
8585: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8586: }
8587: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8588: goto setproperties;
8589: }
8591: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8592: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8593: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8595: setproperties:
8596: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8597: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8598: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8599: }
8600: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8601: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8602: if (!iscol || isrow == iscol) PetscCall(MatSelectVariableBlockSizes(*newmat, mat, isrow));
8603: PetscFunctionReturn(PETSC_SUCCESS);
8604: }
8606: /*@
8607: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8609: Not Collective
8611: Input Parameters:
8612: + A - the matrix we wish to propagate options from
8613: - B - the matrix we wish to propagate options to
8615: Level: beginner
8617: Note:
8618: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8620: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8621: @*/
8622: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8623: {
8624: PetscFunctionBegin;
8627: B->symmetry_eternal = A->symmetry_eternal;
8628: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8629: B->symmetric = A->symmetric;
8630: B->structurally_symmetric = A->structurally_symmetric;
8631: B->spd = A->spd;
8632: B->hermitian = A->hermitian;
8633: PetscFunctionReturn(PETSC_SUCCESS);
8634: }
8636: /*@
8637: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8638: used during the assembly process to store values that belong to
8639: other processors.
8641: Not Collective
8643: Input Parameters:
8644: + mat - the matrix
8645: . size - the initial size of the stash.
8646: - bsize - the initial size of the block-stash(if used).
8648: Options Database Keys:
8649: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8650: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8652: Level: intermediate
8654: Notes:
8655: The block-stash is used for values set with `MatSetValuesBlocked()` while
8656: the stash is used for values set with `MatSetValues()`
8658: Run with the option -info and look for output of the form
8659: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8660: to determine the appropriate value, MM, to use for size and
8661: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8662: to determine the value, BMM to use for bsize
8664: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8665: @*/
8666: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8667: {
8668: PetscFunctionBegin;
8671: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8672: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8673: PetscFunctionReturn(PETSC_SUCCESS);
8674: }
8676: /*@
8677: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8678: the matrix
8680: Neighbor-wise Collective
8682: Input Parameters:
8683: + A - the matrix
8684: . x - the vector to be multiplied by the interpolation operator
8685: - y - the vector to be added to the result
8687: Output Parameter:
8688: . w - the resulting vector
8690: Level: intermediate
8692: Notes:
8693: `w` may be the same vector as `y`.
8695: This allows one to use either the restriction or interpolation (its transpose)
8696: matrix to do the interpolation
8698: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8699: @*/
8700: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8701: {
8702: PetscInt M, N, Ny;
8704: PetscFunctionBegin;
8709: PetscCall(MatGetSize(A, &M, &N));
8710: PetscCall(VecGetSize(y, &Ny));
8711: if (M == Ny) {
8712: PetscCall(MatMultAdd(A, x, y, w));
8713: } else {
8714: PetscCall(MatMultTransposeAdd(A, x, y, w));
8715: }
8716: PetscFunctionReturn(PETSC_SUCCESS);
8717: }
8719: /*@
8720: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8721: the matrix
8723: Neighbor-wise Collective
8725: Input Parameters:
8726: + A - the matrix
8727: - x - the vector to be interpolated
8729: Output Parameter:
8730: . y - the resulting vector
8732: Level: intermediate
8734: Note:
8735: This allows one to use either the restriction or interpolation (its transpose)
8736: matrix to do the interpolation
8738: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8739: @*/
8740: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8741: {
8742: PetscInt M, N, Ny;
8744: PetscFunctionBegin;
8748: PetscCall(MatGetSize(A, &M, &N));
8749: PetscCall(VecGetSize(y, &Ny));
8750: if (M == Ny) {
8751: PetscCall(MatMult(A, x, y));
8752: } else {
8753: PetscCall(MatMultTranspose(A, x, y));
8754: }
8755: PetscFunctionReturn(PETSC_SUCCESS);
8756: }
8758: /*@
8759: MatRestrict - $y = A*x$ or $A^T*x$
8761: Neighbor-wise Collective
8763: Input Parameters:
8764: + A - the matrix
8765: - x - the vector to be restricted
8767: Output Parameter:
8768: . y - the resulting vector
8770: Level: intermediate
8772: Note:
8773: This allows one to use either the restriction or interpolation (its transpose)
8774: matrix to do the restriction
8776: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8777: @*/
8778: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8779: {
8780: PetscInt M, N, Nx;
8782: PetscFunctionBegin;
8786: PetscCall(MatGetSize(A, &M, &N));
8787: PetscCall(VecGetSize(x, &Nx));
8788: if (M == Nx) {
8789: PetscCall(MatMultTranspose(A, x, y));
8790: } else {
8791: PetscCall(MatMult(A, x, y));
8792: }
8793: PetscFunctionReturn(PETSC_SUCCESS);
8794: }
8796: /*@
8797: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8799: Neighbor-wise Collective
8801: Input Parameters:
8802: + A - the matrix
8803: . x - the input dense matrix to be multiplied
8804: - w - the input dense matrix to be added to the result
8806: Output Parameter:
8807: . y - the output dense matrix
8809: Level: intermediate
8811: Note:
8812: This allows one to use either the restriction or interpolation (its transpose)
8813: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8814: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8816: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8817: @*/
8818: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8819: {
8820: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8821: PetscBool trans = PETSC_TRUE;
8822: MatReuse reuse = MAT_INITIAL_MATRIX;
8824: PetscFunctionBegin;
8830: PetscCall(MatGetSize(A, &M, &N));
8831: PetscCall(MatGetSize(x, &Mx, &Nx));
8832: if (N == Mx) trans = PETSC_FALSE;
8833: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8834: Mo = trans ? N : M;
8835: if (*y) {
8836: PetscCall(MatGetSize(*y, &My, &Ny));
8837: if (Mo == My && Nx == Ny) {
8838: reuse = MAT_REUSE_MATRIX;
8839: } else {
8840: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8841: PetscCall(MatDestroy(y));
8842: }
8843: }
8845: if (w && *y == w) { /* this is to minimize changes in PCMG */
8846: PetscBool flg;
8848: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8849: if (w) {
8850: PetscInt My, Ny, Mw, Nw;
8852: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8853: PetscCall(MatGetSize(*y, &My, &Ny));
8854: PetscCall(MatGetSize(w, &Mw, &Nw));
8855: if (!flg || My != Mw || Ny != Nw) w = NULL;
8856: }
8857: if (!w) {
8858: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8859: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8860: PetscCall(PetscObjectDereference((PetscObject)w));
8861: } else {
8862: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8863: }
8864: }
8865: if (!trans) {
8866: PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8867: } else {
8868: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8869: }
8870: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8871: PetscFunctionReturn(PETSC_SUCCESS);
8872: }
8874: /*@
8875: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8877: Neighbor-wise Collective
8879: Input Parameters:
8880: + A - the matrix
8881: - x - the input dense matrix
8883: Output Parameter:
8884: . y - the output dense matrix
8886: Level: intermediate
8888: Note:
8889: This allows one to use either the restriction or interpolation (its transpose)
8890: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8891: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8893: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8894: @*/
8895: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8896: {
8897: PetscFunctionBegin;
8898: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8899: PetscFunctionReturn(PETSC_SUCCESS);
8900: }
8902: /*@
8903: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8905: Neighbor-wise Collective
8907: Input Parameters:
8908: + A - the matrix
8909: - x - the input dense matrix
8911: Output Parameter:
8912: . y - the output dense matrix
8914: Level: intermediate
8916: Note:
8917: This allows one to use either the restriction or interpolation (its transpose)
8918: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8919: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8921: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8922: @*/
8923: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8924: {
8925: PetscFunctionBegin;
8926: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8927: PetscFunctionReturn(PETSC_SUCCESS);
8928: }
8930: /*@
8931: MatGetNullSpace - retrieves the null space of a matrix.
8933: Logically Collective
8935: Input Parameters:
8936: + mat - the matrix
8937: - nullsp - the null space object
8939: Level: developer
8941: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8942: @*/
8943: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8944: {
8945: PetscFunctionBegin;
8947: PetscAssertPointer(nullsp, 2);
8948: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8949: PetscFunctionReturn(PETSC_SUCCESS);
8950: }
8952: /*@C
8953: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
8955: Logically Collective
8957: Input Parameters:
8958: + n - the number of matrices
8959: - mat - the array of matrices
8961: Output Parameters:
8962: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
8964: Level: developer
8966: Note:
8967: Call `MatRestoreNullspaces()` to provide these to another array of matrices
8969: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
8970: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
8971: @*/
8972: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
8973: {
8974: PetscFunctionBegin;
8975: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
8976: PetscAssertPointer(mat, 2);
8977: PetscAssertPointer(nullsp, 3);
8979: PetscCall(PetscCalloc1(3 * n, nullsp));
8980: for (PetscInt i = 0; i < n; i++) {
8982: (*nullsp)[i] = mat[i]->nullsp;
8983: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
8984: (*nullsp)[n + i] = mat[i]->nearnullsp;
8985: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
8986: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
8987: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
8988: }
8989: PetscFunctionReturn(PETSC_SUCCESS);
8990: }
8992: /*@C
8993: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
8995: Logically Collective
8997: Input Parameters:
8998: + n - the number of matrices
8999: . mat - the array of matrices
9000: - nullsp - an array of null spaces
9002: Level: developer
9004: Note:
9005: Call `MatGetNullSpaces()` to create `nullsp`
9007: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9008: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
9009: @*/
9010: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9011: {
9012: PetscFunctionBegin;
9013: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9014: PetscAssertPointer(mat, 2);
9015: PetscAssertPointer(nullsp, 3);
9016: PetscAssertPointer(*nullsp, 3);
9018: for (PetscInt i = 0; i < n; i++) {
9020: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9021: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9022: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9023: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9024: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9025: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9026: }
9027: PetscCall(PetscFree(*nullsp));
9028: PetscFunctionReturn(PETSC_SUCCESS);
9029: }
9031: /*@
9032: MatSetNullSpace - attaches a null space to a matrix.
9034: Logically Collective
9036: Input Parameters:
9037: + mat - the matrix
9038: - nullsp - the null space object
9040: Level: advanced
9042: Notes:
9043: This null space is used by the `KSP` linear solvers to solve singular systems.
9045: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9047: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
9048: to zero but the linear system will still be solved in a least squares sense.
9050: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9051: the domain of a matrix $A$ (from $R^n$ to $R^m$ ($m$ rows, $n$ columns) $R^n$ = the direct sum of the null space of $A$, $n(A)$, plus the range of $A^T$, $R(A^T)$.
9052: Similarly $R^m$ = direct sum $n(A^T) + R(A)$. Hence the linear system $A x = b$ has a solution only if $b$ in $R(A)$ (or correspondingly $b$ is orthogonal to
9053: $n(A^T))$ and if $x$ is a solution then $x + \alpha n(A)$ is a solution for any $\alpha$. The minimum norm solution is orthogonal to $n(A)$. For problems without a solution
9054: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving $A x = \hat{b}$ where $\hat{b}$ is $b$ orthogonalized to the $n(A^T)$.
9055: This $\hat{b}$ can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9057: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one has called
9058: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9059: routine also automatically calls `MatSetTransposeNullSpace()`.
9061: The user should call `MatNullSpaceDestroy()`.
9063: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9064: `KSPSetPCSide()`
9065: @*/
9066: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9067: {
9068: PetscFunctionBegin;
9071: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9072: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9073: mat->nullsp = nullsp;
9074: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9075: PetscFunctionReturn(PETSC_SUCCESS);
9076: }
9078: /*@
9079: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9081: Logically Collective
9083: Input Parameters:
9084: + mat - the matrix
9085: - nullsp - the null space object
9087: Level: developer
9089: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9090: @*/
9091: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9092: {
9093: PetscFunctionBegin;
9096: PetscAssertPointer(nullsp, 2);
9097: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9098: PetscFunctionReturn(PETSC_SUCCESS);
9099: }
9101: /*@
9102: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9104: Logically Collective
9106: Input Parameters:
9107: + mat - the matrix
9108: - nullsp - the null space object
9110: Level: advanced
9112: Notes:
9113: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9115: See `MatSetNullSpace()`
9117: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9118: @*/
9119: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9120: {
9121: PetscFunctionBegin;
9124: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9125: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9126: mat->transnullsp = nullsp;
9127: PetscFunctionReturn(PETSC_SUCCESS);
9128: }
9130: /*@
9131: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9132: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9134: Logically Collective
9136: Input Parameters:
9137: + mat - the matrix
9138: - nullsp - the null space object
9140: Level: advanced
9142: Notes:
9143: Overwrites any previous near null space that may have been attached
9145: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9147: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9148: @*/
9149: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9150: {
9151: PetscFunctionBegin;
9155: MatCheckPreallocated(mat, 1);
9156: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9157: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9158: mat->nearnullsp = nullsp;
9159: PetscFunctionReturn(PETSC_SUCCESS);
9160: }
9162: /*@
9163: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9165: Not Collective
9167: Input Parameter:
9168: . mat - the matrix
9170: Output Parameter:
9171: . nullsp - the null space object, `NULL` if not set
9173: Level: advanced
9175: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9176: @*/
9177: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9178: {
9179: PetscFunctionBegin;
9182: PetscAssertPointer(nullsp, 2);
9183: MatCheckPreallocated(mat, 1);
9184: *nullsp = mat->nearnullsp;
9185: PetscFunctionReturn(PETSC_SUCCESS);
9186: }
9188: /*@
9189: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9191: Collective
9193: Input Parameters:
9194: + mat - the matrix
9195: . row - row/column permutation
9196: - info - information on desired factorization process
9198: Level: developer
9200: Notes:
9201: Probably really in-place only when level of fill is zero, otherwise allocates
9202: new space to store factored matrix and deletes previous memory.
9204: Most users should employ the `KSP` interface for linear solvers
9205: instead of working directly with matrix algebra routines such as this.
9206: See, e.g., `KSPCreate()`.
9208: Fortran Note:
9209: A valid (non-null) `info` argument must be provided
9211: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9212: @*/
9213: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9214: {
9215: PetscFunctionBegin;
9219: PetscAssertPointer(info, 3);
9220: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9221: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9222: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9223: MatCheckPreallocated(mat, 1);
9224: PetscUseTypeMethod(mat, iccfactor, row, info);
9225: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9226: PetscFunctionReturn(PETSC_SUCCESS);
9227: }
9229: /*@
9230: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9231: ghosted ones.
9233: Not Collective
9235: Input Parameters:
9236: + mat - the matrix
9237: - diag - the diagonal values, including ghost ones
9239: Level: developer
9241: Notes:
9242: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9244: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9246: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9247: @*/
9248: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9249: {
9250: PetscMPIInt size;
9252: PetscFunctionBegin;
9257: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9258: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9259: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9260: if (size == 1) {
9261: PetscInt n, m;
9262: PetscCall(VecGetSize(diag, &n));
9263: PetscCall(MatGetSize(mat, NULL, &m));
9264: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9265: PetscCall(MatDiagonalScale(mat, NULL, diag));
9266: } else {
9267: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9268: }
9269: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9270: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9271: PetscFunctionReturn(PETSC_SUCCESS);
9272: }
9274: /*@
9275: MatGetInertia - Gets the inertia from a factored matrix
9277: Collective
9279: Input Parameter:
9280: . mat - the matrix
9282: Output Parameters:
9283: + nneg - number of negative eigenvalues
9284: . nzero - number of zero eigenvalues
9285: - npos - number of positive eigenvalues
9287: Level: advanced
9289: Note:
9290: Matrix must have been factored by `MatCholeskyFactor()`
9292: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9293: @*/
9294: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9295: {
9296: PetscFunctionBegin;
9299: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9300: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9301: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9302: PetscFunctionReturn(PETSC_SUCCESS);
9303: }
9305: /*@C
9306: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9308: Neighbor-wise Collective
9310: Input Parameters:
9311: + mat - the factored matrix obtained with `MatGetFactor()`
9312: - b - the right-hand-side vectors
9314: Output Parameter:
9315: . x - the result vectors
9317: Level: developer
9319: Note:
9320: The vectors `b` and `x` cannot be the same. I.e., one cannot
9321: call `MatSolves`(A,x,x).
9323: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9324: @*/
9325: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9326: {
9327: PetscFunctionBegin;
9330: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9331: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9332: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9334: MatCheckPreallocated(mat, 1);
9335: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9336: PetscUseTypeMethod(mat, solves, b, x);
9337: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9338: PetscFunctionReturn(PETSC_SUCCESS);
9339: }
9341: /*@
9342: MatIsSymmetric - Test whether a matrix is symmetric
9344: Collective
9346: Input Parameters:
9347: + A - the matrix to test
9348: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9350: Output Parameter:
9351: . flg - the result
9353: Level: intermediate
9355: Notes:
9356: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9358: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9360: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9361: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9363: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9364: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9365: @*/
9366: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9367: {
9368: PetscFunctionBegin;
9370: PetscAssertPointer(flg, 3);
9371: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9372: else {
9373: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9374: else PetscCall(MatIsTranspose(A, A, tol, flg));
9375: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9376: }
9377: PetscFunctionReturn(PETSC_SUCCESS);
9378: }
9380: /*@
9381: MatIsHermitian - Test whether a matrix is Hermitian
9383: Collective
9385: Input Parameters:
9386: + A - the matrix to test
9387: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9389: Output Parameter:
9390: . flg - the result
9392: Level: intermediate
9394: Notes:
9395: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9397: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9399: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9400: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9402: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9403: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9404: @*/
9405: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9406: {
9407: PetscFunctionBegin;
9409: PetscAssertPointer(flg, 3);
9410: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9411: else {
9412: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9413: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9414: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9415: }
9416: PetscFunctionReturn(PETSC_SUCCESS);
9417: }
9419: /*@
9420: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9422: Not Collective
9424: Input Parameter:
9425: . A - the matrix to check
9427: Output Parameters:
9428: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9429: - flg - the result (only valid if set is `PETSC_TRUE`)
9431: Level: advanced
9433: Notes:
9434: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9435: if you want it explicitly checked
9437: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9438: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9440: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9441: @*/
9442: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9443: {
9444: PetscFunctionBegin;
9446: PetscAssertPointer(set, 2);
9447: PetscAssertPointer(flg, 3);
9448: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9449: *set = PETSC_TRUE;
9450: *flg = PetscBool3ToBool(A->symmetric);
9451: } else {
9452: *set = PETSC_FALSE;
9453: }
9454: PetscFunctionReturn(PETSC_SUCCESS);
9455: }
9457: /*@
9458: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9460: Not Collective
9462: Input Parameter:
9463: . A - the matrix to check
9465: Output Parameters:
9466: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9467: - flg - the result (only valid if set is `PETSC_TRUE`)
9469: Level: advanced
9471: Notes:
9472: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9474: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9475: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9477: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9478: @*/
9479: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9480: {
9481: PetscFunctionBegin;
9483: PetscAssertPointer(set, 2);
9484: PetscAssertPointer(flg, 3);
9485: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9486: *set = PETSC_TRUE;
9487: *flg = PetscBool3ToBool(A->spd);
9488: } else {
9489: *set = PETSC_FALSE;
9490: }
9491: PetscFunctionReturn(PETSC_SUCCESS);
9492: }
9494: /*@
9495: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9497: Not Collective
9499: Input Parameter:
9500: . A - the matrix to check
9502: Output Parameters:
9503: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9504: - flg - the result (only valid if set is `PETSC_TRUE`)
9506: Level: advanced
9508: Notes:
9509: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9510: if you want it explicitly checked
9512: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9513: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9515: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9516: @*/
9517: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9518: {
9519: PetscFunctionBegin;
9521: PetscAssertPointer(set, 2);
9522: PetscAssertPointer(flg, 3);
9523: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9524: *set = PETSC_TRUE;
9525: *flg = PetscBool3ToBool(A->hermitian);
9526: } else {
9527: *set = PETSC_FALSE;
9528: }
9529: PetscFunctionReturn(PETSC_SUCCESS);
9530: }
9532: /*@
9533: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9535: Collective
9537: Input Parameter:
9538: . A - the matrix to test
9540: Output Parameter:
9541: . flg - the result
9543: Level: intermediate
9545: Notes:
9546: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9548: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9549: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9551: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9552: @*/
9553: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9554: {
9555: PetscFunctionBegin;
9557: PetscAssertPointer(flg, 2);
9558: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9559: *flg = PetscBool3ToBool(A->structurally_symmetric);
9560: } else {
9561: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9562: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9563: }
9564: PetscFunctionReturn(PETSC_SUCCESS);
9565: }
9567: /*@
9568: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9570: Not Collective
9572: Input Parameter:
9573: . A - the matrix to check
9575: Output Parameters:
9576: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9577: - flg - the result (only valid if set is PETSC_TRUE)
9579: Level: advanced
9581: Notes:
9582: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9583: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9585: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9587: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9588: @*/
9589: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9590: {
9591: PetscFunctionBegin;
9593: PetscAssertPointer(set, 2);
9594: PetscAssertPointer(flg, 3);
9595: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9596: *set = PETSC_TRUE;
9597: *flg = PetscBool3ToBool(A->structurally_symmetric);
9598: } else {
9599: *set = PETSC_FALSE;
9600: }
9601: PetscFunctionReturn(PETSC_SUCCESS);
9602: }
9604: /*@
9605: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9606: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9608: Not Collective
9610: Input Parameter:
9611: . mat - the matrix
9613: Output Parameters:
9614: + nstash - the size of the stash
9615: . reallocs - the number of additional mallocs incurred.
9616: . bnstash - the size of the block stash
9617: - breallocs - the number of additional mallocs incurred.in the block stash
9619: Level: advanced
9621: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9622: @*/
9623: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9624: {
9625: PetscFunctionBegin;
9626: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9627: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9628: PetscFunctionReturn(PETSC_SUCCESS);
9629: }
9631: /*@
9632: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9633: parallel layout, `PetscLayout` for rows and columns
9635: Collective
9637: Input Parameter:
9638: . mat - the matrix
9640: Output Parameters:
9641: + right - (optional) vector that the matrix can be multiplied against
9642: - left - (optional) vector that the matrix vector product can be stored in
9644: Level: advanced
9646: Notes:
9647: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9649: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9651: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9652: @*/
9653: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9654: {
9655: PetscFunctionBegin;
9658: if (mat->ops->getvecs) {
9659: PetscUseTypeMethod(mat, getvecs, right, left);
9660: } else {
9661: if (right) {
9662: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9663: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9664: PetscCall(VecSetType(*right, mat->defaultvectype));
9665: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9666: if (mat->boundtocpu && mat->bindingpropagates) {
9667: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9668: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9669: }
9670: #endif
9671: }
9672: if (left) {
9673: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9674: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9675: PetscCall(VecSetType(*left, mat->defaultvectype));
9676: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9677: if (mat->boundtocpu && mat->bindingpropagates) {
9678: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9679: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9680: }
9681: #endif
9682: }
9683: }
9684: PetscFunctionReturn(PETSC_SUCCESS);
9685: }
9687: /*@
9688: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9689: with default values.
9691: Not Collective
9693: Input Parameter:
9694: . info - the `MatFactorInfo` data structure
9696: Level: developer
9698: Notes:
9699: The solvers are generally used through the `KSP` and `PC` objects, for example
9700: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9702: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9704: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9705: @*/
9706: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9707: {
9708: PetscFunctionBegin;
9709: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9710: PetscFunctionReturn(PETSC_SUCCESS);
9711: }
9713: /*@
9714: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9716: Collective
9718: Input Parameters:
9719: + mat - the factored matrix
9720: - is - the index set defining the Schur indices (0-based)
9722: Level: advanced
9724: Notes:
9725: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9727: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9729: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9731: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9732: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9733: @*/
9734: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9735: {
9736: PetscErrorCode (*f)(Mat, IS);
9738: PetscFunctionBegin;
9743: PetscCheckSameComm(mat, 1, is, 2);
9744: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9745: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9746: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9747: PetscCall(MatDestroy(&mat->schur));
9748: PetscCall((*f)(mat, is));
9749: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9750: PetscFunctionReturn(PETSC_SUCCESS);
9751: }
9753: /*@
9754: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9756: Logically Collective
9758: Input Parameters:
9759: + F - the factored matrix obtained by calling `MatGetFactor()`
9760: . S - location where to return the Schur complement, can be `NULL`
9761: - status - the status of the Schur complement matrix, can be `NULL`
9763: Level: advanced
9765: Notes:
9766: You must call `MatFactorSetSchurIS()` before calling this routine.
9768: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9770: The routine provides a copy of the Schur matrix stored within the solver data structures.
9771: The caller must destroy the object when it is no longer needed.
9772: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9774: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9776: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9778: Developer Note:
9779: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9780: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9782: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9783: @*/
9784: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9785: {
9786: PetscFunctionBegin;
9788: if (S) PetscAssertPointer(S, 2);
9789: if (status) PetscAssertPointer(status, 3);
9790: if (S) {
9791: PetscErrorCode (*f)(Mat, Mat *);
9793: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9794: if (f) {
9795: PetscCall((*f)(F, S));
9796: } else {
9797: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9798: }
9799: }
9800: if (status) *status = F->schur_status;
9801: PetscFunctionReturn(PETSC_SUCCESS);
9802: }
9804: /*@
9805: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9807: Logically Collective
9809: Input Parameters:
9810: + F - the factored matrix obtained by calling `MatGetFactor()`
9811: . S - location where to return the Schur complement, can be `NULL`
9812: - status - the status of the Schur complement matrix, can be `NULL`
9814: Level: advanced
9816: Notes:
9817: You must call `MatFactorSetSchurIS()` before calling this routine.
9819: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9821: The routine returns a the Schur Complement stored within the data structures of the solver.
9823: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9825: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9827: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9829: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9831: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9832: @*/
9833: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9834: {
9835: PetscFunctionBegin;
9837: if (S) {
9838: PetscAssertPointer(S, 2);
9839: *S = F->schur;
9840: }
9841: if (status) {
9842: PetscAssertPointer(status, 3);
9843: *status = F->schur_status;
9844: }
9845: PetscFunctionReturn(PETSC_SUCCESS);
9846: }
9848: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9849: {
9850: Mat S = F->schur;
9852: PetscFunctionBegin;
9853: switch (F->schur_status) {
9854: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9855: case MAT_FACTOR_SCHUR_INVERTED:
9856: if (S) {
9857: S->ops->solve = NULL;
9858: S->ops->matsolve = NULL;
9859: S->ops->solvetranspose = NULL;
9860: S->ops->matsolvetranspose = NULL;
9861: S->ops->solveadd = NULL;
9862: S->ops->solvetransposeadd = NULL;
9863: S->factortype = MAT_FACTOR_NONE;
9864: PetscCall(PetscFree(S->solvertype));
9865: }
9866: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9867: break;
9868: default:
9869: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9870: }
9871: PetscFunctionReturn(PETSC_SUCCESS);
9872: }
9874: /*@
9875: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9877: Logically Collective
9879: Input Parameters:
9880: + F - the factored matrix obtained by calling `MatGetFactor()`
9881: . S - location where the Schur complement is stored
9882: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9884: Level: advanced
9886: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9887: @*/
9888: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9889: {
9890: PetscFunctionBegin;
9892: if (S) {
9894: *S = NULL;
9895: }
9896: F->schur_status = status;
9897: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9898: PetscFunctionReturn(PETSC_SUCCESS);
9899: }
9901: /*@
9902: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9904: Logically Collective
9906: Input Parameters:
9907: + F - the factored matrix obtained by calling `MatGetFactor()`
9908: . rhs - location where the right-hand side of the Schur complement system is stored
9909: - sol - location where the solution of the Schur complement system has to be returned
9911: Level: advanced
9913: Notes:
9914: The sizes of the vectors should match the size of the Schur complement
9916: Must be called after `MatFactorSetSchurIS()`
9918: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9919: @*/
9920: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9921: {
9922: PetscFunctionBegin;
9929: PetscCheckSameComm(F, 1, rhs, 2);
9930: PetscCheckSameComm(F, 1, sol, 3);
9931: PetscCall(MatFactorFactorizeSchurComplement(F));
9932: switch (F->schur_status) {
9933: case MAT_FACTOR_SCHUR_FACTORED:
9934: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9935: break;
9936: case MAT_FACTOR_SCHUR_INVERTED:
9937: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9938: break;
9939: default:
9940: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9941: }
9942: PetscFunctionReturn(PETSC_SUCCESS);
9943: }
9945: /*@
9946: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9948: Logically Collective
9950: Input Parameters:
9951: + F - the factored matrix obtained by calling `MatGetFactor()`
9952: . rhs - location where the right-hand side of the Schur complement system is stored
9953: - sol - location where the solution of the Schur complement system has to be returned
9955: Level: advanced
9957: Notes:
9958: The sizes of the vectors should match the size of the Schur complement
9960: Must be called after `MatFactorSetSchurIS()`
9962: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9963: @*/
9964: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9965: {
9966: PetscFunctionBegin;
9973: PetscCheckSameComm(F, 1, rhs, 2);
9974: PetscCheckSameComm(F, 1, sol, 3);
9975: PetscCall(MatFactorFactorizeSchurComplement(F));
9976: switch (F->schur_status) {
9977: case MAT_FACTOR_SCHUR_FACTORED:
9978: PetscCall(MatSolve(F->schur, rhs, sol));
9979: break;
9980: case MAT_FACTOR_SCHUR_INVERTED:
9981: PetscCall(MatMult(F->schur, rhs, sol));
9982: break;
9983: default:
9984: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9985: }
9986: PetscFunctionReturn(PETSC_SUCCESS);
9987: }
9989: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9990: #if PetscDefined(HAVE_CUDA)
9991: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9992: #endif
9994: /* Schur status updated in the interface */
9995: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9996: {
9997: Mat S = F->schur;
9999: PetscFunctionBegin;
10000: if (S) {
10001: PetscMPIInt size;
10002: PetscBool isdense, isdensecuda;
10004: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
10005: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
10006: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
10007: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
10008: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
10009: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
10010: if (isdense) {
10011: PetscCall(MatSeqDenseInvertFactors_Private(S));
10012: } else if (isdensecuda) {
10013: #if defined(PETSC_HAVE_CUDA)
10014: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10015: #endif
10016: }
10017: // HIP??????????????
10018: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10019: }
10020: PetscFunctionReturn(PETSC_SUCCESS);
10021: }
10023: /*@
10024: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10026: Logically Collective
10028: Input Parameter:
10029: . F - the factored matrix obtained by calling `MatGetFactor()`
10031: Level: advanced
10033: Notes:
10034: Must be called after `MatFactorSetSchurIS()`.
10036: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10038: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10039: @*/
10040: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10041: {
10042: PetscFunctionBegin;
10045: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10046: PetscCall(MatFactorFactorizeSchurComplement(F));
10047: PetscCall(MatFactorInvertSchurComplement_Private(F));
10048: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10049: PetscFunctionReturn(PETSC_SUCCESS);
10050: }
10052: /*@
10053: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10055: Logically Collective
10057: Input Parameter:
10058: . F - the factored matrix obtained by calling `MatGetFactor()`
10060: Level: advanced
10062: Note:
10063: Must be called after `MatFactorSetSchurIS()`
10065: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10066: @*/
10067: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10068: {
10069: MatFactorInfo info;
10071: PetscFunctionBegin;
10074: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10075: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10076: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10077: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10078: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10079: } else {
10080: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10081: }
10082: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10083: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10084: PetscFunctionReturn(PETSC_SUCCESS);
10085: }
10087: /*@
10088: MatPtAP - Creates the matrix product $C = P^T * A * P$
10090: Neighbor-wise Collective
10092: Input Parameters:
10093: + A - the matrix
10094: . P - the projection matrix
10095: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10096: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10097: if the result is a dense matrix this is irrelevant
10099: Output Parameter:
10100: . C - the product matrix
10102: Level: intermediate
10104: Notes:
10105: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10107: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_PtAP`
10108: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10110: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10112: Developer Note:
10113: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10115: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10116: @*/
10117: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10118: {
10119: PetscFunctionBegin;
10120: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10121: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10123: if (scall == MAT_INITIAL_MATRIX) {
10124: PetscCall(MatProductCreate(A, P, NULL, C));
10125: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10126: PetscCall(MatProductSetAlgorithm(*C, "default"));
10127: PetscCall(MatProductSetFill(*C, fill));
10129: (*C)->product->api_user = PETSC_TRUE;
10130: PetscCall(MatProductSetFromOptions(*C));
10131: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10132: PetscCall(MatProductSymbolic(*C));
10133: } else { /* scall == MAT_REUSE_MATRIX */
10134: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10135: }
10137: PetscCall(MatProductNumeric(*C));
10138: if (A->symmetric == PETSC_BOOL3_TRUE) {
10139: PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10140: (*C)->spd = A->spd;
10141: }
10142: PetscFunctionReturn(PETSC_SUCCESS);
10143: }
10145: /*@
10146: MatRARt - Creates the matrix product $C = R * A * R^T$
10148: Neighbor-wise Collective
10150: Input Parameters:
10151: + A - the matrix
10152: . R - the projection matrix
10153: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10154: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10155: if the result is a dense matrix this is irrelevant
10157: Output Parameter:
10158: . C - the product matrix
10160: Level: intermediate
10162: Notes:
10163: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10165: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_RARt`
10166: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10168: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10169: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10170: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10171: We recommend using `MatPtAP()` when possible.
10173: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10175: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10176: @*/
10177: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10178: {
10179: PetscFunctionBegin;
10180: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10181: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10183: if (scall == MAT_INITIAL_MATRIX) {
10184: PetscCall(MatProductCreate(A, R, NULL, C));
10185: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10186: PetscCall(MatProductSetAlgorithm(*C, "default"));
10187: PetscCall(MatProductSetFill(*C, fill));
10189: (*C)->product->api_user = PETSC_TRUE;
10190: PetscCall(MatProductSetFromOptions(*C));
10191: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10192: PetscCall(MatProductSymbolic(*C));
10193: } else { /* scall == MAT_REUSE_MATRIX */
10194: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10195: }
10197: PetscCall(MatProductNumeric(*C));
10198: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10199: PetscFunctionReturn(PETSC_SUCCESS);
10200: }
10202: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10203: {
10204: PetscBool flg = PETSC_TRUE;
10206: PetscFunctionBegin;
10207: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10208: if (scall == MAT_INITIAL_MATRIX) {
10209: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10210: PetscCall(MatProductCreate(A, B, NULL, C));
10211: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10212: PetscCall(MatProductSetFill(*C, fill));
10213: } else { /* scall == MAT_REUSE_MATRIX */
10214: Mat_Product *product = (*C)->product;
10216: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10217: if (flg && product && product->type != ptype) {
10218: PetscCall(MatProductClear(*C));
10219: product = NULL;
10220: }
10221: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10222: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10223: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10224: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10225: product = (*C)->product;
10226: product->fill = fill;
10227: product->clear = PETSC_TRUE;
10228: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10229: flg = PETSC_FALSE;
10230: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10231: }
10232: }
10233: if (flg) {
10234: (*C)->product->api_user = PETSC_TRUE;
10235: PetscCall(MatProductSetType(*C, ptype));
10236: PetscCall(MatProductSetFromOptions(*C));
10237: PetscCall(MatProductSymbolic(*C));
10238: }
10239: PetscCall(MatProductNumeric(*C));
10240: PetscFunctionReturn(PETSC_SUCCESS);
10241: }
10243: /*@
10244: MatMatMult - Performs matrix-matrix multiplication $ C=A*B $.
10246: Neighbor-wise Collective
10248: Input Parameters:
10249: + A - the left matrix
10250: . B - the right matrix
10251: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10252: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10253: if the result is a dense matrix this is irrelevant
10255: Output Parameter:
10256: . C - the product matrix
10258: Notes:
10259: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10261: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10262: call to this function with `MAT_INITIAL_MATRIX`.
10264: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10266: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10267: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10269: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10271: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_AB`
10272: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10274: Example of Usage:
10275: .vb
10276: MatProductCreate(A,B,NULL,&C);
10277: MatProductSetType(C,MATPRODUCT_AB);
10278: MatProductSymbolic(C);
10279: MatProductNumeric(C); // compute C=A * B
10280: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10281: MatProductNumeric(C);
10282: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10283: MatProductNumeric(C);
10284: .ve
10286: Level: intermediate
10288: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10289: @*/
10290: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10291: {
10292: PetscFunctionBegin;
10293: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10294: PetscFunctionReturn(PETSC_SUCCESS);
10295: }
10297: /*@
10298: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10300: Neighbor-wise Collective
10302: Input Parameters:
10303: + A - the left matrix
10304: . B - the right matrix
10305: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10306: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10308: Output Parameter:
10309: . C - the product matrix
10311: Options Database Key:
10312: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10313: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10314: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10316: Level: intermediate
10318: Notes:
10319: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10321: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10323: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10324: actually needed.
10326: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10327: and for pairs of `MATMPIDENSE` matrices.
10329: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_ABt`
10330: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10332: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10334: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10335: @*/
10336: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10337: {
10338: PetscFunctionBegin;
10339: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10340: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10341: PetscFunctionReturn(PETSC_SUCCESS);
10342: }
10344: /*@
10345: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10347: Neighbor-wise Collective
10349: Input Parameters:
10350: + A - the left matrix
10351: . B - the right matrix
10352: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10353: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10355: Output Parameter:
10356: . C - the product matrix
10358: Level: intermediate
10360: Notes:
10361: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10363: `MAT_REUSE_MATRIX` can only be used if `A` and `B` have the same nonzero pattern as in the previous call.
10365: This is a convenience routine that wraps the use of `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_AtB`
10366: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10368: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10369: actually needed.
10371: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10372: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10374: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10376: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10377: @*/
10378: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10379: {
10380: PetscFunctionBegin;
10381: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10382: PetscFunctionReturn(PETSC_SUCCESS);
10383: }
10385: /*@
10386: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10388: Neighbor-wise Collective
10390: Input Parameters:
10391: + A - the left matrix
10392: . B - the middle matrix
10393: . C - the right matrix
10394: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10395: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10396: if the result is a dense matrix this is irrelevant
10398: Output Parameter:
10399: . D - the product matrix
10401: Level: intermediate
10403: Notes:
10404: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10406: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10408: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_ABC`
10409: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10411: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10412: actually needed.
10414: If you have many matrices with the same non-zero structure to multiply, you
10415: should use `MAT_REUSE_MATRIX` in all calls but the first
10417: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10419: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10420: @*/
10421: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10422: {
10423: PetscFunctionBegin;
10424: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10425: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10427: if (scall == MAT_INITIAL_MATRIX) {
10428: PetscCall(MatProductCreate(A, B, C, D));
10429: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10430: PetscCall(MatProductSetAlgorithm(*D, "default"));
10431: PetscCall(MatProductSetFill(*D, fill));
10433: (*D)->product->api_user = PETSC_TRUE;
10434: PetscCall(MatProductSetFromOptions(*D));
10435: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10436: ((PetscObject)C)->type_name);
10437: PetscCall(MatProductSymbolic(*D));
10438: } else { /* user may change input matrices when REUSE */
10439: PetscCall(MatProductReplaceMats(A, B, C, *D));
10440: }
10441: PetscCall(MatProductNumeric(*D));
10442: PetscFunctionReturn(PETSC_SUCCESS);
10443: }
10445: /*@
10446: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10448: Collective
10450: Input Parameters:
10451: + mat - the matrix
10452: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10453: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10454: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10456: Output Parameter:
10457: . matredundant - redundant matrix
10459: Level: advanced
10461: Notes:
10462: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10463: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10465: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10466: calling it.
10468: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10470: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10471: @*/
10472: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10473: {
10474: MPI_Comm comm;
10475: PetscMPIInt size;
10476: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10477: Mat_Redundant *redund = NULL;
10478: PetscSubcomm psubcomm = NULL;
10479: MPI_Comm subcomm_in = subcomm;
10480: Mat *matseq;
10481: IS isrow, iscol;
10482: PetscBool newsubcomm = PETSC_FALSE;
10484: PetscFunctionBegin;
10486: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10487: PetscAssertPointer(*matredundant, 5);
10489: }
10491: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10492: if (size == 1 || nsubcomm == 1) {
10493: if (reuse == MAT_INITIAL_MATRIX) {
10494: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10495: } else {
10496: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10497: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10498: }
10499: PetscFunctionReturn(PETSC_SUCCESS);
10500: }
10502: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10503: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10504: MatCheckPreallocated(mat, 1);
10506: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10507: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10508: /* create psubcomm, then get subcomm */
10509: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10510: PetscCallMPI(MPI_Comm_size(comm, &size));
10511: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10513: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10514: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10515: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10516: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10517: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10518: newsubcomm = PETSC_TRUE;
10519: PetscCall(PetscSubcommDestroy(&psubcomm));
10520: }
10522: /* get isrow, iscol and a local sequential matrix matseq[0] */
10523: if (reuse == MAT_INITIAL_MATRIX) {
10524: mloc_sub = PETSC_DECIDE;
10525: nloc_sub = PETSC_DECIDE;
10526: if (bs < 1) {
10527: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10528: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10529: } else {
10530: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10531: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10532: }
10533: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10534: rstart = rend - mloc_sub;
10535: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10536: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10537: PetscCall(ISSetIdentity(iscol));
10538: } else { /* reuse == MAT_REUSE_MATRIX */
10539: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10540: /* retrieve subcomm */
10541: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10542: redund = (*matredundant)->redundant;
10543: isrow = redund->isrow;
10544: iscol = redund->iscol;
10545: matseq = redund->matseq;
10546: }
10547: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10549: /* get matredundant over subcomm */
10550: if (reuse == MAT_INITIAL_MATRIX) {
10551: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10553: /* create a supporting struct and attach it to C for reuse */
10554: PetscCall(PetscNew(&redund));
10555: (*matredundant)->redundant = redund;
10556: redund->isrow = isrow;
10557: redund->iscol = iscol;
10558: redund->matseq = matseq;
10559: if (newsubcomm) {
10560: redund->subcomm = subcomm;
10561: } else {
10562: redund->subcomm = MPI_COMM_NULL;
10563: }
10564: } else {
10565: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10566: }
10567: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10568: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10569: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10570: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10571: }
10572: #endif
10573: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10574: PetscFunctionReturn(PETSC_SUCCESS);
10575: }
10577: /*@C
10578: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10579: a given `Mat`. Each submatrix can span multiple procs.
10581: Collective
10583: Input Parameters:
10584: + mat - the matrix
10585: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10586: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10588: Output Parameter:
10589: . subMat - parallel sub-matrices each spanning a given `subcomm`
10591: Level: advanced
10593: Notes:
10594: The submatrix partition across processors is dictated by `subComm` a
10595: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10596: is not restricted to be grouped with consecutive original MPI processes.
10598: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10599: map directly to the layout of the original matrix [wrt the local
10600: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10601: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10602: the `subMat`. However the offDiagMat looses some columns - and this is
10603: reconstructed with `MatSetValues()`
10605: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10607: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10608: @*/
10609: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10610: {
10611: PetscMPIInt commsize, subCommSize;
10613: PetscFunctionBegin;
10614: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10615: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10616: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10618: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10619: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10620: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10621: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10622: PetscFunctionReturn(PETSC_SUCCESS);
10623: }
10625: /*@
10626: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10628: Not Collective
10630: Input Parameters:
10631: + mat - matrix to extract local submatrix from
10632: . isrow - local row indices for submatrix
10633: - iscol - local column indices for submatrix
10635: Output Parameter:
10636: . submat - the submatrix
10638: Level: intermediate
10640: Notes:
10641: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10643: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10644: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10646: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10647: `MatSetValuesBlockedLocal()` will also be implemented.
10649: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10650: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10652: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10653: @*/
10654: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10655: {
10656: PetscFunctionBegin;
10660: PetscCheckSameComm(isrow, 2, iscol, 3);
10661: PetscAssertPointer(submat, 4);
10662: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10664: if (mat->ops->getlocalsubmatrix) {
10665: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10666: } else {
10667: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10668: }
10669: (*submat)->assembled = mat->assembled;
10670: PetscFunctionReturn(PETSC_SUCCESS);
10671: }
10673: /*@
10674: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10676: Not Collective
10678: Input Parameters:
10679: + mat - matrix to extract local submatrix from
10680: . isrow - local row indices for submatrix
10681: . iscol - local column indices for submatrix
10682: - submat - the submatrix
10684: Level: intermediate
10686: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10687: @*/
10688: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10689: {
10690: PetscFunctionBegin;
10694: PetscCheckSameComm(isrow, 2, iscol, 3);
10695: PetscAssertPointer(submat, 4);
10698: if (mat->ops->restorelocalsubmatrix) {
10699: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10700: } else {
10701: PetscCall(MatDestroy(submat));
10702: }
10703: *submat = NULL;
10704: PetscFunctionReturn(PETSC_SUCCESS);
10705: }
10707: /*@
10708: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10710: Collective
10712: Input Parameter:
10713: . mat - the matrix
10715: Output Parameter:
10716: . is - if any rows have zero diagonals this contains the list of them
10718: Level: developer
10720: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10721: @*/
10722: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10723: {
10724: PetscFunctionBegin;
10727: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10728: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10730: if (!mat->ops->findzerodiagonals) {
10731: Vec diag;
10732: const PetscScalar *a;
10733: PetscInt *rows;
10734: PetscInt rStart, rEnd, r, nrow = 0;
10736: PetscCall(MatCreateVecs(mat, &diag, NULL));
10737: PetscCall(MatGetDiagonal(mat, diag));
10738: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10739: PetscCall(VecGetArrayRead(diag, &a));
10740: for (r = 0; r < rEnd - rStart; ++r)
10741: if (a[r] == 0.0) ++nrow;
10742: PetscCall(PetscMalloc1(nrow, &rows));
10743: nrow = 0;
10744: for (r = 0; r < rEnd - rStart; ++r)
10745: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10746: PetscCall(VecRestoreArrayRead(diag, &a));
10747: PetscCall(VecDestroy(&diag));
10748: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10749: } else {
10750: PetscUseTypeMethod(mat, findzerodiagonals, is);
10751: }
10752: PetscFunctionReturn(PETSC_SUCCESS);
10753: }
10755: /*@
10756: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10758: Collective
10760: Input Parameter:
10761: . mat - the matrix
10763: Output Parameter:
10764: . is - contains the list of rows with off block diagonal entries
10766: Level: developer
10768: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10769: @*/
10770: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10771: {
10772: PetscFunctionBegin;
10775: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10776: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10778: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10779: PetscFunctionReturn(PETSC_SUCCESS);
10780: }
10782: /*@C
10783: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10785: Collective; No Fortran Support
10787: Input Parameter:
10788: . mat - the matrix
10790: Output Parameter:
10791: . values - the block inverses in column major order (FORTRAN-like)
10793: Level: advanced
10795: Notes:
10796: The size of the blocks is determined by the block size of the matrix.
10798: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10800: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10802: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10803: @*/
10804: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10805: {
10806: PetscFunctionBegin;
10808: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10809: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10810: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10811: PetscFunctionReturn(PETSC_SUCCESS);
10812: }
10814: /*@
10815: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10817: Collective; No Fortran Support
10819: Input Parameters:
10820: + mat - the matrix
10821: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10822: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10824: Output Parameter:
10825: . values - the block inverses in column major order (FORTRAN-like)
10827: Level: advanced
10829: Notes:
10830: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10832: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10834: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10835: @*/
10836: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10837: {
10838: PetscFunctionBegin;
10840: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10841: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10842: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10843: PetscFunctionReturn(PETSC_SUCCESS);
10844: }
10846: /*@
10847: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10849: Collective
10851: Input Parameters:
10852: + A - the matrix
10853: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10855: Level: advanced
10857: Note:
10858: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10860: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10861: @*/
10862: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10863: {
10864: const PetscScalar *vals;
10865: PetscInt *dnnz;
10866: PetscInt m, rstart, rend, bs, i, j;
10868: PetscFunctionBegin;
10869: PetscCall(MatInvertBlockDiagonal(A, &vals));
10870: PetscCall(MatGetBlockSize(A, &bs));
10871: PetscCall(MatGetLocalSize(A, &m, NULL));
10872: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10873: PetscCall(MatSetBlockSizes(C, A->rmap->bs, A->cmap->bs));
10874: PetscCall(PetscMalloc1(m / bs, &dnnz));
10875: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10876: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10877: PetscCall(PetscFree(dnnz));
10878: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10879: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10880: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10881: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
10882: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10883: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10884: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_FALSE));
10885: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10886: PetscFunctionReturn(PETSC_SUCCESS);
10887: }
10889: /*@
10890: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10891: via `MatTransposeColoringCreate()`.
10893: Collective
10895: Input Parameter:
10896: . c - coloring context
10898: Level: intermediate
10900: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10901: @*/
10902: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10903: {
10904: MatTransposeColoring matcolor = *c;
10906: PetscFunctionBegin;
10907: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10908: if (--((PetscObject)matcolor)->refct > 0) {
10909: matcolor = NULL;
10910: PetscFunctionReturn(PETSC_SUCCESS);
10911: }
10913: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10914: PetscCall(PetscFree(matcolor->rows));
10915: PetscCall(PetscFree(matcolor->den2sp));
10916: PetscCall(PetscFree(matcolor->colorforcol));
10917: PetscCall(PetscFree(matcolor->columns));
10918: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10919: PetscCall(PetscHeaderDestroy(c));
10920: PetscFunctionReturn(PETSC_SUCCESS);
10921: }
10923: /*@
10924: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10925: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10926: `MatTransposeColoring` to sparse `B`.
10928: Collective
10930: Input Parameters:
10931: + coloring - coloring context created with `MatTransposeColoringCreate()`
10932: - B - sparse matrix
10934: Output Parameter:
10935: . Btdense - dense matrix $B^T$
10937: Level: developer
10939: Note:
10940: These are used internally for some implementations of `MatRARt()`
10942: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10943: @*/
10944: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10945: {
10946: PetscFunctionBegin;
10951: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10952: PetscFunctionReturn(PETSC_SUCCESS);
10953: }
10955: /*@
10956: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10957: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10958: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10959: $C_{sp}$ from $C_{den}$.
10961: Collective
10963: Input Parameters:
10964: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10965: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10967: Output Parameter:
10968: . Csp - sparse matrix
10970: Level: developer
10972: Note:
10973: These are used internally for some implementations of `MatRARt()`
10975: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10976: @*/
10977: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10978: {
10979: PetscFunctionBegin;
10984: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10985: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10986: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10987: PetscFunctionReturn(PETSC_SUCCESS);
10988: }
10990: /*@
10991: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
10993: Collective
10995: Input Parameters:
10996: + mat - the matrix product C
10997: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
10999: Output Parameter:
11000: . color - the new coloring context
11002: Level: intermediate
11004: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
11005: `MatTransColoringApplyDenToSp()`
11006: @*/
11007: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
11008: {
11009: MatTransposeColoring c;
11010: MPI_Comm comm;
11012: PetscFunctionBegin;
11013: PetscAssertPointer(color, 3);
11015: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11016: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
11017: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
11018: c->ctype = iscoloring->ctype;
11019: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
11020: *color = c;
11021: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11022: PetscFunctionReturn(PETSC_SUCCESS);
11023: }
11025: /*@
11026: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
11027: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11029: Not Collective
11031: Input Parameter:
11032: . mat - the matrix
11034: Output Parameter:
11035: . state - the current state
11037: Level: intermediate
11039: Notes:
11040: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11041: different matrices
11043: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11045: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11047: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11048: @*/
11049: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11050: {
11051: PetscFunctionBegin;
11053: *state = mat->nonzerostate;
11054: PetscFunctionReturn(PETSC_SUCCESS);
11055: }
11057: /*@
11058: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11059: matrices from each processor
11061: Collective
11063: Input Parameters:
11064: + comm - the communicators the parallel matrix will live on
11065: . seqmat - the input sequential matrices
11066: . n - number of local columns (or `PETSC_DECIDE`)
11067: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11069: Output Parameter:
11070: . mpimat - the parallel matrix generated
11072: Level: developer
11074: Note:
11075: The number of columns of the matrix in EACH processor MUST be the same.
11077: .seealso: [](ch_matrices), `Mat`
11078: @*/
11079: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11080: {
11081: PetscMPIInt size;
11083: PetscFunctionBegin;
11084: PetscCallMPI(MPI_Comm_size(comm, &size));
11085: if (size == 1) {
11086: if (reuse == MAT_INITIAL_MATRIX) {
11087: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11088: } else {
11089: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11090: }
11091: PetscFunctionReturn(PETSC_SUCCESS);
11092: }
11094: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11096: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11097: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11098: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11099: PetscFunctionReturn(PETSC_SUCCESS);
11100: }
11102: /*@
11103: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11105: Collective
11107: Input Parameters:
11108: + A - the matrix to create subdomains from
11109: - N - requested number of subdomains
11111: Output Parameters:
11112: + n - number of subdomains resulting on this MPI process
11113: - iss - `IS` list with indices of subdomains on this MPI process
11115: Level: advanced
11117: Note:
11118: The number of subdomains must be smaller than the communicator size
11120: .seealso: [](ch_matrices), `Mat`, `IS`
11121: @*/
11122: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11123: {
11124: MPI_Comm comm, subcomm;
11125: PetscMPIInt size, rank, color;
11126: PetscInt rstart, rend, k;
11128: PetscFunctionBegin;
11129: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11130: PetscCallMPI(MPI_Comm_size(comm, &size));
11131: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11132: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11133: *n = 1;
11134: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11135: color = rank / k;
11136: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11137: PetscCall(PetscMalloc1(1, iss));
11138: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11139: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11140: PetscCallMPI(MPI_Comm_free(&subcomm));
11141: PetscFunctionReturn(PETSC_SUCCESS);
11142: }
11144: /*@
11145: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11147: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11148: If they are not the same, uses `MatMatMatMult()`.
11150: Once the coarse grid problem is constructed, correct for interpolation operators
11151: that are not of full rank, which can legitimately happen in the case of non-nested
11152: geometric multigrid.
11154: Input Parameters:
11155: + restrct - restriction operator
11156: . dA - fine grid matrix
11157: . interpolate - interpolation operator
11158: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11159: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11161: Output Parameter:
11162: . A - the Galerkin coarse matrix
11164: Options Database Key:
11165: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11167: Level: developer
11169: Note:
11170: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11172: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11173: @*/
11174: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11175: {
11176: IS zerorows;
11177: Vec diag;
11179: PetscFunctionBegin;
11180: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11181: /* Construct the coarse grid matrix */
11182: if (interpolate == restrct) {
11183: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11184: } else {
11185: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11186: }
11188: /* If the interpolation matrix is not of full rank, A will have zero rows.
11189: This can legitimately happen in the case of non-nested geometric multigrid.
11190: In that event, we set the rows of the matrix to the rows of the identity,
11191: ignoring the equations (as the RHS will also be zero). */
11193: PetscCall(MatFindZeroRows(*A, &zerorows));
11195: if (zerorows != NULL) { /* if there are any zero rows */
11196: PetscCall(MatCreateVecs(*A, &diag, NULL));
11197: PetscCall(MatGetDiagonal(*A, diag));
11198: PetscCall(VecISSet(diag, zerorows, 1.0));
11199: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11200: PetscCall(VecDestroy(&diag));
11201: PetscCall(ISDestroy(&zerorows));
11202: }
11203: PetscFunctionReturn(PETSC_SUCCESS);
11204: }
11206: /*@C
11207: MatSetOperation - Allows user to set a matrix operation for any matrix type
11209: Logically Collective
11211: Input Parameters:
11212: + mat - the matrix
11213: . op - the name of the operation
11214: - f - the function that provides the operation
11216: Level: developer
11218: Example Usage:
11219: .vb
11220: extern PetscErrorCode usermult(Mat, Vec, Vec);
11222: PetscCall(MatCreateXXX(comm, ..., &A));
11223: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscErrorCodeFn *)usermult));
11224: .ve
11226: Notes:
11227: See the file `include/petscmat.h` for a complete list of matrix
11228: operations, which all have the form MATOP_<OPERATION>, where
11229: <OPERATION> is the name (in all capital letters) of the
11230: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11232: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11233: sequence as the usual matrix interface routines, since they
11234: are intended to be accessed via the usual matrix interface
11235: routines, e.g.,
11236: .vb
11237: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11238: .ve
11240: In particular each function MUST return `PETSC_SUCCESS` on success and
11241: nonzero on failure.
11243: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11245: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11246: @*/
11247: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, PetscErrorCodeFn *f)
11248: {
11249: PetscFunctionBegin;
11251: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (PetscErrorCodeFn *)mat->ops->view) mat->ops->viewnative = mat->ops->view;
11252: (((PetscErrorCodeFn **)mat->ops)[op]) = f;
11253: PetscFunctionReturn(PETSC_SUCCESS);
11254: }
11256: /*@C
11257: MatGetOperation - Gets a matrix operation for any matrix type.
11259: Not Collective
11261: Input Parameters:
11262: + mat - the matrix
11263: - op - the name of the operation
11265: Output Parameter:
11266: . f - the function that provides the operation
11268: Level: developer
11270: Example Usage:
11271: .vb
11272: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11274: MatGetOperation(A, MATOP_MULT, (PetscErrorCodeFn **)&usermult);
11275: .ve
11277: Notes:
11278: See the file `include/petscmat.h` for a complete list of matrix
11279: operations, which all have the form MATOP_<OPERATION>, where
11280: <OPERATION> is the name (in all capital letters) of the
11281: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11283: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11285: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11286: @*/
11287: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, PetscErrorCodeFn **f)
11288: {
11289: PetscFunctionBegin;
11291: *f = (((PetscErrorCodeFn **)mat->ops)[op]);
11292: PetscFunctionReturn(PETSC_SUCCESS);
11293: }
11295: /*@
11296: MatHasOperation - Determines whether the given matrix supports the particular operation.
11298: Not Collective
11300: Input Parameters:
11301: + mat - the matrix
11302: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11304: Output Parameter:
11305: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11307: Level: advanced
11309: Note:
11310: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11312: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11313: @*/
11314: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11315: {
11316: PetscFunctionBegin;
11318: PetscAssertPointer(has, 3);
11319: if (mat->ops->hasoperation) {
11320: PetscUseTypeMethod(mat, hasoperation, op, has);
11321: } else {
11322: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11323: else {
11324: *has = PETSC_FALSE;
11325: if (op == MATOP_CREATE_SUBMATRIX) {
11326: PetscMPIInt size;
11328: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11329: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11330: }
11331: }
11332: }
11333: PetscFunctionReturn(PETSC_SUCCESS);
11334: }
11336: /*@
11337: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11339: Collective
11341: Input Parameter:
11342: . mat - the matrix
11344: Output Parameter:
11345: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11347: Level: beginner
11349: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11350: @*/
11351: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11352: {
11353: PetscFunctionBegin;
11356: PetscAssertPointer(cong, 2);
11357: if (!mat->rmap || !mat->cmap) {
11358: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11359: PetscFunctionReturn(PETSC_SUCCESS);
11360: }
11361: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11362: PetscCall(PetscLayoutSetUp(mat->rmap));
11363: PetscCall(PetscLayoutSetUp(mat->cmap));
11364: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11365: if (*cong) mat->congruentlayouts = 1;
11366: else mat->congruentlayouts = 0;
11367: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11368: PetscFunctionReturn(PETSC_SUCCESS);
11369: }
11371: PetscErrorCode MatSetInf(Mat A)
11372: {
11373: PetscFunctionBegin;
11374: PetscUseTypeMethod(A, setinf);
11375: PetscFunctionReturn(PETSC_SUCCESS);
11376: }
11378: /*@
11379: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11380: and possibly removes small values from the graph structure.
11382: Collective
11384: Input Parameters:
11385: + A - the matrix
11386: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11387: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11388: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11389: . num_idx - size of 'index' array
11390: - index - array of block indices to use for graph strength of connection weight
11392: Output Parameter:
11393: . graph - the resulting graph
11395: Level: advanced
11397: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11398: @*/
11399: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11400: {
11401: PetscFunctionBegin;
11405: PetscAssertPointer(graph, 7);
11406: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11407: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11408: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11409: PetscFunctionReturn(PETSC_SUCCESS);
11410: }
11412: /*@
11413: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11414: meaning the same memory is used for the matrix, and no new memory is allocated.
11416: Collective
11418: Input Parameters:
11419: + A - the matrix
11420: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11422: Level: intermediate
11424: Developer Note:
11425: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11426: of the arrays in the data structure are unneeded.
11428: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11429: @*/
11430: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11431: {
11432: PetscFunctionBegin;
11434: PetscUseTypeMethod(A, eliminatezeros, keep);
11435: PetscFunctionReturn(PETSC_SUCCESS);
11436: }
11438: /*@C
11439: MatGetCurrentMemType - Get the memory location of the matrix
11441: Not Collective, but the result will be the same on all MPI processes
11443: Input Parameter:
11444: . A - the matrix whose memory type we are checking
11446: Output Parameter:
11447: . m - the memory type
11449: Level: intermediate
11451: .seealso: [](ch_matrices), `Mat`, `MatBoundToCPU()`, `PetscMemType`
11452: @*/
11453: PetscErrorCode MatGetCurrentMemType(Mat A, PetscMemType *m)
11454: {
11455: PetscFunctionBegin;
11457: PetscAssertPointer(m, 2);
11458: if (A->ops->getcurrentmemtype) PetscUseTypeMethod(A, getcurrentmemtype, m);
11459: else *m = PETSC_MEMTYPE_HOST;
11460: PetscFunctionReturn(PETSC_SUCCESS);
11461: }