Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_ADot, MAT_ANorm;
19: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
20: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
21: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
22: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
23: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
24: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
25: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
26: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
27: PetscLogEvent MAT_TransposeColoringCreate;
28: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
29: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
30: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
31: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
32: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
33: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
34: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
35: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
36: PetscLogEvent MAT_GetMultiProcBlock;
37: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
38: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
39: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
40: PetscLogEvent MAT_CreateGraph;
41: PetscLogEvent MAT_SetValuesBatch;
42: PetscLogEvent MAT_ViennaCLCopyToGPU;
43: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
44: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
45: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
46: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
47: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
48: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
50: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
52: /*@
53: MatSetRandom - Sets all components of a matrix to random numbers.
55: Logically Collective
57: Input Parameters:
58: + x - the matrix
59: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
60: it will create one internally.
62: Example:
63: .vb
64: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
65: MatSetRandom(x,rctx);
66: PetscRandomDestroy(rctx);
67: .ve
69: Level: intermediate
71: Notes:
72: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
74: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
76: It generates an error if used on unassembled sparse matrices that have not been preallocated.
78: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
79: @*/
80: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
81: {
82: PetscRandom randObj = NULL;
84: PetscFunctionBegin;
88: MatCheckPreallocated(x, 1);
90: if (!rctx) {
91: MPI_Comm comm;
92: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
93: PetscCall(PetscRandomCreate(comm, &randObj));
94: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
95: PetscCall(PetscRandomSetFromOptions(randObj));
96: rctx = randObj;
97: }
98: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
99: PetscUseTypeMethod(x, setrandom, rctx);
100: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
102: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
104: PetscCall(PetscRandomDestroy(&randObj));
105: PetscFunctionReturn(PETSC_SUCCESS);
106: }
108: /*@
109: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
111: Logically Collective
113: Input Parameter:
114: . A - A matrix in unassembled, hash table form
116: Output Parameter:
117: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
119: Example:
120: .vb
121: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
122: PetscCall(MatCopyHashToXAIJ(A, B));
123: .ve
125: Level: advanced
127: Notes:
128: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
130: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
131: @*/
132: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
133: {
134: PetscFunctionBegin;
136: PetscUseTypeMethod(A, copyhashtoxaij, B);
137: PetscFunctionReturn(PETSC_SUCCESS);
138: }
140: /*@
141: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
143: Logically Collective
145: Input Parameter:
146: . mat - the factored matrix
148: Output Parameters:
149: + pivot - the pivot value computed
150: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
151: the share the matrix
153: Level: advanced
155: Notes:
156: This routine does not work for factorizations done with external packages.
158: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
160: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
162: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
163: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
164: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
165: @*/
166: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
167: {
168: PetscFunctionBegin;
170: PetscAssertPointer(pivot, 2);
171: PetscAssertPointer(row, 3);
172: *pivot = mat->factorerror_zeropivot_value;
173: *row = mat->factorerror_zeropivot_row;
174: PetscFunctionReturn(PETSC_SUCCESS);
175: }
177: /*@
178: MatFactorGetError - gets the error code from a factorization
180: Logically Collective
182: Input Parameter:
183: . mat - the factored matrix
185: Output Parameter:
186: . err - the error code
188: Level: advanced
190: Note:
191: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
193: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
194: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
195: @*/
196: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
197: {
198: PetscFunctionBegin;
200: PetscAssertPointer(err, 2);
201: *err = mat->factorerrortype;
202: PetscFunctionReturn(PETSC_SUCCESS);
203: }
205: /*@
206: MatFactorClearError - clears the error code in a factorization
208: Logically Collective
210: Input Parameter:
211: . mat - the factored matrix
213: Level: developer
215: Note:
216: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
218: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
219: `MatGetErrorCode()`, `MatFactorError`
220: @*/
221: PetscErrorCode MatFactorClearError(Mat mat)
222: {
223: PetscFunctionBegin;
225: mat->factorerrortype = MAT_FACTOR_NOERROR;
226: mat->factorerror_zeropivot_value = 0.0;
227: mat->factorerror_zeropivot_row = 0;
228: PetscFunctionReturn(PETSC_SUCCESS);
229: }
231: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
232: {
233: Vec r, l;
234: const PetscScalar *al;
235: PetscInt i, nz, gnz, N, n, st;
237: PetscFunctionBegin;
238: PetscCall(MatCreateVecs(mat, &r, &l));
239: if (!cols) { /* nonzero rows */
240: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
241: PetscCall(MatGetSize(mat, &N, NULL));
242: PetscCall(MatGetLocalSize(mat, &n, NULL));
243: PetscCall(VecSetRandom(r, NULL));
244: PetscCall(MatMult(mat, r, l));
245: PetscCall(VecGetArrayRead(l, &al));
246: } else { /* nonzero columns */
247: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
248: PetscCall(MatGetSize(mat, NULL, &N));
249: PetscCall(MatGetLocalSize(mat, NULL, &n));
250: PetscCall(VecSet(r, 0.0));
251: PetscCall(VecSetRandom(l, NULL));
252: PetscCall(MatMultTranspose(mat, l, r));
253: PetscCall(VecGetArrayRead(r, &al));
254: }
255: if (tol <= 0.0) {
256: for (i = 0, nz = 0; i < n; i++)
257: if (al[i] != 0.0) nz++;
258: } else {
259: for (i = 0, nz = 0; i < n; i++)
260: if (PetscAbsScalar(al[i]) > tol) nz++;
261: }
262: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
263: if (gnz != N) {
264: PetscInt *nzr;
265: PetscCall(PetscMalloc1(nz, &nzr));
266: if (nz) {
267: if (tol < 0) {
268: for (i = 0, nz = 0; i < n; i++)
269: if (al[i] != 0.0) nzr[nz++] = i + st;
270: } else {
271: for (i = 0, nz = 0; i < n; i++)
272: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
273: }
274: }
275: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
276: } else *nonzero = NULL;
277: if (!cols) { /* nonzero rows */
278: PetscCall(VecRestoreArrayRead(l, &al));
279: } else {
280: PetscCall(VecRestoreArrayRead(r, &al));
281: }
282: PetscCall(VecDestroy(&l));
283: PetscCall(VecDestroy(&r));
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /*@
288: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
290: Input Parameter:
291: . mat - the matrix
293: Output Parameter:
294: . keptrows - the rows that are not completely zero
296: Level: intermediate
298: Note:
299: `keptrows` is set to `NULL` if all rows are nonzero.
301: Developer Note:
302: If `keptrows` is not `NULL`, it must be sorted.
304: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
305: @*/
306: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
307: {
308: PetscFunctionBegin;
311: PetscAssertPointer(keptrows, 2);
312: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
313: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
314: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
315: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
316: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatFindZeroRows - Locate all rows that are completely zero in the matrix
323: Input Parameter:
324: . mat - the matrix
326: Output Parameter:
327: . zerorows - the rows that are completely zero
329: Level: intermediate
331: Note:
332: `zerorows` is set to `NULL` if no rows are zero.
334: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
335: @*/
336: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
337: {
338: IS keptrows;
339: PetscInt m, n;
341: PetscFunctionBegin;
344: PetscAssertPointer(zerorows, 2);
345: PetscCall(MatFindNonzeroRows(mat, &keptrows));
346: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
347: In keeping with this convention, we set zerorows to NULL if there are no zero
348: rows. */
349: if (keptrows == NULL) {
350: *zerorows = NULL;
351: } else {
352: PetscCall(MatGetOwnershipRange(mat, &m, &n));
353: PetscCall(ISComplement(keptrows, m, n, zerorows));
354: PetscCall(ISDestroy(&keptrows));
355: }
356: PetscFunctionReturn(PETSC_SUCCESS);
357: }
359: /*@
360: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
362: Not Collective
364: Input Parameter:
365: . A - the matrix
367: Output Parameter:
368: . a - the diagonal part (which is a SEQUENTIAL matrix)
370: Level: advanced
372: Notes:
373: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
375: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
377: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
378: @*/
379: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
380: {
381: PetscFunctionBegin;
384: PetscAssertPointer(a, 2);
385: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
386: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
387: else {
388: PetscMPIInt size;
390: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
391: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
392: *a = A;
393: }
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: /*@
398: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
400: Collective
402: Input Parameter:
403: . mat - the matrix
405: Output Parameter:
406: . trace - the sum of the diagonal entries
408: Level: advanced
410: .seealso: [](ch_matrices), `Mat`
411: @*/
412: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
413: {
414: Vec diag;
416: PetscFunctionBegin;
418: PetscAssertPointer(trace, 2);
419: PetscCall(MatCreateVecs(mat, &diag, NULL));
420: PetscCall(MatGetDiagonal(mat, diag));
421: PetscCall(VecSum(diag, trace));
422: PetscCall(VecDestroy(&diag));
423: PetscFunctionReturn(PETSC_SUCCESS);
424: }
426: /*@
427: MatRealPart - Zeros out the imaginary part of the matrix
429: Logically Collective
431: Input Parameter:
432: . mat - the matrix
434: Level: advanced
436: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
437: @*/
438: PetscErrorCode MatRealPart(Mat mat)
439: {
440: PetscFunctionBegin;
443: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
444: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
445: MatCheckPreallocated(mat, 1);
446: PetscUseTypeMethod(mat, realpart);
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@C
451: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
453: Collective
455: Input Parameter:
456: . mat - the matrix
458: Output Parameters:
459: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
460: - ghosts - the global indices of the ghost points
462: Level: advanced
464: Note:
465: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
467: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
468: @*/
469: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
470: {
471: PetscFunctionBegin;
474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
476: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
477: else {
478: if (nghosts) *nghosts = 0;
479: if (ghosts) *ghosts = NULL;
480: }
481: PetscFunctionReturn(PETSC_SUCCESS);
482: }
484: /*@
485: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
487: Logically Collective
489: Input Parameter:
490: . mat - the matrix
492: Level: advanced
494: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
495: @*/
496: PetscErrorCode MatImaginaryPart(Mat mat)
497: {
498: PetscFunctionBegin;
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: MatCheckPreallocated(mat, 1);
504: PetscUseTypeMethod(mat, imaginarypart);
505: PetscFunctionReturn(PETSC_SUCCESS);
506: }
508: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
509: /*@C
510: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
511: for each row that you get to ensure that your application does
512: not bleed memory.
514: Not Collective
516: Input Parameters:
517: + mat - the matrix
518: - row - the row to get
520: Output Parameters:
521: + ncols - if not `NULL`, the number of nonzeros in `row`
522: . cols - if not `NULL`, the column numbers
523: - vals - if not `NULL`, the numerical values
525: Level: advanced
527: Notes:
528: This routine is provided for people who need to have direct access
529: to the structure of a matrix. We hope that we provide enough
530: high-level matrix routines that few users will need it.
532: `MatGetRow()` always returns 0-based column indices, regardless of
533: whether the internal representation is 0-based (default) or 1-based.
535: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
536: not wish to extract these quantities.
538: The user can only examine the values extracted with `MatGetRow()`;
539: the values CANNOT be altered. To change the matrix entries, one
540: must use `MatSetValues()`.
542: You can only have one call to `MatGetRow()` outstanding for a particular
543: matrix at a time, per processor. `MatGetRow()` can only obtain rows
544: associated with the given processor, it cannot get rows from the
545: other processors; for that we suggest using `MatCreateSubMatrices()`, then
546: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
547: is in the global number of rows.
549: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
551: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
553: Fortran Note:
554: .vb
555: PetscInt, pointer :: cols(:)
556: PetscScalar, pointer :: vals(:)
557: .ve
559: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
560: @*/
561: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
562: {
563: PetscInt incols;
565: PetscFunctionBegin;
568: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
569: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
570: MatCheckPreallocated(mat, 1);
571: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
572: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
573: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
574: if (ncols) *ncols = incols;
575: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
576: PetscFunctionReturn(PETSC_SUCCESS);
577: }
579: /*@
580: MatConjugate - replaces the matrix values with their complex conjugates
582: Logically Collective
584: Input Parameter:
585: . mat - the matrix
587: Level: advanced
589: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
590: @*/
591: PetscErrorCode MatConjugate(Mat mat)
592: {
593: PetscFunctionBegin;
595: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
596: if (PetscDefined(USE_COMPLEX) && !(mat->symmetric == PETSC_BOOL3_TRUE && mat->hermitian == PETSC_BOOL3_TRUE)) {
597: PetscUseTypeMethod(mat, conjugate);
598: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
599: }
600: PetscFunctionReturn(PETSC_SUCCESS);
601: }
603: /*@C
604: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
606: Not Collective
608: Input Parameters:
609: + mat - the matrix
610: . row - the row to get
611: . ncols - the number of nonzeros
612: . cols - the columns of the nonzeros
613: - vals - if nonzero the column values
615: Level: advanced
617: Notes:
618: This routine should be called after you have finished examining the entries.
620: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
621: us of the array after it has been restored. If you pass `NULL`, it will
622: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
624: Fortran Note:
625: .vb
626: PetscInt, pointer :: cols(:)
627: PetscScalar, pointer :: vals(:)
628: .ve
630: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
631: @*/
632: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
633: {
634: PetscFunctionBegin;
636: if (ncols) PetscAssertPointer(ncols, 3);
637: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
638: PetscTryTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
639: if (ncols) *ncols = 0;
640: if (cols) *cols = NULL;
641: if (vals) *vals = NULL;
642: PetscFunctionReturn(PETSC_SUCCESS);
643: }
645: /*@
646: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
647: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
649: Not Collective
651: Input Parameter:
652: . mat - the matrix
654: Level: advanced
656: Note:
657: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
659: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
660: @*/
661: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
662: {
663: PetscFunctionBegin;
666: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
667: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
668: MatCheckPreallocated(mat, 1);
669: PetscTryTypeMethod(mat, getrowuppertriangular);
670: PetscFunctionReturn(PETSC_SUCCESS);
671: }
673: /*@
674: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
676: Not Collective
678: Input Parameter:
679: . mat - the matrix
681: Level: advanced
683: Note:
684: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
686: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
687: @*/
688: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
689: {
690: PetscFunctionBegin;
693: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
694: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
695: MatCheckPreallocated(mat, 1);
696: PetscTryTypeMethod(mat, restorerowuppertriangular);
697: PetscFunctionReturn(PETSC_SUCCESS);
698: }
700: /*@
701: MatSetOptionsPrefix - Sets the prefix used for searching for all
702: `Mat` options in the database.
704: Logically Collective
706: Input Parameters:
707: + A - the matrix
708: - prefix - the prefix to prepend to all option names
710: Level: advanced
712: Notes:
713: A hyphen (-) must NOT be given at the beginning of the prefix name.
714: The first character of all runtime options is AUTOMATICALLY the hyphen.
716: This is NOT used for options for the factorization of the matrix. Normally the
717: prefix is automatically passed in from the PC calling the factorization. To set
718: it directly use `MatSetOptionsPrefixFactor()`
720: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
721: @*/
722: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
723: {
724: PetscFunctionBegin;
726: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
727: PetscTryMethod(A, "MatSetOptionsPrefix_C", (Mat, const char[]), (A, prefix));
728: PetscFunctionReturn(PETSC_SUCCESS);
729: }
731: /*@
732: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
733: for matrices created with `MatGetFactor()`
735: Logically Collective
737: Input Parameters:
738: + A - the matrix
739: - prefix - the prefix to prepend to all option names for the factored matrix
741: Level: developer
743: Notes:
744: A hyphen (-) must NOT be given at the beginning of the prefix name.
745: The first character of all runtime options is AUTOMATICALLY the hyphen.
747: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
748: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
750: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
751: @*/
752: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
753: {
754: PetscFunctionBegin;
756: if (prefix) {
757: PetscAssertPointer(prefix, 2);
758: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
759: if (prefix != A->factorprefix) {
760: PetscCall(PetscFree(A->factorprefix));
761: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
762: }
763: } else PetscCall(PetscFree(A->factorprefix));
764: PetscFunctionReturn(PETSC_SUCCESS);
765: }
767: /*@
768: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
769: for matrices created with `MatGetFactor()`
771: Logically Collective
773: Input Parameters:
774: + A - the matrix
775: - prefix - the prefix to prepend to all option names for the factored matrix
777: Level: developer
779: Notes:
780: A hyphen (-) must NOT be given at the beginning of the prefix name.
781: The first character of all runtime options is AUTOMATICALLY the hyphen.
783: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
784: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
786: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
787: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
788: `MatSetOptionsPrefix()`
789: @*/
790: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
791: {
792: size_t len1, len2, new_len;
794: PetscFunctionBegin;
796: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
797: if (!A->factorprefix) {
798: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
799: PetscFunctionReturn(PETSC_SUCCESS);
800: }
801: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
803: PetscCall(PetscStrlen(A->factorprefix, &len1));
804: PetscCall(PetscStrlen(prefix, &len2));
805: new_len = len1 + len2 + 1;
806: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
807: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
808: PetscFunctionReturn(PETSC_SUCCESS);
809: }
811: /*@
812: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
813: matrix options in the database.
815: Logically Collective
817: Input Parameters:
818: + A - the matrix
819: - prefix - the prefix to prepend to all option names
821: Level: advanced
823: Note:
824: A hyphen (-) must NOT be given at the beginning of the prefix name.
825: The first character of all runtime options is AUTOMATICALLY the hyphen.
827: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
828: @*/
829: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
830: {
831: PetscFunctionBegin;
833: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
834: PetscTryMethod(A, "MatAppendOptionsPrefix_C", (Mat, const char[]), (A, prefix));
835: PetscFunctionReturn(PETSC_SUCCESS);
836: }
838: /*@
839: MatGetOptionsPrefix - Gets the prefix used for searching for all
840: matrix options in the database.
842: Not Collective
844: Input Parameter:
845: . A - the matrix
847: Output Parameter:
848: . prefix - pointer to the prefix string used
850: Level: advanced
852: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
853: @*/
854: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
855: {
856: PetscFunctionBegin;
858: PetscAssertPointer(prefix, 2);
859: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
860: PetscFunctionReturn(PETSC_SUCCESS);
861: }
863: /*@
864: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
866: Not Collective
868: Input Parameter:
869: . A - the matrix
871: Output Parameter:
872: . state - the object state
874: Level: advanced
876: Note:
877: Object state is an integer which gets increased every time
878: the object is changed. By saving and later querying the object state
879: one can determine whether information about the object is still current.
881: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
883: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
884: @*/
885: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
886: {
887: PetscFunctionBegin;
889: PetscAssertPointer(state, 2);
890: PetscCall(PetscObjectStateGet((PetscObject)A, state));
891: PetscFunctionReturn(PETSC_SUCCESS);
892: }
894: /*@
895: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
897: Collective
899: Input Parameter:
900: . A - the matrix
902: Level: beginner
904: Notes:
905: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
906: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
907: makes all of the preallocation space available
909: Current values in the matrix are lost in this call
911: Currently only supported for `MATAIJ` matrices.
913: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
914: @*/
915: PetscErrorCode MatResetPreallocation(Mat A)
916: {
917: PetscFunctionBegin;
920: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
921: PetscFunctionReturn(PETSC_SUCCESS);
922: }
924: /*@
925: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
927: Collective
929: Input Parameter:
930: . A - the matrix
932: Level: intermediate
934: Notes:
935: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
937: Currently only supported for `MATAIJ` matrices.
939: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
940: @*/
941: PetscErrorCode MatResetHash(Mat A)
942: {
943: PetscFunctionBegin;
946: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
947: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
948: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
949: /* These flags are used to determine whether certain setups occur */
950: A->was_assembled = PETSC_FALSE;
951: A->assembled = PETSC_FALSE;
952: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
953: PetscCall(PetscObjectStateIncrease((PetscObject)A));
954: PetscFunctionReturn(PETSC_SUCCESS);
955: }
957: /*@
958: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
960: Collective
962: Input Parameter:
963: . A - the matrix
965: Level: advanced
967: Notes:
968: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
969: setting values in the matrix.
971: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
973: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
974: @*/
975: PetscErrorCode MatSetUp(Mat A)
976: {
977: PetscFunctionBegin;
979: if (!((PetscObject)A)->type_name) {
980: PetscMPIInt size;
982: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
983: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
984: }
985: if (!A->preallocated) PetscTryTypeMethod(A, setup);
986: PetscCall(PetscLayoutSetUp(A->rmap));
987: PetscCall(PetscLayoutSetUp(A->cmap));
988: A->preallocated = PETSC_TRUE;
989: PetscFunctionReturn(PETSC_SUCCESS);
990: }
992: #if defined(PETSC_HAVE_SAWS)
993: #include <petscviewersaws.h>
994: #endif
996: /*
997: If threadsafety is on extraneous matrices may be printed
999: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1000: */
1001: #if !defined(PETSC_HAVE_THREADSAFETY)
1002: static PetscInt insidematview = 0;
1003: #endif
1005: /*@
1006: MatViewFromOptions - View properties of the matrix based on options set in the options database
1008: Collective
1010: Input Parameters:
1011: + A - the matrix
1012: . obj - optional additional object that provides the options prefix to use
1013: - name - command line option
1015: Options Database Key:
1016: . -name [viewertype][:...] - option name and values. See `PetscObjectViewFromOptions()` for the possible arguments
1018: Level: intermediate
1020: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1021: @*/
1022: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1023: {
1024: PetscFunctionBegin;
1026: #if !defined(PETSC_HAVE_THREADSAFETY)
1027: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1028: #endif
1029: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1030: PetscFunctionReturn(PETSC_SUCCESS);
1031: }
1033: /*@
1034: MatView - display information about a matrix in a variety ways
1036: Collective on viewer
1038: Input Parameters:
1039: + mat - the matrix
1040: - viewer - visualization context
1042: Options Database Keys:
1043: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1044: . -mat_view ::ascii_info_detail - Prints more detailed info
1045: . -mat_view - Prints matrix in ASCII format
1046: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1047: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1048: . -display name - Sets display name (default is host)
1049: . -draw_pause sec - Sets number of seconds to pause after display
1050: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1051: . -viewer_socket_machine machine - -
1052: . -viewer_socket_port port - -
1053: . -mat_view binary - save matrix to file in binary format
1054: - -viewer_binary_filename name - -
1056: Level: beginner
1058: Notes:
1059: The available visualization contexts include
1060: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1061: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1062: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1063: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1065: The user can open alternative visualization contexts with
1066: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1067: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1068: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1069: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1071: The user can call `PetscViewerPushFormat()` to specify the output
1072: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1073: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1074: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1075: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1076: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1077: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1078: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1079: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1080: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1082: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1083: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1085: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1087: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1088: viewer is used.
1090: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1091: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1093: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1094: and then use the following mouse functions.
1095: .vb
1096: left mouse: zoom in
1097: middle mouse: zoom out
1098: right mouse: continue with the simulation
1099: .ve
1101: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1102: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1103: @*/
1104: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1105: {
1106: PetscInt rows, cols, rbs, cbs;
1107: PetscBool isascii, isstring, issaws;
1108: PetscViewerFormat format;
1109: PetscMPIInt size;
1111: PetscFunctionBegin;
1114: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1117: PetscCall(PetscViewerGetFormat(viewer, &format));
1118: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1119: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1121: #if !defined(PETSC_HAVE_THREADSAFETY)
1122: insidematview++;
1123: #endif
1124: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1125: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1126: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1127: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1129: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1130: if (isascii) {
1131: if (!mat->preallocated) {
1132: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1133: #if !defined(PETSC_HAVE_THREADSAFETY)
1134: insidematview--;
1135: #endif
1136: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1137: PetscFunctionReturn(PETSC_SUCCESS);
1138: }
1139: if (!mat->assembled) {
1140: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1141: #if !defined(PETSC_HAVE_THREADSAFETY)
1142: insidematview--;
1143: #endif
1144: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1145: PetscFunctionReturn(PETSC_SUCCESS);
1146: }
1147: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1148: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1149: MatNullSpace nullsp, transnullsp;
1151: PetscCall(PetscViewerASCIIPushTab(viewer));
1152: PetscCall(MatGetSize(mat, &rows, &cols));
1153: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1154: if (rbs != 1 || cbs != 1) {
1155: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1156: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1157: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1158: if (mat->factortype) {
1159: MatSolverType solver;
1160: PetscCall(MatFactorGetSolverType(mat, &solver));
1161: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1162: }
1163: if (mat->ops->getinfo) {
1164: PetscBool is_constant_or_diagonal;
1166: // Don't print nonzero information for constant or diagonal matrices, it just adds noise to the output
1167: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &is_constant_or_diagonal, MATCONSTANTDIAGONAL, MATDIAGONAL, ""));
1168: if (!is_constant_or_diagonal) {
1169: MatInfo info;
1171: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1172: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1173: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1174: }
1175: }
1176: PetscCall(MatGetNullSpace(mat, &nullsp));
1177: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1178: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1179: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1180: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1181: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1182: PetscCall(PetscViewerASCIIPushTab(viewer));
1183: PetscCall(MatProductView(mat, viewer));
1184: PetscCall(PetscViewerASCIIPopTab(viewer));
1185: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1186: IS tmp;
1188: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1189: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1190: PetscCall(PetscViewerASCIIPushTab(viewer));
1191: PetscCall(ISView(tmp, viewer));
1192: PetscCall(PetscViewerASCIIPopTab(viewer));
1193: PetscCall(ISDestroy(&tmp));
1194: }
1195: }
1196: } else if (issaws) {
1197: #if defined(PETSC_HAVE_SAWS)
1198: PetscMPIInt rank;
1200: PetscCall(PetscObjectName((PetscObject)mat));
1201: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1202: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1203: #endif
1204: } else if (isstring) {
1205: const char *type;
1206: PetscCall(MatGetType(mat, &type));
1207: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1208: PetscTryTypeMethod(mat, view, viewer);
1209: }
1210: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1211: PetscCall(PetscViewerASCIIPushTab(viewer));
1212: PetscUseTypeMethod(mat, viewnative, viewer);
1213: PetscCall(PetscViewerASCIIPopTab(viewer));
1214: } else if (mat->ops->view) {
1215: PetscCall(PetscViewerASCIIPushTab(viewer));
1216: PetscUseTypeMethod(mat, view, viewer);
1217: PetscCall(PetscViewerASCIIPopTab(viewer));
1218: }
1219: if (isascii) {
1220: PetscCall(PetscViewerGetFormat(viewer, &format));
1221: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1222: }
1223: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1224: #if !defined(PETSC_HAVE_THREADSAFETY)
1225: insidematview--;
1226: #endif
1227: PetscFunctionReturn(PETSC_SUCCESS);
1228: }
1230: #if defined(PETSC_USE_DEBUG)
1231: #include <../src/sys/totalview/tv_data_display.h>
1232: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1233: {
1234: TV_add_row("Local rows", "int", &mat->rmap->n);
1235: TV_add_row("Local columns", "int", &mat->cmap->n);
1236: TV_add_row("Global rows", "int", &mat->rmap->N);
1237: TV_add_row("Global columns", "int", &mat->cmap->N);
1238: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1239: return TV_format_OK;
1240: }
1241: #endif
1243: /*@
1244: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1245: with `MatView()`. The matrix format is determined from the options database.
1246: Generates a parallel MPI matrix if the communicator has more than one
1247: processor. The default matrix type is `MATAIJ`.
1249: Collective
1251: Input Parameters:
1252: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1253: or some related function before a call to `MatLoad()`
1254: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1256: Options Database Key:
1257: . -matload_block_size bs - set block size
1259: Level: beginner
1261: Notes:
1262: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1263: `Mat` before calling this routine if you wish to set it from the options database.
1265: `MatLoad()` automatically loads into the options database any options
1266: given in the file filename.info where filename is the name of the file
1267: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1268: file will be ignored if you use the -viewer_binary_skip_info option.
1270: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1271: sets the default matrix type AIJ and sets the local and global sizes.
1272: If type and/or size is already set, then the same are used.
1274: In parallel, each processor can load a subset of rows (or the
1275: entire matrix). This routine is especially useful when a large
1276: matrix is stored on disk and only part of it is desired on each
1277: processor. For example, a parallel solver may access only some of
1278: the rows from each processor. The algorithm used here reads
1279: relatively small blocks of data rather than reading the entire
1280: matrix and then subsetting it.
1282: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1283: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1284: or the sequence like
1285: .vb
1286: `PetscViewer` v;
1287: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1288: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1289: `PetscViewerSetFromOptions`(v);
1290: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1291: `PetscViewerFileSetName`(v,"datafile");
1292: .ve
1293: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1294: .vb
1295: -viewer_type {binary, hdf5}
1296: .ve
1298: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1299: and src/mat/tutorials/ex10.c with the second approach.
1301: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1302: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1303: Multiple objects, both matrices and vectors, can be stored within the same file.
1304: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1306: Most users should not need to know the details of the binary storage
1307: format, since `MatLoad()` and `MatView()` completely hide these details.
1308: But for anyone who is interested, the standard binary matrix storage
1309: format is
1311: .vb
1312: PetscInt MAT_FILE_CLASSID
1313: PetscInt number of rows
1314: PetscInt number of columns
1315: PetscInt total number of nonzeros
1316: PetscInt *number nonzeros in each row
1317: PetscInt *column indices of all nonzeros (starting index is zero)
1318: PetscScalar *values of all nonzeros
1319: .ve
1320: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1321: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1322: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1324: PETSc automatically does the byte swapping for
1325: machines that store the bytes reversed. Thus if you write your own binary
1326: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1327: and `PetscBinaryWrite()` to see how this may be done.
1329: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1330: Each processor's chunk is loaded independently by its owning MPI process.
1331: Multiple objects, both matrices and vectors, can be stored within the same file.
1332: They are looked up by their PetscObject name.
1334: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1335: by default the same structure and naming of the AIJ arrays and column count
1336: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1337: .vb
1338: save example.mat A b -v7.3
1339: .ve
1340: can be directly read by this routine (see Reference 1 for details).
1342: Depending on your MATLAB version, this format might be a default,
1343: otherwise you can set it as default in Preferences.
1345: Unless -nocompression flag is used to save the file in MATLAB,
1346: PETSc must be configured with ZLIB package.
1348: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1350: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1352: Corresponding `MatView()` is not yet implemented.
1354: The loaded matrix is actually a transpose of the original one in MATLAB,
1355: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1356: With this format, matrix is automatically transposed by PETSc,
1357: unless the matrix is marked as SPD or symmetric
1358: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1360: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1362: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1363: @*/
1364: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1365: {
1366: PetscBool flg;
1368: PetscFunctionBegin;
1372: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1374: flg = PETSC_FALSE;
1375: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1376: if (flg) {
1377: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1378: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1379: }
1380: flg = PETSC_FALSE;
1381: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1382: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1384: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1385: PetscUseTypeMethod(mat, load, viewer);
1386: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1387: PetscFunctionReturn(PETSC_SUCCESS);
1388: }
1390: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1391: {
1392: Mat_Redundant *redund = *redundant;
1394: PetscFunctionBegin;
1395: if (redund) {
1396: if (redund->matseq) { /* via MatCreateSubMatrices() */
1397: PetscCall(ISDestroy(&redund->isrow));
1398: PetscCall(ISDestroy(&redund->iscol));
1399: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1400: } else {
1401: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1402: PetscCall(PetscFree(redund->sbuf_j));
1403: PetscCall(PetscFree(redund->sbuf_a));
1404: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1405: PetscCall(PetscFree(redund->rbuf_j[i]));
1406: PetscCall(PetscFree(redund->rbuf_a[i]));
1407: }
1408: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1409: }
1411: PetscCall(PetscCommDestroy(&redund->subcomm));
1412: PetscCall(PetscFree(redund));
1413: }
1414: PetscFunctionReturn(PETSC_SUCCESS);
1415: }
1417: /*@
1418: MatDestroy - Frees space taken by a matrix.
1420: Collective
1422: Input Parameter:
1423: . A - the matrix
1425: Level: beginner
1427: Developer Note:
1428: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1429: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1430: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1431: if changes are needed here.
1433: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1434: @*/
1435: PetscErrorCode MatDestroy(Mat *A)
1436: {
1437: PetscFunctionBegin;
1438: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1440: if (--((PetscObject)*A)->refct > 0) {
1441: *A = NULL;
1442: PetscFunctionReturn(PETSC_SUCCESS);
1443: }
1445: /* if memory was published with SAWs then destroy it */
1446: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1447: PetscTryTypeMethod(*A, destroy);
1449: PetscCall(PetscFree((*A)->factorprefix));
1450: PetscCall(PetscFree((*A)->defaultvectype));
1451: PetscCall(PetscFree((*A)->defaultrandtype));
1452: PetscCall(PetscFree((*A)->bsizes));
1453: PetscCall(PetscFree((*A)->solvertype));
1454: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1455: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1456: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1457: PetscCall(MatProductClear(*A));
1458: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1459: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1460: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1461: PetscCall(MatDestroy(&(*A)->schur));
1462: PetscCall(VecDestroy(&(*A)->dot_vec));
1463: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1464: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1465: PetscCall(PetscHeaderDestroy(A));
1466: PetscFunctionReturn(PETSC_SUCCESS);
1467: }
1469: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1470: /*@
1471: MatSetValues - Inserts or adds a block of values into a matrix.
1472: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1473: MUST be called after all calls to `MatSetValues()` have been completed.
1475: Not Collective
1477: Input Parameters:
1478: + mat - the matrix
1479: . m - the number of rows
1480: . idxm - the global indices of the rows
1481: . n - the number of columns
1482: . idxn - the global indices of the columns
1483: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1484: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1485: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1487: Level: beginner
1489: Notes:
1490: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1491: options cannot be mixed without intervening calls to the assembly
1492: routines.
1494: `MatSetValues()` uses 0-based row and column numbers in Fortran
1495: as well as in C.
1497: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1498: simply ignored. This allows easily inserting element stiffness matrices
1499: with homogeneous Dirichlet boundary conditions that you don't want represented
1500: in the matrix.
1502: Efficiency Alert:
1503: The routine `MatSetValuesBlocked()` may offer much better efficiency
1504: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1506: Fortran Notes:
1507: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1508: .vb
1509: call MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
1510: .ve
1512: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1514: Developer Note:
1515: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1516: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1518: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1519: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1520: @*/
1521: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1522: {
1523: PetscFunctionBeginHot;
1526: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1527: PetscAssertPointer(idxm, 3);
1528: PetscAssertPointer(idxn, 5);
1529: MatCheckPreallocated(mat, 1);
1531: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1532: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1534: if (PetscDefined(USE_DEBUG)) {
1535: PetscInt i, j;
1537: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1538: if (v) {
1539: for (i = 0; i < m; i++) {
1540: for (j = 0; j < n; j++) {
1541: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1542: #if defined(PETSC_USE_COMPLEX)
1543: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1544: #else
1545: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1546: #endif
1547: }
1548: }
1549: }
1550: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1551: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1552: }
1554: if (mat->assembled) {
1555: mat->was_assembled = PETSC_TRUE;
1556: mat->assembled = PETSC_FALSE;
1557: }
1558: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1559: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1560: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1561: PetscFunctionReturn(PETSC_SUCCESS);
1562: }
1564: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1565: /*@
1566: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1567: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1568: MUST be called after all calls to `MatSetValues()` have been completed.
1570: Not Collective
1572: Input Parameters:
1573: + mat - the matrix
1574: . ism - the rows to provide
1575: . isn - the columns to provide
1576: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1577: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1578: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1580: Level: beginner
1582: Notes:
1583: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1585: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1586: options cannot be mixed without intervening calls to the assembly
1587: routines.
1589: `MatSetValues()` uses 0-based row and column numbers in Fortran
1590: as well as in C.
1592: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1593: simply ignored. This allows easily inserting element stiffness matrices
1594: with homogeneous Dirichlet boundary conditions that you don't want represented
1595: in the matrix.
1597: Fortran Note:
1598: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1600: Efficiency Alert:
1601: The routine `MatSetValuesBlocked()` may offer much better efficiency
1602: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1604: This is currently not optimized for any particular `ISType`
1606: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1607: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1608: @*/
1609: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1610: {
1611: PetscInt m, n;
1612: const PetscInt *rows, *cols;
1614: PetscFunctionBeginHot;
1616: PetscCall(ISGetIndices(ism, &rows));
1617: PetscCall(ISGetIndices(isn, &cols));
1618: PetscCall(ISGetLocalSize(ism, &m));
1619: PetscCall(ISGetLocalSize(isn, &n));
1620: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1621: PetscCall(ISRestoreIndices(ism, &rows));
1622: PetscCall(ISRestoreIndices(isn, &cols));
1623: PetscFunctionReturn(PETSC_SUCCESS);
1624: }
1626: /*@
1627: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1628: values into a matrix
1630: Not Collective
1632: Input Parameters:
1633: + mat - the matrix
1634: . row - the (block) row to set
1635: - v - a one-dimensional array that contains the values. For `MATBAIJ` they are implicitly stored as a two-dimensional array, by default in row-major order.
1636: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1638: Level: intermediate
1640: Notes:
1641: The values, `v`, are column-oriented (for the block version) and sorted
1643: All the nonzero values in `row` must be provided
1645: The matrix must have previously had its column indices set, likely by having been assembled.
1647: `row` must belong to this MPI process
1649: Fortran Note:
1650: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1652: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1653: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1654: @*/
1655: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1656: {
1657: PetscInt globalrow;
1659: PetscFunctionBegin;
1662: PetscAssertPointer(v, 3);
1663: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1664: PetscCall(MatSetValuesRow(mat, globalrow, v));
1665: PetscFunctionReturn(PETSC_SUCCESS);
1666: }
1668: /*@
1669: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1670: values into a matrix
1672: Not Collective
1674: Input Parameters:
1675: + mat - the matrix
1676: . row - the (block) row to set
1677: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1679: Level: advanced
1681: Notes:
1682: The values, `v`, are column-oriented for the block version.
1684: All the nonzeros in `row` must be provided
1686: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1688: `row` must belong to this process
1690: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1691: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1692: @*/
1693: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1694: {
1695: PetscFunctionBeginHot;
1698: MatCheckPreallocated(mat, 1);
1699: PetscAssertPointer(v, 3);
1700: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1701: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1702: mat->insertmode = INSERT_VALUES;
1704: if (mat->assembled) {
1705: mat->was_assembled = PETSC_TRUE;
1706: mat->assembled = PETSC_FALSE;
1707: }
1708: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1709: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1710: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1711: PetscFunctionReturn(PETSC_SUCCESS);
1712: }
1714: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1715: /*@
1716: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1717: Using structured grid indexing
1719: Not Collective
1721: Input Parameters:
1722: + mat - the matrix
1723: . m - number of rows being entered
1724: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1725: . n - number of columns being entered
1726: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1727: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1728: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1729: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1731: Level: beginner
1733: Notes:
1734: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1736: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1737: options cannot be mixed without intervening calls to the assembly
1738: routines.
1740: The grid coordinates are across the entire grid, not just the local portion
1742: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1743: as well as in C.
1745: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1747: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1748: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1750: The columns and rows in the stencil passed in MUST be contained within the
1751: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1752: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1753: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1754: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1756: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1757: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1758: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1759: `DM_BOUNDARY_PERIODIC` boundary type.
1761: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1762: a single value per point) you can skip filling those indices.
1764: Inspired by the structured grid interface to the HYPRE package
1765: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1767: Fortran Note:
1768: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1770: Efficiency Alert:
1771: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1772: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1774: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1775: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1776: @*/
1777: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1778: {
1779: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1780: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1781: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1783: PetscFunctionBegin;
1784: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1787: PetscAssertPointer(idxm, 3);
1788: PetscAssertPointer(idxn, 5);
1790: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1791: jdxm = buf;
1792: jdxn = buf + m;
1793: } else {
1794: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1795: jdxm = bufm;
1796: jdxn = bufn;
1797: }
1798: for (i = 0; i < m; i++) {
1799: for (j = 0; j < 3 - sdim; j++) dxm++;
1800: tmp = *dxm++ - starts[0];
1801: for (j = 0; j < dim - 1; j++) {
1802: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1803: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1804: }
1805: if (mat->stencil.noc) dxm++;
1806: jdxm[i] = tmp;
1807: }
1808: for (i = 0; i < n; i++) {
1809: for (j = 0; j < 3 - sdim; j++) dxn++;
1810: tmp = *dxn++ - starts[0];
1811: for (j = 0; j < dim - 1; j++) {
1812: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1813: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1814: }
1815: if (mat->stencil.noc) dxn++;
1816: jdxn[i] = tmp;
1817: }
1818: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1819: PetscCall(PetscFree2(bufm, bufn));
1820: PetscFunctionReturn(PETSC_SUCCESS);
1821: }
1823: /*@
1824: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1825: Using structured grid indexing
1827: Not Collective
1829: Input Parameters:
1830: + mat - the matrix
1831: . m - number of rows being entered
1832: . idxm - grid coordinates for matrix rows being entered
1833: . n - number of columns being entered
1834: . idxn - grid coordinates for matrix columns being entered
1835: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1836: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1837: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1839: Level: beginner
1841: Notes:
1842: By default the values, `v`, are row-oriented and unsorted.
1843: See `MatSetOption()` for other options.
1845: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1846: options cannot be mixed without intervening calls to the assembly
1847: routines.
1849: The grid coordinates are across the entire grid, not just the local portion
1851: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1852: as well as in C.
1854: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1856: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1857: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1859: The columns and rows in the stencil passed in MUST be contained within the
1860: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1861: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1862: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1863: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1865: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1866: simply ignored. This allows easily inserting element stiffness matrices
1867: with homogeneous Dirichlet boundary conditions that you don't want represented
1868: in the matrix.
1870: Inspired by the structured grid interface to the HYPRE package
1871: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1873: Fortran Notes:
1874: `idxm` and `idxn` should be declared as
1875: .vb
1876: MatStencil idxm(4,m),idxn(4,n)
1877: .ve
1878: and the values inserted using
1879: .vb
1880: idxm(MatStencil_i,1) = i
1881: idxm(MatStencil_j,1) = j
1882: idxm(MatStencil_k,1) = k
1883: etc
1884: .ve
1886: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1888: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1889: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1890: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1891: @*/
1892: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1893: {
1894: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1895: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1896: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1898: PetscFunctionBegin;
1899: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1902: PetscAssertPointer(idxm, 3);
1903: PetscAssertPointer(idxn, 5);
1904: PetscAssertPointer(v, 6);
1906: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1907: jdxm = buf;
1908: jdxn = buf + m;
1909: } else {
1910: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1911: jdxm = bufm;
1912: jdxn = bufn;
1913: }
1914: for (i = 0; i < m; i++) {
1915: for (j = 0; j < 3 - sdim; j++) dxm++;
1916: tmp = *dxm++ - starts[0];
1917: for (j = 0; j < sdim - 1; j++) {
1918: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1919: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1920: }
1921: dxm++;
1922: jdxm[i] = tmp;
1923: }
1924: for (i = 0; i < n; i++) {
1925: for (j = 0; j < 3 - sdim; j++) dxn++;
1926: tmp = *dxn++ - starts[0];
1927: for (j = 0; j < sdim - 1; j++) {
1928: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1929: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1930: }
1931: dxn++;
1932: jdxn[i] = tmp;
1933: }
1934: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1935: PetscCall(PetscFree2(bufm, bufn));
1936: PetscFunctionReturn(PETSC_SUCCESS);
1937: }
1939: /*@
1940: MatSetStencil - Sets the grid information for setting values into a matrix via
1941: `MatSetValuesStencil()`
1943: Not Collective
1945: Input Parameters:
1946: + mat - the matrix
1947: . dim - dimension of the grid 1, 2, or 3
1948: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1949: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1950: - dof - number of degrees of freedom per node
1952: Level: beginner
1954: Notes:
1955: Inspired by the structured grid interface to the HYPRE package
1956: (www.llnl.gov/CASC/hyper)
1958: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1959: user.
1961: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1962: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1963: @*/
1964: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1965: {
1966: PetscFunctionBegin;
1968: PetscAssertPointer(dims, 3);
1969: PetscAssertPointer(starts, 4);
1971: mat->stencil.dim = dim + (dof > 1);
1972: for (PetscInt i = 0; i < dim; i++) {
1973: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1974: mat->stencil.starts[i] = starts[dim - i - 1];
1975: }
1976: mat->stencil.dims[dim] = dof;
1977: mat->stencil.starts[dim] = 0;
1978: mat->stencil.noc = (PetscBool)(dof == 1);
1979: PetscFunctionReturn(PETSC_SUCCESS);
1980: }
1982: /*@
1983: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1985: Not Collective
1987: Input Parameters:
1988: + mat - the matrix
1989: . m - the number of block rows
1990: . idxm - the global block indices
1991: . n - the number of block columns
1992: . idxn - the global block indices
1993: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1994: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1995: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1997: Level: intermediate
1999: Notes:
2000: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2001: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2003: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2004: NOT the total number of rows/columns; for example, if the block size is 2 and
2005: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2006: The values in `idxm` would be 1 2; that is the first index for each block divided by
2007: the block size.
2009: You must call `MatSetBlockSize()` when constructing this matrix (before
2010: preallocating it).
2012: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2014: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2015: options cannot be mixed without intervening calls to the assembly
2016: routines.
2018: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2019: as well as in C.
2021: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2022: simply ignored. This allows easily inserting element stiffness matrices
2023: with homogeneous Dirichlet boundary conditions that you don't want represented
2024: in the matrix.
2026: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2027: internal searching must be done to determine where to place the
2028: data in the matrix storage space. By instead inserting blocks of
2029: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2030: reduced.
2032: Example:
2033: .vb
2034: Suppose m=n=2 and block size(bs) = 2 The array is
2036: 1 2 | 3 4
2037: 5 6 | 7 8
2038: - - - | - - -
2039: 9 10 | 11 12
2040: 13 14 | 15 16
2042: v[] should be passed in like
2043: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2045: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2046: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2047: .ve
2049: Fortran Notes:
2050: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2051: .vb
2052: call MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
2053: .ve
2055: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2057: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2058: @*/
2059: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2060: {
2061: PetscFunctionBeginHot;
2064: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2065: PetscAssertPointer(idxm, 3);
2066: PetscAssertPointer(idxn, 5);
2067: MatCheckPreallocated(mat, 1);
2068: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2069: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2070: if (PetscDefined(USE_DEBUG)) {
2071: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2072: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2073: }
2074: if (PetscDefined(USE_DEBUG)) {
2075: PetscInt rbs, cbs, M, N, i;
2076: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2077: PetscCall(MatGetSize(mat, &M, &N));
2078: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2079: for (i = 0; i < n; i++)
2080: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2081: }
2082: if (mat->assembled) {
2083: mat->was_assembled = PETSC_TRUE;
2084: mat->assembled = PETSC_FALSE;
2085: }
2086: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2087: if (mat->ops->setvaluesblocked) PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2088: else {
2089: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2090: PetscInt i, j, bs, cbs;
2092: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2093: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2094: iidxm = buf;
2095: iidxn = buf + m * bs;
2096: } else {
2097: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2098: iidxm = bufr;
2099: iidxn = bufc;
2100: }
2101: for (i = 0; i < m; i++) {
2102: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2103: }
2104: if (m != n || bs != cbs || idxm != idxn) {
2105: for (i = 0; i < n; i++) {
2106: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2107: }
2108: } else iidxn = iidxm;
2109: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2110: PetscCall(PetscFree2(bufr, bufc));
2111: }
2112: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2113: PetscFunctionReturn(PETSC_SUCCESS);
2114: }
2116: /*@
2117: MatGetValues - Gets a block of local values from a matrix.
2119: Not Collective; can only return values that are owned by the give process
2121: Input Parameters:
2122: + mat - the matrix
2123: . v - a logically two-dimensional array for storing the values
2124: . m - the number of rows
2125: . idxm - the global indices of the rows
2126: . n - the number of columns
2127: - idxn - the global indices of the columns
2129: Level: advanced
2131: Notes:
2132: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2133: The values, `v`, are then returned in a row-oriented format,
2134: analogous to that used by default in `MatSetValues()`.
2136: `MatGetValues()` uses 0-based row and column numbers in
2137: Fortran as well as in C.
2139: `MatGetValues()` requires that the matrix has been assembled
2140: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2141: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2142: without intermediate matrix assembly.
2144: Negative row or column indices will be ignored and those locations in `v` will be
2145: left unchanged.
2147: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2148: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2149: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2151: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2152: @*/
2153: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2154: {
2155: PetscFunctionBegin;
2158: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2159: PetscAssertPointer(idxm, 3);
2160: PetscAssertPointer(idxn, 5);
2161: PetscAssertPointer(v, 6);
2162: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2163: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2164: MatCheckPreallocated(mat, 1);
2166: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2167: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2168: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2169: PetscFunctionReturn(PETSC_SUCCESS);
2170: }
2172: /*@
2173: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2174: defined previously by `MatSetLocalToGlobalMapping()`
2176: Not Collective
2178: Input Parameters:
2179: + mat - the matrix
2180: . nrow - number of rows
2181: . irow - the row local indices
2182: . ncol - number of columns
2183: - icol - the column local indices
2185: Output Parameter:
2186: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2187: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2189: Level: advanced
2191: Notes:
2192: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2194: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2195: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2196: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2197: with `MatSetLocalToGlobalMapping()`.
2199: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2200: `MatSetValuesLocal()`, `MatGetValues()`
2201: @*/
2202: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2203: {
2204: PetscFunctionBeginHot;
2207: MatCheckPreallocated(mat, 1);
2208: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2209: PetscAssertPointer(irow, 3);
2210: PetscAssertPointer(icol, 5);
2211: if (PetscDefined(USE_DEBUG)) {
2212: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2213: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2214: }
2215: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2216: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2217: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2218: else {
2219: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2220: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2221: irowm = buf;
2222: icolm = buf + nrow;
2223: } else {
2224: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2225: irowm = bufr;
2226: icolm = bufc;
2227: }
2228: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2229: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2230: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2231: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2232: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2233: PetscCall(PetscFree2(bufr, bufc));
2234: }
2235: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2236: PetscFunctionReturn(PETSC_SUCCESS);
2237: }
2239: /*@
2240: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2241: the same size. Currently, this can only be called once and creates the given matrix.
2243: Not Collective
2245: Input Parameters:
2246: + mat - the matrix
2247: . nb - the number of blocks
2248: . bs - the number of rows (and columns) in each block
2249: . rows - a concatenation of the rows for each block
2250: - v - a concatenation of logically two-dimensional arrays of values
2252: Level: advanced
2254: Notes:
2255: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2257: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2259: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2260: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2261: @*/
2262: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2263: {
2264: PetscFunctionBegin;
2267: PetscAssertPointer(rows, 4);
2268: PetscAssertPointer(v, 5);
2269: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2271: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2272: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2273: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2274: PetscFunctionReturn(PETSC_SUCCESS);
2275: }
2277: /*@
2278: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2279: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2280: using a local (per-processor) numbering.
2282: Not Collective
2284: Input Parameters:
2285: + x - the matrix
2286: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2287: - cmapping - column mapping
2289: Level: intermediate
2291: Note:
2292: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2294: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2295: @*/
2296: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2297: {
2298: PetscFunctionBegin;
2303: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2304: else {
2305: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2306: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2307: }
2308: PetscFunctionReturn(PETSC_SUCCESS);
2309: }
2311: /*@
2312: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2314: Not Collective
2316: Input Parameter:
2317: . A - the matrix
2319: Output Parameters:
2320: + rmapping - row mapping
2321: - cmapping - column mapping
2323: Level: advanced
2325: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2326: @*/
2327: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2328: {
2329: PetscFunctionBegin;
2332: if (rmapping) {
2333: PetscAssertPointer(rmapping, 2);
2334: *rmapping = A->rmap->mapping;
2335: }
2336: if (cmapping) {
2337: PetscAssertPointer(cmapping, 3);
2338: *cmapping = A->cmap->mapping;
2339: }
2340: PetscFunctionReturn(PETSC_SUCCESS);
2341: }
2343: /*@
2344: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2346: Logically Collective
2348: Input Parameters:
2349: + A - the matrix
2350: . rmap - row layout
2351: - cmap - column layout
2353: Level: advanced
2355: Note:
2356: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2358: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2359: @*/
2360: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2361: {
2362: PetscFunctionBegin;
2364: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2365: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2366: PetscFunctionReturn(PETSC_SUCCESS);
2367: }
2369: /*@
2370: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2372: Not Collective
2374: Input Parameter:
2375: . A - the matrix
2377: Output Parameters:
2378: + rmap - row layout
2379: - cmap - column layout
2381: Level: advanced
2383: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2384: @*/
2385: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2386: {
2387: PetscFunctionBegin;
2390: if (rmap) {
2391: PetscAssertPointer(rmap, 2);
2392: *rmap = A->rmap;
2393: }
2394: if (cmap) {
2395: PetscAssertPointer(cmap, 3);
2396: *cmap = A->cmap;
2397: }
2398: PetscFunctionReturn(PETSC_SUCCESS);
2399: }
2401: /*@
2402: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2403: using a local numbering of the rows and columns.
2405: Not Collective
2407: Input Parameters:
2408: + mat - the matrix
2409: . nrow - number of rows
2410: . irow - the row local indices
2411: . ncol - number of columns
2412: . icol - the column local indices
2413: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2414: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2415: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2417: Level: intermediate
2419: Notes:
2420: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2422: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2423: options cannot be mixed without intervening calls to the assembly
2424: routines.
2426: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2427: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2429: Fortran Notes:
2430: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2431: .vb
2432: call MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2433: .ve
2435: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2437: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2438: `MatGetValuesLocal()`
2439: @*/
2440: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2441: {
2442: PetscFunctionBeginHot;
2445: MatCheckPreallocated(mat, 1);
2446: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2447: PetscAssertPointer(irow, 3);
2448: PetscAssertPointer(icol, 5);
2449: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2450: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2451: if (PetscDefined(USE_DEBUG)) {
2452: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2453: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2454: }
2456: if (mat->assembled) {
2457: mat->was_assembled = PETSC_TRUE;
2458: mat->assembled = PETSC_FALSE;
2459: }
2460: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2461: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2462: else {
2463: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2464: const PetscInt *irowm, *icolm;
2466: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2467: bufr = buf;
2468: bufc = buf + nrow;
2469: irowm = bufr;
2470: icolm = bufc;
2471: } else {
2472: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2473: irowm = bufr;
2474: icolm = bufc;
2475: }
2476: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2477: else irowm = irow;
2478: if (mat->cmap->mapping) {
2479: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2480: else icolm = irowm;
2481: } else icolm = icol;
2482: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2483: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2484: }
2485: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2486: PetscFunctionReturn(PETSC_SUCCESS);
2487: }
2489: /*@
2490: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2491: using a local ordering of the nodes a block at a time.
2493: Not Collective
2495: Input Parameters:
2496: + mat - the matrix
2497: . nrow - number of rows
2498: . irow - the row local indices
2499: . ncol - number of columns
2500: . icol - the column local indices
2501: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2502: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2503: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2505: Level: intermediate
2507: Notes:
2508: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2509: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2511: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2512: options cannot be mixed without intervening calls to the assembly
2513: routines.
2515: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2516: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2518: Fortran Notes:
2519: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2520: .vb
2521: call MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2522: .ve
2524: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2526: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2527: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2528: @*/
2529: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2530: {
2531: PetscFunctionBeginHot;
2534: MatCheckPreallocated(mat, 1);
2535: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2536: PetscAssertPointer(irow, 3);
2537: PetscAssertPointer(icol, 5);
2538: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2539: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2540: if (PetscDefined(USE_DEBUG)) {
2541: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2542: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2543: }
2545: if (mat->assembled) {
2546: mat->was_assembled = PETSC_TRUE;
2547: mat->assembled = PETSC_FALSE;
2548: }
2549: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2550: PetscInt irbs, rbs;
2551: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2552: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2553: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2554: }
2555: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2556: PetscInt icbs, cbs;
2557: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2558: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2559: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2560: }
2561: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2562: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2563: else {
2564: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2565: const PetscInt *irowm, *icolm;
2567: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2568: bufr = buf;
2569: bufc = buf + nrow;
2570: irowm = bufr;
2571: icolm = bufc;
2572: } else {
2573: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2574: irowm = bufr;
2575: icolm = bufc;
2576: }
2577: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2578: else irowm = irow;
2579: if (mat->cmap->mapping) {
2580: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2581: else icolm = irowm;
2582: } else icolm = icol;
2583: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2584: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2585: }
2586: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2587: PetscFunctionReturn(PETSC_SUCCESS);
2588: }
2590: /*@
2591: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2593: Collective
2595: Input Parameters:
2596: + mat - the matrix
2597: - x - the vector to be multiplied
2599: Output Parameter:
2600: . y - the result
2602: Level: developer
2604: Note:
2605: The vectors `x` and `y` cannot be the same. I.e., one cannot
2606: call `MatMultDiagonalBlock`(A,y,y).
2608: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2609: @*/
2610: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2611: {
2612: PetscFunctionBegin;
2618: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2619: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2620: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2621: MatCheckPreallocated(mat, 1);
2623: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2624: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2625: PetscFunctionReturn(PETSC_SUCCESS);
2626: }
2628: /*@
2629: MatMult - Computes the matrix-vector product, $y = Ax$.
2631: Neighbor-wise Collective
2633: Input Parameters:
2634: + mat - the matrix
2635: - x - the vector to be multiplied
2637: Output Parameter:
2638: . y - the result
2640: Level: beginner
2642: Note:
2643: The vectors `x` and `y` cannot be the same. I.e., one cannot
2644: call `MatMult`(A,y,y).
2646: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2647: @*/
2648: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2649: {
2650: PetscFunctionBegin;
2654: VecCheckAssembled(x);
2656: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2657: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2658: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2659: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2660: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2661: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2662: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2663: PetscCall(VecSetErrorIfLocked(y, 3));
2664: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2665: MatCheckPreallocated(mat, 1);
2667: PetscCall(VecLockReadPush(x));
2668: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2669: PetscUseTypeMethod(mat, mult, x, y);
2670: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2671: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2672: PetscCall(VecLockReadPop(x));
2673: PetscFunctionReturn(PETSC_SUCCESS);
2674: }
2676: /*@
2677: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2679: Neighbor-wise Collective
2681: Input Parameters:
2682: + mat - the matrix
2683: - x - the vector to be multiplied
2685: Output Parameter:
2686: . y - the result
2688: Level: beginner
2690: Notes:
2691: The vectors `x` and `y` cannot be the same. I.e., one cannot
2692: call `MatMultTranspose`(A,y,y).
2694: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2695: use `MatMultHermitianTranspose()`
2697: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2698: @*/
2699: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2700: {
2701: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2703: PetscFunctionBegin;
2707: VecCheckAssembled(x);
2710: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2711: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2712: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2713: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2714: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2715: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2716: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2717: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2718: MatCheckPreallocated(mat, 1);
2720: if (!mat->ops->multtranspose) {
2721: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2722: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2723: } else op = mat->ops->multtranspose;
2724: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2725: PetscCall(VecLockReadPush(x));
2726: PetscCall((*op)(mat, x, y));
2727: PetscCall(VecLockReadPop(x));
2728: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2729: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2730: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2731: PetscFunctionReturn(PETSC_SUCCESS);
2732: }
2734: /*@
2735: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2737: Neighbor-wise Collective
2739: Input Parameters:
2740: + mat - the matrix
2741: - x - the vector to be multiplied
2743: Output Parameter:
2744: . y - the result
2746: Level: beginner
2748: Notes:
2749: The vectors `x` and `y` cannot be the same. I.e., one cannot
2750: call `MatMultHermitianTranspose`(A,y,y).
2752: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2754: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2756: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2757: @*/
2758: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2759: {
2760: PetscFunctionBegin;
2766: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2767: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2768: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2769: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2770: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2771: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2772: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2773: MatCheckPreallocated(mat, 1);
2775: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2776: #if defined(PETSC_USE_COMPLEX)
2777: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2778: PetscCall(VecLockReadPush(x));
2779: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2780: else PetscUseTypeMethod(mat, mult, x, y);
2781: PetscCall(VecLockReadPop(x));
2782: } else {
2783: Vec w;
2784: PetscCall(VecDuplicate(x, &w));
2785: PetscCall(VecCopy(x, w));
2786: PetscCall(VecConjugate(w));
2787: PetscCall(MatMultTranspose(mat, w, y));
2788: PetscCall(VecDestroy(&w));
2789: PetscCall(VecConjugate(y));
2790: }
2791: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2792: #else
2793: PetscCall(MatMultTranspose(mat, x, y));
2794: #endif
2795: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2796: PetscFunctionReturn(PETSC_SUCCESS);
2797: }
2799: /*@
2800: MatMultAdd - Computes $v3 = v2 + A * v1$.
2802: Neighbor-wise Collective
2804: Input Parameters:
2805: + mat - the matrix
2806: . v1 - the vector to be multiplied by `mat`
2807: - v2 - the vector to be added to the result
2809: Output Parameter:
2810: . v3 - the result
2812: Level: beginner
2814: Note:
2815: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2816: call `MatMultAdd`(A,v1,v2,v1).
2818: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2819: @*/
2820: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2821: {
2822: PetscFunctionBegin;
2829: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2830: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2831: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2832: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2833: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2834: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2835: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2836: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2837: MatCheckPreallocated(mat, 1);
2839: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2840: PetscCall(VecLockReadPush(v1));
2841: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2842: PetscCall(VecLockReadPop(v1));
2843: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2844: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2845: PetscFunctionReturn(PETSC_SUCCESS);
2846: }
2848: /*@
2849: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2851: Neighbor-wise Collective
2853: Input Parameters:
2854: + mat - the matrix
2855: . v1 - the vector to be multiplied by the transpose of the matrix
2856: - v2 - the vector to be added to the result
2858: Output Parameter:
2859: . v3 - the result
2861: Level: beginner
2863: Note:
2864: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2865: call `MatMultTransposeAdd`(A,v1,v2,v1).
2867: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2868: @*/
2869: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2870: {
2871: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2873: PetscFunctionBegin;
2880: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2881: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2882: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2883: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2884: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2885: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2886: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2887: MatCheckPreallocated(mat, 1);
2889: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2890: PetscCall(VecLockReadPush(v1));
2891: PetscCall((*op)(mat, v1, v2, v3));
2892: PetscCall(VecLockReadPop(v1));
2893: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2894: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2895: PetscFunctionReturn(PETSC_SUCCESS);
2896: }
2898: /*@
2899: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2901: Neighbor-wise Collective
2903: Input Parameters:
2904: + mat - the matrix
2905: . v1 - the vector to be multiplied by the Hermitian transpose
2906: - v2 - the vector to be added to the result
2908: Output Parameter:
2909: . v3 - the result
2911: Level: beginner
2913: Note:
2914: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2915: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2917: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2918: @*/
2919: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2920: {
2921: PetscFunctionBegin;
2928: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2929: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2930: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2931: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2932: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2933: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2934: MatCheckPreallocated(mat, 1);
2936: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2937: PetscCall(VecLockReadPush(v1));
2938: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2939: else {
2940: Vec w, z;
2941: PetscCall(VecDuplicate(v1, &w));
2942: PetscCall(VecCopy(v1, w));
2943: PetscCall(VecConjugate(w));
2944: PetscCall(VecDuplicate(v3, &z));
2945: PetscCall(MatMultTranspose(mat, w, z));
2946: PetscCall(VecDestroy(&w));
2947: PetscCall(VecConjugate(z));
2948: if (v2 != v3) PetscCall(VecWAXPY(v3, 1.0, v2, z));
2949: else PetscCall(VecAXPY(v3, 1.0, z));
2950: PetscCall(VecDestroy(&z));
2951: }
2952: PetscCall(VecLockReadPop(v1));
2953: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2954: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2955: PetscFunctionReturn(PETSC_SUCCESS);
2956: }
2958: PetscErrorCode MatADot_Default(Mat mat, Vec x, Vec y, PetscScalar *val)
2959: {
2960: PetscFunctionBegin;
2961: if (!mat->dot_vec) PetscCall(MatCreateVecs(mat, &mat->dot_vec, NULL));
2962: PetscCall(MatMult(mat, x, mat->dot_vec));
2963: PetscCall(VecDot(mat->dot_vec, y, val));
2964: PetscFunctionReturn(PETSC_SUCCESS);
2965: }
2967: PetscErrorCode MatANorm_Default(Mat mat, Vec x, PetscReal *val)
2968: {
2969: PetscScalar sval;
2971: PetscFunctionBegin;
2972: PetscCall(MatADot_Default(mat, x, x, &sval));
2973: PetscCheck(PetscRealPart(sval) >= 0.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix argument is not positive definite");
2974: PetscCheck(PetscAbsReal(PetscImaginaryPart(sval)) < 100 * PETSC_MACHINE_EPSILON, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix argument is not Hermitian");
2975: *val = PetscSqrtReal(PetscRealPart(sval));
2976: PetscFunctionReturn(PETSC_SUCCESS);
2977: }
2979: /*@
2980: MatADot - Computes the inner product with respect to a matrix, i.e., $(x, y)_A = y^H A x$ where $A$ is symmetric (Hermitian when using complex)
2981: positive definite.
2983: Collective
2985: Input Parameters:
2986: + mat - matrix used to define the inner product
2987: . x - first vector
2988: - y - second vector
2990: Output Parameter:
2991: . val - the dot product with respect to `A`
2993: Level: intermediate
2995: Note:
2996: For complex vectors, `MatADot()` computes
2997: $$
2998: val = (x,y)_A = y^H A x,
2999: $$
3000: where $y^H$ denotes the conjugate transpose of `y`. Note that this corresponds to the "mathematicians" complex
3001: inner product where the SECOND argument gets the complex conjugate.
3003: .seealso: [](ch_matrices), `Mat`, `MatANorm()`, `VecDot()`, `VecNorm()`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`
3004: @*/
3005: PetscErrorCode MatADot(Mat mat, Vec x, Vec y, PetscScalar *val)
3006: {
3007: PetscFunctionBegin;
3011: VecCheckAssembled(x);
3013: VecCheckAssembled(y);
3016: PetscAssertPointer(val, 4);
3017: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3018: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3019: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3020: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3021: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
3022: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
3023: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
3024: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_TRUE));
3025: MatCheckPreallocated(mat, 1);
3027: PetscCall(VecLockReadPush(x));
3028: PetscCall(VecLockReadPush(y));
3029: PetscCall(PetscLogEventBegin(MAT_ADot, mat, x, y, 0));
3030: PetscUseTypeMethod(mat, adot, x, y, val);
3031: PetscCall(PetscLogEventEnd(MAT_ADot, mat, x, y, 0));
3032: PetscCall(VecLockReadPop(y));
3033: PetscCall(VecLockReadPop(x));
3034: PetscFunctionReturn(PETSC_SUCCESS);
3035: }
3037: /*@
3038: MatANorm - Computes the norm with respect to a matrix, i.e., $(x, x)_A^{1/2} = (x^H A x)^{1/2}$ where $A$ is symmetric (Hermitian when using complex)
3039: positive definite.
3041: Collective
3043: Input Parameters:
3044: + mat - matrix used to define norm
3045: - x - the vector to compute the norm of
3047: Output Parameter:
3048: . val - the norm with respect to `A`
3050: Level: intermediate
3052: Note:
3053: For complex vectors, `MatANorm()` computes
3054: $$
3055: val = (x,x)_A^{1/2} = (x^H A x)^{1/2},
3056: $$
3057: where $x^H$ denotes the conjugate transpose of `x`.
3059: .seealso: [](ch_matrices), `Mat`, `MatADot()`, `VecDot()`, `VecNorm()`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`
3060: @*/
3061: PetscErrorCode MatANorm(Mat mat, Vec x, PetscReal *val)
3062: {
3063: PetscFunctionBegin;
3067: VecCheckAssembled(x);
3069: PetscAssertPointer(val, 3);
3070: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3071: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3072: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3073: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
3074: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
3075: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
3076: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
3077: MatCheckPreallocated(mat, 1);
3079: PetscCall(VecLockReadPush(x));
3080: PetscCall(PetscLogEventBegin(MAT_ANorm, mat, x, 0, 0));
3081: PetscUseTypeMethod(mat, anorm, x, val);
3082: PetscCall(PetscLogEventEnd(MAT_ANorm, mat, x, 0, 0));
3083: PetscCall(VecLockReadPop(x));
3084: PetscFunctionReturn(PETSC_SUCCESS);
3085: }
3087: /*@
3088: MatGetFactorType - gets the type of factorization a matrix is
3090: Not Collective
3092: Input Parameter:
3093: . mat - the matrix
3095: Output Parameter:
3096: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3098: Level: intermediate
3100: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3101: `MAT_FACTOR_ICC`, `MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3102: @*/
3103: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
3104: {
3105: PetscFunctionBegin;
3108: PetscAssertPointer(t, 2);
3109: *t = mat->factortype;
3110: PetscFunctionReturn(PETSC_SUCCESS);
3111: }
3113: /*@
3114: MatSetFactorType - sets the type of factorization a matrix is
3116: Logically Collective
3118: Input Parameters:
3119: + mat - the matrix
3120: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3122: Level: intermediate
3124: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3125: `MAT_FACTOR_ICC`, `MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3126: @*/
3127: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3128: {
3129: PetscFunctionBegin;
3132: mat->factortype = t;
3133: PetscFunctionReturn(PETSC_SUCCESS);
3134: }
3136: /*@
3137: MatGetInfo - Returns information about matrix storage (number of
3138: nonzeros, memory, etc.).
3140: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3142: Input Parameters:
3143: + mat - the matrix
3144: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3146: Output Parameter:
3147: . info - matrix information context
3149: Options Database Key:
3150: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3152: Level: intermediate
3154: Notes:
3155: The `MatInfo` context contains a variety of matrix data, including
3156: number of nonzeros allocated and used, number of mallocs during
3157: matrix assembly, etc. Additional information for factored matrices
3158: is provided (such as the fill ratio, number of mallocs during
3159: factorization, etc.).
3161: Example:
3162: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3163: data within the `MatInfo` context. For example,
3164: .vb
3165: MatInfo info;
3166: Mat A;
3167: double mal, nz_a, nz_u;
3169: MatGetInfo(A, MAT_LOCAL, &info);
3170: mal = info.mallocs;
3171: nz_a = info.nz_allocated;
3172: .ve
3174: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3175: @*/
3176: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3177: {
3178: PetscFunctionBegin;
3181: PetscAssertPointer(info, 3);
3182: MatCheckPreallocated(mat, 1);
3183: PetscUseTypeMethod(mat, getinfo, flag, info);
3184: PetscFunctionReturn(PETSC_SUCCESS);
3185: }
3187: /*
3188: This is used by external packages where it is not easy to get the info from the actual
3189: matrix factorization.
3190: */
3191: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3192: {
3193: PetscFunctionBegin;
3194: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3195: PetscFunctionReturn(PETSC_SUCCESS);
3196: }
3198: /*@
3199: MatLUFactor - Performs in-place LU factorization of matrix.
3201: Collective
3203: Input Parameters:
3204: + mat - the matrix
3205: . row - row permutation
3206: . col - column permutation
3207: - info - options for factorization, includes
3208: .vb
3209: fill - expected fill as ratio of original fill.
3210: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3211: Run with the option -info to determine an optimal value to use
3212: .ve
3214: Level: developer
3216: Notes:
3217: Most users should employ the `KSP` interface for linear solvers
3218: instead of working directly with matrix algebra routines such as this.
3219: See, e.g., `KSPCreate()`.
3221: This changes the state of the matrix to a factored matrix; it cannot be used
3222: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3224: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3225: when not using `KSP`.
3227: Fortran Note:
3228: A valid (non-null) `info` argument must be provided
3230: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3231: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3232: @*/
3233: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3234: {
3235: MatFactorInfo tinfo;
3237: PetscFunctionBegin;
3241: if (info) PetscAssertPointer(info, 4);
3243: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3244: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3245: MatCheckPreallocated(mat, 1);
3246: if (!info) {
3247: PetscCall(MatFactorInfoInitialize(&tinfo));
3248: info = &tinfo;
3249: }
3251: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3252: PetscUseTypeMethod(mat, lufactor, row, col, info);
3253: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3254: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3255: PetscFunctionReturn(PETSC_SUCCESS);
3256: }
3258: /*@
3259: MatILUFactor - Performs in-place ILU factorization of matrix.
3261: Collective
3263: Input Parameters:
3264: + mat - the matrix
3265: . row - row permutation
3266: . col - column permutation
3267: - info - structure containing
3268: .vb
3269: levels - number of levels of fill.
3270: expected fill - as ratio of original fill.
3271: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3272: missing diagonal entries)
3273: .ve
3275: Level: developer
3277: Notes:
3278: Most users should employ the `KSP` interface for linear solvers
3279: instead of working directly with matrix algebra routines such as this.
3280: See, e.g., `KSPCreate()`.
3282: Probably really in-place only when level of fill is zero, otherwise allocates
3283: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3284: when not using `KSP`.
3286: Fortran Note:
3287: A valid (non-null) `info` argument must be provided
3289: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3290: @*/
3291: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3292: {
3293: PetscFunctionBegin;
3297: PetscAssertPointer(info, 4);
3299: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3300: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3301: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3302: MatCheckPreallocated(mat, 1);
3304: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3305: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3306: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3307: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3308: PetscFunctionReturn(PETSC_SUCCESS);
3309: }
3311: /*@
3312: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3313: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3315: Collective
3317: Input Parameters:
3318: + fact - the factor matrix obtained with `MatGetFactor()`
3319: . mat - the matrix
3320: . row - the row permutation
3321: . col - the column permutation
3322: - info - options for factorization, includes
3323: .vb
3324: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3325: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3326: .ve
3328: Level: developer
3330: Notes:
3331: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3333: Most users should employ the simplified `KSP` interface for linear solvers
3334: instead of working directly with matrix algebra routines such as this.
3335: See, e.g., `KSPCreate()`.
3337: Fortran Note:
3338: A valid (non-null) `info` argument must be provided
3340: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3341: @*/
3342: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3343: {
3344: MatFactorInfo tinfo;
3346: PetscFunctionBegin;
3351: if (info) PetscAssertPointer(info, 5);
3354: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3355: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3356: MatCheckPreallocated(mat, 2);
3357: if (!info) {
3358: PetscCall(MatFactorInfoInitialize(&tinfo));
3359: info = &tinfo;
3360: }
3362: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3363: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3364: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3365: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3366: PetscFunctionReturn(PETSC_SUCCESS);
3367: }
3369: /*@
3370: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3371: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3373: Collective
3375: Input Parameters:
3376: + fact - the factor matrix obtained with `MatGetFactor()`
3377: . mat - the matrix
3378: - info - options for factorization
3380: Level: developer
3382: Notes:
3383: See `MatLUFactor()` for in-place factorization. See
3384: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3386: Most users should employ the `KSP` interface for linear solvers
3387: instead of working directly with matrix algebra routines such as this.
3388: See, e.g., `KSPCreate()`.
3390: Fortran Note:
3391: A valid (non-null) `info` argument must be provided
3393: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3394: @*/
3395: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3396: {
3397: MatFactorInfo tinfo;
3399: PetscFunctionBegin;
3404: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3405: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3406: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3408: MatCheckPreallocated(mat, 2);
3409: if (!info) {
3410: PetscCall(MatFactorInfoInitialize(&tinfo));
3411: info = &tinfo;
3412: }
3414: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3415: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3416: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3417: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3418: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3419: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3420: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3421: PetscFunctionReturn(PETSC_SUCCESS);
3422: }
3424: /*@
3425: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3426: symmetric matrix.
3428: Collective
3430: Input Parameters:
3431: + mat - the matrix
3432: . perm - row and column permutations
3433: - info - expected fill as ratio of original fill
3435: Level: developer
3437: Notes:
3438: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3439: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3441: Most users should employ the `KSP` interface for linear solvers
3442: instead of working directly with matrix algebra routines such as this.
3443: See, e.g., `KSPCreate()`.
3445: Fortran Note:
3446: A valid (non-null) `info` argument must be provided
3448: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`,
3449: `MatGetOrdering()`
3450: @*/
3451: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3452: {
3453: MatFactorInfo tinfo;
3455: PetscFunctionBegin;
3458: if (info) PetscAssertPointer(info, 3);
3460: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3461: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3462: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3463: MatCheckPreallocated(mat, 1);
3464: if (!info) {
3465: PetscCall(MatFactorInfoInitialize(&tinfo));
3466: info = &tinfo;
3467: }
3469: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3470: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3471: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3472: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3473: PetscFunctionReturn(PETSC_SUCCESS);
3474: }
3476: /*@
3477: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3478: of a symmetric matrix.
3480: Collective
3482: Input Parameters:
3483: + fact - the factor matrix obtained with `MatGetFactor()`
3484: . mat - the matrix
3485: . perm - row and column permutations
3486: - info - options for factorization, includes
3487: .vb
3488: fill - expected fill as ratio of original fill.
3489: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3490: Run with the option -info to determine an optimal value to use
3491: .ve
3493: Level: developer
3495: Notes:
3496: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3497: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3499: Most users should employ the `KSP` interface for linear solvers
3500: instead of working directly with matrix algebra routines such as this.
3501: See, e.g., `KSPCreate()`.
3503: Fortran Note:
3504: A valid (non-null) `info` argument must be provided
3506: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`,
3507: `MatGetOrdering()`
3508: @*/
3509: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3510: {
3511: MatFactorInfo tinfo;
3513: PetscFunctionBegin;
3517: if (info) PetscAssertPointer(info, 4);
3520: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3521: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3522: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3523: MatCheckPreallocated(mat, 2);
3524: if (!info) {
3525: PetscCall(MatFactorInfoInitialize(&tinfo));
3526: info = &tinfo;
3527: }
3529: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3530: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3531: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3532: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3533: PetscFunctionReturn(PETSC_SUCCESS);
3534: }
3536: /*@
3537: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3538: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3539: `MatCholeskyFactorSymbolic()`.
3541: Collective
3543: Input Parameters:
3544: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3545: . mat - the initial matrix that is to be factored
3546: - info - options for factorization
3548: Level: developer
3550: Note:
3551: Most users should employ the `KSP` interface for linear solvers
3552: instead of working directly with matrix algebra routines such as this.
3553: See, e.g., `KSPCreate()`.
3555: Fortran Note:
3556: A valid (non-null) `info` argument must be provided
3558: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3559: @*/
3560: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3561: {
3562: MatFactorInfo tinfo;
3564: PetscFunctionBegin;
3569: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3570: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3571: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3572: MatCheckPreallocated(mat, 2);
3573: if (!info) {
3574: PetscCall(MatFactorInfoInitialize(&tinfo));
3575: info = &tinfo;
3576: }
3578: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3579: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3580: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3581: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3582: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3583: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3584: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3585: PetscFunctionReturn(PETSC_SUCCESS);
3586: }
3588: /*@
3589: MatQRFactor - Performs in-place QR factorization of matrix.
3591: Collective
3593: Input Parameters:
3594: + mat - the matrix
3595: . col - column permutation
3596: - info - options for factorization, includes
3597: .vb
3598: fill - expected fill as ratio of original fill.
3599: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3600: Run with the option -info to determine an optimal value to use
3601: .ve
3603: Level: developer
3605: Notes:
3606: Most users should employ the `KSP` interface for linear solvers
3607: instead of working directly with matrix algebra routines such as this.
3608: See, e.g., `KSPCreate()`.
3610: This changes the state of the matrix to a factored matrix; it cannot be used
3611: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3613: Fortran Note:
3614: A valid (non-null) `info` argument must be provided
3616: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3617: `MatSetUnfactored()`
3618: @*/
3619: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3620: {
3621: PetscFunctionBegin;
3624: if (info) PetscAssertPointer(info, 3);
3626: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3627: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3628: MatCheckPreallocated(mat, 1);
3629: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3630: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3631: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3632: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3633: PetscFunctionReturn(PETSC_SUCCESS);
3634: }
3636: /*@
3637: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3638: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3640: Collective
3642: Input Parameters:
3643: + fact - the factor matrix obtained with `MatGetFactor()`
3644: . mat - the matrix
3645: . col - column permutation
3646: - info - options for factorization, includes
3647: .vb
3648: fill - expected fill as ratio of original fill.
3649: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3650: Run with the option -info to determine an optimal value to use
3651: .ve
3653: Level: developer
3655: Note:
3656: Most users should employ the `KSP` interface for linear solvers
3657: instead of working directly with matrix algebra routines such as this.
3658: See, e.g., `KSPCreate()`.
3660: Fortran Note:
3661: A valid (non-null) `info` argument must be provided
3663: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3664: @*/
3665: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3666: {
3667: MatFactorInfo tinfo;
3669: PetscFunctionBegin;
3673: if (info) PetscAssertPointer(info, 4);
3676: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3677: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3678: MatCheckPreallocated(mat, 2);
3679: if (!info) {
3680: PetscCall(MatFactorInfoInitialize(&tinfo));
3681: info = &tinfo;
3682: }
3684: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3685: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3686: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3687: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3688: PetscFunctionReturn(PETSC_SUCCESS);
3689: }
3691: /*@
3692: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3693: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3695: Collective
3697: Input Parameters:
3698: + fact - the factor matrix obtained with `MatGetFactor()`
3699: . mat - the matrix
3700: - info - options for factorization
3702: Level: developer
3704: Notes:
3705: See `MatQRFactor()` for in-place factorization.
3707: Most users should employ the `KSP` interface for linear solvers
3708: instead of working directly with matrix algebra routines such as this.
3709: See, e.g., `KSPCreate()`.
3711: Fortran Note:
3712: A valid (non-null) `info` argument must be provided
3714: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3715: @*/
3716: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3717: {
3718: MatFactorInfo tinfo;
3720: PetscFunctionBegin;
3725: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3726: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3727: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3729: MatCheckPreallocated(mat, 2);
3730: if (!info) {
3731: PetscCall(MatFactorInfoInitialize(&tinfo));
3732: info = &tinfo;
3733: }
3735: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3736: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3737: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3738: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3739: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3740: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3741: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3742: PetscFunctionReturn(PETSC_SUCCESS);
3743: }
3745: /*@
3746: MatSolve - Solves $A x = b$, given a factored matrix.
3748: Neighbor-wise Collective
3750: Input Parameters:
3751: + mat - the factored matrix
3752: - b - the right-hand-side vector
3754: Output Parameter:
3755: . x - the result vector
3757: Level: developer
3759: Notes:
3760: The vectors `b` and `x` cannot be the same. I.e., one cannot
3761: call `MatSolve`(A,x,x).
3763: Most users should employ the `KSP` interface for linear solvers
3764: instead of working directly with matrix algebra routines such as this.
3765: See, e.g., `KSPCreate()`.
3767: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3768: @*/
3769: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3770: {
3771: PetscFunctionBegin;
3776: PetscCheckSameComm(mat, 1, b, 2);
3777: PetscCheckSameComm(mat, 1, x, 3);
3778: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3779: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3780: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3781: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3782: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3783: MatCheckPreallocated(mat, 1);
3785: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3786: PetscCall(VecFlag(x, mat->factorerrortype));
3787: if (mat->factorerrortype) PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3788: else PetscUseTypeMethod(mat, solve, b, x);
3789: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3790: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3791: PetscFunctionReturn(PETSC_SUCCESS);
3792: }
3794: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3795: {
3796: Vec b, x;
3797: PetscInt N;
3798: PetscErrorCode (*f)(Mat, Vec, Vec);
3799: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3801: PetscFunctionBegin;
3802: if (A->factorerrortype) {
3803: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3804: PetscCall(MatSetInf(X));
3805: PetscFunctionReturn(PETSC_SUCCESS);
3806: }
3807: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3808: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3809: PetscCall(MatBoundToCPU(A, &Abound));
3810: if (!Abound) {
3811: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3812: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3813: }
3814: #if PetscDefined(HAVE_CUDA)
3815: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3816: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3817: #elif PetscDefined(HAVE_HIP)
3818: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3819: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3820: #endif
3821: PetscCall(MatGetSize(B, NULL, &N));
3822: for (PetscInt i = 0; i < N; i++) {
3823: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3824: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3825: PetscCall((*f)(A, b, x));
3826: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3827: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3828: }
3829: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3830: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3831: PetscFunctionReturn(PETSC_SUCCESS);
3832: }
3834: /*@
3835: MatMatSolve - Solves $A X = B$, given a factored matrix.
3837: Neighbor-wise Collective
3839: Input Parameters:
3840: + A - the factored matrix
3841: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3843: Output Parameter:
3844: . X - the result matrix (dense matrix)
3846: Level: developer
3848: Note:
3849: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3850: otherwise, `B` and `X` cannot be the same.
3852: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3853: @*/
3854: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3855: {
3856: PetscFunctionBegin;
3861: PetscCheckSameComm(A, 1, B, 2);
3862: PetscCheckSameComm(A, 1, X, 3);
3863: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3864: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3865: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3866: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3867: MatCheckPreallocated(A, 1);
3869: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3870: if (!A->ops->matsolve) {
3871: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3872: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3873: } else PetscUseTypeMethod(A, matsolve, B, X);
3874: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3875: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3876: PetscFunctionReturn(PETSC_SUCCESS);
3877: }
3879: /*@
3880: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3882: Neighbor-wise Collective
3884: Input Parameters:
3885: + A - the factored matrix
3886: - B - the right-hand-side matrix (`MATDENSE` matrix)
3888: Output Parameter:
3889: . X - the result matrix (dense matrix)
3891: Level: developer
3893: Note:
3894: The matrices `B` and `X` cannot be the same. I.e., one cannot
3895: call `MatMatSolveTranspose`(A,X,X).
3897: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3898: @*/
3899: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3900: {
3901: PetscFunctionBegin;
3906: PetscCheckSameComm(A, 1, B, 2);
3907: PetscCheckSameComm(A, 1, X, 3);
3908: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3909: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3910: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3911: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3912: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3913: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3914: MatCheckPreallocated(A, 1);
3916: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3917: if (!A->ops->matsolvetranspose) {
3918: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3919: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3920: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3921: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3922: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3923: PetscFunctionReturn(PETSC_SUCCESS);
3924: }
3926: /*@
3927: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3929: Neighbor-wise Collective
3931: Input Parameters:
3932: + A - the factored matrix
3933: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3935: Output Parameter:
3936: . X - the result matrix (dense matrix)
3938: Level: developer
3940: Note:
3941: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3942: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3944: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3945: @*/
3946: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3947: {
3948: PetscFunctionBegin;
3953: PetscCheckSameComm(A, 1, Bt, 2);
3954: PetscCheckSameComm(A, 1, X, 3);
3956: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3957: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3958: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3959: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3960: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3961: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3962: MatCheckPreallocated(A, 1);
3964: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3965: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3966: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3967: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3968: PetscFunctionReturn(PETSC_SUCCESS);
3969: }
3971: /*@
3972: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3973: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3975: Neighbor-wise Collective
3977: Input Parameters:
3978: + mat - the factored matrix
3979: - b - the right-hand-side vector
3981: Output Parameter:
3982: . x - the result vector
3984: Level: developer
3986: Notes:
3987: `MatSolve()` should be used for most applications, as it performs
3988: a forward solve followed by a backward solve.
3990: The vectors `b` and `x` cannot be the same, i.e., one cannot
3991: call `MatForwardSolve`(A,x,x).
3993: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3994: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3995: `MatForwardSolve()` solves $U^T*D y = b$, and
3996: `MatBackwardSolve()` solves $U x = y$.
3997: Thus they do not provide a symmetric preconditioner.
3999: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
4000: @*/
4001: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
4002: {
4003: PetscFunctionBegin;
4008: PetscCheckSameComm(mat, 1, b, 2);
4009: PetscCheckSameComm(mat, 1, x, 3);
4010: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4011: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4012: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4013: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4014: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4015: MatCheckPreallocated(mat, 1);
4017: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
4018: PetscUseTypeMethod(mat, forwardsolve, b, x);
4019: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
4020: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4021: PetscFunctionReturn(PETSC_SUCCESS);
4022: }
4024: /*@
4025: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
4026: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
4028: Neighbor-wise Collective
4030: Input Parameters:
4031: + mat - the factored matrix
4032: - b - the right-hand-side vector
4034: Output Parameter:
4035: . x - the result vector
4037: Level: developer
4039: Notes:
4040: `MatSolve()` should be used for most applications, as it performs
4041: a forward solve followed by a backward solve.
4043: The vectors `b` and `x` cannot be the same. I.e., one cannot
4044: call `MatBackwardSolve`(A,x,x).
4046: For matrix in `MATSEQBAIJ` format with block size larger than 1,
4047: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
4048: `MatForwardSolve()` solves $U^T*D y = b$, and
4049: `MatBackwardSolve()` solves $U x = y$.
4050: Thus they do not provide a symmetric preconditioner.
4052: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
4053: @*/
4054: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
4055: {
4056: PetscFunctionBegin;
4061: PetscCheckSameComm(mat, 1, b, 2);
4062: PetscCheckSameComm(mat, 1, x, 3);
4063: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4064: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4065: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4066: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4067: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4068: MatCheckPreallocated(mat, 1);
4070: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
4071: PetscUseTypeMethod(mat, backwardsolve, b, x);
4072: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
4073: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4074: PetscFunctionReturn(PETSC_SUCCESS);
4075: }
4077: /*@
4078: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
4080: Neighbor-wise Collective
4082: Input Parameters:
4083: + mat - the factored matrix
4084: . b - the right-hand-side vector
4085: - y - the vector to be added to
4087: Output Parameter:
4088: . x - the result vector
4090: Level: developer
4092: Note:
4093: The vectors `b` and `x` cannot be the same. I.e., one cannot
4094: call `MatSolveAdd`(A,x,y,x).
4096: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
4097: @*/
4098: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
4099: {
4100: PetscScalar one = 1.0;
4101: Vec tmp;
4103: PetscFunctionBegin;
4109: PetscCheckSameComm(mat, 1, b, 2);
4110: PetscCheckSameComm(mat, 1, y, 3);
4111: PetscCheckSameComm(mat, 1, x, 4);
4112: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4113: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4114: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4115: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4116: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4117: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4118: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4119: MatCheckPreallocated(mat, 1);
4121: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4122: PetscCall(VecFlag(x, mat->factorerrortype));
4123: if (mat->factorerrortype) {
4124: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4125: } else if (mat->ops->solveadd) {
4126: PetscUseTypeMethod(mat, solveadd, b, y, x);
4127: } else {
4128: /* do the solve then the add manually */
4129: if (x != y) {
4130: PetscCall(MatSolve(mat, b, x));
4131: PetscCall(VecAXPY(x, one, y));
4132: } else {
4133: PetscCall(VecDuplicate(x, &tmp));
4134: PetscCall(VecCopy(x, tmp));
4135: PetscCall(MatSolve(mat, b, x));
4136: PetscCall(VecAXPY(x, one, tmp));
4137: PetscCall(VecDestroy(&tmp));
4138: }
4139: }
4140: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4141: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4142: PetscFunctionReturn(PETSC_SUCCESS);
4143: }
4145: /*@
4146: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4148: Neighbor-wise Collective
4150: Input Parameters:
4151: + mat - the factored matrix
4152: - b - the right-hand-side vector
4154: Output Parameter:
4155: . x - the result vector
4157: Level: developer
4159: Notes:
4160: The vectors `b` and `x` cannot be the same. I.e., one cannot
4161: call `MatSolveTranspose`(A,x,x).
4163: Most users should employ the `KSP` interface for linear solvers
4164: instead of working directly with matrix algebra routines such as this.
4165: See, e.g., `KSPCreate()`.
4167: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4168: @*/
4169: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4170: {
4171: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4173: PetscFunctionBegin;
4178: PetscCheckSameComm(mat, 1, b, 2);
4179: PetscCheckSameComm(mat, 1, x, 3);
4180: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4181: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4182: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4183: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4184: MatCheckPreallocated(mat, 1);
4185: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4186: PetscCall(VecFlag(x, mat->factorerrortype));
4187: if (mat->factorerrortype) {
4188: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4189: } else {
4190: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4191: PetscCall((*f)(mat, b, x));
4192: }
4193: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4194: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4195: PetscFunctionReturn(PETSC_SUCCESS);
4196: }
4198: /*@
4199: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4200: factored matrix.
4202: Neighbor-wise Collective
4204: Input Parameters:
4205: + mat - the factored matrix
4206: . b - the right-hand-side vector
4207: - y - the vector to be added to
4209: Output Parameter:
4210: . x - the result vector
4212: Level: developer
4214: Note:
4215: The vectors `b` and `x` cannot be the same. I.e., one cannot
4216: call `MatSolveTransposeAdd`(A,x,y,x).
4218: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4219: @*/
4220: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4221: {
4222: PetscScalar one = 1.0;
4223: Vec tmp;
4224: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4226: PetscFunctionBegin;
4232: PetscCheckSameComm(mat, 1, b, 2);
4233: PetscCheckSameComm(mat, 1, y, 3);
4234: PetscCheckSameComm(mat, 1, x, 4);
4235: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4236: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4237: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4238: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4239: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4240: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4241: MatCheckPreallocated(mat, 1);
4243: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4244: PetscCall(VecFlag(x, mat->factorerrortype));
4245: if (mat->factorerrortype) {
4246: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4247: } else if (f) {
4248: PetscCall((*f)(mat, b, y, x));
4249: } else {
4250: /* do the solve then the add manually */
4251: if (x != y) {
4252: PetscCall(MatSolveTranspose(mat, b, x));
4253: PetscCall(VecAXPY(x, one, y));
4254: } else {
4255: PetscCall(VecDuplicate(x, &tmp));
4256: PetscCall(VecCopy(x, tmp));
4257: PetscCall(MatSolveTranspose(mat, b, x));
4258: PetscCall(VecAXPY(x, one, tmp));
4259: PetscCall(VecDestroy(&tmp));
4260: }
4261: }
4262: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4263: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4264: PetscFunctionReturn(PETSC_SUCCESS);
4265: }
4267: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4268: /*@
4269: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4271: Neighbor-wise Collective
4273: Input Parameters:
4274: + mat - the matrix
4275: . b - the right-hand side
4276: . omega - the relaxation factor
4277: . flag - flag indicating the type of SOR (see below)
4278: . shift - diagonal shift
4279: . its - the number of iterations
4280: - lits - the number of local iterations
4282: Output Parameter:
4283: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4285: SOR Flags:
4286: + `SOR_FORWARD_SWEEP` - forward SOR
4287: . `SOR_BACKWARD_SWEEP` - backward SOR
4288: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4289: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4290: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4291: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4292: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4293: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies upper/lower triangular part of matrix to vector (with `omega`)
4294: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4296: Level: developer
4298: Notes:
4299: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4300: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4301: on each processor.
4303: Application programmers will not generally use `MatSOR()` directly,
4304: but instead will employ `PCSOR` or `PCEISENSTAT`
4306: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with inodes, this does a block SOR smoothing, otherwise it does a pointwise smoothing.
4307: For `MATAIJ` matrices with inodes, the block sizes are determined by the inode sizes, not the block size set with `MatSetBlockSize()`
4309: Vectors `x` and `b` CANNOT be the same
4311: The flags are implemented as bitwise inclusive or operations.
4312: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4313: to specify a zero initial guess for SSOR.
4315: Developer Note:
4316: We should add block SOR support for `MATAIJ` matrices with block size set to greater than one and no inodes
4318: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4319: @*/
4320: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4321: {
4322: PetscFunctionBegin;
4327: PetscCheckSameComm(mat, 1, b, 2);
4328: PetscCheckSameComm(mat, 1, x, 8);
4329: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4330: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4331: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4332: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4333: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4334: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4335: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4336: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4338: MatCheckPreallocated(mat, 1);
4339: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4340: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4341: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4342: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4343: PetscFunctionReturn(PETSC_SUCCESS);
4344: }
4346: /*
4347: Default matrix copy routine.
4348: */
4349: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4350: {
4351: PetscInt i, rstart = 0, rend = 0, nz;
4352: const PetscInt *cwork;
4353: const PetscScalar *vwork;
4355: PetscFunctionBegin;
4356: if (B->assembled) PetscCall(MatZeroEntries(B));
4357: if (str == SAME_NONZERO_PATTERN) {
4358: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4359: for (i = rstart; i < rend; i++) {
4360: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4361: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4362: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4363: }
4364: } else {
4365: PetscCall(MatAYPX(B, 0.0, A, str));
4366: }
4367: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4368: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4369: PetscFunctionReturn(PETSC_SUCCESS);
4370: }
4372: /*@
4373: MatCopy - Copies a matrix to another matrix.
4375: Collective
4377: Input Parameters:
4378: + A - the matrix
4379: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4381: Output Parameter:
4382: . B - where the copy is put
4384: Level: intermediate
4386: Notes:
4387: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4389: `MatCopy()` copies the matrix entries of a matrix to another existing
4390: matrix (after first zeroing the second matrix). A related routine is
4391: `MatConvert()`, which first creates a new matrix and then copies the data.
4393: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4394: @*/
4395: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4396: {
4397: PetscInt i;
4399: PetscFunctionBegin;
4404: PetscCheckSameComm(A, 1, B, 2);
4405: MatCheckPreallocated(B, 2);
4406: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4407: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4408: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4409: A->cmap->N, B->cmap->N);
4410: MatCheckPreallocated(A, 1);
4411: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4413: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4414: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4415: else PetscCall(MatCopy_Basic(A, B, str));
4417: B->stencil.dim = A->stencil.dim;
4418: B->stencil.noc = A->stencil.noc;
4419: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4420: B->stencil.dims[i] = A->stencil.dims[i];
4421: B->stencil.starts[i] = A->stencil.starts[i];
4422: }
4424: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4425: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4426: PetscFunctionReturn(PETSC_SUCCESS);
4427: }
4429: /*@
4430: MatConvert - Converts a matrix to another matrix, either of the same
4431: or different type.
4433: Collective
4435: Input Parameters:
4436: + mat - the matrix
4437: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4438: same type as the original matrix.
4439: - reuse - denotes if the destination matrix is to be created or reused.
4440: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4441: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4443: Output Parameter:
4444: . M - pointer to place new matrix
4446: Level: intermediate
4448: Notes:
4449: `MatConvert()` first creates a new matrix and then copies the data from
4450: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4451: entries of one matrix to another already existing matrix context.
4453: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4454: the MPI communicator of the generated matrix is always the same as the communicator
4455: of the input matrix.
4457: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4458: @*/
4459: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4460: {
4461: PetscBool sametype, issame, flg;
4462: PetscBool3 issymmetric, ishermitian, isspd;
4463: char convname[256], mtype[256];
4464: Mat B;
4466: PetscFunctionBegin;
4469: PetscAssertPointer(M, 4);
4470: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4471: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4472: MatCheckPreallocated(mat, 1);
4474: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4475: if (flg) newtype = mtype;
4477: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4478: PetscCall(PetscStrcmp(newtype, "same", &issame));
4479: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4480: if (reuse == MAT_REUSE_MATRIX) {
4482: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4483: }
4485: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4486: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4487: PetscFunctionReturn(PETSC_SUCCESS);
4488: }
4490: /* Cache Mat options because some converters use MatHeaderReplace() */
4491: issymmetric = mat->symmetric;
4492: ishermitian = mat->hermitian;
4493: isspd = mat->spd;
4495: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4496: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4497: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4498: } else {
4499: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4500: const char *prefix[3] = {"seq", "mpi", ""};
4501: PetscInt i;
4502: /*
4503: Order of precedence:
4504: 0) See if newtype is a superclass of the current matrix.
4505: 1) See if a specialized converter is known to the current matrix.
4506: 2) See if a specialized converter is known to the desired matrix class.
4507: 3) See if a good general converter is registered for the desired class
4508: (as of 6/27/03 only MATMPIADJ falls into this category).
4509: 4) See if a good general converter is known for the current matrix.
4510: 5) Use a really basic converter.
4511: */
4513: /* 0) See if newtype is a superclass of the current matrix.
4514: i.e mat is mpiaij and newtype is aij */
4515: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4516: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4517: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4518: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4519: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4520: if (flg) {
4521: if (reuse == MAT_INPLACE_MATRIX) {
4522: PetscCall(PetscInfo(mat, "Early return\n"));
4523: PetscFunctionReturn(PETSC_SUCCESS);
4524: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4525: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4526: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4527: PetscFunctionReturn(PETSC_SUCCESS);
4528: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4529: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4530: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4531: PetscFunctionReturn(PETSC_SUCCESS);
4532: }
4533: }
4534: }
4535: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4536: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4537: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4538: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4539: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4540: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4541: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4542: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4543: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4544: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4545: if (conv) goto foundconv;
4546: }
4548: /* 2) See if a specialized converter is known to the desired matrix class. */
4549: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4550: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4551: PetscCall(MatSetType(B, newtype));
4552: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4553: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4554: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4555: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4556: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4557: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4558: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4559: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4560: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4561: if (conv) {
4562: PetscCall(MatDestroy(&B));
4563: goto foundconv;
4564: }
4565: }
4567: /* 3) See if a good general converter is registered for the desired class */
4568: conv = B->ops->convertfrom;
4569: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4570: PetscCall(MatDestroy(&B));
4571: if (conv) goto foundconv;
4573: /* 4) See if a good general converter is known for the current matrix */
4574: if (mat->ops->convert) conv = mat->ops->convert;
4575: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4576: if (conv) goto foundconv;
4578: /* 5) Use a really basic converter. */
4579: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4580: conv = MatConvert_Basic;
4582: foundconv:
4583: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4584: PetscCall((*conv)(mat, newtype, reuse, M));
4585: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4586: /* the block sizes must be same if the mappings are copied over */
4587: (*M)->rmap->bs = mat->rmap->bs;
4588: (*M)->cmap->bs = mat->cmap->bs;
4589: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4590: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4591: (*M)->rmap->mapping = mat->rmap->mapping;
4592: (*M)->cmap->mapping = mat->cmap->mapping;
4593: }
4594: (*M)->stencil.dim = mat->stencil.dim;
4595: (*M)->stencil.noc = mat->stencil.noc;
4596: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4597: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4598: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4599: }
4600: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4601: }
4602: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4604: /* Reset Mat options */
4605: if (issymmetric != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PetscBool3ToBool(issymmetric)));
4606: if (ishermitian != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PetscBool3ToBool(ishermitian)));
4607: if (isspd != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_SPD, PetscBool3ToBool(isspd)));
4608: PetscFunctionReturn(PETSC_SUCCESS);
4609: }
4611: /*@
4612: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4614: Not Collective
4616: Input Parameter:
4617: . mat - the matrix, must be a factored matrix
4619: Output Parameter:
4620: . type - the string name of the package (do not free this string)
4622: Level: intermediate
4624: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4625: @*/
4626: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4627: {
4628: PetscErrorCode (*conv)(Mat, MatSolverType *);
4630: PetscFunctionBegin;
4633: PetscAssertPointer(type, 2);
4634: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4635: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4636: if (conv) PetscCall((*conv)(mat, type));
4637: else *type = MATSOLVERPETSC;
4638: PetscFunctionReturn(PETSC_SUCCESS);
4639: }
4641: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4642: struct _MatSolverTypeForSpecifcType {
4643: MatType mtype;
4644: /* no entry for MAT_FACTOR_NONE */
4645: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4646: MatSolverTypeForSpecifcType next;
4647: };
4649: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4650: struct _MatSolverTypeHolder {
4651: char *name;
4652: MatSolverTypeForSpecifcType handlers;
4653: MatSolverTypeHolder next;
4654: };
4656: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4658: /*@C
4659: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4661: Logically Collective, No Fortran Support
4663: Input Parameters:
4664: + package - name of the package, for example `petsc` or `superlu`
4665: . mtype - the matrix type that works with this package
4666: . ftype - the type of factorization supported by the package
4667: - createfactor - routine that will create the factored matrix ready to be used
4669: Level: developer
4671: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4672: `MatGetFactor()`
4673: @*/
4674: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4675: {
4676: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4677: PetscBool flg;
4678: MatSolverTypeForSpecifcType inext, iprev = NULL;
4680: PetscFunctionBegin;
4681: PetscCall(MatInitializePackage());
4682: if (!next) {
4683: PetscCall(PetscNew(&MatSolverTypeHolders));
4684: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4685: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4686: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4687: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4688: PetscFunctionReturn(PETSC_SUCCESS);
4689: }
4690: while (next) {
4691: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4692: if (flg) {
4693: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4694: inext = next->handlers;
4695: while (inext) {
4696: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4697: if (flg) {
4698: inext->createfactor[(int)ftype - 1] = createfactor;
4699: PetscFunctionReturn(PETSC_SUCCESS);
4700: }
4701: iprev = inext;
4702: inext = inext->next;
4703: }
4704: PetscCall(PetscNew(&iprev->next));
4705: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4706: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4707: PetscFunctionReturn(PETSC_SUCCESS);
4708: }
4709: prev = next;
4710: next = next->next;
4711: }
4712: PetscCall(PetscNew(&prev->next));
4713: PetscCall(PetscStrallocpy(package, &prev->next->name));
4714: PetscCall(PetscNew(&prev->next->handlers));
4715: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4716: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4717: PetscFunctionReturn(PETSC_SUCCESS);
4718: }
4720: /*@C
4721: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4723: Input Parameters:
4724: + type - name of the package, for example `petsc` or `superlu`, if this is 'NULL', then the first result that satisfies the other criteria is returned
4725: . ftype - the type of factorization supported by the type
4726: - mtype - the matrix type that works with this type
4728: Output Parameters:
4729: + foundtype - `PETSC_TRUE` if the type was registered
4730: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4731: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4733: Calling sequence of `createfactor`:
4734: + A - the matrix providing the factor matrix
4735: . ftype - the `MatFactorType` of the factor requested
4736: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4738: Level: developer
4740: Note:
4741: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4742: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4743: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4745: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4746: `MatInitializePackage()`
4747: @*/
4748: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4749: {
4750: MatSolverTypeHolder next = MatSolverTypeHolders;
4751: PetscBool flg;
4752: MatSolverTypeForSpecifcType inext;
4754: PetscFunctionBegin;
4755: if (foundtype) *foundtype = PETSC_FALSE;
4756: if (foundmtype) *foundmtype = PETSC_FALSE;
4757: if (createfactor) *createfactor = NULL;
4759: if (type) {
4760: while (next) {
4761: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4762: if (flg) {
4763: if (foundtype) *foundtype = PETSC_TRUE;
4764: inext = next->handlers;
4765: while (inext) {
4766: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4767: if (flg) {
4768: if (foundmtype) *foundmtype = PETSC_TRUE;
4769: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4770: PetscFunctionReturn(PETSC_SUCCESS);
4771: }
4772: inext = inext->next;
4773: }
4774: }
4775: next = next->next;
4776: }
4777: } else {
4778: while (next) {
4779: inext = next->handlers;
4780: while (inext) {
4781: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4782: if (flg && inext->createfactor[(int)ftype - 1]) {
4783: if (foundtype) *foundtype = PETSC_TRUE;
4784: if (foundmtype) *foundmtype = PETSC_TRUE;
4785: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4786: PetscFunctionReturn(PETSC_SUCCESS);
4787: }
4788: inext = inext->next;
4789: }
4790: next = next->next;
4791: }
4792: /* try with base classes inext->mtype */
4793: next = MatSolverTypeHolders;
4794: while (next) {
4795: inext = next->handlers;
4796: while (inext) {
4797: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4798: if (flg && inext->createfactor[(int)ftype - 1]) {
4799: if (foundtype) *foundtype = PETSC_TRUE;
4800: if (foundmtype) *foundmtype = PETSC_TRUE;
4801: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4802: PetscFunctionReturn(PETSC_SUCCESS);
4803: }
4804: inext = inext->next;
4805: }
4806: next = next->next;
4807: }
4808: }
4809: PetscFunctionReturn(PETSC_SUCCESS);
4810: }
4812: PetscErrorCode MatSolverTypeDestroy(void)
4813: {
4814: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4815: MatSolverTypeForSpecifcType inext, iprev;
4817: PetscFunctionBegin;
4818: while (next) {
4819: PetscCall(PetscFree(next->name));
4820: inext = next->handlers;
4821: while (inext) {
4822: PetscCall(PetscFree(inext->mtype));
4823: iprev = inext;
4824: inext = inext->next;
4825: PetscCall(PetscFree(iprev));
4826: }
4827: prev = next;
4828: next = next->next;
4829: PetscCall(PetscFree(prev));
4830: }
4831: MatSolverTypeHolders = NULL;
4832: PetscFunctionReturn(PETSC_SUCCESS);
4833: }
4835: /*@
4836: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4838: Logically Collective
4840: Input Parameter:
4841: . mat - the matrix
4843: Output Parameter:
4844: . flg - `PETSC_TRUE` if uses the ordering
4846: Level: developer
4848: Note:
4849: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4850: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4852: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4853: @*/
4854: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4855: {
4856: PetscFunctionBegin;
4857: *flg = mat->canuseordering;
4858: PetscFunctionReturn(PETSC_SUCCESS);
4859: }
4861: /*@
4862: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4864: Logically Collective
4866: Input Parameters:
4867: + mat - the matrix obtained with `MatGetFactor()`
4868: - ftype - the factorization type to be used
4870: Output Parameter:
4871: . otype - the preferred ordering type
4873: Level: developer
4875: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4876: @*/
4877: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4878: {
4879: PetscFunctionBegin;
4880: *otype = mat->preferredordering[ftype];
4881: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4882: PetscFunctionReturn(PETSC_SUCCESS);
4883: }
4885: /*@
4886: MatGetFactor - Returns a matrix suitable to calls to routines such as `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatILUFactorSymbolic()`,
4887: `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactorNumeric()`, `MatILUFactorNumeric()`, and
4888: `MatICCFactorNumeric()`
4890: Collective
4892: Input Parameters:
4893: + mat - the matrix
4894: . type - name of solver type, for example, `superlu_dist`, `petsc` (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4895: the other criteria is returned
4896: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4898: Output Parameter:
4899: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4901: Options Database Keys:
4902: + -pc_factor_mat_solver_type type - choose the type at run time. When using `KSP` solvers
4903: . -pc_factor_mat_factor_on_host (true|false) - do matrix factorization on host (with device matrices). Default is doing it on device
4904: - -pc_factor_mat_solve_on_host (true|false) - do matrix solve on host (with device matrices). Default is doing it on device
4906: Level: intermediate
4908: Notes:
4909: Some of the packages, such as MUMPS, have options for controlling the factorization, these are in the form `-prefix_mat_packagename_packageoption`
4910: (for example, `-mat_mumps_icntl_6 1`) where `prefix` is normally set automatically from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly,
4911: without using a `PC`, one can set the prefix by
4912: calling `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4914: Some PETSc matrix formats have alternative solvers available that are provided by alternative packages
4915: such as PaStiX, SuperLU_DIST, MUMPS etc. PETSc must have been configured to use the external solver,
4916: using the corresponding `./configure` option such as `--download-package` or `--with-package-dir`.
4918: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4919: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4920: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4922: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4923: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4925: Developer Note:
4926: This should actually be called `MatCreateFactor()` since it creates a new factor object
4928: The `MatGetFactor()` implementations should not be accessing the PETSc options database or making other decisions about solver options,
4929: that should be delayed until the later operations. This is to ensure the correct options prefix has been set in the factor matrix.
4931: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4932: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`,
4933: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`,
4934: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatILUFactorSymbolic()`,
4935: `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactorNumeric()`, `MatILUFactorNumeric()`,
4936: `MatICCFactorNumeric()`
4937: @*/
4938: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4939: {
4940: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4941: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4943: PetscFunctionBegin;
4947: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4948: MatCheckPreallocated(mat, 1);
4950: PetscCall(MatIsShell(mat, &shell));
4951: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4952: if (hasop) {
4953: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4954: PetscFunctionReturn(PETSC_SUCCESS);
4955: }
4957: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4958: if (!foundtype) {
4959: if (type) {
4960: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4961: ((PetscObject)mat)->type_name, type);
4962: } else {
4963: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4964: }
4965: }
4966: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4967: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4969: PetscCall((*conv)(mat, ftype, f));
4970: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4971: PetscFunctionReturn(PETSC_SUCCESS);
4972: }
4974: /*@
4975: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4977: Not Collective
4979: Input Parameters:
4980: + mat - the matrix
4981: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's default)
4982: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4984: Output Parameter:
4985: . flg - PETSC_TRUE if the factorization is available
4987: Level: intermediate
4989: Notes:
4990: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4991: such as pastix, superlu, mumps etc.
4993: PETSc must have been ./configure to use the external solver, using the option --download-package
4995: Developer Note:
4996: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4998: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4999: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
5000: @*/
5001: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
5002: {
5003: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
5005: PetscFunctionBegin;
5007: PetscAssertPointer(flg, 4);
5009: *flg = PETSC_FALSE;
5010: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
5012: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5013: MatCheckPreallocated(mat, 1);
5015: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
5016: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
5017: PetscFunctionReturn(PETSC_SUCCESS);
5018: }
5020: /*@
5021: MatDuplicate - Duplicates a matrix including the non-zero structure.
5023: Collective
5025: Input Parameters:
5026: + mat - the matrix
5027: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
5028: See the manual page for `MatDuplicateOption()` for an explanation of these options.
5030: Output Parameter:
5031: . M - pointer to place new matrix
5033: Level: intermediate
5035: Notes:
5036: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
5038: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
5040: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
5042: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
5043: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
5044: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
5046: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
5047: @*/
5048: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
5049: {
5050: Mat B;
5051: VecType vtype;
5052: PetscInt i;
5053: PetscObject dm, container_h, container_d;
5054: PetscErrorCodeFn *viewf;
5056: PetscFunctionBegin;
5059: PetscAssertPointer(M, 3);
5060: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
5061: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5062: MatCheckPreallocated(mat, 1);
5064: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
5065: PetscUseTypeMethod(mat, duplicate, op, M);
5066: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
5067: B = *M;
5069: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
5070: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
5071: PetscCall(MatGetVecType(mat, &vtype));
5072: PetscCall(MatSetVecType(B, vtype));
5074: B->stencil.dim = mat->stencil.dim;
5075: B->stencil.noc = mat->stencil.noc;
5076: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
5077: B->stencil.dims[i] = mat->stencil.dims[i];
5078: B->stencil.starts[i] = mat->stencil.starts[i];
5079: }
5081: B->nooffproczerorows = mat->nooffproczerorows;
5082: B->nooffprocentries = mat->nooffprocentries;
5084: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
5085: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
5086: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
5087: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
5088: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
5089: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
5090: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
5091: PetscCall(PetscObjectStateIncrease((PetscObject)B));
5092: PetscFunctionReturn(PETSC_SUCCESS);
5093: }
5095: /*@
5096: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
5098: Logically Collective
5100: Input Parameter:
5101: . mat - the matrix
5103: Output Parameter:
5104: . v - the diagonal of the matrix
5106: Level: intermediate
5108: Note:
5109: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5110: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5111: is larger than `ndiag`, the values of the remaining entries are unspecified.
5113: Currently only correct in parallel for square matrices.
5115: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5116: @*/
5117: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5118: {
5119: PetscFunctionBegin;
5123: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5124: MatCheckPreallocated(mat, 1);
5125: if (PetscDefined(USE_DEBUG)) {
5126: PetscInt nv, row, col, ndiag;
5128: PetscCall(VecGetLocalSize(v, &nv));
5129: PetscCall(MatGetLocalSize(mat, &row, &col));
5130: ndiag = PetscMin(row, col);
5131: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5132: }
5134: PetscUseTypeMethod(mat, getdiagonal, v);
5135: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5136: PetscFunctionReturn(PETSC_SUCCESS);
5137: }
5139: /*@
5140: MatGetRowMin - Gets the minimum value (of the real part) of each
5141: row of the matrix
5143: Logically Collective
5145: Input Parameter:
5146: . mat - the matrix
5148: Output Parameters:
5149: + v - the vector for storing the maximums
5150: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5152: Level: intermediate
5154: Note:
5155: The result of this call are the same as if one converted the matrix to dense format
5156: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5158: This code is only implemented for a couple of matrix formats.
5160: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5161: `MatGetRowMax()`
5162: @*/
5163: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5164: {
5165: PetscFunctionBegin;
5169: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5171: if (!mat->cmap->N) {
5172: PetscCall(VecSet(v, PETSC_MAX_REAL));
5173: if (idx) {
5174: PetscInt i, m = mat->rmap->n;
5175: for (i = 0; i < m; i++) idx[i] = -1;
5176: }
5177: } else {
5178: MatCheckPreallocated(mat, 1);
5179: }
5180: PetscUseTypeMethod(mat, getrowmin, v, idx);
5181: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5182: PetscFunctionReturn(PETSC_SUCCESS);
5183: }
5185: /*@
5186: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5187: row of the matrix
5189: Logically Collective
5191: Input Parameter:
5192: . mat - the matrix
5194: Output Parameters:
5195: + v - the vector for storing the minimums
5196: - idx - the indices of the column found for each row (or `NULL` if not needed)
5198: Level: intermediate
5200: Notes:
5201: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5202: row is 0 (the first column).
5204: This code is only implemented for a couple of matrix formats.
5206: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5207: @*/
5208: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5209: {
5210: PetscFunctionBegin;
5214: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5215: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5217: if (!mat->cmap->N) {
5218: PetscCall(VecSet(v, 0.0));
5219: if (idx) {
5220: PetscInt i, m = mat->rmap->n;
5221: for (i = 0; i < m; i++) idx[i] = -1;
5222: }
5223: } else {
5224: MatCheckPreallocated(mat, 1);
5225: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5226: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5227: }
5228: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5229: PetscFunctionReturn(PETSC_SUCCESS);
5230: }
5232: /*@
5233: MatGetRowMax - Gets the maximum value (of the real part) of each
5234: row of the matrix
5236: Logically Collective
5238: Input Parameter:
5239: . mat - the matrix
5241: Output Parameters:
5242: + v - the vector for storing the maximums
5243: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5245: Level: intermediate
5247: Notes:
5248: The result of this call are the same as if one converted the matrix to dense format
5249: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5251: This code is only implemented for a couple of matrix formats.
5253: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5254: @*/
5255: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5256: {
5257: PetscFunctionBegin;
5261: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5263: if (!mat->cmap->N) {
5264: PetscCall(VecSet(v, PETSC_MIN_REAL));
5265: if (idx) {
5266: PetscInt i, m = mat->rmap->n;
5267: for (i = 0; i < m; i++) idx[i] = -1;
5268: }
5269: } else {
5270: MatCheckPreallocated(mat, 1);
5271: PetscUseTypeMethod(mat, getrowmax, v, idx);
5272: }
5273: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5274: PetscFunctionReturn(PETSC_SUCCESS);
5275: }
5277: /*@
5278: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5279: row of the matrix
5281: Logically Collective
5283: Input Parameter:
5284: . mat - the matrix
5286: Output Parameters:
5287: + v - the vector for storing the maximums
5288: - idx - the indices of the column found for each row (or `NULL` if not needed)
5290: Level: intermediate
5292: Notes:
5293: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5294: row is 0 (the first column).
5296: This code is only implemented for a couple of matrix formats.
5298: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5299: @*/
5300: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5301: {
5302: PetscFunctionBegin;
5306: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5308: if (!mat->cmap->N) {
5309: PetscCall(VecSet(v, 0.0));
5310: if (idx) {
5311: PetscInt i, m = mat->rmap->n;
5312: for (i = 0; i < m; i++) idx[i] = -1;
5313: }
5314: } else {
5315: MatCheckPreallocated(mat, 1);
5316: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5317: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5318: }
5319: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5320: PetscFunctionReturn(PETSC_SUCCESS);
5321: }
5323: /*@
5324: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5326: Logically Collective
5328: Input Parameter:
5329: . mat - the matrix
5331: Output Parameter:
5332: . v - the vector for storing the sum
5334: Level: intermediate
5336: This code is only implemented for a couple of matrix formats.
5338: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5339: @*/
5340: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5341: {
5342: PetscFunctionBegin;
5346: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5348: if (!mat->cmap->N) PetscCall(VecSet(v, 0.0));
5349: else {
5350: MatCheckPreallocated(mat, 1);
5351: PetscUseTypeMethod(mat, getrowsumabs, v);
5352: }
5353: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5354: PetscFunctionReturn(PETSC_SUCCESS);
5355: }
5357: /*@
5358: MatGetRowSum - Gets the sum of each row of the matrix
5360: Logically or Neighborhood Collective
5362: Input Parameter:
5363: . mat - the matrix
5365: Output Parameter:
5366: . v - the vector for storing the sum of rows
5368: Level: intermediate
5370: Note:
5371: This code is slow since it is not currently specialized for different formats
5373: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5374: @*/
5375: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5376: {
5377: Vec ones;
5379: PetscFunctionBegin;
5383: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5384: MatCheckPreallocated(mat, 1);
5385: PetscCall(MatCreateVecs(mat, &ones, NULL));
5386: PetscCall(VecSet(ones, 1.));
5387: PetscCall(MatMult(mat, ones, v));
5388: PetscCall(VecDestroy(&ones));
5389: PetscFunctionReturn(PETSC_SUCCESS);
5390: }
5392: /*@
5393: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5394: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5396: Collective
5398: Input Parameter:
5399: . mat - the matrix to provide the transpose
5401: Output Parameter:
5402: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5404: Level: advanced
5406: Note:
5407: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5408: routine allows bypassing that call.
5410: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5411: @*/
5412: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5413: {
5414: MatParentState *rb = NULL;
5416: PetscFunctionBegin;
5417: PetscCall(PetscNew(&rb));
5418: rb->id = ((PetscObject)mat)->id;
5419: rb->state = 0;
5420: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5421: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5422: PetscFunctionReturn(PETSC_SUCCESS);
5423: }
5425: static PetscErrorCode MatTranspose_Private(Mat mat, MatReuse reuse, Mat *B, PetscBool conjugate)
5426: {
5427: PetscContainer rB = NULL;
5428: MatParentState *rb = NULL;
5429: PetscErrorCode (*f)(Mat, MatReuse, Mat *) = NULL;
5431: PetscFunctionBegin;
5434: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5435: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5436: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5437: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5438: MatCheckPreallocated(mat, 1);
5439: if (reuse == MAT_REUSE_MATRIX) {
5440: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5441: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5442: PetscCall(PetscContainerGetPointer(rB, &rb));
5443: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5444: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5445: }
5447: if (conjugate) {
5448: f = mat->ops->hermitiantranspose;
5449: if (f) PetscCall((*f)(mat, reuse, B));
5450: }
5451: if (!f && !(reuse == MAT_INPLACE_MATRIX && mat->hermitian == PETSC_BOOL3_TRUE && conjugate)) {
5452: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5453: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5454: PetscUseTypeMethod(mat, transpose, reuse, B);
5455: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5456: }
5457: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5458: if (conjugate) PetscCall(MatConjugate(*B));
5459: }
5461: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5462: if (reuse != MAT_INPLACE_MATRIX) {
5463: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5464: PetscCall(PetscContainerGetPointer(rB, &rb));
5465: rb->state = ((PetscObject)mat)->state;
5466: rb->nonzerostate = mat->nonzerostate;
5467: }
5468: PetscFunctionReturn(PETSC_SUCCESS);
5469: }
5471: /*@
5472: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5474: Collective
5476: Input Parameters:
5477: + mat - the matrix to transpose
5478: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5480: Output Parameter:
5481: . B - the transpose of the matrix
5483: Level: intermediate
5485: Notes:
5486: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5488: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5489: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5491: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5493: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5494: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5496: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5498: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5500: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5501: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5502: @*/
5503: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5504: {
5505: PetscFunctionBegin;
5506: PetscCall(MatTranspose_Private(mat, reuse, B, PETSC_FALSE));
5507: PetscFunctionReturn(PETSC_SUCCESS);
5508: }
5510: /*@
5511: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5513: Collective
5515: Input Parameter:
5516: . A - the matrix to transpose
5518: Output Parameter:
5519: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5520: numerical portion.
5522: Level: intermediate
5524: Note:
5525: This is not supported for many matrix types, use `MatTranspose()` in those cases
5527: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5528: @*/
5529: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5530: {
5531: PetscFunctionBegin;
5534: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5535: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5536: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5537: PetscUseTypeMethod(A, transposesymbolic, B);
5538: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5540: PetscCall(MatTransposeSetPrecursor(A, *B));
5541: PetscFunctionReturn(PETSC_SUCCESS);
5542: }
5544: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5545: {
5546: PetscContainer rB;
5547: MatParentState *rb;
5549: PetscFunctionBegin;
5552: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5553: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5554: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5555: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5556: PetscCall(PetscContainerGetPointer(rB, &rb));
5557: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5558: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5559: PetscFunctionReturn(PETSC_SUCCESS);
5560: }
5562: /*@
5563: MatIsTranspose - Test whether a matrix is another one's transpose,
5564: or its own, in which case it tests symmetry.
5566: Collective
5568: Input Parameters:
5569: + A - the matrix to test
5570: . B - the matrix to test against, this can equal the first parameter
5571: - tol - tolerance, differences between entries smaller than this are counted as zero
5573: Output Parameter:
5574: . flg - the result
5576: Level: intermediate
5578: Notes:
5579: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5580: test involves parallel copies of the block off-diagonal parts of the matrix.
5582: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5583: @*/
5584: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5585: {
5586: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5588: PetscFunctionBegin;
5591: PetscAssertPointer(flg, 4);
5592: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5593: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5594: *flg = PETSC_FALSE;
5595: if (f && g) {
5596: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5597: PetscCall((*f)(A, B, tol, flg));
5598: } else {
5599: MatType mattype;
5601: PetscCall(MatGetType(f ? B : A, &mattype));
5602: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5603: }
5604: PetscFunctionReturn(PETSC_SUCCESS);
5605: }
5607: /*@
5608: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5610: Collective
5612: Input Parameters:
5613: + mat - the matrix to transpose and complex conjugate
5614: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5616: Output Parameter:
5617: . B - the Hermitian transpose
5619: Level: intermediate
5621: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5622: @*/
5623: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5624: {
5625: PetscFunctionBegin;
5626: PetscCall(MatTranspose_Private(mat, reuse, B, PETSC_TRUE));
5627: PetscFunctionReturn(PETSC_SUCCESS);
5628: }
5630: /*@
5631: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5633: Collective
5635: Input Parameters:
5636: + A - the matrix to test
5637: . B - the matrix to test against, this can equal the first parameter
5638: - tol - tolerance, differences between entries smaller than this are counted as zero
5640: Output Parameter:
5641: . flg - the result
5643: Level: intermediate
5645: Notes:
5646: Only available for `MATAIJ` matrices.
5648: The sequential algorithm
5649: has a running time of the order of the number of nonzeros; the parallel
5650: test involves parallel copies of the block off-diagonal parts of the matrix.
5652: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5653: @*/
5654: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5655: {
5656: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5658: PetscFunctionBegin;
5661: PetscAssertPointer(flg, 4);
5662: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5663: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5664: if (f && g) {
5665: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5666: PetscCall((*f)(A, B, tol, flg));
5667: } else {
5668: MatType mattype;
5670: PetscCall(MatGetType(f ? B : A, &mattype));
5671: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for Hermitian transpose", mattype);
5672: }
5673: PetscFunctionReturn(PETSC_SUCCESS);
5674: }
5676: /*@
5677: MatPermute - Creates a new matrix with rows and columns permuted from the
5678: original.
5680: Collective
5682: Input Parameters:
5683: + mat - the matrix to permute
5684: . row - row permutation, each processor supplies only the permutation for its rows
5685: - col - column permutation, each processor supplies only the permutation for its columns
5687: Output Parameter:
5688: . B - the permuted matrix
5690: Level: advanced
5692: Note:
5693: The index sets map from row/col of permuted matrix to row/col of original matrix.
5694: The index sets should be on the same communicator as mat and have the same local sizes.
5696: Developer Note:
5697: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5698: exploit the fact that row and col are permutations, consider implementing the
5699: more general `MatCreateSubMatrix()` instead.
5701: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5702: @*/
5703: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5704: {
5705: PetscFunctionBegin;
5710: PetscAssertPointer(B, 4);
5711: PetscCheckSameComm(mat, 1, row, 2);
5712: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5713: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5714: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5715: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5716: MatCheckPreallocated(mat, 1);
5718: if (mat->ops->permute) {
5719: PetscUseTypeMethod(mat, permute, row, col, B);
5720: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5721: } else {
5722: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5723: }
5724: PetscFunctionReturn(PETSC_SUCCESS);
5725: }
5727: /*@
5728: MatEqual - Compares two matrices.
5730: Collective
5732: Input Parameters:
5733: + A - the first matrix
5734: - B - the second matrix
5736: Output Parameter:
5737: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5739: Level: intermediate
5741: Note:
5742: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing
5743: the results of several matrix-vector product using randomly created vectors, see `MatMultEqual()`.
5745: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5746: @*/
5747: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5748: {
5749: PetscFunctionBegin;
5754: PetscAssertPointer(flg, 3);
5755: PetscCheckSameComm(A, 1, B, 2);
5756: MatCheckPreallocated(A, 1);
5757: MatCheckPreallocated(B, 2);
5758: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5759: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5760: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5761: B->cmap->N);
5762: if (A->ops->equal && A->ops->equal == B->ops->equal) PetscUseTypeMethod(A, equal, B, flg);
5763: else PetscCall(MatMultEqual(A, B, 10, flg));
5764: PetscFunctionReturn(PETSC_SUCCESS);
5765: }
5767: /*@
5768: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5769: matrices that are stored as vectors. Either of the two scaling
5770: matrices can be `NULL`.
5772: Collective
5774: Input Parameters:
5775: + mat - the matrix to be scaled
5776: . l - the left scaling vector (or `NULL`)
5777: - r - the right scaling vector (or `NULL`)
5779: Level: intermediate
5781: Note:
5782: `MatDiagonalScale()` computes $A = LAR$, where
5783: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5784: The L scales the rows of the matrix, the R scales the columns of the matrix.
5786: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5787: @*/
5788: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5789: {
5790: PetscBool flg = PETSC_FALSE;
5792: PetscFunctionBegin;
5795: if (l) {
5797: PetscCheckSameComm(mat, 1, l, 2);
5798: }
5799: if (r) {
5801: PetscCheckSameComm(mat, 1, r, 3);
5802: }
5803: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5804: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5805: MatCheckPreallocated(mat, 1);
5806: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5808: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5809: PetscUseTypeMethod(mat, diagonalscale, l, r);
5810: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5811: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5812: if (l != r && (PetscBool3ToBool(mat->symmetric) || PetscBool3ToBool(mat->hermitian))) {
5813: if (!PetscDefined(USE_COMPLEX) || PetscBool3ToBool(mat->symmetric)) {
5814: if (l && r) PetscCall(VecEqual(l, r, &flg));
5815: if (!flg) {
5816: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
5817: PetscCheck(!flg, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format, left and right scaling vectors must be the same");
5818: mat->symmetric = mat->spd = PETSC_BOOL3_FALSE;
5819: if (!PetscDefined(USE_COMPLEX)) mat->hermitian = PETSC_BOOL3_FALSE;
5820: else mat->hermitian = PETSC_BOOL3_UNKNOWN;
5821: }
5822: }
5823: if (PetscDefined(USE_COMPLEX) && PetscBool3ToBool(mat->hermitian)) {
5824: flg = PETSC_FALSE;
5825: if (l && r) {
5826: Vec conjugate;
5828: PetscCall(VecDuplicate(l, &conjugate));
5829: PetscCall(VecCopy(l, conjugate));
5830: PetscCall(VecConjugate(conjugate));
5831: PetscCall(VecEqual(conjugate, r, &flg));
5832: PetscCall(VecDestroy(&conjugate));
5833: }
5834: if (!flg) {
5835: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
5836: PetscCheck(!flg, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format and Hermitian matrix, left and right scaling vectors must be conjugate one of the other");
5837: mat->hermitian = PETSC_BOOL3_FALSE;
5838: mat->symmetric = mat->spd = PETSC_BOOL3_UNKNOWN;
5839: }
5840: }
5841: }
5842: PetscFunctionReturn(PETSC_SUCCESS);
5843: }
5845: /*@
5846: MatScale - Scales all elements of a matrix by a given number.
5848: Logically Collective
5850: Input Parameters:
5851: + mat - the matrix to be scaled
5852: - a - the scaling value
5854: Level: intermediate
5856: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5857: @*/
5858: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5859: {
5860: PetscFunctionBegin;
5863: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5864: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5866: MatCheckPreallocated(mat, 1);
5868: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5869: if (a != (PetscScalar)1.0) {
5870: PetscUseTypeMethod(mat, scale, a);
5871: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5872: }
5873: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5874: PetscFunctionReturn(PETSC_SUCCESS);
5875: }
5877: /*@
5878: MatNorm - Calculates various norms of a matrix.
5880: Collective
5882: Input Parameters:
5883: + mat - the matrix
5884: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5886: Output Parameter:
5887: . nrm - the resulting norm
5889: Level: intermediate
5891: .seealso: [](ch_matrices), `Mat`
5892: @*/
5893: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5894: {
5895: PetscFunctionBegin;
5898: PetscAssertPointer(nrm, 3);
5900: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5901: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5902: MatCheckPreallocated(mat, 1);
5904: PetscUseTypeMethod(mat, norm, type, nrm);
5905: PetscFunctionReturn(PETSC_SUCCESS);
5906: }
5908: /*
5909: This variable is used to prevent counting of MatAssemblyBegin() that
5910: are called from within a MatAssemblyEnd().
5911: */
5912: static PetscInt MatAssemblyEnd_InUse = 0;
5913: /*@
5914: MatAssemblyBegin - Begins assembling the matrix. This routine should
5915: be called after completing all calls to `MatSetValues()`.
5917: Collective
5919: Input Parameters:
5920: + mat - the matrix
5921: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5923: Level: beginner
5925: Notes:
5926: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5927: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5929: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5930: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5931: using the matrix.
5933: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5934: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5935: a global collective operation requiring all processes that share the matrix.
5937: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5938: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5939: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5941: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5942: @*/
5943: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5944: {
5945: PetscFunctionBegin;
5948: MatCheckPreallocated(mat, 1);
5949: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5950: if (mat->assembled) {
5951: mat->was_assembled = PETSC_TRUE;
5952: mat->assembled = PETSC_FALSE;
5953: }
5955: if (!MatAssemblyEnd_InUse) {
5956: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5957: PetscTryTypeMethod(mat, assemblybegin, type);
5958: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5959: } else PetscTryTypeMethod(mat, assemblybegin, type);
5960: PetscFunctionReturn(PETSC_SUCCESS);
5961: }
5963: /*@
5964: MatAssembled - Indicates if a matrix has been assembled and is ready for
5965: use; for example, in matrix-vector product.
5967: Not Collective
5969: Input Parameter:
5970: . mat - the matrix
5972: Output Parameter:
5973: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5975: Level: advanced
5977: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5978: @*/
5979: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5980: {
5981: PetscFunctionBegin;
5983: PetscAssertPointer(assembled, 2);
5984: *assembled = mat->assembled;
5985: PetscFunctionReturn(PETSC_SUCCESS);
5986: }
5988: /*@
5989: MatAssemblyEnd - Completes assembling the matrix. This routine should
5990: be called after `MatAssemblyBegin()`.
5992: Collective
5994: Input Parameters:
5995: + mat - the matrix
5996: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5998: Options Database Key:
5999: . -mat_view [viewertype][:...] - option name and values. See `MatViewFromOptions()`/`PetscObjectViewFromOptions()` for the possible arguments
6001: Level: beginner
6003: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`,
6004: `MatViewFromOptions()`, `PetscObjectViewFromOptions()`
6005: @*/
6006: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
6007: {
6008: static PetscInt inassm = 0;
6009: PetscBool flg = PETSC_FALSE;
6011: PetscFunctionBegin;
6015: inassm++;
6016: MatAssemblyEnd_InUse++;
6017: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
6018: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
6019: PetscTryTypeMethod(mat, assemblyend, type);
6020: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
6021: } else PetscTryTypeMethod(mat, assemblyend, type);
6023: /* Flush assembly is not a true assembly */
6024: if (type != MAT_FLUSH_ASSEMBLY) {
6025: if (mat->num_ass) {
6026: if (!mat->symmetry_eternal) {
6027: mat->symmetric = PETSC_BOOL3_UNKNOWN;
6028: mat->hermitian = PETSC_BOOL3_UNKNOWN;
6029: }
6030: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
6031: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
6032: }
6033: mat->num_ass++;
6034: mat->assembled = PETSC_TRUE;
6035: mat->ass_nonzerostate = mat->nonzerostate;
6036: }
6038: mat->insertmode = NOT_SET_VALUES;
6039: MatAssemblyEnd_InUse--;
6040: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6041: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
6042: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6044: if (mat->checksymmetryonassembly) {
6045: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
6046: if (flg) {
6047: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
6048: } else {
6049: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
6050: }
6051: }
6052: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
6053: }
6054: inassm--;
6055: PetscFunctionReturn(PETSC_SUCCESS);
6056: }
6058: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
6059: /*@
6060: MatSetOption - Sets a parameter option for a matrix. Some options
6061: may be specific to certain storage formats. Some options
6062: determine how values will be inserted (or added). Sorted,
6063: row-oriented input will generally assemble the fastest. The default
6064: is row-oriented.
6066: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
6068: Input Parameters:
6069: + mat - the matrix
6070: . op - the option, one of those listed below (and possibly others),
6071: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6073: Options Describing Matrix Structure:
6074: + `MAT_SPD` - symmetric positive definite
6075: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
6076: . `MAT_HERMITIAN` - transpose is the complex conjugation
6077: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
6078: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
6079: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
6080: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
6082: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
6083: do not need to be computed (usually at a high cost)
6085: Options For Use with `MatSetValues()`:
6086: Insert a logically dense subblock, which can be
6087: . `MAT_ROW_ORIENTED` - row-oriented (default)
6089: These options reflect the data you pass in with `MatSetValues()`; it has
6090: nothing to do with how the data is stored internally in the matrix
6091: data structure.
6093: When (re)assembling a matrix, we can restrict the input for
6094: efficiency/debugging purposes. These options include
6095: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
6096: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
6097: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
6098: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
6099: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
6100: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
6101: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
6102: performance for very large process counts.
6103: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
6104: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
6105: functions, instead sending only neighbor messages.
6107: Level: intermediate
6109: Notes:
6110: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
6112: Some options are relevant only for particular matrix types and
6113: are thus ignored by others. Other options are not supported by
6114: certain matrix types and will generate an error message if set.
6116: If using Fortran to compute a matrix, one may need to
6117: use the column-oriented option (or convert to the row-oriented
6118: format).
6120: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
6121: that would generate a new entry in the nonzero structure is instead
6122: ignored. Thus, if memory has not already been allocated for this particular
6123: data, then the insertion is ignored. For dense matrices, in which
6124: the entire array is allocated, no entries are ever ignored.
6125: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6127: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
6128: that would generate a new entry in the nonzero structure instead produces
6129: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6131: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
6132: that would generate a new entry that has not been preallocated will
6133: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
6134: only.) This is a useful flag when debugging matrix memory preallocation.
6135: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6137: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6138: other processors should be dropped, rather than stashed.
6139: This is useful if you know that the "owning" processor is also
6140: always generating the correct matrix entries, so that PETSc need
6141: not transfer duplicate entries generated on another processor.
6143: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6144: searches during matrix assembly. When this flag is set, the hash table
6145: is created during the first matrix assembly. This hash table is
6146: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6147: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6148: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6149: supported by `MATMPIBAIJ` format only.
6151: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6152: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6154: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6155: a zero location in the matrix
6157: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6159: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6160: zero row routines and thus improves performance for very large process counts.
6162: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6163: part of the matrix (since they should match the upper triangular part).
6165: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6166: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6167: with finite difference schemes with non-periodic boundary conditions.
6169: Developer Note:
6170: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6171: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6172: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6173: not changed.
6175: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6176: @*/
6177: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6178: {
6179: PetscFunctionBegin;
6181: if (op > 0) {
6184: }
6186: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6188: switch (op) {
6189: case MAT_FORCE_DIAGONAL_ENTRIES:
6190: mat->force_diagonals = flg;
6191: PetscFunctionReturn(PETSC_SUCCESS);
6192: case MAT_NO_OFF_PROC_ENTRIES:
6193: mat->nooffprocentries = flg;
6194: PetscFunctionReturn(PETSC_SUCCESS);
6195: case MAT_SUBSET_OFF_PROC_ENTRIES:
6196: mat->assembly_subset = flg;
6197: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6198: #if !defined(PETSC_HAVE_MPIUNI)
6199: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6200: #endif
6201: mat->stash.first_assembly_done = PETSC_FALSE;
6202: }
6203: PetscFunctionReturn(PETSC_SUCCESS);
6204: case MAT_NO_OFF_PROC_ZERO_ROWS:
6205: mat->nooffproczerorows = flg;
6206: PetscFunctionReturn(PETSC_SUCCESS);
6207: case MAT_SPD:
6208: if (flg) {
6209: mat->spd = PETSC_BOOL3_TRUE;
6210: mat->symmetric = PETSC_BOOL3_TRUE;
6211: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6212: #if !defined(PETSC_USE_COMPLEX)
6213: mat->hermitian = PETSC_BOOL3_TRUE;
6214: #endif
6215: } else {
6216: mat->spd = PETSC_BOOL3_FALSE;
6217: }
6218: break;
6219: case MAT_SYMMETRIC:
6220: mat->symmetric = PetscBoolToBool3(flg);
6221: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6222: #if !defined(PETSC_USE_COMPLEX)
6223: mat->hermitian = PetscBoolToBool3(flg);
6224: #endif
6225: break;
6226: case MAT_HERMITIAN:
6227: mat->hermitian = PetscBoolToBool3(flg);
6228: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6229: #if !defined(PETSC_USE_COMPLEX)
6230: mat->symmetric = PetscBoolToBool3(flg);
6231: #endif
6232: break;
6233: case MAT_STRUCTURALLY_SYMMETRIC:
6234: mat->structurally_symmetric = PetscBoolToBool3(flg);
6235: break;
6236: case MAT_SYMMETRY_ETERNAL:
6237: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6238: mat->symmetry_eternal = flg;
6239: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6240: break;
6241: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6242: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6243: mat->structural_symmetry_eternal = flg;
6244: break;
6245: case MAT_SPD_ETERNAL:
6246: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6247: mat->spd_eternal = flg;
6248: if (flg) {
6249: mat->structural_symmetry_eternal = PETSC_TRUE;
6250: mat->symmetry_eternal = PETSC_TRUE;
6251: }
6252: break;
6253: case MAT_STRUCTURE_ONLY:
6254: mat->structure_only = flg;
6255: break;
6256: case MAT_SORTED_FULL:
6257: mat->sortedfull = flg;
6258: break;
6259: default:
6260: break;
6261: }
6262: PetscTryTypeMethod(mat, setoption, op, flg);
6263: PetscFunctionReturn(PETSC_SUCCESS);
6264: }
6266: /*@
6267: MatGetOption - Gets a parameter option that has been set for a matrix.
6269: Logically Collective
6271: Input Parameters:
6272: + mat - the matrix
6273: - op - the option, this only responds to certain options, check the code for which ones
6275: Output Parameter:
6276: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6278: Level: intermediate
6280: Notes:
6281: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6283: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6284: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6286: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6287: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6288: @*/
6289: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6290: {
6291: PetscFunctionBegin;
6295: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6296: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6298: switch (op) {
6299: case MAT_NO_OFF_PROC_ENTRIES:
6300: *flg = mat->nooffprocentries;
6301: break;
6302: case MAT_NO_OFF_PROC_ZERO_ROWS:
6303: *flg = mat->nooffproczerorows;
6304: break;
6305: case MAT_SYMMETRIC:
6306: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6307: break;
6308: case MAT_HERMITIAN:
6309: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6310: break;
6311: case MAT_STRUCTURALLY_SYMMETRIC:
6312: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6313: break;
6314: case MAT_SPD:
6315: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6316: break;
6317: case MAT_SYMMETRY_ETERNAL:
6318: *flg = mat->symmetry_eternal;
6319: break;
6320: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6321: *flg = mat->symmetry_eternal;
6322: break;
6323: default:
6324: break;
6325: }
6326: PetscFunctionReturn(PETSC_SUCCESS);
6327: }
6329: /*@
6330: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6331: this routine retains the old nonzero structure.
6333: Logically Collective
6335: Input Parameter:
6336: . mat - the matrix
6338: Level: intermediate
6340: Note:
6341: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6342: See the Performance chapter of the users manual for information on preallocating matrices.
6344: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6345: @*/
6346: PetscErrorCode MatZeroEntries(Mat mat)
6347: {
6348: PetscFunctionBegin;
6351: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6352: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6353: MatCheckPreallocated(mat, 1);
6355: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6356: PetscUseTypeMethod(mat, zeroentries);
6357: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6358: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6359: PetscFunctionReturn(PETSC_SUCCESS);
6360: }
6362: /*@
6363: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6364: of a set of rows and columns of a matrix.
6366: Collective
6368: Input Parameters:
6369: + mat - the matrix
6370: . numRows - the number of rows/columns to zero
6371: . rows - the global row indices
6372: . diag - value put in the diagonal of the eliminated rows
6373: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6374: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6376: Level: intermediate
6378: Notes:
6379: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6381: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6382: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6384: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6385: Krylov method to take advantage of the known solution on the zeroed rows.
6387: For the parallel case, all processes that share the matrix (i.e.,
6388: those in the communicator used for matrix creation) MUST call this
6389: routine, regardless of whether any rows being zeroed are owned by
6390: them.
6392: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6393: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6394: missing.
6396: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6397: list only rows local to itself).
6399: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6401: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6402: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6403: @*/
6404: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6405: {
6406: PetscFunctionBegin;
6409: if (numRows) PetscAssertPointer(rows, 3);
6410: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6411: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6412: MatCheckPreallocated(mat, 1);
6414: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6415: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6416: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6417: PetscFunctionReturn(PETSC_SUCCESS);
6418: }
6420: /*@
6421: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6422: of a set of rows and columns of a matrix.
6424: Collective
6426: Input Parameters:
6427: + mat - the matrix
6428: . is - the rows to zero
6429: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6430: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6431: - b - optional vector of right-hand side, that will be adjusted by provided solution
6433: Level: intermediate
6435: Note:
6436: See `MatZeroRowsColumns()` for details on how this routine operates.
6438: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6439: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6440: @*/
6441: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6442: {
6443: PetscInt numRows;
6444: const PetscInt *rows;
6446: PetscFunctionBegin;
6451: PetscCall(ISGetLocalSize(is, &numRows));
6452: PetscCall(ISGetIndices(is, &rows));
6453: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6454: PetscCall(ISRestoreIndices(is, &rows));
6455: PetscFunctionReturn(PETSC_SUCCESS);
6456: }
6458: /*@
6459: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6460: of a set of rows of a matrix.
6462: Collective
6464: Input Parameters:
6465: + mat - the matrix
6466: . numRows - the number of rows to zero
6467: . rows - the global row indices
6468: . diag - value put in the diagonal of the zeroed rows
6469: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6470: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6472: Level: intermediate
6474: Notes:
6475: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6477: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6479: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6480: Krylov method to take advantage of the known solution on the zeroed rows.
6482: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6483: from the matrix.
6485: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6486: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6487: formats this does not alter the nonzero structure.
6489: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6490: of the matrix is not changed the values are
6491: merely zeroed.
6493: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6494: formats can optionally remove the main diagonal entry from the
6495: nonzero structure as well, by passing 0.0 as the final argument).
6497: For the parallel case, all processes that share the matrix (i.e.,
6498: those in the communicator used for matrix creation) MUST call this
6499: routine, regardless of whether any rows being zeroed are owned by
6500: them.
6502: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6503: list only rows local to itself).
6505: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6506: owns that are to be zeroed. This saves a global synchronization in the implementation.
6508: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6509: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6510: @*/
6511: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6512: {
6513: PetscFunctionBegin;
6516: if (numRows) PetscAssertPointer(rows, 3);
6517: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6518: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6519: MatCheckPreallocated(mat, 1);
6521: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6522: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6523: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6524: PetscFunctionReturn(PETSC_SUCCESS);
6525: }
6527: /*@
6528: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6529: of a set of rows of a matrix indicated by an `IS`
6531: Collective
6533: Input Parameters:
6534: + mat - the matrix
6535: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6536: . diag - value put in all diagonals of eliminated rows
6537: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6538: - b - optional vector of right-hand side, that will be adjusted by provided solution
6540: Level: intermediate
6542: Note:
6543: See `MatZeroRows()` for details on how this routine operates.
6545: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6546: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6547: @*/
6548: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6549: {
6550: PetscInt numRows = 0;
6551: const PetscInt *rows = NULL;
6553: PetscFunctionBegin;
6556: if (is) {
6558: PetscCall(ISGetLocalSize(is, &numRows));
6559: PetscCall(ISGetIndices(is, &rows));
6560: }
6561: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6562: if (is) PetscCall(ISRestoreIndices(is, &rows));
6563: PetscFunctionReturn(PETSC_SUCCESS);
6564: }
6566: /*@
6567: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6568: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6570: Collective
6572: Input Parameters:
6573: + mat - the matrix
6574: . numRows - the number of rows to remove
6575: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6576: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6577: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6578: - b - optional vector of right-hand side, that will be adjusted by provided solution
6580: Level: intermediate
6582: Notes:
6583: See `MatZeroRows()` for details on how this routine operates.
6585: The grid coordinates are across the entire grid, not just the local portion
6587: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6588: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6589: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6590: `DM_BOUNDARY_PERIODIC` boundary type.
6592: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6593: a single value per point) you can skip filling those indices.
6595: Fortran Note:
6596: `idxm` and `idxn` should be declared as
6597: .vb
6598: MatStencil idxm(4, m)
6599: .ve
6600: and the values inserted using
6601: .vb
6602: idxm(MatStencil_i, 1) = i
6603: idxm(MatStencil_j, 1) = j
6604: idxm(MatStencil_k, 1) = k
6605: idxm(MatStencil_c, 1) = c
6606: etc
6607: .ve
6609: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6610: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6611: @*/
6612: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6613: {
6614: PetscInt dim = mat->stencil.dim;
6615: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6616: PetscInt *dims = mat->stencil.dims + 1;
6617: PetscInt *starts = mat->stencil.starts;
6618: PetscInt *dxm = (PetscInt *)rows;
6619: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6621: PetscFunctionBegin;
6624: if (numRows) PetscAssertPointer(rows, 3);
6626: PetscCall(PetscMalloc1(numRows, &jdxm));
6627: for (i = 0; i < numRows; ++i) {
6628: /* Skip unused dimensions (they are ordered k, j, i, c) */
6629: for (j = 0; j < 3 - sdim; ++j) dxm++;
6630: /* Local index in X dir */
6631: tmp = *dxm++ - starts[0];
6632: /* Loop over remaining dimensions */
6633: for (j = 0; j < dim - 1; ++j) {
6634: /* If nonlocal, set index to be negative */
6635: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6636: /* Update local index */
6637: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6638: }
6639: /* Skip component slot if necessary */
6640: if (mat->stencil.noc) dxm++;
6641: /* Local row number */
6642: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6643: }
6644: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6645: PetscCall(PetscFree(jdxm));
6646: PetscFunctionReturn(PETSC_SUCCESS);
6647: }
6649: /*@
6650: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6651: of a set of rows and columns of a matrix.
6653: Collective
6655: Input Parameters:
6656: + mat - the matrix
6657: . numRows - the number of rows/columns to remove
6658: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6659: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6660: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6661: - b - optional vector of right-hand side, that will be adjusted by provided solution
6663: Level: intermediate
6665: Notes:
6666: See `MatZeroRowsColumns()` for details on how this routine operates.
6668: The grid coordinates are across the entire grid, not just the local portion
6670: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6671: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6672: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6673: `DM_BOUNDARY_PERIODIC` boundary type.
6675: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6676: a single value per point) you can skip filling those indices.
6678: Fortran Note:
6679: `idxm` and `idxn` should be declared as
6680: .vb
6681: MatStencil idxm(4, m)
6682: .ve
6683: and the values inserted using
6684: .vb
6685: idxm(MatStencil_i, 1) = i
6686: idxm(MatStencil_j, 1) = j
6687: idxm(MatStencil_k, 1) = k
6688: idxm(MatStencil_c, 1) = c
6689: etc
6690: .ve
6692: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6693: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6694: @*/
6695: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6696: {
6697: PetscInt dim = mat->stencil.dim;
6698: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6699: PetscInt *dims = mat->stencil.dims + 1;
6700: PetscInt *starts = mat->stencil.starts;
6701: PetscInt *dxm = (PetscInt *)rows;
6702: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6704: PetscFunctionBegin;
6707: if (numRows) PetscAssertPointer(rows, 3);
6709: PetscCall(PetscMalloc1(numRows, &jdxm));
6710: for (i = 0; i < numRows; ++i) {
6711: /* Skip unused dimensions (they are ordered k, j, i, c) */
6712: for (j = 0; j < 3 - sdim; ++j) dxm++;
6713: /* Local index in X dir */
6714: tmp = *dxm++ - starts[0];
6715: /* Loop over remaining dimensions */
6716: for (j = 0; j < dim - 1; ++j) {
6717: /* If nonlocal, set index to be negative */
6718: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6719: /* Update local index */
6720: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6721: }
6722: /* Skip component slot if necessary */
6723: if (mat->stencil.noc) dxm++;
6724: /* Local row number */
6725: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6726: }
6727: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6728: PetscCall(PetscFree(jdxm));
6729: PetscFunctionReturn(PETSC_SUCCESS);
6730: }
6732: /*@
6733: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6734: of a set of rows of a matrix; using local numbering of rows.
6736: Collective
6738: Input Parameters:
6739: + mat - the matrix
6740: . numRows - the number of rows to remove
6741: . rows - the local row indices
6742: . diag - value put in all diagonals of eliminated rows
6743: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6744: - b - optional vector of right-hand side, that will be adjusted by provided solution
6746: Level: intermediate
6748: Notes:
6749: Before calling `MatZeroRowsLocal()`, the user must first set the
6750: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6752: See `MatZeroRows()` for details on how this routine operates.
6754: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6755: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6756: @*/
6757: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6758: {
6759: PetscFunctionBegin;
6762: if (numRows) PetscAssertPointer(rows, 3);
6763: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6764: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6765: MatCheckPreallocated(mat, 1);
6767: if (mat->ops->zerorowslocal) {
6768: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6769: } else {
6770: IS is, newis;
6771: PetscInt *newRows, nl = 0;
6773: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6774: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6775: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6776: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6777: for (PetscInt i = 0; i < numRows; i++)
6778: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6779: PetscUseTypeMethod(mat, zerorows, nl, newRows, diag, x, b);
6780: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6781: PetscCall(ISDestroy(&newis));
6782: PetscCall(ISDestroy(&is));
6783: }
6784: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6785: PetscFunctionReturn(PETSC_SUCCESS);
6786: }
6788: /*@
6789: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6790: of a set of rows of a matrix; using local numbering of rows.
6792: Collective
6794: Input Parameters:
6795: + mat - the matrix
6796: . is - index set of rows to remove
6797: . diag - value put in all diagonals of eliminated rows
6798: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6799: - b - optional vector of right-hand side, that will be adjusted by provided solution
6801: Level: intermediate
6803: Notes:
6804: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6805: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6807: See `MatZeroRows()` for details on how this routine operates.
6809: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6810: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6811: @*/
6812: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6813: {
6814: PetscInt numRows;
6815: const PetscInt *rows;
6817: PetscFunctionBegin;
6821: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6822: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6823: MatCheckPreallocated(mat, 1);
6825: PetscCall(ISGetLocalSize(is, &numRows));
6826: PetscCall(ISGetIndices(is, &rows));
6827: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6828: PetscCall(ISRestoreIndices(is, &rows));
6829: PetscFunctionReturn(PETSC_SUCCESS);
6830: }
6832: /*@
6833: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6834: of a set of rows and columns of a matrix; using local numbering of rows.
6836: Collective
6838: Input Parameters:
6839: + mat - the matrix
6840: . numRows - the number of rows to remove
6841: . rows - the global row indices
6842: . diag - value put in all diagonals of eliminated rows
6843: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6844: - b - optional vector of right-hand side, that will be adjusted by provided solution
6846: Level: intermediate
6848: Notes:
6849: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6850: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6852: See `MatZeroRowsColumns()` for details on how this routine operates.
6854: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6855: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6856: @*/
6857: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6858: {
6859: PetscFunctionBegin;
6862: if (numRows) PetscAssertPointer(rows, 3);
6863: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6864: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6865: MatCheckPreallocated(mat, 1);
6867: if (mat->ops->zerorowscolumnslocal) {
6868: PetscUseTypeMethod(mat, zerorowscolumnslocal, numRows, rows, diag, x, b);
6869: } else {
6870: IS is, newis;
6871: PetscInt *newRows, nl = 0;
6873: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6874: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6875: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6876: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6877: for (PetscInt i = 0; i < numRows; i++)
6878: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6879: PetscUseTypeMethod(mat, zerorowscolumns, nl, newRows, diag, x, b);
6880: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6881: PetscCall(ISDestroy(&newis));
6882: PetscCall(ISDestroy(&is));
6883: }
6884: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6885: PetscFunctionReturn(PETSC_SUCCESS);
6886: }
6888: /*@
6889: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6890: of a set of rows and columns of a matrix; using local numbering of rows.
6892: Collective
6894: Input Parameters:
6895: + mat - the matrix
6896: . is - index set of rows to remove
6897: . diag - value put in all diagonals of eliminated rows
6898: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6899: - b - optional vector of right-hand side, that will be adjusted by provided solution
6901: Level: intermediate
6903: Notes:
6904: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6905: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6907: See `MatZeroRowsColumns()` for details on how this routine operates.
6909: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6910: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6911: @*/
6912: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6913: {
6914: PetscInt numRows;
6915: const PetscInt *rows;
6917: PetscFunctionBegin;
6921: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6922: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6923: MatCheckPreallocated(mat, 1);
6925: PetscCall(ISGetLocalSize(is, &numRows));
6926: PetscCall(ISGetIndices(is, &rows));
6927: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6928: PetscCall(ISRestoreIndices(is, &rows));
6929: PetscFunctionReturn(PETSC_SUCCESS);
6930: }
6932: /*@
6933: MatGetSize - Returns the numbers of rows and columns in a matrix.
6935: Not Collective
6937: Input Parameter:
6938: . mat - the matrix
6940: Output Parameters:
6941: + m - the number of global rows
6942: - n - the number of global columns
6944: Level: beginner
6946: Note:
6947: Both output parameters can be `NULL` on input.
6949: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6950: @*/
6951: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6952: {
6953: PetscFunctionBegin;
6955: if (m) *m = mat->rmap->N;
6956: if (n) *n = mat->cmap->N;
6957: PetscFunctionReturn(PETSC_SUCCESS);
6958: }
6960: /*@
6961: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6962: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6964: Not Collective
6966: Input Parameter:
6967: . mat - the matrix
6969: Output Parameters:
6970: + m - the number of local rows, use `NULL` to not obtain this value
6971: - n - the number of local columns, use `NULL` to not obtain this value
6973: Level: beginner
6975: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6976: @*/
6977: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6978: {
6979: PetscFunctionBegin;
6981: if (m) PetscAssertPointer(m, 2);
6982: if (n) PetscAssertPointer(n, 3);
6983: if (m) *m = mat->rmap->n;
6984: if (n) *n = mat->cmap->n;
6985: PetscFunctionReturn(PETSC_SUCCESS);
6986: }
6988: /*@
6989: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6990: vector one multiplies this matrix by that are owned by this processor.
6992: Not Collective, unless matrix has not been allocated, then collective
6994: Input Parameter:
6995: . mat - the matrix
6997: Output Parameters:
6998: + m - the global index of the first local column, use `NULL` to not obtain this value
6999: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
7001: Level: developer
7003: Notes:
7004: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7006: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7007: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7009: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7010: the local values in the matrix.
7012: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
7013: Layouts](sec_matlayout) for details on matrix layouts.
7015: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
7016: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
7017: @*/
7018: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
7019: {
7020: PetscFunctionBegin;
7023: if (m) PetscAssertPointer(m, 2);
7024: if (n) PetscAssertPointer(n, 3);
7025: MatCheckPreallocated(mat, 1);
7026: if (m) *m = mat->cmap->rstart;
7027: if (n) *n = mat->cmap->rend;
7028: PetscFunctionReturn(PETSC_SUCCESS);
7029: }
7031: /*@
7032: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
7033: this MPI process.
7035: Not Collective
7037: Input Parameter:
7038: . mat - the matrix
7040: Output Parameters:
7041: + m - the global index of the first local row, use `NULL` to not obtain this value
7042: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
7044: Level: beginner
7046: Notes:
7047: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7049: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7050: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7052: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7053: the local values in the matrix.
7055: The high argument is one more than the last element stored locally.
7057: For all matrices it returns the range of matrix rows associated with rows of a vector that
7058: would contain the result of a matrix vector product with this matrix. See [Matrix
7059: Layouts](sec_matlayout) for details on matrix layouts.
7061: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
7062: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
7063: @*/
7064: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
7065: {
7066: PetscFunctionBegin;
7069: if (m) PetscAssertPointer(m, 2);
7070: if (n) PetscAssertPointer(n, 3);
7071: MatCheckPreallocated(mat, 1);
7072: if (m) *m = mat->rmap->rstart;
7073: if (n) *n = mat->rmap->rend;
7074: PetscFunctionReturn(PETSC_SUCCESS);
7075: }
7077: /*@C
7078: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
7079: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
7081: Not Collective, unless matrix has not been allocated
7083: Input Parameter:
7084: . mat - the matrix
7086: Output Parameter:
7087: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
7088: where `size` is the number of MPI processes used by `mat`
7090: Level: beginner
7092: Notes:
7093: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7095: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7096: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7098: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7099: the local values in the matrix.
7101: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
7102: would contain the result of a matrix vector product with this matrix. See [Matrix
7103: Layouts](sec_matlayout) for details on matrix layouts.
7105: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
7106: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
7107: `DMDAGetGhostCorners()`, `DM`
7108: @*/
7109: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
7110: {
7111: PetscFunctionBegin;
7114: MatCheckPreallocated(mat, 1);
7115: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
7116: PetscFunctionReturn(PETSC_SUCCESS);
7117: }
7119: /*@C
7120: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
7121: vector one multiplies this vector by that are owned by each processor.
7123: Not Collective, unless matrix has not been allocated
7125: Input Parameter:
7126: . mat - the matrix
7128: Output Parameter:
7129: . ranges - start of each processors portion plus one more than the total length at the end
7131: Level: beginner
7133: Notes:
7134: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7136: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7137: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7139: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7140: the local values in the matrix.
7142: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
7143: Layouts](sec_matlayout) for details on matrix layouts.
7145: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
7146: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7147: `DMDAGetGhostCorners()`, `DM`
7148: @*/
7149: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7150: {
7151: PetscFunctionBegin;
7154: MatCheckPreallocated(mat, 1);
7155: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7156: PetscFunctionReturn(PETSC_SUCCESS);
7157: }
7159: /*@
7160: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7162: Not Collective
7164: Input Parameter:
7165: . A - matrix
7167: Output Parameters:
7168: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7169: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7171: Level: intermediate
7173: Note:
7174: You should call `ISDestroy()` on the returned `IS`
7176: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7177: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7178: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7179: details on matrix layouts.
7181: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7182: @*/
7183: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7184: {
7185: PetscErrorCode (*f)(Mat, IS *, IS *);
7187: PetscFunctionBegin;
7190: MatCheckPreallocated(A, 1);
7191: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7192: if (f) {
7193: PetscCall((*f)(A, rows, cols));
7194: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7195: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7196: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7197: }
7198: PetscFunctionReturn(PETSC_SUCCESS);
7199: }
7201: /*@
7202: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7203: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7204: to complete the factorization.
7206: Collective
7208: Input Parameters:
7209: + fact - the factorized matrix obtained with `MatGetFactor()`
7210: . mat - the matrix
7211: . row - row permutation
7212: . col - column permutation
7213: - info - structure containing
7214: .vb
7215: levels - number of levels of fill.
7216: expected fill - as ratio of original fill.
7217: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7218: missing diagonal entries)
7219: .ve
7221: Level: developer
7223: Notes:
7224: See [Matrix Factorization](sec_matfactor) for additional information.
7226: Most users should employ the `KSP` interface for linear solvers
7227: instead of working directly with matrix algebra routines such as this.
7228: See, e.g., `KSPCreate()`.
7230: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7232: Fortran Note:
7233: A valid (non-null) `info` argument must be provided
7235: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
7236: `MatGetOrdering()`, `MatFactorInfo`
7237: @*/
7238: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7239: {
7240: PetscFunctionBegin;
7245: PetscAssertPointer(info, 5);
7246: PetscAssertPointer(fact, 1);
7247: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7248: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7249: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7250: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7251: MatCheckPreallocated(mat, 2);
7253: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7254: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7255: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7256: PetscFunctionReturn(PETSC_SUCCESS);
7257: }
7259: /*@
7260: MatICCFactorSymbolic - Performs symbolic incomplete
7261: Cholesky factorization for a symmetric matrix. Use
7262: `MatCholeskyFactorNumeric()` to complete the factorization.
7264: Collective
7266: Input Parameters:
7267: + fact - the factorized matrix obtained with `MatGetFactor()`
7268: . mat - the matrix to be factored
7269: . perm - row and column permutation
7270: - info - structure containing
7271: .vb
7272: levels - number of levels of fill.
7273: expected fill - as ratio of original fill.
7274: .ve
7276: Level: developer
7278: Notes:
7279: Most users should employ the `KSP` interface for linear solvers
7280: instead of working directly with matrix algebra routines such as this.
7281: See, e.g., `KSPCreate()`.
7283: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7285: Fortran Note:
7286: A valid (non-null) `info` argument must be provided
7288: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7289: @*/
7290: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7291: {
7292: PetscFunctionBegin;
7296: PetscAssertPointer(info, 4);
7297: PetscAssertPointer(fact, 1);
7298: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7299: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7300: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7301: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7302: MatCheckPreallocated(mat, 2);
7304: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7305: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7306: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7307: PetscFunctionReturn(PETSC_SUCCESS);
7308: }
7310: /*@C
7311: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7312: points to an array of valid matrices, they may be reused to store the new
7313: submatrices.
7315: Collective
7317: Input Parameters:
7318: + mat - the matrix
7319: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7320: . irow - index set of rows to extract
7321: . icol - index set of columns to extract
7322: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7324: Output Parameter:
7325: . submat - the array of submatrices
7327: Level: advanced
7329: Notes:
7330: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7331: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7332: to extract a parallel submatrix.
7334: Some matrix types place restrictions on the row and column
7335: indices, such as that they be sorted or that they be equal to each other.
7337: The index sets may not have duplicate entries.
7339: When extracting submatrices from a parallel matrix, each processor can
7340: form a different submatrix by setting the rows and columns of its
7341: individual index sets according to the local submatrix desired.
7343: When finished using the submatrices, the user should destroy
7344: them with `MatDestroySubMatrices()`.
7346: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7347: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7349: This routine creates the matrices in submat; you should NOT create them before
7350: calling it. It also allocates the array of matrix pointers submat.
7352: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7353: request one row/column in a block, they must request all rows/columns that are in
7354: that block. For example, if the block size is 2 you cannot request just row 0 and
7355: column 0.
7357: Fortran Note:
7358: .vb
7359: Mat, pointer :: submat(:)
7360: .ve
7362: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7363: @*/
7364: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7365: {
7366: PetscInt i;
7367: PetscBool eq;
7369: PetscFunctionBegin;
7372: if (n) {
7373: PetscAssertPointer(irow, 3);
7375: PetscAssertPointer(icol, 4);
7377: }
7378: PetscAssertPointer(submat, 6);
7379: if (n && scall == MAT_REUSE_MATRIX) {
7380: PetscAssertPointer(*submat, 6);
7382: }
7383: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7384: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7385: MatCheckPreallocated(mat, 1);
7386: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7387: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7388: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7389: for (i = 0; i < n; i++) {
7390: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7391: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7392: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7393: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7394: if (mat->boundtocpu && mat->bindingpropagates) {
7395: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7396: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7397: }
7398: #endif
7399: }
7400: PetscFunctionReturn(PETSC_SUCCESS);
7401: }
7403: /*@C
7404: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of `mat` (by pairs of `IS` that may live on subcomms).
7406: Collective
7408: Input Parameters:
7409: + mat - the matrix
7410: . n - the number of submatrixes to be extracted
7411: . irow - index set of rows to extract
7412: . icol - index set of columns to extract
7413: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7415: Output Parameter:
7416: . submat - the array of submatrices
7418: Level: advanced
7420: Note:
7421: This is used by `PCGASM`
7423: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7424: @*/
7425: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7426: {
7427: PetscInt i;
7428: PetscBool eq;
7430: PetscFunctionBegin;
7433: if (n) {
7434: PetscAssertPointer(irow, 3);
7436: PetscAssertPointer(icol, 4);
7438: }
7439: PetscAssertPointer(submat, 6);
7440: if (n && scall == MAT_REUSE_MATRIX) {
7441: PetscAssertPointer(*submat, 6);
7443: }
7444: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7445: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7446: MatCheckPreallocated(mat, 1);
7448: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7449: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7450: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7451: for (i = 0; i < n; i++) {
7452: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7453: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7454: }
7455: PetscFunctionReturn(PETSC_SUCCESS);
7456: }
7458: /*@C
7459: MatDestroyMatrices - Destroys an array of matrices
7461: Collective
7463: Input Parameters:
7464: + n - the number of local matrices
7465: - mat - the matrices (this is a pointer to the array of matrices)
7467: Level: advanced
7469: Notes:
7470: Frees not only the matrices, but also the array that contains the matrices
7472: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7474: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7475: @*/
7476: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7477: {
7478: PetscInt i;
7480: PetscFunctionBegin;
7481: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7482: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7483: PetscAssertPointer(mat, 2);
7485: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7487: /* memory is allocated even if n = 0 */
7488: PetscCall(PetscFree(*mat));
7489: PetscFunctionReturn(PETSC_SUCCESS);
7490: }
7492: /*@C
7493: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7495: Collective
7497: Input Parameters:
7498: + n - the number of local matrices
7499: - mat - the matrices (this is a pointer to the array of matrices, to match the calling sequence of `MatCreateSubMatrices()`)
7501: Level: advanced
7503: Note:
7504: Frees not only the matrices, but also the array that contains the matrices
7506: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7507: @*/
7508: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7509: {
7510: Mat mat0;
7512: PetscFunctionBegin;
7513: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7514: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7515: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7516: PetscAssertPointer(mat, 2);
7518: mat0 = (*mat)[0];
7519: if (mat0 && mat0->ops->destroysubmatrices) {
7520: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7521: } else {
7522: PetscCall(MatDestroyMatrices(n, mat));
7523: }
7524: PetscFunctionReturn(PETSC_SUCCESS);
7525: }
7527: /*@
7528: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7530: Collective
7532: Input Parameter:
7533: . mat - the matrix
7535: Output Parameter:
7536: . matstruct - the sequential matrix with the nonzero structure of `mat`
7538: Level: developer
7540: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7541: @*/
7542: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7543: {
7544: PetscFunctionBegin;
7546: PetscAssertPointer(matstruct, 2);
7549: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7550: MatCheckPreallocated(mat, 1);
7552: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7553: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7554: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7555: PetscFunctionReturn(PETSC_SUCCESS);
7556: }
7558: /*@C
7559: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7561: Collective
7563: Input Parameter:
7564: . mat - the matrix
7566: Level: advanced
7568: Note:
7569: This is not needed, one can just call `MatDestroy()`
7571: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7572: @*/
7573: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7574: {
7575: PetscFunctionBegin;
7576: PetscAssertPointer(mat, 1);
7577: PetscCall(MatDestroy(mat));
7578: PetscFunctionReturn(PETSC_SUCCESS);
7579: }
7581: /*@
7582: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7583: replaces the index sets by larger ones that represent submatrices with
7584: additional overlap.
7586: Collective
7588: Input Parameters:
7589: + mat - the matrix
7590: . n - the number of index sets
7591: . is - the array of index sets (these index sets will changed during the call)
7592: - ov - the additional overlap requested
7594: Options Database Key:
7595: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7597: Level: developer
7599: Note:
7600: The computed overlap preserves the matrix block sizes when the blocks are square.
7601: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7602: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7604: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7605: @*/
7606: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7607: {
7608: PetscInt i, bs, cbs;
7610: PetscFunctionBegin;
7614: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7615: if (n) {
7616: PetscAssertPointer(is, 3);
7618: }
7619: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7620: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7621: MatCheckPreallocated(mat, 1);
7623: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7624: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7625: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7626: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7627: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7628: if (bs == cbs) {
7629: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7630: }
7631: PetscFunctionReturn(PETSC_SUCCESS);
7632: }
7634: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7636: /*@
7637: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7638: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7639: additional overlap.
7641: Collective
7643: Input Parameters:
7644: + mat - the matrix
7645: . n - the number of index sets
7646: . is - the array of index sets (these index sets will changed during the call)
7647: - ov - the additional overlap requested
7649: ` Options Database Key:
7650: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7652: Level: developer
7654: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7655: @*/
7656: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7657: {
7658: PetscInt i;
7660: PetscFunctionBegin;
7663: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7664: if (n) {
7665: PetscAssertPointer(is, 3);
7667: }
7668: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7669: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7670: MatCheckPreallocated(mat, 1);
7671: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7672: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7673: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7674: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7675: PetscFunctionReturn(PETSC_SUCCESS);
7676: }
7678: /*@
7679: MatGetBlockSize - Returns the matrix block size.
7681: Not Collective
7683: Input Parameter:
7684: . mat - the matrix
7686: Output Parameter:
7687: . bs - block size
7689: Level: intermediate
7691: Notes:
7692: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7694: If the block size has not been set yet this routine returns 1.
7696: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7697: @*/
7698: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7699: {
7700: PetscFunctionBegin;
7702: PetscAssertPointer(bs, 2);
7703: *bs = mat->rmap->bs;
7704: PetscFunctionReturn(PETSC_SUCCESS);
7705: }
7707: /*@
7708: MatGetBlockSizes - Returns the matrix block row and column sizes.
7710: Not Collective
7712: Input Parameter:
7713: . mat - the matrix
7715: Output Parameters:
7716: + rbs - row block size
7717: - cbs - column block size
7719: Level: intermediate
7721: Notes:
7722: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7723: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7725: If a block size has not been set yet this routine returns 1.
7727: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7728: @*/
7729: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7730: {
7731: PetscFunctionBegin;
7733: if (rbs) PetscAssertPointer(rbs, 2);
7734: if (cbs) PetscAssertPointer(cbs, 3);
7735: if (rbs) *rbs = mat->rmap->bs;
7736: if (cbs) *cbs = mat->cmap->bs;
7737: PetscFunctionReturn(PETSC_SUCCESS);
7738: }
7740: /*@
7741: MatSetBlockSize - Sets the matrix block size.
7743: Logically Collective
7745: Input Parameters:
7746: + mat - the matrix
7747: - bs - block size
7749: Level: intermediate
7751: Notes:
7752: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7753: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7755: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7756: is compatible with the matrix local sizes.
7758: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7759: @*/
7760: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7761: {
7762: PetscFunctionBegin;
7765: PetscCall(MatSetBlockSizes(mat, bs, bs));
7766: PetscFunctionReturn(PETSC_SUCCESS);
7767: }
7769: typedef struct {
7770: PetscInt n;
7771: IS *is;
7772: Mat *mat;
7773: PetscObjectState nonzerostate;
7774: Mat C;
7775: } EnvelopeData;
7777: static PetscErrorCode EnvelopeDataDestroy(PetscCtxRt ptr)
7778: {
7779: EnvelopeData *edata = *(EnvelopeData **)ptr;
7781: PetscFunctionBegin;
7782: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7783: PetscCall(PetscFree(edata->is));
7784: PetscCall(PetscFree(edata));
7785: PetscFunctionReturn(PETSC_SUCCESS);
7786: }
7788: /*@
7789: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7790: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7792: Collective
7794: Input Parameter:
7795: . mat - the matrix
7797: Level: intermediate
7799: Notes:
7800: There can be zeros within the blocks
7802: The blocks can overlap between processes, including laying on more than two processes
7804: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7805: @*/
7806: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7807: {
7808: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7809: PetscInt *diag, *odiag, sc;
7810: VecScatter scatter;
7811: PetscScalar *seqv;
7812: const PetscScalar *parv;
7813: const PetscInt *ia, *ja;
7814: PetscBool set, flag, done;
7815: Mat AA = mat, A;
7816: MPI_Comm comm;
7817: PetscMPIInt rank, size, tag;
7818: MPI_Status status;
7819: PetscContainer container;
7820: EnvelopeData *edata;
7821: Vec seq, par;
7822: IS isglobal;
7824: PetscFunctionBegin;
7826: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7827: if (!set || !flag) {
7828: /* TODO: only needs nonzero structure of transpose */
7829: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7830: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7831: }
7832: PetscCall(MatAIJGetLocalMat(AA, &A));
7833: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7834: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7836: PetscCall(MatGetLocalSize(mat, &n, NULL));
7837: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7838: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7839: PetscCallMPI(MPI_Comm_size(comm, &size));
7840: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7842: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7844: if (rank > 0) {
7845: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7846: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7847: }
7848: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7849: for (i = 0; i < n; i++) {
7850: env = PetscMax(env, ja[ia[i + 1] - 1]);
7851: II = rstart + i;
7852: if (env == II) {
7853: starts[lblocks] = tbs;
7854: sizes[lblocks++] = 1 + II - tbs;
7855: tbs = 1 + II;
7856: }
7857: }
7858: if (rank < size - 1) {
7859: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7860: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7861: }
7863: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7864: if (!set || !flag) PetscCall(MatDestroy(&AA));
7865: PetscCall(MatDestroy(&A));
7867: PetscCall(PetscNew(&edata));
7868: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7869: edata->n = lblocks;
7870: /* create IS needed for extracting blocks from the original matrix */
7871: PetscCall(PetscMalloc1(lblocks, &edata->is));
7872: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7874: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7875: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7876: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7877: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7878: PetscCall(MatSetType(edata->C, MATAIJ));
7880: /* Communicate the start and end of each row, from each block to the correct rank */
7881: /* TODO: Use PetscSF instead of VecScatter */
7882: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7883: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7884: PetscCall(VecGetArrayWrite(seq, &seqv));
7885: for (PetscInt i = 0; i < lblocks; i++) {
7886: for (PetscInt j = 0; j < sizes[i]; j++) {
7887: seqv[cnt] = starts[i];
7888: seqv[cnt + 1] = starts[i] + sizes[i];
7889: cnt += 2;
7890: }
7891: }
7892: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7893: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7894: sc -= cnt;
7895: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7896: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7897: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7898: PetscCall(ISDestroy(&isglobal));
7899: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7900: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7901: PetscCall(VecScatterDestroy(&scatter));
7902: PetscCall(VecDestroy(&seq));
7903: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7904: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7905: PetscCall(VecGetArrayRead(par, &parv));
7906: cnt = 0;
7907: PetscCall(MatGetSize(mat, NULL, &n));
7908: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7909: PetscInt start, end, d = 0, od = 0;
7911: start = (PetscInt)PetscRealPart(parv[cnt]);
7912: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7913: cnt += 2;
7915: if (start < cstart) {
7916: od += cstart - start + n - cend;
7917: d += cend - cstart;
7918: } else if (start < cend) {
7919: od += n - cend;
7920: d += cend - start;
7921: } else od += n - start;
7922: if (end <= cstart) {
7923: od -= cstart - end + n - cend;
7924: d -= cend - cstart;
7925: } else if (end < cend) {
7926: od -= n - cend;
7927: d -= cend - end;
7928: } else od -= n - end;
7930: odiag[i] = od;
7931: diag[i] = d;
7932: }
7933: PetscCall(VecRestoreArrayRead(par, &parv));
7934: PetscCall(VecDestroy(&par));
7935: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7936: PetscCall(PetscFree2(diag, odiag));
7937: PetscCall(PetscFree2(sizes, starts));
7939: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7940: PetscCall(PetscContainerSetPointer(container, edata));
7941: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7942: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7943: PetscCall(PetscObjectDereference((PetscObject)container));
7944: PetscFunctionReturn(PETSC_SUCCESS);
7945: }
7947: /*@
7948: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7950: Collective
7952: Input Parameters:
7953: + A - the matrix
7954: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7956: Output Parameter:
7957: . C - matrix with inverted block diagonal of `A`
7959: Level: advanced
7961: Note:
7962: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7964: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7965: @*/
7966: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7967: {
7968: PetscContainer container;
7969: EnvelopeData *edata;
7970: PetscObjectState nonzerostate;
7972: PetscFunctionBegin;
7973: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7974: if (!container) {
7975: PetscCall(MatComputeVariableBlockEnvelope(A));
7976: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7977: }
7978: PetscCall(PetscContainerGetPointer(container, &edata));
7979: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7980: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7981: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7983: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7984: *C = edata->C;
7986: for (PetscInt i = 0; i < edata->n; i++) {
7987: Mat D;
7988: PetscScalar *dvalues;
7990: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7991: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7992: PetscCall(MatSeqDenseInvert(D));
7993: PetscCall(MatDenseGetArray(D, &dvalues));
7994: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7995: PetscCall(MatDestroy(&D));
7996: }
7997: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7998: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7999: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
8000: PetscFunctionReturn(PETSC_SUCCESS);
8001: }
8003: /*@
8004: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
8006: Not Collective
8008: Input Parameters:
8009: + mat - the matrix
8010: . nblocks - the number of blocks on this process, each block can only exist on a single process
8011: - bsizes - the block sizes
8013: Level: intermediate
8015: Notes:
8016: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
8018: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
8020: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
8021: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
8022: @*/
8023: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
8024: {
8025: PetscInt ncnt = 0, nlocal;
8027: PetscFunctionBegin;
8029: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
8030: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
8031: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
8032: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
8033: PetscCall(PetscFree(mat->bsizes));
8034: mat->nblocks = nblocks;
8035: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
8036: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
8037: PetscFunctionReturn(PETSC_SUCCESS);
8038: }
8040: /*@C
8041: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
8043: Not Collective; No Fortran Support
8045: Input Parameter:
8046: . mat - the matrix
8048: Output Parameters:
8049: + nblocks - the number of blocks on this process
8050: - bsizes - the block sizes
8052: Level: intermediate
8054: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
8055: @*/
8056: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
8057: {
8058: PetscFunctionBegin;
8060: if (nblocks) *nblocks = mat->nblocks;
8061: if (bsizes) *bsizes = mat->bsizes;
8062: PetscFunctionReturn(PETSC_SUCCESS);
8063: }
8065: /*@
8066: MatSelectVariableBlockSizes - When creating a submatrix, pass on the variable block sizes
8068: Not Collective
8070: Input Parameter:
8071: + subA - the submatrix
8072: . A - the original matrix
8073: - isrow - The `IS` of selected rows for the submatrix, must be sorted
8075: Level: developer
8077: Notes:
8078: If the index set is not sorted or contains off-process entries, this function will do nothing.
8080: .seealso: [](ch_matrices), `Mat`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
8081: @*/
8082: PetscErrorCode MatSelectVariableBlockSizes(Mat subA, Mat A, IS isrow)
8083: {
8084: const PetscInt *rows;
8085: PetscInt n, rStart, rEnd, Nb = 0;
8086: PetscBool flg = A->bsizes ? PETSC_TRUE : PETSC_FALSE;
8088: PetscFunctionBegin;
8089: // The code for block size extraction does not support an unsorted IS
8090: if (flg) PetscCall(ISSorted(isrow, &flg));
8091: // We don't support originally off-diagonal blocks
8092: if (flg) {
8093: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
8094: PetscCall(ISGetLocalSize(isrow, &n));
8095: PetscCall(ISGetIndices(isrow, &rows));
8096: for (PetscInt i = 0; i < n && flg; ++i) {
8097: if (rows[i] < rStart || rows[i] >= rEnd) flg = PETSC_FALSE;
8098: }
8099: PetscCall(ISRestoreIndices(isrow, &rows));
8100: }
8101: // quiet return if we can't extract block size
8102: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)subA)));
8103: if (!flg) PetscFunctionReturn(PETSC_SUCCESS);
8105: // extract block sizes
8106: PetscCall(ISGetIndices(isrow, &rows));
8107: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8108: PetscBool occupied = PETSC_FALSE;
8110: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8111: const PetscInt row = gr + br;
8113: if (i == n) break;
8114: if (rows[i] == row) {
8115: occupied = PETSC_TRUE;
8116: ++i;
8117: }
8118: while (i < n && rows[i] < row) ++i;
8119: }
8120: gr += A->bsizes[b];
8121: if (occupied) ++Nb;
8122: }
8123: subA->nblocks = Nb;
8124: PetscCall(PetscFree(subA->bsizes));
8125: PetscCall(PetscMalloc1(subA->nblocks, &subA->bsizes));
8126: PetscInt sb = 0;
8127: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8128: if (sb < subA->nblocks) subA->bsizes[sb] = 0;
8129: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8130: const PetscInt row = gr + br;
8132: if (i == n) break;
8133: if (rows[i] == row) {
8134: ++subA->bsizes[sb];
8135: ++i;
8136: }
8137: while (i < n && rows[i] < row) ++i;
8138: }
8139: gr += A->bsizes[b];
8140: if (sb < subA->nblocks && subA->bsizes[sb]) ++sb;
8141: }
8142: PetscCheck(sb == subA->nblocks, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of blocks %" PetscInt_FMT " != %" PetscInt_FMT, sb, subA->nblocks);
8143: PetscInt nlocal, ncnt = 0;
8144: PetscCall(MatGetLocalSize(subA, &nlocal, NULL));
8145: PetscCheck(subA->nblocks >= 0 && subA->nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", subA->nblocks, nlocal);
8146: for (PetscInt i = 0; i < subA->nblocks; i++) ncnt += subA->bsizes[i];
8147: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
8148: PetscCall(ISRestoreIndices(isrow, &rows));
8149: PetscFunctionReturn(PETSC_SUCCESS);
8150: }
8152: /*@
8153: MatSetBlockSizes - Sets the matrix block row and column sizes.
8155: Logically Collective
8157: Input Parameters:
8158: + mat - the matrix
8159: . rbs - row block size
8160: - cbs - column block size
8162: Level: intermediate
8164: Notes:
8165: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
8166: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
8167: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
8169: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
8170: are compatible with the matrix local sizes.
8172: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
8174: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
8175: @*/
8176: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
8177: {
8178: PetscFunctionBegin;
8182: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
8183: if (mat->rmap->refcnt) {
8184: ISLocalToGlobalMapping l2g = NULL;
8185: PetscLayout nmap = NULL;
8187: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
8188: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
8189: PetscCall(PetscLayoutDestroy(&mat->rmap));
8190: mat->rmap = nmap;
8191: mat->rmap->mapping = l2g;
8192: }
8193: if (mat->cmap->refcnt) {
8194: ISLocalToGlobalMapping l2g = NULL;
8195: PetscLayout nmap = NULL;
8197: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
8198: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
8199: PetscCall(PetscLayoutDestroy(&mat->cmap));
8200: mat->cmap = nmap;
8201: mat->cmap->mapping = l2g;
8202: }
8203: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
8204: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
8205: PetscFunctionReturn(PETSC_SUCCESS);
8206: }
8208: /*@
8209: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
8211: Logically Collective
8213: Input Parameters:
8214: + mat - the matrix
8215: . fromRow - matrix from which to copy row block size
8216: - fromCol - matrix from which to copy column block size (can be same as `fromRow`)
8218: Level: developer
8220: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
8221: @*/
8222: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8223: {
8224: PetscFunctionBegin;
8228: PetscTryTypeMethod(mat, setblocksizes, fromRow->rmap->bs, fromCol->cmap->bs);
8229: PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8230: PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8231: PetscFunctionReturn(PETSC_SUCCESS);
8232: }
8234: /*@
8235: MatResidual - Default routine to calculate the residual r = b - Ax
8237: Collective
8239: Input Parameters:
8240: + mat - the matrix
8241: . b - the right-hand-side
8242: - x - the approximate solution
8244: Output Parameter:
8245: . r - location to store the residual
8247: Level: developer
8249: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8250: @*/
8251: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8252: {
8253: PetscFunctionBegin;
8259: MatCheckPreallocated(mat, 1);
8260: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8261: if (!mat->ops->residual) {
8262: PetscCall(MatMult(mat, x, r));
8263: PetscCall(VecAYPX(r, -1.0, b));
8264: } else {
8265: PetscUseTypeMethod(mat, residual, b, x, r);
8266: }
8267: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8268: PetscFunctionReturn(PETSC_SUCCESS);
8269: }
8271: /*@C
8272: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8274: Collective
8276: Input Parameters:
8277: + mat - the matrix
8278: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8279: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8280: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8281: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8282: always used.
8284: Output Parameters:
8285: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8286: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8287: . ja - the column indices, use `NULL` if not needed
8288: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8289: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8291: Level: developer
8293: Notes:
8294: You CANNOT change any of the ia[] or ja[] values.
8296: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8298: Fortran Notes:
8299: Use
8300: .vb
8301: PetscInt, pointer :: ia(:),ja(:)
8302: call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8303: ! Access the ith and jth entries via ia(i) and ja(j)
8304: .ve
8306: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8307: @*/
8308: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8309: {
8310: PetscFunctionBegin;
8313: if (n) PetscAssertPointer(n, 5);
8314: if (ia) PetscAssertPointer(ia, 6);
8315: if (ja) PetscAssertPointer(ja, 7);
8316: if (done) PetscAssertPointer(done, 8);
8317: MatCheckPreallocated(mat, 1);
8318: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8319: else {
8320: if (done) *done = PETSC_TRUE;
8321: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8322: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8323: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8324: }
8325: PetscFunctionReturn(PETSC_SUCCESS);
8326: }
8328: /*@C
8329: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8331: Collective
8333: Input Parameters:
8334: + mat - the matrix
8335: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8336: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8337: symmetrized
8338: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8339: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8340: always used.
8342: Output Parameters:
8343: + n - number of columns in the (possibly compressed) matrix
8344: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8345: . ja - the row indices
8346: - done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8348: Level: developer
8350: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8351: @*/
8352: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8353: {
8354: PetscFunctionBegin;
8357: PetscAssertPointer(n, 5);
8358: if (ia) PetscAssertPointer(ia, 6);
8359: if (ja) PetscAssertPointer(ja, 7);
8360: PetscAssertPointer(done, 8);
8361: MatCheckPreallocated(mat, 1);
8362: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8363: else {
8364: *done = PETSC_TRUE;
8365: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8366: }
8367: PetscFunctionReturn(PETSC_SUCCESS);
8368: }
8370: /*@C
8371: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8373: Collective
8375: Input Parameters:
8376: + mat - the matrix
8377: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8378: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8379: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8380: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8381: always used.
8382: . n - size of (possibly compressed) matrix
8383: . ia - the row pointers
8384: - ja - the column indices
8386: Output Parameter:
8387: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8389: Level: developer
8391: Note:
8392: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8393: us of the array after it has been restored. If you pass `NULL`, it will
8394: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8396: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8397: @*/
8398: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8399: {
8400: PetscFunctionBegin;
8403: if (ia) PetscAssertPointer(ia, 6);
8404: if (ja) PetscAssertPointer(ja, 7);
8405: if (done) PetscAssertPointer(done, 8);
8406: MatCheckPreallocated(mat, 1);
8408: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8409: else {
8410: if (done) *done = PETSC_TRUE;
8411: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8412: if (n) *n = 0;
8413: if (ia) *ia = NULL;
8414: if (ja) *ja = NULL;
8415: }
8416: PetscFunctionReturn(PETSC_SUCCESS);
8417: }
8419: /*@C
8420: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8422: Collective
8424: Input Parameters:
8425: + mat - the matrix
8426: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8427: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8428: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8429: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8430: always used.
8432: Output Parameters:
8433: + n - size of (possibly compressed) matrix
8434: . ia - the column pointers
8435: . ja - the row indices
8436: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8438: Level: developer
8440: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8441: @*/
8442: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8443: {
8444: PetscFunctionBegin;
8447: if (ia) PetscAssertPointer(ia, 6);
8448: if (ja) PetscAssertPointer(ja, 7);
8449: PetscAssertPointer(done, 8);
8450: MatCheckPreallocated(mat, 1);
8452: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8453: else {
8454: *done = PETSC_TRUE;
8455: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8456: if (n) *n = 0;
8457: if (ia) *ia = NULL;
8458: if (ja) *ja = NULL;
8459: }
8460: PetscFunctionReturn(PETSC_SUCCESS);
8461: }
8463: /*@
8464: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8465: `MatGetColumnIJ()`.
8467: Collective
8469: Input Parameters:
8470: + mat - the matrix
8471: . ncolors - maximum color value
8472: . n - number of entries in colorarray
8473: - colorarray - array indicating color for each column
8475: Output Parameter:
8476: . iscoloring - coloring generated using colorarray information
8478: Level: developer
8480: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8481: @*/
8482: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8483: {
8484: PetscFunctionBegin;
8487: PetscAssertPointer(colorarray, 4);
8488: PetscAssertPointer(iscoloring, 5);
8489: MatCheckPreallocated(mat, 1);
8491: if (!mat->ops->coloringpatch) {
8492: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8493: } else {
8494: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8495: }
8496: PetscFunctionReturn(PETSC_SUCCESS);
8497: }
8499: /*@
8500: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8502: Logically Collective
8504: Input Parameter:
8505: . mat - the factored matrix to be reset
8507: Level: developer
8509: Notes:
8510: This routine should be used only with factored matrices formed by in-place
8511: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8512: format). This option can save memory, for example, when solving nonlinear
8513: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8514: ILU(0) preconditioner.
8516: One can specify in-place ILU(0) factorization by calling
8517: .vb
8518: PCType(pc,PCILU);
8519: PCFactorSeUseInPlace(pc);
8520: .ve
8521: or by using the options -pc_type ilu -pc_factor_in_place
8523: In-place factorization ILU(0) can also be used as a local
8524: solver for the blocks within the block Jacobi or additive Schwarz
8525: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8526: for details on setting local solver options.
8528: Most users should employ the `KSP` interface for linear solvers
8529: instead of working directly with matrix algebra routines such as this.
8530: See, e.g., `KSPCreate()`.
8532: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8533: @*/
8534: PetscErrorCode MatSetUnfactored(Mat mat)
8535: {
8536: PetscFunctionBegin;
8539: MatCheckPreallocated(mat, 1);
8540: mat->factortype = MAT_FACTOR_NONE;
8541: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8542: PetscUseTypeMethod(mat, setunfactored);
8543: PetscFunctionReturn(PETSC_SUCCESS);
8544: }
8546: /*@
8547: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8548: as the original matrix.
8550: Collective
8552: Input Parameters:
8553: + mat - the original matrix
8554: . isrow - parallel `IS` containing the rows this processor should obtain
8555: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8556: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8558: Output Parameter:
8559: . newmat - the new submatrix, of the same type as the original matrix
8561: Level: advanced
8563: Notes:
8564: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8566: Some matrix types place restrictions on the row and column indices, such
8567: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8568: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8570: The index sets may not have duplicate entries.
8572: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8573: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8574: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8575: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8576: you are finished using it.
8578: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8579: the input matrix.
8581: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8583: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8584: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8586: Example usage:
8587: Consider the following 8x8 matrix with 34 non-zero values, that is
8588: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8589: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8590: as follows
8591: .vb
8592: 1 2 0 | 0 3 0 | 0 4
8593: Proc0 0 5 6 | 7 0 0 | 8 0
8594: 9 0 10 | 11 0 0 | 12 0
8595: -------------------------------------
8596: 13 0 14 | 15 16 17 | 0 0
8597: Proc1 0 18 0 | 19 20 21 | 0 0
8598: 0 0 0 | 22 23 0 | 24 0
8599: -------------------------------------
8600: Proc2 25 26 27 | 0 0 28 | 29 0
8601: 30 0 0 | 31 32 33 | 0 34
8602: .ve
8604: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8606: .vb
8607: 2 0 | 0 3 0 | 0
8608: Proc0 5 6 | 7 0 0 | 8
8609: -------------------------------
8610: Proc1 18 0 | 19 20 21 | 0
8611: -------------------------------
8612: Proc2 26 27 | 0 0 28 | 29
8613: 0 0 | 31 32 33 | 0
8614: .ve
8616: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8617: @*/
8618: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8619: {
8620: PetscMPIInt size;
8621: Mat *local;
8622: IS iscoltmp;
8623: PetscBool flg;
8625: PetscFunctionBegin;
8629: PetscAssertPointer(newmat, 5);
8632: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8633: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8634: PetscCheck(cll != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_INPLACE_MATRIX");
8636: MatCheckPreallocated(mat, 1);
8637: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8639: if (!iscol || isrow == iscol) {
8640: PetscBool stride;
8641: PetscMPIInt grabentirematrix = 0, grab;
8642: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8643: if (stride) {
8644: PetscInt first, step, n, rstart, rend;
8645: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8646: if (step == 1) {
8647: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8648: if (rstart == first) {
8649: PetscCall(ISGetLocalSize(isrow, &n));
8650: if (n == rend - rstart) grabentirematrix = 1;
8651: }
8652: }
8653: }
8654: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8655: if (grab) {
8656: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8657: if (cll == MAT_INITIAL_MATRIX) {
8658: *newmat = mat;
8659: PetscCall(PetscObjectReference((PetscObject)mat));
8660: }
8661: PetscFunctionReturn(PETSC_SUCCESS);
8662: }
8663: }
8665: if (!iscol) {
8666: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8667: } else {
8668: iscoltmp = iscol;
8669: }
8671: /* if original matrix is on just one processor then use submatrix generated */
8672: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8673: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8674: goto setproperties;
8675: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8676: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8677: *newmat = *local;
8678: PetscCall(PetscFree(local));
8679: goto setproperties;
8680: } else if (!mat->ops->createsubmatrix) {
8681: /* Create a new matrix type that implements the operation using the full matrix */
8682: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8683: switch (cll) {
8684: case MAT_INITIAL_MATRIX:
8685: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8686: break;
8687: case MAT_REUSE_MATRIX:
8688: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8689: break;
8690: default:
8691: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8692: }
8693: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8694: goto setproperties;
8695: }
8697: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8698: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8699: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8701: setproperties:
8702: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8703: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8704: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8705: }
8706: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8707: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8708: if (!iscol || isrow == iscol) PetscCall(MatSelectVariableBlockSizes(*newmat, mat, isrow));
8709: PetscFunctionReturn(PETSC_SUCCESS);
8710: }
8712: /*@
8713: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8715: Not Collective
8717: Input Parameters:
8718: + A - the matrix we wish to propagate options from
8719: - B - the matrix we wish to propagate options to
8721: Level: beginner
8723: Note:
8724: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8726: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8727: @*/
8728: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8729: {
8730: PetscFunctionBegin;
8733: B->symmetry_eternal = A->symmetry_eternal;
8734: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8735: B->symmetric = A->symmetric;
8736: B->structurally_symmetric = A->structurally_symmetric;
8737: B->spd = A->spd;
8738: B->hermitian = A->hermitian;
8739: PetscFunctionReturn(PETSC_SUCCESS);
8740: }
8742: /*@
8743: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8744: used during the assembly process to store values that belong to
8745: other processors.
8747: Not Collective
8749: Input Parameters:
8750: + mat - the matrix
8751: . size - the initial size of the stash.
8752: - bsize - the initial size of the block-stash(if used).
8754: Options Database Keys:
8755: + -matstash_initial_size size or size0,size1,...,sizep-1 - set initial size
8756: - -matstash_block_initial_size bsize or bsize0,bsize1,...,bsizep-1 - set initial block size
8758: Level: intermediate
8760: Notes:
8761: The block-stash is used for values set with `MatSetValuesBlocked()` while
8762: the stash is used for values set with `MatSetValues()`
8764: Run with the option -info and look for output of the form
8765: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8766: to determine the appropriate value, MM, to use for size and
8767: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8768: to determine the value, BMM to use for bsize
8770: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8771: @*/
8772: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8773: {
8774: PetscFunctionBegin;
8777: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8778: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8779: PetscFunctionReturn(PETSC_SUCCESS);
8780: }
8782: /*@
8783: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8784: the matrix
8786: Neighbor-wise Collective
8788: Input Parameters:
8789: + A - the matrix
8790: . x - the vector to be multiplied by the interpolation operator
8791: - y - the vector to be added to the result
8793: Output Parameter:
8794: . w - the resulting vector
8796: Level: intermediate
8798: Notes:
8799: `w` may be the same vector as `y`.
8801: This allows one to use either the restriction or interpolation (its transpose)
8802: matrix to do the interpolation
8804: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8805: @*/
8806: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8807: {
8808: PetscInt M, N, Ny;
8810: PetscFunctionBegin;
8815: PetscCall(MatGetSize(A, &M, &N));
8816: PetscCall(VecGetSize(y, &Ny));
8817: if (M == Ny) PetscCall(MatMultAdd(A, x, y, w));
8818: else PetscCall(MatMultTransposeAdd(A, x, y, w));
8819: PetscFunctionReturn(PETSC_SUCCESS);
8820: }
8822: /*@
8823: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8824: the matrix
8826: Neighbor-wise Collective
8828: Input Parameters:
8829: + A - the matrix
8830: - x - the vector to be interpolated
8832: Output Parameter:
8833: . y - the resulting vector
8835: Level: intermediate
8837: Note:
8838: This allows one to use either the restriction or interpolation (its transpose)
8839: matrix to do the interpolation
8841: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8842: @*/
8843: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8844: {
8845: PetscInt M, N, Ny;
8847: PetscFunctionBegin;
8851: PetscCall(MatGetSize(A, &M, &N));
8852: PetscCall(VecGetSize(y, &Ny));
8853: if (M == Ny) PetscCall(MatMult(A, x, y));
8854: else PetscCall(MatMultTranspose(A, x, y));
8855: PetscFunctionReturn(PETSC_SUCCESS);
8856: }
8858: /*@
8859: MatRestrict - $y = A*x$ or $A^T*x$
8861: Neighbor-wise Collective
8863: Input Parameters:
8864: + A - the matrix
8865: - x - the vector to be restricted
8867: Output Parameter:
8868: . y - the resulting vector
8870: Level: intermediate
8872: Note:
8873: This allows one to use either the restriction or interpolation (its transpose)
8874: matrix to do the restriction
8876: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8877: @*/
8878: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8879: {
8880: PetscInt M, N, Nx;
8882: PetscFunctionBegin;
8886: PetscCall(MatGetSize(A, &M, &N));
8887: PetscCall(VecGetSize(x, &Nx));
8888: if (M == Nx) PetscCall(MatMultTranspose(A, x, y));
8889: else PetscCall(MatMult(A, x, y));
8890: PetscFunctionReturn(PETSC_SUCCESS);
8891: }
8893: /*@
8894: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8896: Neighbor-wise Collective
8898: Input Parameters:
8899: + A - the matrix
8900: . x - the input dense matrix to be multiplied
8901: - w - the input dense matrix to be added to the result
8903: Output Parameter:
8904: . y - the output dense matrix
8906: Level: intermediate
8908: Note:
8909: This allows one to use either the restriction or interpolation (its transpose)
8910: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8911: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8913: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8914: @*/
8915: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8916: {
8917: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8918: PetscBool trans = PETSC_TRUE;
8919: MatReuse reuse = MAT_INITIAL_MATRIX;
8921: PetscFunctionBegin;
8927: PetscCall(MatGetSize(A, &M, &N));
8928: PetscCall(MatGetSize(x, &Mx, &Nx));
8929: if (N == Mx) trans = PETSC_FALSE;
8930: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8931: Mo = trans ? N : M;
8932: if (*y) {
8933: PetscCall(MatGetSize(*y, &My, &Ny));
8934: if (Mo == My && Nx == Ny) reuse = MAT_REUSE_MATRIX;
8935: else {
8936: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8937: PetscCall(MatDestroy(y));
8938: }
8939: }
8941: if (w && *y == w) { /* this is to minimize changes in PCMG */
8942: PetscBool flg;
8944: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8945: if (w) {
8946: PetscInt My, Ny, Mw, Nw;
8948: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8949: PetscCall(MatGetSize(*y, &My, &Ny));
8950: PetscCall(MatGetSize(w, &Mw, &Nw));
8951: if (!flg || My != Mw || Ny != Nw) w = NULL;
8952: }
8953: if (!w) {
8954: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8955: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8956: PetscCall(PetscObjectDereference((PetscObject)w));
8957: } else PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8958: }
8959: if (!trans) PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8960: else PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8961: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8962: PetscFunctionReturn(PETSC_SUCCESS);
8963: }
8965: /*@
8966: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8968: Neighbor-wise Collective
8970: Input Parameters:
8971: + A - the matrix
8972: - x - the input dense matrix
8974: Output Parameter:
8975: . y - the output dense matrix
8977: Level: intermediate
8979: Note:
8980: This allows one to use either the restriction or interpolation (its transpose)
8981: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8982: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8984: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8985: @*/
8986: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8987: {
8988: PetscFunctionBegin;
8989: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8990: PetscFunctionReturn(PETSC_SUCCESS);
8991: }
8993: /*@
8994: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8996: Neighbor-wise Collective
8998: Input Parameters:
8999: + A - the matrix
9000: - x - the input dense matrix
9002: Output Parameter:
9003: . y - the output dense matrix
9005: Level: intermediate
9007: Note:
9008: This allows one to use either the restriction or interpolation (its transpose)
9009: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
9010: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
9012: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
9013: @*/
9014: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
9015: {
9016: PetscFunctionBegin;
9017: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
9018: PetscFunctionReturn(PETSC_SUCCESS);
9019: }
9021: /*@
9022: MatGetNullSpace - retrieves the null space of a matrix.
9024: Logically Collective
9026: Input Parameters:
9027: + mat - the matrix
9028: - nullsp - the null space object
9030: Level: developer
9032: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
9033: @*/
9034: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
9035: {
9036: PetscFunctionBegin;
9038: PetscAssertPointer(nullsp, 2);
9039: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
9040: PetscFunctionReturn(PETSC_SUCCESS);
9041: }
9043: /*@C
9044: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
9046: Logically Collective
9048: Input Parameters:
9049: + n - the number of matrices
9050: - mat - the array of matrices
9052: Output Parameters:
9053: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
9055: Level: developer
9057: Note:
9058: Call `MatRestoreNullspaces()` to provide these to another array of matrices
9060: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9061: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
9062: @*/
9063: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9064: {
9065: PetscFunctionBegin;
9066: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9067: PetscAssertPointer(mat, 2);
9068: PetscAssertPointer(nullsp, 3);
9070: PetscCall(PetscCalloc1(3 * n, nullsp));
9071: for (PetscInt i = 0; i < n; i++) {
9073: (*nullsp)[i] = mat[i]->nullsp;
9074: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
9075: (*nullsp)[n + i] = mat[i]->nearnullsp;
9076: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
9077: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
9078: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
9079: }
9080: PetscFunctionReturn(PETSC_SUCCESS);
9081: }
9083: /*@C
9084: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
9086: Logically Collective
9088: Input Parameters:
9089: + n - the number of matrices
9090: . mat - the array of matrices
9091: - nullsp - an array of null spaces
9093: Level: developer
9095: Note:
9096: Call `MatGetNullSpaces()` to create `nullsp`
9098: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9099: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
9100: @*/
9101: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9102: {
9103: PetscFunctionBegin;
9104: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9105: PetscAssertPointer(mat, 2);
9106: PetscAssertPointer(nullsp, 3);
9107: PetscAssertPointer(*nullsp, 3);
9109: for (PetscInt i = 0; i < n; i++) {
9111: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9112: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9113: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9114: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9115: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9116: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9117: }
9118: PetscCall(PetscFree(*nullsp));
9119: PetscFunctionReturn(PETSC_SUCCESS);
9120: }
9122: /*@
9123: MatSetNullSpace - attaches a null space to a matrix.
9125: Logically Collective
9127: Input Parameters:
9128: + mat - the matrix
9129: - nullsp - the null space object
9131: Level: advanced
9133: Notes:
9134: This null space is used by the `KSP` linear solvers to solve singular systems.
9136: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9138: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
9139: to zero but the linear system will still be solved in a least squares sense.
9141: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9142: the domain of a matrix $A$ (from $R^n$ to $R^m$ ($m$ rows, $n$ columns) $R^n$ = the direct sum of the null space of $A$, $n(A)$, plus the range of $A^T$, $R(A^T)$.
9143: Similarly $R^m$ = direct sum $n(A^T) + R(A)$. Hence the linear system $A x = b$ has a solution only if $b$ in $R(A)$ (or correspondingly $b$ is orthogonal to
9144: $n(A^T))$ and if $x$ is a solution then $x + \alpha n(A)$ is a solution for any $\alpha$. The minimum norm solution is orthogonal to $n(A)$. For problems without a solution
9145: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving $A x = \hat{b}$ where $\hat{b}$ is $b$ orthogonalized to the $n(A^T)$.
9146: This $\hat{b}$ can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9148: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one has called
9149: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9150: routine also automatically calls `MatSetTransposeNullSpace()`.
9152: The user should call `MatNullSpaceDestroy()`.
9154: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9155: `KSPSetPCSide()`
9156: @*/
9157: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9158: {
9159: PetscFunctionBegin;
9162: PetscCall(PetscObjectReference((PetscObject)nullsp));
9163: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9164: mat->nullsp = nullsp;
9165: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9166: PetscFunctionReturn(PETSC_SUCCESS);
9167: }
9169: /*@
9170: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9172: Logically Collective
9174: Input Parameters:
9175: + mat - the matrix
9176: - nullsp - the null space object
9178: Level: developer
9180: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9181: @*/
9182: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9183: {
9184: PetscFunctionBegin;
9187: PetscAssertPointer(nullsp, 2);
9188: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9189: PetscFunctionReturn(PETSC_SUCCESS);
9190: }
9192: /*@
9193: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9195: Logically Collective
9197: Input Parameters:
9198: + mat - the matrix
9199: - nullsp - the null space object
9201: Level: advanced
9203: Notes:
9204: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9206: See `MatSetNullSpace()`
9208: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9209: @*/
9210: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9211: {
9212: PetscFunctionBegin;
9215: PetscCall(PetscObjectReference((PetscObject)nullsp));
9216: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9217: mat->transnullsp = nullsp;
9218: PetscFunctionReturn(PETSC_SUCCESS);
9219: }
9221: /*@
9222: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9223: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9225: Logically Collective
9227: Input Parameters:
9228: + mat - the matrix
9229: - nullsp - the null space object
9231: Level: advanced
9233: Notes:
9234: Overwrites any previous near null space that may have been attached
9236: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9238: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9239: @*/
9240: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9241: {
9242: PetscFunctionBegin;
9246: MatCheckPreallocated(mat, 1);
9247: PetscCall(PetscObjectReference((PetscObject)nullsp));
9248: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9249: mat->nearnullsp = nullsp;
9250: PetscFunctionReturn(PETSC_SUCCESS);
9251: }
9253: /*@
9254: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9256: Not Collective
9258: Input Parameter:
9259: . mat - the matrix
9261: Output Parameter:
9262: . nullsp - the null space object, `NULL` if not set
9264: Level: advanced
9266: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9267: @*/
9268: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9269: {
9270: PetscFunctionBegin;
9273: PetscAssertPointer(nullsp, 2);
9274: MatCheckPreallocated(mat, 1);
9275: *nullsp = mat->nearnullsp;
9276: PetscFunctionReturn(PETSC_SUCCESS);
9277: }
9279: /*@
9280: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9282: Collective
9284: Input Parameters:
9285: + mat - the matrix
9286: . row - row/column permutation
9287: - info - information on desired factorization process
9289: Level: developer
9291: Notes:
9292: Probably really in-place only when level of fill is zero, otherwise allocates
9293: new space to store factored matrix and deletes previous memory.
9295: Most users should employ the `KSP` interface for linear solvers
9296: instead of working directly with matrix algebra routines such as this.
9297: See, e.g., `KSPCreate()`.
9299: Fortran Note:
9300: A valid (non-null) `info` argument must be provided
9302: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9303: @*/
9304: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9305: {
9306: PetscFunctionBegin;
9310: PetscAssertPointer(info, 3);
9311: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9312: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9313: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9314: MatCheckPreallocated(mat, 1);
9315: PetscUseTypeMethod(mat, iccfactor, row, info);
9316: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9317: PetscFunctionReturn(PETSC_SUCCESS);
9318: }
9320: /*@
9321: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9322: ghosted ones.
9324: Not Collective
9326: Input Parameters:
9327: + mat - the matrix
9328: - diag - the diagonal values, including ghost ones
9330: Level: developer
9332: Notes:
9333: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9335: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9337: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9338: @*/
9339: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9340: {
9341: PetscMPIInt size;
9343: PetscFunctionBegin;
9348: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9349: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9350: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9351: if (size == 1) {
9352: PetscInt n, m;
9353: PetscCall(VecGetSize(diag, &n));
9354: PetscCall(MatGetSize(mat, NULL, &m));
9355: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9356: PetscCall(MatDiagonalScale(mat, NULL, diag));
9357: } else PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9358: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9359: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9360: PetscFunctionReturn(PETSC_SUCCESS);
9361: }
9363: /*@
9364: MatGetInertia - Gets the inertia from a factored matrix
9366: Collective
9368: Input Parameter:
9369: . mat - the matrix
9371: Output Parameters:
9372: + nneg - number of negative eigenvalues
9373: . nzero - number of zero eigenvalues
9374: - npos - number of positive eigenvalues
9376: Level: advanced
9378: Note:
9379: Matrix must have been factored by `MatCholeskyFactor()`
9381: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9382: @*/
9383: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9384: {
9385: PetscFunctionBegin;
9388: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9389: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9390: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9391: PetscFunctionReturn(PETSC_SUCCESS);
9392: }
9394: /*@C
9395: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9397: Neighbor-wise Collective
9399: Input Parameters:
9400: + mat - the factored matrix obtained with `MatGetFactor()`
9401: - b - the right-hand-side vectors
9403: Output Parameter:
9404: . x - the result vectors
9406: Level: developer
9408: Note:
9409: The vectors `b` and `x` cannot be the same. I.e., one cannot
9410: call `MatSolves`(A,x,x).
9412: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9413: @*/
9414: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9415: {
9416: PetscFunctionBegin;
9419: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9420: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9421: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9423: MatCheckPreallocated(mat, 1);
9424: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9425: PetscUseTypeMethod(mat, solves, b, x);
9426: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9427: PetscFunctionReturn(PETSC_SUCCESS);
9428: }
9430: /*@
9431: MatIsSymmetric - Test whether a matrix is symmetric
9433: Collective
9435: Input Parameters:
9436: + A - the matrix to test
9437: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9439: Output Parameter:
9440: . flg - the result
9442: Level: intermediate
9444: Notes:
9445: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9447: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9449: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9450: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9452: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9453: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9454: @*/
9455: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9456: {
9457: PetscFunctionBegin;
9459: PetscAssertPointer(flg, 3);
9460: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9461: else {
9462: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9463: else PetscCall(MatIsTranspose(A, A, tol, flg));
9464: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9465: }
9466: PetscFunctionReturn(PETSC_SUCCESS);
9467: }
9469: /*@
9470: MatIsHermitian - Test whether a matrix is Hermitian
9472: Collective
9474: Input Parameters:
9475: + A - the matrix to test
9476: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9478: Output Parameter:
9479: . flg - the result
9481: Level: intermediate
9483: Notes:
9484: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9486: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9488: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9489: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9491: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9492: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9493: @*/
9494: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9495: {
9496: PetscFunctionBegin;
9498: PetscAssertPointer(flg, 3);
9499: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9500: else {
9501: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9502: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9503: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9504: }
9505: PetscFunctionReturn(PETSC_SUCCESS);
9506: }
9508: /*@
9509: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9511: Not Collective
9513: Input Parameter:
9514: . A - the matrix to check
9516: Output Parameters:
9517: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9518: - flg - the result (only valid if set is `PETSC_TRUE`)
9520: Level: advanced
9522: Notes:
9523: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9524: if you want it explicitly checked
9526: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9527: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9529: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9530: @*/
9531: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9532: {
9533: PetscFunctionBegin;
9535: PetscAssertPointer(set, 2);
9536: PetscAssertPointer(flg, 3);
9537: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9538: *set = PETSC_TRUE;
9539: *flg = PetscBool3ToBool(A->symmetric);
9540: } else *set = PETSC_FALSE;
9541: PetscFunctionReturn(PETSC_SUCCESS);
9542: }
9544: /*@
9545: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9547: Not Collective
9549: Input Parameter:
9550: . A - the matrix to check
9552: Output Parameters:
9553: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9554: - flg - the result (only valid if set is `PETSC_TRUE`)
9556: Level: advanced
9558: Notes:
9559: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9561: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9562: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9564: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9565: @*/
9566: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9567: {
9568: PetscFunctionBegin;
9570: PetscAssertPointer(set, 2);
9571: PetscAssertPointer(flg, 3);
9572: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9573: *set = PETSC_TRUE;
9574: *flg = PetscBool3ToBool(A->spd);
9575: } else *set = PETSC_FALSE;
9576: PetscFunctionReturn(PETSC_SUCCESS);
9577: }
9579: /*@
9580: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9582: Not Collective
9584: Input Parameter:
9585: . A - the matrix to check
9587: Output Parameters:
9588: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9589: - flg - the result (only valid if set is `PETSC_TRUE`)
9591: Level: advanced
9593: Notes:
9594: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9595: if you want it explicitly checked
9597: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9598: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9600: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9601: @*/
9602: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9603: {
9604: PetscFunctionBegin;
9606: PetscAssertPointer(set, 2);
9607: PetscAssertPointer(flg, 3);
9608: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9609: *set = PETSC_TRUE;
9610: *flg = PetscBool3ToBool(A->hermitian);
9611: } else *set = PETSC_FALSE;
9612: PetscFunctionReturn(PETSC_SUCCESS);
9613: }
9615: /*@
9616: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9618: Collective
9620: Input Parameter:
9621: . A - the matrix to test
9623: Output Parameter:
9624: . flg - the result
9626: Level: intermediate
9628: Notes:
9629: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9631: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9632: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9634: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9635: @*/
9636: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9637: {
9638: PetscFunctionBegin;
9640: PetscAssertPointer(flg, 2);
9641: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->structurally_symmetric);
9642: else {
9643: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9644: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9645: }
9646: PetscFunctionReturn(PETSC_SUCCESS);
9647: }
9649: /*@
9650: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9652: Not Collective
9654: Input Parameter:
9655: . A - the matrix to check
9657: Output Parameters:
9658: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9659: - flg - the result (only valid if set is PETSC_TRUE)
9661: Level: advanced
9663: Notes:
9664: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9665: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9667: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9669: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9670: @*/
9671: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9672: {
9673: PetscFunctionBegin;
9675: PetscAssertPointer(set, 2);
9676: PetscAssertPointer(flg, 3);
9677: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9678: *set = PETSC_TRUE;
9679: *flg = PetscBool3ToBool(A->structurally_symmetric);
9680: } else *set = PETSC_FALSE;
9681: PetscFunctionReturn(PETSC_SUCCESS);
9682: }
9684: /*@
9685: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9686: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9688: Not Collective
9690: Input Parameter:
9691: . mat - the matrix
9693: Output Parameters:
9694: + nstash - the size of the stash
9695: . reallocs - the number of additional mallocs incurred.
9696: . bnstash - the size of the block stash
9697: - breallocs - the number of additional mallocs incurred.in the block stash
9699: Level: advanced
9701: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9702: @*/
9703: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9704: {
9705: PetscFunctionBegin;
9706: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9707: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9708: PetscFunctionReturn(PETSC_SUCCESS);
9709: }
9711: /*@
9712: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9713: parallel layout, `PetscLayout` for rows and columns
9715: Collective
9717: Input Parameter:
9718: . mat - the matrix
9720: Output Parameters:
9721: + right - (optional) vector that the matrix can be multiplied against
9722: - left - (optional) vector that the matrix vector product can be stored in
9724: Options Database Key:
9725: . -mat_vec_type type - set the `VecType` of the created vectors during `MatSetFromOptions()`
9727: Level: advanced
9729: Notes:
9730: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9732: The `VecType` of the created vectors is determined by the `MatType` of `mat`. This can be overridden by using `MatSetVecType()` or the option `-mat_vec_type`.
9734: These are new vectors which are not owned by the `mat`, they should be destroyed with `VecDestroy()` when no longer needed.
9736: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`, `MatSetVecType()`
9737: @*/
9738: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9739: {
9740: PetscFunctionBegin;
9743: if (mat->ops->getvecs) {
9744: PetscUseTypeMethod(mat, getvecs, right, left);
9745: } else {
9746: if (right) {
9747: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9748: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9749: PetscCall(VecSetType(*right, mat->defaultvectype));
9750: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9751: if (mat->boundtocpu && mat->bindingpropagates) {
9752: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9753: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9754: }
9755: #endif
9756: }
9757: if (left) {
9758: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9759: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9760: PetscCall(VecSetType(*left, mat->defaultvectype));
9761: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9762: if (mat->boundtocpu && mat->bindingpropagates) {
9763: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9764: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9765: }
9766: #endif
9767: }
9768: }
9769: PetscFunctionReturn(PETSC_SUCCESS);
9770: }
9772: /*@
9773: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9774: with default values.
9776: Not Collective
9778: Input Parameter:
9779: . info - the `MatFactorInfo` data structure
9781: Level: developer
9783: Notes:
9784: The solvers are generally used through the `KSP` and `PC` objects, for example
9785: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9787: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9789: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9790: @*/
9791: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9792: {
9793: PetscFunctionBegin;
9794: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9795: PetscFunctionReturn(PETSC_SUCCESS);
9796: }
9798: /*@
9799: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9801: Collective
9803: Input Parameters:
9804: + mat - the factored matrix
9805: - is - the index set defining the Schur indices (0-based)
9807: Level: advanced
9809: Notes:
9810: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9812: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9814: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9816: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9817: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9818: @*/
9819: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9820: {
9821: PetscErrorCode (*f)(Mat, IS);
9823: PetscFunctionBegin;
9828: PetscCheckSameComm(mat, 1, is, 2);
9829: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9830: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9831: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9832: PetscCall(MatDestroy(&mat->schur));
9833: PetscCall((*f)(mat, is));
9834: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9835: PetscFunctionReturn(PETSC_SUCCESS);
9836: }
9838: /*@
9839: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9841: Logically Collective
9843: Input Parameters:
9844: + F - the factored matrix obtained by calling `MatGetFactor()`
9845: . S - location where to return the Schur complement, can be `NULL`
9846: - status - the status of the Schur complement matrix, can be `NULL`
9848: Level: advanced
9850: Notes:
9851: You must call `MatFactorSetSchurIS()` before calling this routine.
9853: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9855: The routine provides a copy of the Schur matrix stored within the solver data structures.
9856: The caller must destroy the object when it is no longer needed.
9857: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9859: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9861: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9863: Developer Note:
9864: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9865: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9867: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9868: @*/
9869: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9870: {
9871: PetscFunctionBegin;
9873: if (S) PetscAssertPointer(S, 2);
9874: if (status) PetscAssertPointer(status, 3);
9875: if (S) {
9876: PetscErrorCode (*f)(Mat, Mat *);
9878: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9879: if (f) PetscCall((*f)(F, S));
9880: else PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9881: }
9882: if (status) *status = F->schur_status;
9883: PetscFunctionReturn(PETSC_SUCCESS);
9884: }
9886: /*@
9887: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9889: Logically Collective
9891: Input Parameters:
9892: + F - the factored matrix obtained by calling `MatGetFactor()`
9893: . S - location where to return the Schur complement, can be `NULL`
9894: - status - the status of the Schur complement matrix, can be `NULL`
9896: Level: advanced
9898: Notes:
9899: You must call `MatFactorSetSchurIS()` before calling this routine.
9901: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9903: The routine returns a the Schur Complement stored within the data structures of the solver.
9905: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9907: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9909: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9911: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9913: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9914: @*/
9915: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9916: {
9917: PetscFunctionBegin;
9919: if (S) {
9920: PetscAssertPointer(S, 2);
9921: *S = F->schur;
9922: }
9923: if (status) {
9924: PetscAssertPointer(status, 3);
9925: *status = F->schur_status;
9926: }
9927: PetscFunctionReturn(PETSC_SUCCESS);
9928: }
9930: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9931: {
9932: Mat S = F->schur;
9934: PetscFunctionBegin;
9935: switch (F->schur_status) {
9936: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9937: case MAT_FACTOR_SCHUR_INVERTED:
9938: if (S) {
9939: S->ops->solve = NULL;
9940: S->ops->matsolve = NULL;
9941: S->ops->solvetranspose = NULL;
9942: S->ops->matsolvetranspose = NULL;
9943: S->ops->solveadd = NULL;
9944: S->ops->solvetransposeadd = NULL;
9945: S->factortype = MAT_FACTOR_NONE;
9946: PetscCall(PetscFree(S->solvertype));
9947: }
9948: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9949: break;
9950: default:
9951: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9952: }
9953: PetscFunctionReturn(PETSC_SUCCESS);
9954: }
9956: /*@
9957: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9959: Logically Collective
9961: Input Parameters:
9962: + F - the factored matrix obtained by calling `MatGetFactor()`
9963: . S - location where the Schur complement is stored
9964: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9966: Level: advanced
9968: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9969: @*/
9970: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9971: {
9972: PetscFunctionBegin;
9974: if (S) {
9976: *S = NULL;
9977: }
9978: F->schur_status = status;
9979: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9980: PetscFunctionReturn(PETSC_SUCCESS);
9981: }
9983: /*@
9984: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9986: Logically Collective
9988: Input Parameters:
9989: + F - the factored matrix obtained by calling `MatGetFactor()`
9990: . rhs - location where the right-hand side of the Schur complement system is stored
9991: - sol - location where the solution of the Schur complement system has to be returned
9993: Level: advanced
9995: Notes:
9996: The sizes of the vectors should match the size of the Schur complement
9998: Must be called after `MatFactorSetSchurIS()`
10000: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
10001: @*/
10002: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
10003: {
10004: PetscFunctionBegin;
10011: PetscCheckSameComm(F, 1, rhs, 2);
10012: PetscCheckSameComm(F, 1, sol, 3);
10013: PetscCall(MatFactorFactorizeSchurComplement(F));
10014: switch (F->schur_status) {
10015: case MAT_FACTOR_SCHUR_FACTORED:
10016: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
10017: break;
10018: case MAT_FACTOR_SCHUR_INVERTED:
10019: PetscCall(MatMultTranspose(F->schur, rhs, sol));
10020: break;
10021: default:
10022: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10023: }
10024: PetscFunctionReturn(PETSC_SUCCESS);
10025: }
10027: /*@
10028: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
10030: Logically Collective
10032: Input Parameters:
10033: + F - the factored matrix obtained by calling `MatGetFactor()`
10034: . rhs - location where the right-hand side of the Schur complement system is stored
10035: - sol - location where the solution of the Schur complement system has to be returned
10037: Level: advanced
10039: Notes:
10040: The sizes of the vectors should match the size of the Schur complement
10042: Must be called after `MatFactorSetSchurIS()`
10044: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
10045: @*/
10046: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
10047: {
10048: PetscFunctionBegin;
10055: PetscCheckSameComm(F, 1, rhs, 2);
10056: PetscCheckSameComm(F, 1, sol, 3);
10057: PetscCall(MatFactorFactorizeSchurComplement(F));
10058: switch (F->schur_status) {
10059: case MAT_FACTOR_SCHUR_FACTORED:
10060: PetscCall(MatSolve(F->schur, rhs, sol));
10061: break;
10062: case MAT_FACTOR_SCHUR_INVERTED:
10063: PetscCall(MatMult(F->schur, rhs, sol));
10064: break;
10065: default:
10066: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10067: }
10068: PetscFunctionReturn(PETSC_SUCCESS);
10069: }
10071: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
10072: #if PetscDefined(HAVE_CUDA)
10073: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
10074: #endif
10076: /* Schur status updated in the interface */
10077: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
10078: {
10079: Mat S = F->schur;
10081: PetscFunctionBegin;
10082: if (S) {
10083: PetscMPIInt size;
10084: PetscBool isdense, isdensecuda;
10086: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
10087: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
10088: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
10089: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
10090: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
10091: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
10092: if (isdense) {
10093: PetscCall(MatSeqDenseInvertFactors_Private(S));
10094: } else if (isdensecuda) {
10095: #if defined(PETSC_HAVE_CUDA)
10096: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10097: #endif
10098: }
10099: // HIP??????????????
10100: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10101: }
10102: PetscFunctionReturn(PETSC_SUCCESS);
10103: }
10105: /*@
10106: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10108: Logically Collective
10110: Input Parameter:
10111: . F - the factored matrix obtained by calling `MatGetFactor()`
10113: Level: advanced
10115: Notes:
10116: Must be called after `MatFactorSetSchurIS()`.
10118: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10120: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10121: @*/
10122: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10123: {
10124: PetscFunctionBegin;
10127: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10128: PetscCall(MatFactorFactorizeSchurComplement(F));
10129: PetscCall(MatFactorInvertSchurComplement_Private(F));
10130: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10131: PetscFunctionReturn(PETSC_SUCCESS);
10132: }
10134: /*@
10135: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10137: Logically Collective
10139: Input Parameter:
10140: . F - the factored matrix obtained by calling `MatGetFactor()`
10142: Level: advanced
10144: Note:
10145: Must be called after `MatFactorSetSchurIS()`
10147: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10148: @*/
10149: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10150: {
10151: MatFactorInfo info;
10153: PetscFunctionBegin;
10156: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10157: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10158: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10159: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10160: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10161: } else {
10162: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10163: }
10164: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10165: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10166: PetscFunctionReturn(PETSC_SUCCESS);
10167: }
10169: /*@
10170: MatPtAP - Creates the matrix product $C = P^T * A * P$
10172: Neighbor-wise Collective
10174: Input Parameters:
10175: + A - the matrix
10176: . P - the projection matrix
10177: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10178: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10179: if the result is a dense matrix this is irrelevant
10181: Output Parameter:
10182: . C - the product matrix
10184: Level: intermediate
10186: Notes:
10187: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10189: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_PtAP`
10190: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10192: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10194: Developer Note:
10195: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10197: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10198: @*/
10199: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10200: {
10201: PetscFunctionBegin;
10202: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10203: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10205: if (scall == MAT_INITIAL_MATRIX) {
10206: PetscCall(MatProductCreate(A, P, NULL, C));
10207: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10208: PetscCall(MatProductSetAlgorithm(*C, "default"));
10209: PetscCall(MatProductSetFill(*C, fill));
10211: (*C)->product->api_user = PETSC_TRUE;
10212: PetscCall(MatProductSetFromOptions(*C));
10213: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10214: PetscCall(MatProductSymbolic(*C));
10215: } else { /* scall == MAT_REUSE_MATRIX */
10216: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10217: }
10219: PetscCall(MatProductNumeric(*C));
10220: if (A->symmetric == PETSC_BOOL3_TRUE) {
10221: PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10222: (*C)->spd = A->spd;
10223: }
10224: PetscFunctionReturn(PETSC_SUCCESS);
10225: }
10227: /*@
10228: MatRARt - Creates the matrix product $C = R * A * R^T$
10230: Neighbor-wise Collective
10232: Input Parameters:
10233: + A - the matrix
10234: . R - the projection matrix
10235: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10236: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10237: if the result is a dense matrix this is irrelevant
10239: Output Parameter:
10240: . C - the product matrix
10242: Level: intermediate
10244: Notes:
10245: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10247: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_RARt`
10248: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10250: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10251: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10252: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10253: We recommend using `MatPtAP()` when possible.
10255: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10257: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10258: @*/
10259: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10260: {
10261: PetscFunctionBegin;
10262: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10263: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10265: if (scall == MAT_INITIAL_MATRIX) {
10266: PetscCall(MatProductCreate(A, R, NULL, C));
10267: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10268: PetscCall(MatProductSetAlgorithm(*C, "default"));
10269: PetscCall(MatProductSetFill(*C, fill));
10271: (*C)->product->api_user = PETSC_TRUE;
10272: PetscCall(MatProductSetFromOptions(*C));
10273: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10274: PetscCall(MatProductSymbolic(*C));
10275: } else { /* scall == MAT_REUSE_MATRIX */
10276: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10277: }
10279: PetscCall(MatProductNumeric(*C));
10280: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10281: PetscFunctionReturn(PETSC_SUCCESS);
10282: }
10284: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10285: {
10286: PetscBool flg = PETSC_TRUE;
10288: PetscFunctionBegin;
10289: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10290: if (scall == MAT_INITIAL_MATRIX) {
10291: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10292: PetscCall(MatProductCreate(A, B, NULL, C));
10293: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10294: PetscCall(MatProductSetFill(*C, fill));
10295: } else { /* scall == MAT_REUSE_MATRIX */
10296: Mat_Product *product = (*C)->product;
10298: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10299: if (flg && product && product->type != ptype) {
10300: PetscCall(MatProductClear(*C));
10301: product = NULL;
10302: }
10303: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10304: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10305: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10306: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10307: product = (*C)->product;
10308: product->fill = fill;
10309: product->clear = PETSC_TRUE;
10310: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10311: flg = PETSC_FALSE;
10312: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10313: }
10314: }
10315: if (flg) {
10316: (*C)->product->api_user = PETSC_TRUE;
10317: PetscCall(MatProductSetType(*C, ptype));
10318: PetscCall(MatProductSetFromOptions(*C));
10319: PetscCall(MatProductSymbolic(*C));
10320: }
10321: PetscCall(MatProductNumeric(*C));
10322: PetscFunctionReturn(PETSC_SUCCESS);
10323: }
10325: /*@
10326: MatMatMult - Performs matrix-matrix multiplication $ C=A*B $.
10328: Neighbor-wise Collective
10330: Input Parameters:
10331: + A - the left matrix
10332: . B - the right matrix
10333: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10334: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10335: if the result is a dense matrix this is irrelevant
10337: Output Parameter:
10338: . C - the product matrix
10340: Notes:
10341: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10343: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10344: call to this function with `MAT_INITIAL_MATRIX`.
10346: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10348: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10349: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10351: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10353: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_AB`
10354: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10356: Example of Usage:
10357: .vb
10358: MatProductCreate(A,B,NULL,&C);
10359: MatProductSetType(C,MATPRODUCT_AB);
10360: MatProductSymbolic(C);
10361: MatProductNumeric(C); // compute C=A * B
10362: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10363: MatProductNumeric(C);
10364: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10365: MatProductNumeric(C);
10366: .ve
10368: Level: intermediate
10370: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10371: @*/
10372: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10373: {
10374: PetscFunctionBegin;
10375: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10376: PetscFunctionReturn(PETSC_SUCCESS);
10377: }
10379: /*@
10380: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10382: Neighbor-wise Collective
10384: Input Parameters:
10385: + A - the left matrix
10386: . B - the right matrix
10387: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10388: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10390: Output Parameter:
10391: . C - the product matrix
10393: Options Database Key:
10394: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10395: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10396: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10398: Level: intermediate
10400: Notes:
10401: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10403: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10405: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10406: actually needed.
10408: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10409: and for pairs of `MATMPIDENSE` matrices.
10411: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_ABt`
10412: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10414: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10416: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()`, `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10417: @*/
10418: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10419: {
10420: PetscFunctionBegin;
10421: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10422: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10423: PetscFunctionReturn(PETSC_SUCCESS);
10424: }
10426: /*@
10427: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10429: Neighbor-wise Collective
10431: Input Parameters:
10432: + A - the left matrix
10433: . B - the right matrix
10434: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10435: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10437: Output Parameter:
10438: . C - the product matrix
10440: Level: intermediate
10442: Notes:
10443: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10445: `MAT_REUSE_MATRIX` can only be used if `A` and `B` have the same nonzero pattern as in the previous call.
10447: This is a convenience routine that wraps the use of `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_AtB`
10448: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10450: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10451: actually needed.
10453: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10454: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10456: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10458: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10459: @*/
10460: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10461: {
10462: PetscFunctionBegin;
10463: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10464: PetscFunctionReturn(PETSC_SUCCESS);
10465: }
10467: /*@
10468: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10470: Neighbor-wise Collective
10472: Input Parameters:
10473: + A - the left matrix
10474: . B - the middle matrix
10475: . C - the right matrix
10476: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10477: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10478: if the result is a dense matrix this is irrelevant
10480: Output Parameter:
10481: . D - the product matrix
10483: Level: intermediate
10485: Notes:
10486: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10488: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10490: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_ABC`
10491: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10493: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10494: actually needed.
10496: If you have many matrices with the same non-zero structure to multiply, you
10497: should use `MAT_REUSE_MATRIX` in all calls but the first
10499: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10501: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10502: @*/
10503: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10504: {
10505: PetscFunctionBegin;
10506: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10507: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10509: if (scall == MAT_INITIAL_MATRIX) {
10510: PetscCall(MatProductCreate(A, B, C, D));
10511: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10512: PetscCall(MatProductSetAlgorithm(*D, "default"));
10513: PetscCall(MatProductSetFill(*D, fill));
10515: (*D)->product->api_user = PETSC_TRUE;
10516: PetscCall(MatProductSetFromOptions(*D));
10517: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10518: ((PetscObject)C)->type_name);
10519: PetscCall(MatProductSymbolic(*D));
10520: } else { /* user may change input matrices when REUSE */
10521: PetscCall(MatProductReplaceMats(A, B, C, *D));
10522: }
10523: PetscCall(MatProductNumeric(*D));
10524: PetscFunctionReturn(PETSC_SUCCESS);
10525: }
10527: /*@
10528: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10530: Collective
10532: Input Parameters:
10533: + mat - the matrix
10534: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10535: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10536: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10538: Output Parameter:
10539: . matredundant - redundant matrix
10541: Level: advanced
10543: Notes:
10544: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10545: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10547: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10548: calling it.
10550: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10552: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10553: @*/
10554: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10555: {
10556: MPI_Comm comm;
10557: PetscMPIInt size;
10558: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10559: Mat_Redundant *redund = NULL;
10560: PetscSubcomm psubcomm = NULL;
10561: MPI_Comm subcomm_in = subcomm;
10562: Mat *matseq;
10563: IS isrow, iscol;
10564: PetscBool newsubcomm = PETSC_FALSE;
10566: PetscFunctionBegin;
10568: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10569: PetscAssertPointer(*matredundant, 5);
10571: }
10573: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10574: if (size == 1 || nsubcomm == 1) {
10575: if (reuse == MAT_INITIAL_MATRIX) {
10576: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10577: } else {
10578: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10579: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10580: }
10581: PetscFunctionReturn(PETSC_SUCCESS);
10582: }
10584: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10585: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10586: MatCheckPreallocated(mat, 1);
10588: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10589: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10590: /* create psubcomm, then get subcomm */
10591: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10592: PetscCallMPI(MPI_Comm_size(comm, &size));
10593: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10595: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10596: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10597: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10598: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10599: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10600: newsubcomm = PETSC_TRUE;
10601: PetscCall(PetscSubcommDestroy(&psubcomm));
10602: }
10604: /* get isrow, iscol and a local sequential matrix matseq[0] */
10605: if (reuse == MAT_INITIAL_MATRIX) {
10606: mloc_sub = PETSC_DECIDE;
10607: nloc_sub = PETSC_DECIDE;
10608: if (bs < 1) {
10609: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10610: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10611: } else {
10612: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10613: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10614: }
10615: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10616: rstart = rend - mloc_sub;
10617: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10618: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10619: PetscCall(ISSetIdentity(iscol));
10620: } else { /* reuse == MAT_REUSE_MATRIX */
10621: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10622: /* retrieve subcomm */
10623: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10624: redund = (*matredundant)->redundant;
10625: isrow = redund->isrow;
10626: iscol = redund->iscol;
10627: matseq = redund->matseq;
10628: }
10629: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10631: /* get matredundant over subcomm */
10632: if (reuse == MAT_INITIAL_MATRIX) {
10633: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10635: /* create a supporting struct and attach it to C for reuse */
10636: PetscCall(PetscNew(&redund));
10637: (*matredundant)->redundant = redund;
10638: redund->isrow = isrow;
10639: redund->iscol = iscol;
10640: redund->matseq = matseq;
10641: if (newsubcomm) {
10642: redund->subcomm = subcomm;
10643: } else {
10644: redund->subcomm = MPI_COMM_NULL;
10645: }
10646: } else {
10647: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10648: }
10649: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10650: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10651: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10652: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10653: }
10654: #endif
10655: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10656: PetscFunctionReturn(PETSC_SUCCESS);
10657: }
10659: /*@C
10660: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10661: a given `Mat`. Each submatrix can span multiple procs.
10663: Collective
10665: Input Parameters:
10666: + mat - the matrix
10667: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10668: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10670: Output Parameter:
10671: . subMat - parallel sub-matrices each spanning a given `subcomm`
10673: Level: advanced
10675: Notes:
10676: The submatrix partition across processors is dictated by `subComm` a
10677: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10678: is not restricted to be grouped with consecutive original MPI processes.
10680: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10681: map directly to the layout of the original matrix [wrt the local
10682: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10683: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10684: the `subMat`. However the offDiagMat looses some columns - and this is
10685: reconstructed with `MatSetValues()`
10687: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10689: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10690: @*/
10691: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10692: {
10693: PetscMPIInt commsize, subCommSize;
10695: PetscFunctionBegin;
10696: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10697: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10698: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10700: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10701: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10702: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10703: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10704: PetscFunctionReturn(PETSC_SUCCESS);
10705: }
10707: /*@
10708: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10710: Not Collective
10712: Input Parameters:
10713: + mat - matrix to extract local submatrix from
10714: . isrow - local row indices for submatrix
10715: - iscol - local column indices for submatrix
10717: Output Parameter:
10718: . submat - the submatrix
10720: Level: intermediate
10722: Notes:
10723: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10725: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10726: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10728: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10729: `MatSetValuesBlockedLocal()` will also be implemented.
10731: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10732: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10734: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10735: @*/
10736: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10737: {
10738: PetscFunctionBegin;
10742: PetscCheckSameComm(isrow, 2, iscol, 3);
10743: PetscAssertPointer(submat, 4);
10744: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10746: if (mat->ops->getlocalsubmatrix) {
10747: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10748: } else {
10749: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10750: }
10751: (*submat)->assembled = mat->assembled;
10752: PetscFunctionReturn(PETSC_SUCCESS);
10753: }
10755: /*@
10756: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10758: Not Collective
10760: Input Parameters:
10761: + mat - matrix to extract local submatrix from
10762: . isrow - local row indices for submatrix
10763: . iscol - local column indices for submatrix
10764: - submat - the submatrix
10766: Level: intermediate
10768: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10769: @*/
10770: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10771: {
10772: PetscFunctionBegin;
10776: PetscCheckSameComm(isrow, 2, iscol, 3);
10777: PetscAssertPointer(submat, 4);
10780: if (mat->ops->restorelocalsubmatrix) {
10781: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10782: } else {
10783: PetscCall(MatDestroy(submat));
10784: }
10785: *submat = NULL;
10786: PetscFunctionReturn(PETSC_SUCCESS);
10787: }
10789: /*@
10790: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10792: Collective
10794: Input Parameter:
10795: . mat - the matrix
10797: Output Parameter:
10798: . is - if any rows have zero diagonals this contains the list of them
10800: Level: developer
10802: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10803: @*/
10804: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10805: {
10806: PetscFunctionBegin;
10809: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10810: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10812: if (!mat->ops->findzerodiagonals) {
10813: Vec diag;
10814: const PetscScalar *a;
10815: PetscInt *rows;
10816: PetscInt rStart, rEnd, r, nrow = 0;
10818: PetscCall(MatCreateVecs(mat, &diag, NULL));
10819: PetscCall(MatGetDiagonal(mat, diag));
10820: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10821: PetscCall(VecGetArrayRead(diag, &a));
10822: for (r = 0; r < rEnd - rStart; ++r)
10823: if (a[r] == 0.0) ++nrow;
10824: PetscCall(PetscMalloc1(nrow, &rows));
10825: nrow = 0;
10826: for (r = 0; r < rEnd - rStart; ++r)
10827: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10828: PetscCall(VecRestoreArrayRead(diag, &a));
10829: PetscCall(VecDestroy(&diag));
10830: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10831: } else {
10832: PetscUseTypeMethod(mat, findzerodiagonals, is);
10833: }
10834: PetscFunctionReturn(PETSC_SUCCESS);
10835: }
10837: /*@
10838: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10840: Collective
10842: Input Parameter:
10843: . mat - the matrix
10845: Output Parameter:
10846: . is - contains the list of rows with off block diagonal entries
10848: Level: developer
10850: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10851: @*/
10852: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10853: {
10854: PetscFunctionBegin;
10857: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10858: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10860: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10861: PetscFunctionReturn(PETSC_SUCCESS);
10862: }
10864: /*@C
10865: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10867: Collective; No Fortran Support
10869: Input Parameter:
10870: . mat - the matrix
10872: Output Parameter:
10873: . values - the block inverses in column major order (FORTRAN-like)
10875: Level: advanced
10877: Notes:
10878: The size of the blocks is determined by the block size of the matrix.
10880: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10882: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10884: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10885: @*/
10886: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10887: {
10888: PetscFunctionBegin;
10890: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10891: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10892: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10893: PetscFunctionReturn(PETSC_SUCCESS);
10894: }
10896: /*@
10897: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10899: Collective; No Fortran Support
10901: Input Parameters:
10902: + mat - the matrix
10903: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10904: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10906: Output Parameter:
10907: . values - the block inverses in column major order (FORTRAN-like)
10909: Level: advanced
10911: Notes:
10912: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10914: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10916: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10917: @*/
10918: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10919: {
10920: PetscFunctionBegin;
10922: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10923: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10924: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10925: PetscFunctionReturn(PETSC_SUCCESS);
10926: }
10928: /*@
10929: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10931: Collective
10933: Input Parameters:
10934: + A - the matrix
10935: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10937: Level: advanced
10939: Note:
10940: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10942: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10943: @*/
10944: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10945: {
10946: const PetscScalar *vals;
10947: PetscInt *dnnz;
10948: PetscInt m, rstart, rend, bs, i, j;
10950: PetscFunctionBegin;
10951: PetscCall(MatInvertBlockDiagonal(A, &vals));
10952: PetscCall(MatGetBlockSize(A, &bs));
10953: PetscCall(MatGetLocalSize(A, &m, NULL));
10954: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10955: PetscCall(MatSetBlockSizes(C, A->rmap->bs, A->cmap->bs));
10956: PetscCall(PetscMalloc1(m / bs, &dnnz));
10957: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10958: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10959: PetscCall(PetscFree(dnnz));
10960: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10961: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10962: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10963: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
10964: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10965: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10966: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_FALSE));
10967: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10968: PetscFunctionReturn(PETSC_SUCCESS);
10969: }
10971: /*@
10972: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10973: via `MatTransposeColoringCreate()`.
10975: Collective
10977: Input Parameter:
10978: . c - coloring context
10980: Level: intermediate
10982: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10983: @*/
10984: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10985: {
10986: MatTransposeColoring matcolor = *c;
10988: PetscFunctionBegin;
10989: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10990: if (--((PetscObject)matcolor)->refct > 0) {
10991: matcolor = NULL;
10992: PetscFunctionReturn(PETSC_SUCCESS);
10993: }
10995: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10996: PetscCall(PetscFree(matcolor->rows));
10997: PetscCall(PetscFree(matcolor->den2sp));
10998: PetscCall(PetscFree(matcolor->colorforcol));
10999: PetscCall(PetscFree(matcolor->columns));
11000: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
11001: PetscCall(PetscHeaderDestroy(c));
11002: PetscFunctionReturn(PETSC_SUCCESS);
11003: }
11005: /*@
11006: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
11007: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
11008: `MatTransposeColoring` to sparse `B`.
11010: Collective
11012: Input Parameters:
11013: + coloring - coloring context created with `MatTransposeColoringCreate()`
11014: - B - sparse matrix
11016: Output Parameter:
11017: . Btdense - dense matrix $B^T$
11019: Level: developer
11021: Note:
11022: These are used internally for some implementations of `MatRARt()`
11024: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
11025: @*/
11026: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
11027: {
11028: PetscFunctionBegin;
11033: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
11034: PetscFunctionReturn(PETSC_SUCCESS);
11035: }
11037: /*@
11038: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
11039: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
11040: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
11041: $C_{sp}$ from $C_{den}$.
11043: Collective
11045: Input Parameters:
11046: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
11047: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
11049: Output Parameter:
11050: . Csp - sparse matrix
11052: Level: developer
11054: Note:
11055: These are used internally for some implementations of `MatRARt()`
11057: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
11058: @*/
11059: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
11060: {
11061: PetscFunctionBegin;
11066: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
11067: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
11068: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
11069: PetscFunctionReturn(PETSC_SUCCESS);
11070: }
11072: /*@
11073: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
11075: Collective
11077: Input Parameters:
11078: + mat - the matrix product C
11079: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
11081: Output Parameter:
11082: . color - the new coloring context
11084: Level: intermediate
11086: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
11087: `MatTransColoringApplyDenToSp()`
11088: @*/
11089: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
11090: {
11091: MatTransposeColoring c;
11092: MPI_Comm comm;
11094: PetscFunctionBegin;
11095: PetscAssertPointer(color, 3);
11097: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11098: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
11099: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
11100: c->ctype = iscoloring->ctype;
11101: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
11102: *color = c;
11103: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11104: PetscFunctionReturn(PETSC_SUCCESS);
11105: }
11107: /*@
11108: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
11109: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11111: Not Collective
11113: Input Parameter:
11114: . mat - the matrix
11116: Output Parameter:
11117: . state - the current state
11119: Level: intermediate
11121: Notes:
11122: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11123: different matrices
11125: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11127: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11129: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11130: @*/
11131: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11132: {
11133: PetscFunctionBegin;
11135: *state = mat->nonzerostate;
11136: PetscFunctionReturn(PETSC_SUCCESS);
11137: }
11139: /*@
11140: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11141: matrices from each processor
11143: Collective
11145: Input Parameters:
11146: + comm - the communicators the parallel matrix will live on
11147: . seqmat - the input sequential matrices
11148: . n - number of local columns (or `PETSC_DECIDE`)
11149: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11151: Output Parameter:
11152: . mpimat - the parallel matrix generated
11154: Level: developer
11156: Note:
11157: The number of columns of the matrix in EACH processor MUST be the same.
11159: .seealso: [](ch_matrices), `Mat`
11160: @*/
11161: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11162: {
11163: PetscMPIInt size;
11165: PetscFunctionBegin;
11166: PetscCallMPI(MPI_Comm_size(comm, &size));
11167: if (size == 1) {
11168: if (reuse == MAT_INITIAL_MATRIX) {
11169: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11170: } else {
11171: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11172: }
11173: PetscFunctionReturn(PETSC_SUCCESS);
11174: }
11176: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11178: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11179: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11180: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11181: PetscFunctionReturn(PETSC_SUCCESS);
11182: }
11184: /*@
11185: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11187: Collective
11189: Input Parameters:
11190: + A - the matrix to create subdomains from
11191: - N - requested number of subdomains
11193: Output Parameters:
11194: + n - number of subdomains resulting on this MPI process
11195: - iss - `IS` list with indices of subdomains on this MPI process
11197: Level: advanced
11199: Note:
11200: The number of subdomains must be smaller than the communicator size
11202: .seealso: [](ch_matrices), `Mat`, `IS`
11203: @*/
11204: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11205: {
11206: MPI_Comm comm, subcomm;
11207: PetscMPIInt size, rank, color;
11208: PetscInt rstart, rend, k;
11210: PetscFunctionBegin;
11211: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11212: PetscCallMPI(MPI_Comm_size(comm, &size));
11213: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11214: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11215: *n = 1;
11216: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11217: color = rank / k;
11218: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11219: PetscCall(PetscMalloc1(1, iss));
11220: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11221: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11222: PetscCallMPI(MPI_Comm_free(&subcomm));
11223: PetscFunctionReturn(PETSC_SUCCESS);
11224: }
11226: /*@
11227: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11229: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11230: If they are not the same, uses `MatMatMatMult()`.
11232: Once the coarse grid problem is constructed, correct for interpolation operators
11233: that are not of full rank, which can legitimately happen in the case of non-nested
11234: geometric multigrid.
11236: Input Parameters:
11237: + restrct - restriction operator
11238: . dA - fine grid matrix
11239: . interpolate - interpolation operator
11240: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11241: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11243: Output Parameter:
11244: . A - the Galerkin coarse matrix
11246: Options Database Key:
11247: . -pc_mg_galerkin (both|pmat|mat|none) - for what matrices the Galerkin process should be used
11249: Level: developer
11251: Note:
11252: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11254: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11255: @*/
11256: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11257: {
11258: IS zerorows;
11259: Vec diag;
11261: PetscFunctionBegin;
11262: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11263: /* Construct the coarse grid matrix */
11264: if (interpolate == restrct) {
11265: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11266: } else {
11267: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11268: }
11270: /* If the interpolation matrix is not of full rank, A will have zero rows.
11271: This can legitimately happen in the case of non-nested geometric multigrid.
11272: In that event, we set the rows of the matrix to the rows of the identity,
11273: ignoring the equations (as the RHS will also be zero). */
11275: PetscCall(MatFindZeroRows(*A, &zerorows));
11277: if (zerorows != NULL) { /* if there are any zero rows */
11278: PetscCall(MatCreateVecs(*A, &diag, NULL));
11279: PetscCall(MatGetDiagonal(*A, diag));
11280: PetscCall(VecISSet(diag, zerorows, 1.0));
11281: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11282: PetscCall(VecDestroy(&diag));
11283: PetscCall(ISDestroy(&zerorows));
11284: }
11285: PetscFunctionReturn(PETSC_SUCCESS);
11286: }
11288: /*@C
11289: MatSetOperation - Allows user to set a matrix operation for any matrix type
11291: Logically Collective
11293: Input Parameters:
11294: + mat - the matrix
11295: . op - the name of the operation
11296: - f - the function that provides the operation
11298: Level: developer
11300: Example Usage:
11301: .vb
11302: extern PetscErrorCode usermult(Mat, Vec, Vec);
11304: PetscCall(MatCreateXXX(comm, ..., &A));
11305: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscErrorCodeFn *)usermult));
11306: .ve
11308: Notes:
11309: See the file `include/petscmat.h` for a complete list of matrix
11310: operations, which all have the form MATOP_<OPERATION>, where
11311: <OPERATION> is the name (in all capital letters) of the
11312: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11314: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11315: sequence as the usual matrix interface routines, since they
11316: are intended to be accessed via the usual matrix interface
11317: routines, e.g.,
11318: .vb
11319: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11320: .ve
11322: In particular each function MUST return `PETSC_SUCCESS` on success and
11323: nonzero on failure.
11325: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11327: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11328: @*/
11329: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, PetscErrorCodeFn *f)
11330: {
11331: PetscFunctionBegin;
11333: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (PetscErrorCodeFn *)mat->ops->view) mat->ops->viewnative = mat->ops->view;
11334: (((PetscErrorCodeFn **)mat->ops)[op]) = f;
11335: PetscFunctionReturn(PETSC_SUCCESS);
11336: }
11338: /*@C
11339: MatGetOperation - Gets a matrix operation for any matrix type.
11341: Not Collective
11343: Input Parameters:
11344: + mat - the matrix
11345: - op - the name of the operation
11347: Output Parameter:
11348: . f - the function that provides the operation
11350: Level: developer
11352: Example Usage:
11353: .vb
11354: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11356: MatGetOperation(A, MATOP_MULT, (PetscErrorCodeFn **)&usermult);
11357: .ve
11359: Notes:
11360: See the file `include/petscmat.h` for a complete list of matrix
11361: operations, which all have the form MATOP_<OPERATION>, where
11362: <OPERATION> is the name (in all capital letters) of the
11363: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11365: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11367: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11368: @*/
11369: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, PetscErrorCodeFn **f)
11370: {
11371: PetscFunctionBegin;
11373: *f = (((PetscErrorCodeFn **)mat->ops)[op]);
11374: PetscFunctionReturn(PETSC_SUCCESS);
11375: }
11377: /*@
11378: MatHasOperation - Determines whether the given matrix supports the particular operation.
11380: Not Collective
11382: Input Parameters:
11383: + mat - the matrix
11384: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11386: Output Parameter:
11387: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11389: Level: advanced
11391: Note:
11392: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11394: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11395: @*/
11396: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11397: {
11398: PetscFunctionBegin;
11400: PetscAssertPointer(has, 3);
11401: if (mat->ops->hasoperation) {
11402: PetscUseTypeMethod(mat, hasoperation, op, has);
11403: } else {
11404: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11405: else {
11406: *has = PETSC_FALSE;
11407: if (op == MATOP_CREATE_SUBMATRIX) {
11408: PetscMPIInt size;
11410: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11411: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11412: }
11413: }
11414: }
11415: PetscFunctionReturn(PETSC_SUCCESS);
11416: }
11418: /*@
11419: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11421: Collective
11423: Input Parameter:
11424: . mat - the matrix
11426: Output Parameter:
11427: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11429: Level: beginner
11431: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11432: @*/
11433: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11434: {
11435: PetscFunctionBegin;
11438: PetscAssertPointer(cong, 2);
11439: if (!mat->rmap || !mat->cmap) {
11440: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11441: PetscFunctionReturn(PETSC_SUCCESS);
11442: }
11443: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11444: PetscCall(PetscLayoutSetUp(mat->rmap));
11445: PetscCall(PetscLayoutSetUp(mat->cmap));
11446: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11447: if (*cong) mat->congruentlayouts = 1;
11448: else mat->congruentlayouts = 0;
11449: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11450: PetscFunctionReturn(PETSC_SUCCESS);
11451: }
11453: PetscErrorCode MatSetInf(Mat A)
11454: {
11455: PetscFunctionBegin;
11456: PetscUseTypeMethod(A, setinf);
11457: PetscFunctionReturn(PETSC_SUCCESS);
11458: }
11460: /*@
11461: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11462: and possibly removes small values from the graph structure.
11464: Collective
11466: Input Parameters:
11467: + A - the matrix
11468: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11469: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11470: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11471: . num_idx - size of 'index' array
11472: - index - array of block indices to use for graph strength of connection weight
11474: Output Parameter:
11475: . graph - the resulting graph
11477: Level: advanced
11479: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11480: @*/
11481: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11482: {
11483: PetscFunctionBegin;
11487: PetscAssertPointer(graph, 7);
11488: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11489: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11490: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11491: PetscFunctionReturn(PETSC_SUCCESS);
11492: }
11494: /*@
11495: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11496: meaning the same memory is used for the matrix, and no new memory is allocated.
11498: Collective
11500: Input Parameters:
11501: + A - the matrix
11502: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11504: Level: intermediate
11506: Developer Note:
11507: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11508: of the arrays in the data structure are unneeded.
11510: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11511: @*/
11512: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11513: {
11514: PetscFunctionBegin;
11516: PetscUseTypeMethod(A, eliminatezeros, keep);
11517: PetscFunctionReturn(PETSC_SUCCESS);
11518: }
11520: /*@C
11521: MatGetCurrentMemType - Get the memory location of the matrix
11523: Not Collective, but the result will be the same on all MPI processes
11525: Input Parameter:
11526: . A - the matrix whose memory type we are checking
11528: Output Parameter:
11529: . m - the memory type
11531: Level: intermediate
11533: .seealso: [](ch_matrices), `Mat`, `MatBoundToCPU()`, `PetscMemType`
11534: @*/
11535: PetscErrorCode MatGetCurrentMemType(Mat A, PetscMemType *m)
11536: {
11537: PetscFunctionBegin;
11539: PetscAssertPointer(m, 2);
11540: if (A->ops->getcurrentmemtype) PetscUseTypeMethod(A, getcurrentmemtype, m);
11541: else *m = PETSC_MEMTYPE_HOST;
11542: PetscFunctionReturn(PETSC_SUCCESS);
11543: }