Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_ADot, MAT_ANorm;
19: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
20: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
21: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
22: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
23: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
24: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
25: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
26: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
27: PetscLogEvent MAT_TransposeColoringCreate;
28: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
29: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
30: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
31: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
32: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
33: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
34: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
35: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
36: PetscLogEvent MAT_GetMultiProcBlock;
37: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
38: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
39: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
40: PetscLogEvent MAT_CreateGraph;
41: PetscLogEvent MAT_SetValuesBatch;
42: PetscLogEvent MAT_ViennaCLCopyToGPU;
43: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
44: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
45: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
46: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
47: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
48: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
50: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
52: /*@
53: MatSetRandom - Sets all components of a matrix to random numbers.
55: Logically Collective
57: Input Parameters:
58: + x - the matrix
59: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
60: it will create one internally.
62: Example:
63: .vb
64: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
65: MatSetRandom(x,rctx);
66: PetscRandomDestroy(rctx);
67: .ve
69: Level: intermediate
71: Notes:
72: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
74: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
76: It generates an error if used on unassembled sparse matrices that have not been preallocated.
78: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
79: @*/
80: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
81: {
82: PetscRandom randObj = NULL;
84: PetscFunctionBegin;
88: MatCheckPreallocated(x, 1);
90: if (!rctx) {
91: MPI_Comm comm;
92: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
93: PetscCall(PetscRandomCreate(comm, &randObj));
94: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
95: PetscCall(PetscRandomSetFromOptions(randObj));
96: rctx = randObj;
97: }
98: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
99: PetscUseTypeMethod(x, setrandom, rctx);
100: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
102: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
104: PetscCall(PetscRandomDestroy(&randObj));
105: PetscFunctionReturn(PETSC_SUCCESS);
106: }
108: /*@
109: MatCopyHashToXAIJ - copy hash table entries into an XAIJ matrix type
111: Logically Collective
113: Input Parameter:
114: . A - A matrix in unassembled, hash table form
116: Output Parameter:
117: . B - The XAIJ matrix. This can either be `A` or some matrix of equivalent size, e.g. obtained from `A` via `MatDuplicate()`
119: Example:
120: .vb
121: PetscCall(MatDuplicate(A, MAT_DO_NOT_COPY_VALUES, &B));
122: PetscCall(MatCopyHashToXAIJ(A, B));
123: .ve
125: Level: advanced
127: Notes:
128: If `B` is `A`, then the hash table data structure will be destroyed. `B` is assembled
130: .seealso: [](ch_matrices), `Mat`, `MAT_USE_HASH_TABLE`
131: @*/
132: PetscErrorCode MatCopyHashToXAIJ(Mat A, Mat B)
133: {
134: PetscFunctionBegin;
136: PetscUseTypeMethod(A, copyhashtoxaij, B);
137: PetscFunctionReturn(PETSC_SUCCESS);
138: }
140: /*@
141: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
143: Logically Collective
145: Input Parameter:
146: . mat - the factored matrix
148: Output Parameters:
149: + pivot - the pivot value computed
150: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
151: the share the matrix
153: Level: advanced
155: Notes:
156: This routine does not work for factorizations done with external packages.
158: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
160: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
162: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
163: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
164: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
165: @*/
166: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
167: {
168: PetscFunctionBegin;
170: PetscAssertPointer(pivot, 2);
171: PetscAssertPointer(row, 3);
172: *pivot = mat->factorerror_zeropivot_value;
173: *row = mat->factorerror_zeropivot_row;
174: PetscFunctionReturn(PETSC_SUCCESS);
175: }
177: /*@
178: MatFactorGetError - gets the error code from a factorization
180: Logically Collective
182: Input Parameter:
183: . mat - the factored matrix
185: Output Parameter:
186: . err - the error code
188: Level: advanced
190: Note:
191: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
193: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
194: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
195: @*/
196: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
197: {
198: PetscFunctionBegin;
200: PetscAssertPointer(err, 2);
201: *err = mat->factorerrortype;
202: PetscFunctionReturn(PETSC_SUCCESS);
203: }
205: /*@
206: MatFactorClearError - clears the error code in a factorization
208: Logically Collective
210: Input Parameter:
211: . mat - the factored matrix
213: Level: developer
215: Note:
216: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
218: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
219: `MatGetErrorCode()`, `MatFactorError`
220: @*/
221: PetscErrorCode MatFactorClearError(Mat mat)
222: {
223: PetscFunctionBegin;
225: mat->factorerrortype = MAT_FACTOR_NOERROR;
226: mat->factorerror_zeropivot_value = 0.0;
227: mat->factorerror_zeropivot_row = 0;
228: PetscFunctionReturn(PETSC_SUCCESS);
229: }
231: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
232: {
233: Vec r, l;
234: const PetscScalar *al;
235: PetscInt i, nz, gnz, N, n, st;
237: PetscFunctionBegin;
238: PetscCall(MatCreateVecs(mat, &r, &l));
239: if (!cols) { /* nonzero rows */
240: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
241: PetscCall(MatGetSize(mat, &N, NULL));
242: PetscCall(MatGetLocalSize(mat, &n, NULL));
243: PetscCall(VecSet(l, 0.0));
244: PetscCall(VecSetRandom(r, NULL));
245: PetscCall(MatMult(mat, r, l));
246: PetscCall(VecGetArrayRead(l, &al));
247: } else { /* nonzero columns */
248: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
249: PetscCall(MatGetSize(mat, NULL, &N));
250: PetscCall(MatGetLocalSize(mat, NULL, &n));
251: PetscCall(VecSet(r, 0.0));
252: PetscCall(VecSetRandom(l, NULL));
253: PetscCall(MatMultTranspose(mat, l, r));
254: PetscCall(VecGetArrayRead(r, &al));
255: }
256: if (tol <= 0.0) {
257: for (i = 0, nz = 0; i < n; i++)
258: if (al[i] != 0.0) nz++;
259: } else {
260: for (i = 0, nz = 0; i < n; i++)
261: if (PetscAbsScalar(al[i]) > tol) nz++;
262: }
263: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
264: if (gnz != N) {
265: PetscInt *nzr;
266: PetscCall(PetscMalloc1(nz, &nzr));
267: if (nz) {
268: if (tol < 0) {
269: for (i = 0, nz = 0; i < n; i++)
270: if (al[i] != 0.0) nzr[nz++] = i + st;
271: } else {
272: for (i = 0, nz = 0; i < n; i++)
273: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
274: }
275: }
276: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
277: } else *nonzero = NULL;
278: if (!cols) { /* nonzero rows */
279: PetscCall(VecRestoreArrayRead(l, &al));
280: } else {
281: PetscCall(VecRestoreArrayRead(r, &al));
282: }
283: PetscCall(VecDestroy(&l));
284: PetscCall(VecDestroy(&r));
285: PetscFunctionReturn(PETSC_SUCCESS);
286: }
288: /*@
289: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
291: Input Parameter:
292: . mat - the matrix
294: Output Parameter:
295: . keptrows - the rows that are not completely zero
297: Level: intermediate
299: Note:
300: `keptrows` is set to `NULL` if all rows are nonzero.
302: Developer Note:
303: If `keptrows` is not `NULL`, it must be sorted.
305: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
306: @*/
307: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
308: {
309: PetscFunctionBegin;
312: PetscAssertPointer(keptrows, 2);
313: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
314: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
315: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
316: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
317: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
318: PetscFunctionReturn(PETSC_SUCCESS);
319: }
321: /*@
322: MatFindZeroRows - Locate all rows that are completely zero in the matrix
324: Input Parameter:
325: . mat - the matrix
327: Output Parameter:
328: . zerorows - the rows that are completely zero
330: Level: intermediate
332: Note:
333: `zerorows` is set to `NULL` if no rows are zero.
335: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
336: @*/
337: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
338: {
339: IS keptrows;
340: PetscInt m, n;
342: PetscFunctionBegin;
345: PetscAssertPointer(zerorows, 2);
346: PetscCall(MatFindNonzeroRows(mat, &keptrows));
347: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
348: In keeping with this convention, we set zerorows to NULL if there are no zero
349: rows. */
350: if (keptrows == NULL) {
351: *zerorows = NULL;
352: } else {
353: PetscCall(MatGetOwnershipRange(mat, &m, &n));
354: PetscCall(ISComplement(keptrows, m, n, zerorows));
355: PetscCall(ISDestroy(&keptrows));
356: }
357: PetscFunctionReturn(PETSC_SUCCESS);
358: }
360: /*@
361: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
363: Not Collective
365: Input Parameter:
366: . A - the matrix
368: Output Parameter:
369: . a - the diagonal part (which is a SEQUENTIAL matrix)
371: Level: advanced
373: Notes:
374: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
376: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
378: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
379: @*/
380: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
381: {
382: PetscFunctionBegin;
385: PetscAssertPointer(a, 2);
386: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
387: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
388: else {
389: PetscMPIInt size;
391: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
392: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
393: *a = A;
394: }
395: PetscFunctionReturn(PETSC_SUCCESS);
396: }
398: /*@
399: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
401: Collective
403: Input Parameter:
404: . mat - the matrix
406: Output Parameter:
407: . trace - the sum of the diagonal entries
409: Level: advanced
411: .seealso: [](ch_matrices), `Mat`
412: @*/
413: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
414: {
415: Vec diag;
417: PetscFunctionBegin;
419: PetscAssertPointer(trace, 2);
420: PetscCall(MatCreateVecs(mat, &diag, NULL));
421: PetscCall(MatGetDiagonal(mat, diag));
422: PetscCall(VecSum(diag, trace));
423: PetscCall(VecDestroy(&diag));
424: PetscFunctionReturn(PETSC_SUCCESS);
425: }
427: /*@
428: MatRealPart - Zeros out the imaginary part of the matrix
430: Logically Collective
432: Input Parameter:
433: . mat - the matrix
435: Level: advanced
437: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
438: @*/
439: PetscErrorCode MatRealPart(Mat mat)
440: {
441: PetscFunctionBegin;
444: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
445: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
446: MatCheckPreallocated(mat, 1);
447: PetscUseTypeMethod(mat, realpart);
448: PetscFunctionReturn(PETSC_SUCCESS);
449: }
451: /*@C
452: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
454: Collective
456: Input Parameter:
457: . mat - the matrix
459: Output Parameters:
460: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
461: - ghosts - the global indices of the ghost points
463: Level: advanced
465: Note:
466: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
468: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
469: @*/
470: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
471: {
472: PetscFunctionBegin;
475: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
476: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
477: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
478: else {
479: if (nghosts) *nghosts = 0;
480: if (ghosts) *ghosts = NULL;
481: }
482: PetscFunctionReturn(PETSC_SUCCESS);
483: }
485: /*@
486: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
488: Logically Collective
490: Input Parameter:
491: . mat - the matrix
493: Level: advanced
495: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
496: @*/
497: PetscErrorCode MatImaginaryPart(Mat mat)
498: {
499: PetscFunctionBegin;
502: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
503: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
504: MatCheckPreallocated(mat, 1);
505: PetscUseTypeMethod(mat, imaginarypart);
506: PetscFunctionReturn(PETSC_SUCCESS);
507: }
509: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
510: /*@C
511: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
512: for each row that you get to ensure that your application does
513: not bleed memory.
515: Not Collective
517: Input Parameters:
518: + mat - the matrix
519: - row - the row to get
521: Output Parameters:
522: + ncols - if not `NULL`, the number of nonzeros in `row`
523: . cols - if not `NULL`, the column numbers
524: - vals - if not `NULL`, the numerical values
526: Level: advanced
528: Notes:
529: This routine is provided for people who need to have direct access
530: to the structure of a matrix. We hope that we provide enough
531: high-level matrix routines that few users will need it.
533: `MatGetRow()` always returns 0-based column indices, regardless of
534: whether the internal representation is 0-based (default) or 1-based.
536: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
537: not wish to extract these quantities.
539: The user can only examine the values extracted with `MatGetRow()`;
540: the values CANNOT be altered. To change the matrix entries, one
541: must use `MatSetValues()`.
543: You can only have one call to `MatGetRow()` outstanding for a particular
544: matrix at a time, per processor. `MatGetRow()` can only obtain rows
545: associated with the given processor, it cannot get rows from the
546: other processors; for that we suggest using `MatCreateSubMatrices()`, then
547: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
548: is in the global number of rows.
550: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
552: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
554: Fortran Note:
555: .vb
556: PetscInt, pointer :: cols(:)
557: PetscScalar, pointer :: vals(:)
558: .ve
560: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
561: @*/
562: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
563: {
564: PetscInt incols;
566: PetscFunctionBegin;
569: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
570: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
571: MatCheckPreallocated(mat, 1);
572: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
573: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
574: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
575: if (ncols) *ncols = incols;
576: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
577: PetscFunctionReturn(PETSC_SUCCESS);
578: }
580: /*@
581: MatConjugate - replaces the matrix values with their complex conjugates
583: Logically Collective
585: Input Parameter:
586: . mat - the matrix
588: Level: advanced
590: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
591: @*/
592: PetscErrorCode MatConjugate(Mat mat)
593: {
594: PetscFunctionBegin;
596: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
597: if (PetscDefined(USE_COMPLEX) && !(mat->symmetric == PETSC_BOOL3_TRUE && mat->hermitian == PETSC_BOOL3_TRUE)) {
598: PetscUseTypeMethod(mat, conjugate);
599: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
600: }
601: PetscFunctionReturn(PETSC_SUCCESS);
602: }
604: /*@C
605: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
607: Not Collective
609: Input Parameters:
610: + mat - the matrix
611: . row - the row to get
612: . ncols - the number of nonzeros
613: . cols - the columns of the nonzeros
614: - vals - if nonzero the column values
616: Level: advanced
618: Notes:
619: This routine should be called after you have finished examining the entries.
621: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
622: us of the array after it has been restored. If you pass `NULL`, it will
623: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
625: Fortran Note:
626: .vb
627: PetscInt, pointer :: cols(:)
628: PetscScalar, pointer :: vals(:)
629: .ve
631: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
632: @*/
633: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
634: {
635: PetscFunctionBegin;
637: if (ncols) PetscAssertPointer(ncols, 3);
638: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
639: PetscTryTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
640: if (ncols) *ncols = 0;
641: if (cols) *cols = NULL;
642: if (vals) *vals = NULL;
643: PetscFunctionReturn(PETSC_SUCCESS);
644: }
646: /*@
647: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
648: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
650: Not Collective
652: Input Parameter:
653: . mat - the matrix
655: Level: advanced
657: Note:
658: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
660: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
661: @*/
662: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
663: {
664: PetscFunctionBegin;
667: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
668: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
669: MatCheckPreallocated(mat, 1);
670: PetscTryTypeMethod(mat, getrowuppertriangular);
671: PetscFunctionReturn(PETSC_SUCCESS);
672: }
674: /*@
675: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
677: Not Collective
679: Input Parameter:
680: . mat - the matrix
682: Level: advanced
684: Note:
685: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
687: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
688: @*/
689: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
690: {
691: PetscFunctionBegin;
694: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
695: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
696: MatCheckPreallocated(mat, 1);
697: PetscTryTypeMethod(mat, restorerowuppertriangular);
698: PetscFunctionReturn(PETSC_SUCCESS);
699: }
701: /*@
702: MatSetOptionsPrefix - Sets the prefix used for searching for all
703: `Mat` options in the database.
705: Logically Collective
707: Input Parameters:
708: + A - the matrix
709: - prefix - the prefix to prepend to all option names
711: Level: advanced
713: Notes:
714: A hyphen (-) must NOT be given at the beginning of the prefix name.
715: The first character of all runtime options is AUTOMATICALLY the hyphen.
717: This is NOT used for options for the factorization of the matrix. Normally the
718: prefix is automatically passed in from the PC calling the factorization. To set
719: it directly use `MatSetOptionsPrefixFactor()`
721: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
722: @*/
723: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
724: {
725: PetscFunctionBegin;
727: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
728: PetscTryMethod(A, "MatSetOptionsPrefix_C", (Mat, const char[]), (A, prefix));
729: PetscFunctionReturn(PETSC_SUCCESS);
730: }
732: /*@
733: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
734: for matrices created with `MatGetFactor()`
736: Logically Collective
738: Input Parameters:
739: + A - the matrix
740: - prefix - the prefix to prepend to all option names for the factored matrix
742: Level: developer
744: Notes:
745: A hyphen (-) must NOT be given at the beginning of the prefix name.
746: The first character of all runtime options is AUTOMATICALLY the hyphen.
748: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
749: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
751: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
752: @*/
753: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
754: {
755: PetscFunctionBegin;
757: if (prefix) {
758: PetscAssertPointer(prefix, 2);
759: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
760: if (prefix != A->factorprefix) {
761: PetscCall(PetscFree(A->factorprefix));
762: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
763: }
764: } else PetscCall(PetscFree(A->factorprefix));
765: PetscFunctionReturn(PETSC_SUCCESS);
766: }
768: /*@
769: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
770: for matrices created with `MatGetFactor()`
772: Logically Collective
774: Input Parameters:
775: + A - the matrix
776: - prefix - the prefix to prepend to all option names for the factored matrix
778: Level: developer
780: Notes:
781: A hyphen (-) must NOT be given at the beginning of the prefix name.
782: The first character of all runtime options is AUTOMATICALLY the hyphen.
784: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
785: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
787: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
788: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
789: `MatSetOptionsPrefix()`
790: @*/
791: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
792: {
793: size_t len1, len2, new_len;
795: PetscFunctionBegin;
797: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
798: if (!A->factorprefix) {
799: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
800: PetscFunctionReturn(PETSC_SUCCESS);
801: }
802: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
804: PetscCall(PetscStrlen(A->factorprefix, &len1));
805: PetscCall(PetscStrlen(prefix, &len2));
806: new_len = len1 + len2 + 1;
807: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
808: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
809: PetscFunctionReturn(PETSC_SUCCESS);
810: }
812: /*@
813: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
814: matrix options in the database.
816: Logically Collective
818: Input Parameters:
819: + A - the matrix
820: - prefix - the prefix to prepend to all option names
822: Level: advanced
824: Note:
825: A hyphen (-) must NOT be given at the beginning of the prefix name.
826: The first character of all runtime options is AUTOMATICALLY the hyphen.
828: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
829: @*/
830: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
831: {
832: PetscFunctionBegin;
834: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
835: PetscTryMethod(A, "MatAppendOptionsPrefix_C", (Mat, const char[]), (A, prefix));
836: PetscFunctionReturn(PETSC_SUCCESS);
837: }
839: /*@
840: MatGetOptionsPrefix - Gets the prefix used for searching for all
841: matrix options in the database.
843: Not Collective
845: Input Parameter:
846: . A - the matrix
848: Output Parameter:
849: . prefix - pointer to the prefix string used
851: Level: advanced
853: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
854: @*/
855: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
856: {
857: PetscFunctionBegin;
859: PetscAssertPointer(prefix, 2);
860: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
861: PetscFunctionReturn(PETSC_SUCCESS);
862: }
864: /*@
865: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
867: Not Collective
869: Input Parameter:
870: . A - the matrix
872: Output Parameter:
873: . state - the object state
875: Level: advanced
877: Note:
878: Object state is an integer which gets increased every time
879: the object is changed. By saving and later querying the object state
880: one can determine whether information about the object is still current.
882: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
884: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
885: @*/
886: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
887: {
888: PetscFunctionBegin;
890: PetscAssertPointer(state, 2);
891: PetscCall(PetscObjectStateGet((PetscObject)A, state));
892: PetscFunctionReturn(PETSC_SUCCESS);
893: }
895: /*@
896: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
898: Collective
900: Input Parameter:
901: . A - the matrix
903: Level: beginner
905: Notes:
906: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
907: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
908: makes all of the preallocation space available
910: Current values in the matrix are lost in this call
912: Currently only supported for `MATAIJ` matrices.
914: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
915: @*/
916: PetscErrorCode MatResetPreallocation(Mat A)
917: {
918: PetscFunctionBegin;
921: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
922: PetscFunctionReturn(PETSC_SUCCESS);
923: }
925: /*@
926: MatResetHash - Reset the matrix so that it will use a hash table for the next round of `MatSetValues()` and `MatAssemblyBegin()`/`MatAssemblyEnd()`.
928: Collective
930: Input Parameter:
931: . A - the matrix
933: Level: intermediate
935: Notes:
936: The matrix will again delete the hash table data structures after following calls to `MatAssemblyBegin()`/`MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
938: Currently only supported for `MATAIJ` matrices.
940: .seealso: [](ch_matrices), `Mat`, `MatResetPreallocation()`
941: @*/
942: PetscErrorCode MatResetHash(Mat A)
943: {
944: PetscFunctionBegin;
947: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset to hash state after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
948: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
949: PetscUseMethod(A, "MatResetHash_C", (Mat), (A));
950: /* These flags are used to determine whether certain setups occur */
951: A->was_assembled = PETSC_FALSE;
952: A->assembled = PETSC_FALSE;
953: /* Log that the state of this object has changed; this will help guarantee that preconditioners get re-setup */
954: PetscCall(PetscObjectStateIncrease((PetscObject)A));
955: PetscFunctionReturn(PETSC_SUCCESS);
956: }
958: /*@
959: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
961: Collective
963: Input Parameter:
964: . A - the matrix
966: Level: advanced
968: Notes:
969: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
970: setting values in the matrix.
972: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
974: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
975: @*/
976: PetscErrorCode MatSetUp(Mat A)
977: {
978: PetscFunctionBegin;
980: if (!((PetscObject)A)->type_name) {
981: PetscMPIInt size;
983: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
984: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
985: }
986: if (!A->preallocated) PetscTryTypeMethod(A, setup);
987: PetscCall(PetscLayoutSetUp(A->rmap));
988: PetscCall(PetscLayoutSetUp(A->cmap));
989: A->preallocated = PETSC_TRUE;
990: PetscFunctionReturn(PETSC_SUCCESS);
991: }
993: #if defined(PETSC_HAVE_SAWS)
994: #include <petscviewersaws.h>
995: #endif
997: /*
998: If threadsafety is on extraneous matrices may be printed
1000: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
1001: */
1002: #if !defined(PETSC_HAVE_THREADSAFETY)
1003: static PetscInt insidematview = 0;
1004: #endif
1006: /*@
1007: MatViewFromOptions - View properties of the matrix based on options set in the options database
1009: Collective
1011: Input Parameters:
1012: + A - the matrix
1013: . obj - optional additional object that provides the options prefix to use
1014: - name - command line option
1016: Options Database Key:
1017: . -mat_view [viewertype]:... - the viewer and its options
1019: Level: intermediate
1021: Note:
1022: .vb
1023: If no value is provided ascii:stdout is used
1024: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
1025: for example ascii::ascii_info prints just the information about the object not all details
1026: unless :append is given filename opens in write mode, overwriting what was already there
1027: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1028: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1029: socket[:port] defaults to the standard output port
1030: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1031: .ve
1033: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1034: @*/
1035: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1036: {
1037: PetscFunctionBegin;
1039: #if !defined(PETSC_HAVE_THREADSAFETY)
1040: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1041: #endif
1042: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1043: PetscFunctionReturn(PETSC_SUCCESS);
1044: }
1046: /*@
1047: MatView - display information about a matrix in a variety ways
1049: Collective on viewer
1051: Input Parameters:
1052: + mat - the matrix
1053: - viewer - visualization context
1055: Options Database Keys:
1056: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1057: . -mat_view ::ascii_info_detail - Prints more detailed info
1058: . -mat_view - Prints matrix in ASCII format
1059: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1060: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1061: . -display name - Sets display name (default is host)
1062: . -draw_pause sec - Sets number of seconds to pause after display
1063: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1064: . -viewer_socket_machine machine - -
1065: . -viewer_socket_port port - -
1066: . -mat_view binary - save matrix to file in binary format
1067: - -viewer_binary_filename name - -
1069: Level: beginner
1071: Notes:
1072: The available visualization contexts include
1073: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1074: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1075: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1076: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1078: The user can open alternative visualization contexts with
1079: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1080: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1081: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1082: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1084: The user can call `PetscViewerPushFormat()` to specify the output
1085: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1086: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1087: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1088: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1089: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1090: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1091: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1092: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1093: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1095: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1096: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1098: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1100: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1101: viewer is used.
1103: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1104: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1106: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1107: and then use the following mouse functions.
1108: .vb
1109: left mouse: zoom in
1110: middle mouse: zoom out
1111: right mouse: continue with the simulation
1112: .ve
1114: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1115: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1116: @*/
1117: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1118: {
1119: PetscInt rows, cols, rbs, cbs;
1120: PetscBool isascii, isstring, issaws;
1121: PetscViewerFormat format;
1122: PetscMPIInt size;
1124: PetscFunctionBegin;
1127: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1130: PetscCall(PetscViewerGetFormat(viewer, &format));
1131: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1132: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1134: #if !defined(PETSC_HAVE_THREADSAFETY)
1135: insidematview++;
1136: #endif
1137: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1138: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1139: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1140: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1142: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1143: if (isascii) {
1144: if (!mat->preallocated) {
1145: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1146: #if !defined(PETSC_HAVE_THREADSAFETY)
1147: insidematview--;
1148: #endif
1149: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1150: PetscFunctionReturn(PETSC_SUCCESS);
1151: }
1152: if (!mat->assembled) {
1153: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1154: #if !defined(PETSC_HAVE_THREADSAFETY)
1155: insidematview--;
1156: #endif
1157: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1158: PetscFunctionReturn(PETSC_SUCCESS);
1159: }
1160: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1161: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1162: MatNullSpace nullsp, transnullsp;
1164: PetscCall(PetscViewerASCIIPushTab(viewer));
1165: PetscCall(MatGetSize(mat, &rows, &cols));
1166: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1167: if (rbs != 1 || cbs != 1) {
1168: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1169: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1170: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1171: if (mat->factortype) {
1172: MatSolverType solver;
1173: PetscCall(MatFactorGetSolverType(mat, &solver));
1174: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1175: }
1176: if (mat->ops->getinfo) {
1177: PetscBool is_constant_or_diagonal;
1179: // Don't print nonzero information for constant or diagonal matrices, it just adds noise to the output
1180: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &is_constant_or_diagonal, MATCONSTANTDIAGONAL, MATDIAGONAL, ""));
1181: if (!is_constant_or_diagonal) {
1182: MatInfo info;
1184: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1185: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1186: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1187: }
1188: }
1189: PetscCall(MatGetNullSpace(mat, &nullsp));
1190: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1191: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1192: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1193: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1194: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1195: PetscCall(PetscViewerASCIIPushTab(viewer));
1196: PetscCall(MatProductView(mat, viewer));
1197: PetscCall(PetscViewerASCIIPopTab(viewer));
1198: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1199: IS tmp;
1201: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1202: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1203: PetscCall(PetscViewerASCIIPushTab(viewer));
1204: PetscCall(ISView(tmp, viewer));
1205: PetscCall(PetscViewerASCIIPopTab(viewer));
1206: PetscCall(ISDestroy(&tmp));
1207: }
1208: }
1209: } else if (issaws) {
1210: #if defined(PETSC_HAVE_SAWS)
1211: PetscMPIInt rank;
1213: PetscCall(PetscObjectName((PetscObject)mat));
1214: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1215: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1216: #endif
1217: } else if (isstring) {
1218: const char *type;
1219: PetscCall(MatGetType(mat, &type));
1220: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1221: PetscTryTypeMethod(mat, view, viewer);
1222: }
1223: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1224: PetscCall(PetscViewerASCIIPushTab(viewer));
1225: PetscUseTypeMethod(mat, viewnative, viewer);
1226: PetscCall(PetscViewerASCIIPopTab(viewer));
1227: } else if (mat->ops->view) {
1228: PetscCall(PetscViewerASCIIPushTab(viewer));
1229: PetscUseTypeMethod(mat, view, viewer);
1230: PetscCall(PetscViewerASCIIPopTab(viewer));
1231: }
1232: if (isascii) {
1233: PetscCall(PetscViewerGetFormat(viewer, &format));
1234: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1235: }
1236: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1237: #if !defined(PETSC_HAVE_THREADSAFETY)
1238: insidematview--;
1239: #endif
1240: PetscFunctionReturn(PETSC_SUCCESS);
1241: }
1243: #if defined(PETSC_USE_DEBUG)
1244: #include <../src/sys/totalview/tv_data_display.h>
1245: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1246: {
1247: TV_add_row("Local rows", "int", &mat->rmap->n);
1248: TV_add_row("Local columns", "int", &mat->cmap->n);
1249: TV_add_row("Global rows", "int", &mat->rmap->N);
1250: TV_add_row("Global columns", "int", &mat->cmap->N);
1251: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1252: return TV_format_OK;
1253: }
1254: #endif
1256: /*@
1257: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1258: with `MatView()`. The matrix format is determined from the options database.
1259: Generates a parallel MPI matrix if the communicator has more than one
1260: processor. The default matrix type is `MATAIJ`.
1262: Collective
1264: Input Parameters:
1265: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1266: or some related function before a call to `MatLoad()`
1267: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1269: Options Database Key:
1270: . -matload_block_size bs - set block size
1272: Level: beginner
1274: Notes:
1275: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1276: `Mat` before calling this routine if you wish to set it from the options database.
1278: `MatLoad()` automatically loads into the options database any options
1279: given in the file filename.info where filename is the name of the file
1280: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1281: file will be ignored if you use the -viewer_binary_skip_info option.
1283: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1284: sets the default matrix type AIJ and sets the local and global sizes.
1285: If type and/or size is already set, then the same are used.
1287: In parallel, each processor can load a subset of rows (or the
1288: entire matrix). This routine is especially useful when a large
1289: matrix is stored on disk and only part of it is desired on each
1290: processor. For example, a parallel solver may access only some of
1291: the rows from each processor. The algorithm used here reads
1292: relatively small blocks of data rather than reading the entire
1293: matrix and then subsetting it.
1295: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1296: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1297: or the sequence like
1298: .vb
1299: `PetscViewer` v;
1300: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1301: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1302: `PetscViewerSetFromOptions`(v);
1303: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1304: `PetscViewerFileSetName`(v,"datafile");
1305: .ve
1306: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1307: .vb
1308: -viewer_type {binary, hdf5}
1309: .ve
1311: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1312: and src/mat/tutorials/ex10.c with the second approach.
1314: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1315: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1316: Multiple objects, both matrices and vectors, can be stored within the same file.
1317: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1319: Most users should not need to know the details of the binary storage
1320: format, since `MatLoad()` and `MatView()` completely hide these details.
1321: But for anyone who is interested, the standard binary matrix storage
1322: format is
1324: .vb
1325: PetscInt MAT_FILE_CLASSID
1326: PetscInt number of rows
1327: PetscInt number of columns
1328: PetscInt total number of nonzeros
1329: PetscInt *number nonzeros in each row
1330: PetscInt *column indices of all nonzeros (starting index is zero)
1331: PetscScalar *values of all nonzeros
1332: .ve
1333: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1334: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1335: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1337: PETSc automatically does the byte swapping for
1338: machines that store the bytes reversed. Thus if you write your own binary
1339: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1340: and `PetscBinaryWrite()` to see how this may be done.
1342: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1343: Each processor's chunk is loaded independently by its owning MPI process.
1344: Multiple objects, both matrices and vectors, can be stored within the same file.
1345: They are looked up by their PetscObject name.
1347: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1348: by default the same structure and naming of the AIJ arrays and column count
1349: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1350: .vb
1351: save example.mat A b -v7.3
1352: .ve
1353: can be directly read by this routine (see Reference 1 for details).
1355: Depending on your MATLAB version, this format might be a default,
1356: otherwise you can set it as default in Preferences.
1358: Unless -nocompression flag is used to save the file in MATLAB,
1359: PETSc must be configured with ZLIB package.
1361: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1363: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1365: Corresponding `MatView()` is not yet implemented.
1367: The loaded matrix is actually a transpose of the original one in MATLAB,
1368: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1369: With this format, matrix is automatically transposed by PETSc,
1370: unless the matrix is marked as SPD or symmetric
1371: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1373: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1375: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1376: @*/
1377: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1378: {
1379: PetscBool flg;
1381: PetscFunctionBegin;
1385: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1387: flg = PETSC_FALSE;
1388: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1389: if (flg) {
1390: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1391: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1392: }
1393: flg = PETSC_FALSE;
1394: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1395: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1397: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1398: PetscUseTypeMethod(mat, load, viewer);
1399: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1400: PetscFunctionReturn(PETSC_SUCCESS);
1401: }
1403: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1404: {
1405: Mat_Redundant *redund = *redundant;
1407: PetscFunctionBegin;
1408: if (redund) {
1409: if (redund->matseq) { /* via MatCreateSubMatrices() */
1410: PetscCall(ISDestroy(&redund->isrow));
1411: PetscCall(ISDestroy(&redund->iscol));
1412: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1413: } else {
1414: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1415: PetscCall(PetscFree(redund->sbuf_j));
1416: PetscCall(PetscFree(redund->sbuf_a));
1417: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1418: PetscCall(PetscFree(redund->rbuf_j[i]));
1419: PetscCall(PetscFree(redund->rbuf_a[i]));
1420: }
1421: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1422: }
1424: PetscCall(PetscCommDestroy(&redund->subcomm));
1425: PetscCall(PetscFree(redund));
1426: }
1427: PetscFunctionReturn(PETSC_SUCCESS);
1428: }
1430: /*@
1431: MatDestroy - Frees space taken by a matrix.
1433: Collective
1435: Input Parameter:
1436: . A - the matrix
1438: Level: beginner
1440: Developer Note:
1441: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1442: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1443: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1444: if changes are needed here.
1446: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1447: @*/
1448: PetscErrorCode MatDestroy(Mat *A)
1449: {
1450: PetscFunctionBegin;
1451: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1453: if (--((PetscObject)*A)->refct > 0) {
1454: *A = NULL;
1455: PetscFunctionReturn(PETSC_SUCCESS);
1456: }
1458: /* if memory was published with SAWs then destroy it */
1459: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1460: PetscTryTypeMethod(*A, destroy);
1462: PetscCall(PetscFree((*A)->factorprefix));
1463: PetscCall(PetscFree((*A)->defaultvectype));
1464: PetscCall(PetscFree((*A)->defaultrandtype));
1465: PetscCall(PetscFree((*A)->bsizes));
1466: PetscCall(PetscFree((*A)->solvertype));
1467: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1468: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1469: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1470: PetscCall(MatProductClear(*A));
1471: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1472: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1473: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1474: PetscCall(MatDestroy(&(*A)->schur));
1475: PetscCall(VecDestroy(&(*A)->dot_vec));
1476: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1477: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1478: PetscCall(PetscHeaderDestroy(A));
1479: PetscFunctionReturn(PETSC_SUCCESS);
1480: }
1482: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1483: /*@
1484: MatSetValues - Inserts or adds a block of values into a matrix.
1485: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1486: MUST be called after all calls to `MatSetValues()` have been completed.
1488: Not Collective
1490: Input Parameters:
1491: + mat - the matrix
1492: . m - the number of rows
1493: . idxm - the global indices of the rows
1494: . n - the number of columns
1495: . idxn - the global indices of the columns
1496: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1497: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1498: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1500: Level: beginner
1502: Notes:
1503: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1504: options cannot be mixed without intervening calls to the assembly
1505: routines.
1507: `MatSetValues()` uses 0-based row and column numbers in Fortran
1508: as well as in C.
1510: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1511: simply ignored. This allows easily inserting element stiffness matrices
1512: with homogeneous Dirichlet boundary conditions that you don't want represented
1513: in the matrix.
1515: Efficiency Alert:
1516: The routine `MatSetValuesBlocked()` may offer much better efficiency
1517: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1519: Fortran Notes:
1520: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1521: .vb
1522: call MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
1523: .ve
1525: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1527: Developer Note:
1528: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1529: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1531: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1532: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1533: @*/
1534: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1535: {
1536: PetscFunctionBeginHot;
1539: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1540: PetscAssertPointer(idxm, 3);
1541: PetscAssertPointer(idxn, 5);
1542: MatCheckPreallocated(mat, 1);
1544: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1545: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1547: if (PetscDefined(USE_DEBUG)) {
1548: PetscInt i, j;
1550: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1551: if (v) {
1552: for (i = 0; i < m; i++) {
1553: for (j = 0; j < n; j++) {
1554: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1555: #if defined(PETSC_USE_COMPLEX)
1556: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1557: #else
1558: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1559: #endif
1560: }
1561: }
1562: }
1563: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1564: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1565: }
1567: if (mat->assembled) {
1568: mat->was_assembled = PETSC_TRUE;
1569: mat->assembled = PETSC_FALSE;
1570: }
1571: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1572: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1573: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1574: PetscFunctionReturn(PETSC_SUCCESS);
1575: }
1577: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1578: /*@
1579: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1580: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1581: MUST be called after all calls to `MatSetValues()` have been completed.
1583: Not Collective
1585: Input Parameters:
1586: + mat - the matrix
1587: . ism - the rows to provide
1588: . isn - the columns to provide
1589: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1590: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1591: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1593: Level: beginner
1595: Notes:
1596: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1598: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1599: options cannot be mixed without intervening calls to the assembly
1600: routines.
1602: `MatSetValues()` uses 0-based row and column numbers in Fortran
1603: as well as in C.
1605: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1606: simply ignored. This allows easily inserting element stiffness matrices
1607: with homogeneous Dirichlet boundary conditions that you don't want represented
1608: in the matrix.
1610: Fortran Note:
1611: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1613: Efficiency Alert:
1614: The routine `MatSetValuesBlocked()` may offer much better efficiency
1615: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1617: This is currently not optimized for any particular `ISType`
1619: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1620: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1621: @*/
1622: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1623: {
1624: PetscInt m, n;
1625: const PetscInt *rows, *cols;
1627: PetscFunctionBeginHot;
1629: PetscCall(ISGetIndices(ism, &rows));
1630: PetscCall(ISGetIndices(isn, &cols));
1631: PetscCall(ISGetLocalSize(ism, &m));
1632: PetscCall(ISGetLocalSize(isn, &n));
1633: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1634: PetscCall(ISRestoreIndices(ism, &rows));
1635: PetscCall(ISRestoreIndices(isn, &cols));
1636: PetscFunctionReturn(PETSC_SUCCESS);
1637: }
1639: /*@
1640: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1641: values into a matrix
1643: Not Collective
1645: Input Parameters:
1646: + mat - the matrix
1647: . row - the (block) row to set
1648: - v - a one-dimensional array that contains the values. For `MATBAIJ` they are implicitly stored as a two-dimensional array, by default in row-major order.
1649: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1651: Level: intermediate
1653: Notes:
1654: The values, `v`, are column-oriented (for the block version) and sorted
1656: All the nonzero values in `row` must be provided
1658: The matrix must have previously had its column indices set, likely by having been assembled.
1660: `row` must belong to this MPI process
1662: Fortran Note:
1663: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1665: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1666: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1667: @*/
1668: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1669: {
1670: PetscInt globalrow;
1672: PetscFunctionBegin;
1675: PetscAssertPointer(v, 3);
1676: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1677: PetscCall(MatSetValuesRow(mat, globalrow, v));
1678: PetscFunctionReturn(PETSC_SUCCESS);
1679: }
1681: /*@
1682: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1683: values into a matrix
1685: Not Collective
1687: Input Parameters:
1688: + mat - the matrix
1689: . row - the (block) row to set
1690: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1692: Level: advanced
1694: Notes:
1695: The values, `v`, are column-oriented for the block version.
1697: All the nonzeros in `row` must be provided
1699: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1701: `row` must belong to this process
1703: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1704: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1705: @*/
1706: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1707: {
1708: PetscFunctionBeginHot;
1711: MatCheckPreallocated(mat, 1);
1712: PetscAssertPointer(v, 3);
1713: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1714: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1715: mat->insertmode = INSERT_VALUES;
1717: if (mat->assembled) {
1718: mat->was_assembled = PETSC_TRUE;
1719: mat->assembled = PETSC_FALSE;
1720: }
1721: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1722: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1723: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1724: PetscFunctionReturn(PETSC_SUCCESS);
1725: }
1727: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1728: /*@
1729: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1730: Using structured grid indexing
1732: Not Collective
1734: Input Parameters:
1735: + mat - the matrix
1736: . m - number of rows being entered
1737: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1738: . n - number of columns being entered
1739: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1740: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1741: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1742: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1744: Level: beginner
1746: Notes:
1747: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1749: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1750: options cannot be mixed without intervening calls to the assembly
1751: routines.
1753: The grid coordinates are across the entire grid, not just the local portion
1755: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1756: as well as in C.
1758: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1760: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1761: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1763: The columns and rows in the stencil passed in MUST be contained within the
1764: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1765: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1766: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1767: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1769: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1770: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1771: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1772: `DM_BOUNDARY_PERIODIC` boundary type.
1774: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1775: a single value per point) you can skip filling those indices.
1777: Inspired by the structured grid interface to the HYPRE package
1778: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1780: Fortran Note:
1781: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1783: Efficiency Alert:
1784: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1785: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1787: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1788: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1789: @*/
1790: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1791: {
1792: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1793: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1794: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1796: PetscFunctionBegin;
1797: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1800: PetscAssertPointer(idxm, 3);
1801: PetscAssertPointer(idxn, 5);
1803: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1804: jdxm = buf;
1805: jdxn = buf + m;
1806: } else {
1807: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1808: jdxm = bufm;
1809: jdxn = bufn;
1810: }
1811: for (i = 0; i < m; i++) {
1812: for (j = 0; j < 3 - sdim; j++) dxm++;
1813: tmp = *dxm++ - starts[0];
1814: for (j = 0; j < dim - 1; j++) {
1815: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1816: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1817: }
1818: if (mat->stencil.noc) dxm++;
1819: jdxm[i] = tmp;
1820: }
1821: for (i = 0; i < n; i++) {
1822: for (j = 0; j < 3 - sdim; j++) dxn++;
1823: tmp = *dxn++ - starts[0];
1824: for (j = 0; j < dim - 1; j++) {
1825: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1826: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1827: }
1828: if (mat->stencil.noc) dxn++;
1829: jdxn[i] = tmp;
1830: }
1831: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1832: PetscCall(PetscFree2(bufm, bufn));
1833: PetscFunctionReturn(PETSC_SUCCESS);
1834: }
1836: /*@
1837: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1838: Using structured grid indexing
1840: Not Collective
1842: Input Parameters:
1843: + mat - the matrix
1844: . m - number of rows being entered
1845: . idxm - grid coordinates for matrix rows being entered
1846: . n - number of columns being entered
1847: . idxn - grid coordinates for matrix columns being entered
1848: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
1849: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
1850: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1852: Level: beginner
1854: Notes:
1855: By default the values, `v`, are row-oriented and unsorted.
1856: See `MatSetOption()` for other options.
1858: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1859: options cannot be mixed without intervening calls to the assembly
1860: routines.
1862: The grid coordinates are across the entire grid, not just the local portion
1864: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1865: as well as in C.
1867: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1869: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1870: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1872: The columns and rows in the stencil passed in MUST be contained within the
1873: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1874: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1875: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1876: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1878: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1879: simply ignored. This allows easily inserting element stiffness matrices
1880: with homogeneous Dirichlet boundary conditions that you don't want represented
1881: in the matrix.
1883: Inspired by the structured grid interface to the HYPRE package
1884: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1886: Fortran Notes:
1887: `idxm` and `idxn` should be declared as
1888: .vb
1889: MatStencil idxm(4,m),idxn(4,n)
1890: .ve
1891: and the values inserted using
1892: .vb
1893: idxm(MatStencil_i,1) = i
1894: idxm(MatStencil_j,1) = j
1895: idxm(MatStencil_k,1) = k
1896: etc
1897: .ve
1899: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1901: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1902: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1903: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1904: @*/
1905: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1906: {
1907: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1908: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1909: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1911: PetscFunctionBegin;
1912: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1915: PetscAssertPointer(idxm, 3);
1916: PetscAssertPointer(idxn, 5);
1917: PetscAssertPointer(v, 6);
1919: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1920: jdxm = buf;
1921: jdxn = buf + m;
1922: } else {
1923: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1924: jdxm = bufm;
1925: jdxn = bufn;
1926: }
1927: for (i = 0; i < m; i++) {
1928: for (j = 0; j < 3 - sdim; j++) dxm++;
1929: tmp = *dxm++ - starts[0];
1930: for (j = 0; j < sdim - 1; j++) {
1931: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1932: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1933: }
1934: dxm++;
1935: jdxm[i] = tmp;
1936: }
1937: for (i = 0; i < n; i++) {
1938: for (j = 0; j < 3 - sdim; j++) dxn++;
1939: tmp = *dxn++ - starts[0];
1940: for (j = 0; j < sdim - 1; j++) {
1941: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1942: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1943: }
1944: dxn++;
1945: jdxn[i] = tmp;
1946: }
1947: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1948: PetscCall(PetscFree2(bufm, bufn));
1949: PetscFunctionReturn(PETSC_SUCCESS);
1950: }
1952: /*@
1953: MatSetStencil - Sets the grid information for setting values into a matrix via
1954: `MatSetValuesStencil()`
1956: Not Collective
1958: Input Parameters:
1959: + mat - the matrix
1960: . dim - dimension of the grid 1, 2, or 3
1961: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1962: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1963: - dof - number of degrees of freedom per node
1965: Level: beginner
1967: Notes:
1968: Inspired by the structured grid interface to the HYPRE package
1969: (www.llnl.gov/CASC/hyper)
1971: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1972: user.
1974: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1975: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1976: @*/
1977: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1978: {
1979: PetscFunctionBegin;
1981: PetscAssertPointer(dims, 3);
1982: PetscAssertPointer(starts, 4);
1984: mat->stencil.dim = dim + (dof > 1);
1985: for (PetscInt i = 0; i < dim; i++) {
1986: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1987: mat->stencil.starts[i] = starts[dim - i - 1];
1988: }
1989: mat->stencil.dims[dim] = dof;
1990: mat->stencil.starts[dim] = 0;
1991: mat->stencil.noc = (PetscBool)(dof == 1);
1992: PetscFunctionReturn(PETSC_SUCCESS);
1993: }
1995: /*@
1996: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1998: Not Collective
2000: Input Parameters:
2001: + mat - the matrix
2002: . m - the number of block rows
2003: . idxm - the global block indices
2004: . n - the number of block columns
2005: . idxn - the global block indices
2006: . v - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2007: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2008: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
2010: Level: intermediate
2012: Notes:
2013: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
2014: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
2016: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
2017: NOT the total number of rows/columns; for example, if the block size is 2 and
2018: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
2019: The values in `idxm` would be 1 2; that is the first index for each block divided by
2020: the block size.
2022: You must call `MatSetBlockSize()` when constructing this matrix (before
2023: preallocating it).
2025: By default, the values, `v`, are stored in row-major order. See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2027: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
2028: options cannot be mixed without intervening calls to the assembly
2029: routines.
2031: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2032: as well as in C.
2034: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2035: simply ignored. This allows easily inserting element stiffness matrices
2036: with homogeneous Dirichlet boundary conditions that you don't want represented
2037: in the matrix.
2039: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2040: internal searching must be done to determine where to place the
2041: data in the matrix storage space. By instead inserting blocks of
2042: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2043: reduced.
2045: Example:
2046: .vb
2047: Suppose m=n=2 and block size(bs) = 2 The array is
2049: 1 2 | 3 4
2050: 5 6 | 7 8
2051: - - - | - - -
2052: 9 10 | 11 12
2053: 13 14 | 15 16
2055: v[] should be passed in like
2056: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2058: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2059: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2060: .ve
2062: Fortran Notes:
2063: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2064: .vb
2065: call MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES, ierr)
2066: .ve
2068: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2070: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2071: @*/
2072: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2073: {
2074: PetscFunctionBeginHot;
2077: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2078: PetscAssertPointer(idxm, 3);
2079: PetscAssertPointer(idxn, 5);
2080: MatCheckPreallocated(mat, 1);
2081: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2082: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2083: if (PetscDefined(USE_DEBUG)) {
2084: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2085: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2086: }
2087: if (PetscDefined(USE_DEBUG)) {
2088: PetscInt rbs, cbs, M, N, i;
2089: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2090: PetscCall(MatGetSize(mat, &M, &N));
2091: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2092: for (i = 0; i < n; i++)
2093: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2094: }
2095: if (mat->assembled) {
2096: mat->was_assembled = PETSC_TRUE;
2097: mat->assembled = PETSC_FALSE;
2098: }
2099: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2100: if (mat->ops->setvaluesblocked) PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2101: else {
2102: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2103: PetscInt i, j, bs, cbs;
2105: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2106: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2107: iidxm = buf;
2108: iidxn = buf + m * bs;
2109: } else {
2110: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2111: iidxm = bufr;
2112: iidxn = bufc;
2113: }
2114: for (i = 0; i < m; i++) {
2115: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2116: }
2117: if (m != n || bs != cbs || idxm != idxn) {
2118: for (i = 0; i < n; i++) {
2119: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2120: }
2121: } else iidxn = iidxm;
2122: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2123: PetscCall(PetscFree2(bufr, bufc));
2124: }
2125: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2126: PetscFunctionReturn(PETSC_SUCCESS);
2127: }
2129: /*@
2130: MatGetValues - Gets a block of local values from a matrix.
2132: Not Collective; can only return values that are owned by the give process
2134: Input Parameters:
2135: + mat - the matrix
2136: . v - a logically two-dimensional array for storing the values
2137: . m - the number of rows
2138: . idxm - the global indices of the rows
2139: . n - the number of columns
2140: - idxn - the global indices of the columns
2142: Level: advanced
2144: Notes:
2145: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2146: The values, `v`, are then returned in a row-oriented format,
2147: analogous to that used by default in `MatSetValues()`.
2149: `MatGetValues()` uses 0-based row and column numbers in
2150: Fortran as well as in C.
2152: `MatGetValues()` requires that the matrix has been assembled
2153: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2154: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2155: without intermediate matrix assembly.
2157: Negative row or column indices will be ignored and those locations in `v` will be
2158: left unchanged.
2160: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2161: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2162: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2164: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2165: @*/
2166: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2167: {
2168: PetscFunctionBegin;
2171: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2172: PetscAssertPointer(idxm, 3);
2173: PetscAssertPointer(idxn, 5);
2174: PetscAssertPointer(v, 6);
2175: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2176: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2177: MatCheckPreallocated(mat, 1);
2179: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2180: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2181: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2182: PetscFunctionReturn(PETSC_SUCCESS);
2183: }
2185: /*@
2186: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2187: defined previously by `MatSetLocalToGlobalMapping()`
2189: Not Collective
2191: Input Parameters:
2192: + mat - the matrix
2193: . nrow - number of rows
2194: . irow - the row local indices
2195: . ncol - number of columns
2196: - icol - the column local indices
2198: Output Parameter:
2199: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2200: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2202: Level: advanced
2204: Notes:
2205: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2207: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2208: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2209: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2210: with `MatSetLocalToGlobalMapping()`.
2212: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2213: `MatSetValuesLocal()`, `MatGetValues()`
2214: @*/
2215: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2216: {
2217: PetscFunctionBeginHot;
2220: MatCheckPreallocated(mat, 1);
2221: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2222: PetscAssertPointer(irow, 3);
2223: PetscAssertPointer(icol, 5);
2224: if (PetscDefined(USE_DEBUG)) {
2225: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2226: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2227: }
2228: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2229: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2230: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2231: else {
2232: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2233: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2234: irowm = buf;
2235: icolm = buf + nrow;
2236: } else {
2237: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2238: irowm = bufr;
2239: icolm = bufc;
2240: }
2241: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2242: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2243: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2244: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2245: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2246: PetscCall(PetscFree2(bufr, bufc));
2247: }
2248: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2249: PetscFunctionReturn(PETSC_SUCCESS);
2250: }
2252: /*@
2253: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2254: the same size. Currently, this can only be called once and creates the given matrix.
2256: Not Collective
2258: Input Parameters:
2259: + mat - the matrix
2260: . nb - the number of blocks
2261: . bs - the number of rows (and columns) in each block
2262: . rows - a concatenation of the rows for each block
2263: - v - a concatenation of logically two-dimensional arrays of values
2265: Level: advanced
2267: Notes:
2268: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2270: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2272: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2273: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2274: @*/
2275: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2276: {
2277: PetscFunctionBegin;
2280: PetscAssertPointer(rows, 4);
2281: PetscAssertPointer(v, 5);
2282: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2284: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2285: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2286: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2287: PetscFunctionReturn(PETSC_SUCCESS);
2288: }
2290: /*@
2291: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2292: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2293: using a local (per-processor) numbering.
2295: Not Collective
2297: Input Parameters:
2298: + x - the matrix
2299: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2300: - cmapping - column mapping
2302: Level: intermediate
2304: Note:
2305: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2307: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2308: @*/
2309: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2310: {
2311: PetscFunctionBegin;
2316: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2317: else {
2318: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2319: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2320: }
2321: PetscFunctionReturn(PETSC_SUCCESS);
2322: }
2324: /*@
2325: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2327: Not Collective
2329: Input Parameter:
2330: . A - the matrix
2332: Output Parameters:
2333: + rmapping - row mapping
2334: - cmapping - column mapping
2336: Level: advanced
2338: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2339: @*/
2340: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2341: {
2342: PetscFunctionBegin;
2345: if (rmapping) {
2346: PetscAssertPointer(rmapping, 2);
2347: *rmapping = A->rmap->mapping;
2348: }
2349: if (cmapping) {
2350: PetscAssertPointer(cmapping, 3);
2351: *cmapping = A->cmap->mapping;
2352: }
2353: PetscFunctionReturn(PETSC_SUCCESS);
2354: }
2356: /*@
2357: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2359: Logically Collective
2361: Input Parameters:
2362: + A - the matrix
2363: . rmap - row layout
2364: - cmap - column layout
2366: Level: advanced
2368: Note:
2369: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2371: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2372: @*/
2373: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2374: {
2375: PetscFunctionBegin;
2377: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2378: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2379: PetscFunctionReturn(PETSC_SUCCESS);
2380: }
2382: /*@
2383: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2385: Not Collective
2387: Input Parameter:
2388: . A - the matrix
2390: Output Parameters:
2391: + rmap - row layout
2392: - cmap - column layout
2394: Level: advanced
2396: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2397: @*/
2398: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2399: {
2400: PetscFunctionBegin;
2403: if (rmap) {
2404: PetscAssertPointer(rmap, 2);
2405: *rmap = A->rmap;
2406: }
2407: if (cmap) {
2408: PetscAssertPointer(cmap, 3);
2409: *cmap = A->cmap;
2410: }
2411: PetscFunctionReturn(PETSC_SUCCESS);
2412: }
2414: /*@
2415: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2416: using a local numbering of the rows and columns.
2418: Not Collective
2420: Input Parameters:
2421: + mat - the matrix
2422: . nrow - number of rows
2423: . irow - the row local indices
2424: . ncol - number of columns
2425: . icol - the column local indices
2426: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2427: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2428: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2430: Level: intermediate
2432: Notes:
2433: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2435: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2436: options cannot be mixed without intervening calls to the assembly
2437: routines.
2439: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2440: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2442: Fortran Notes:
2443: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2444: .vb
2445: call MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2446: .ve
2448: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2450: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2451: `MatGetValuesLocal()`
2452: @*/
2453: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2454: {
2455: PetscFunctionBeginHot;
2458: MatCheckPreallocated(mat, 1);
2459: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2460: PetscAssertPointer(irow, 3);
2461: PetscAssertPointer(icol, 5);
2462: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2463: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2464: if (PetscDefined(USE_DEBUG)) {
2465: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2466: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2467: }
2469: if (mat->assembled) {
2470: mat->was_assembled = PETSC_TRUE;
2471: mat->assembled = PETSC_FALSE;
2472: }
2473: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2474: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2475: else {
2476: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2477: const PetscInt *irowm, *icolm;
2479: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2480: bufr = buf;
2481: bufc = buf + nrow;
2482: irowm = bufr;
2483: icolm = bufc;
2484: } else {
2485: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2486: irowm = bufr;
2487: icolm = bufc;
2488: }
2489: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2490: else irowm = irow;
2491: if (mat->cmap->mapping) {
2492: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2493: else icolm = irowm;
2494: } else icolm = icol;
2495: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2496: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2497: }
2498: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2499: PetscFunctionReturn(PETSC_SUCCESS);
2500: }
2502: /*@
2503: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2504: using a local ordering of the nodes a block at a time.
2506: Not Collective
2508: Input Parameters:
2509: + mat - the matrix
2510: . nrow - number of rows
2511: . irow - the row local indices
2512: . ncol - number of columns
2513: . icol - the column local indices
2514: . y - a one-dimensional array that contains the values implicitly stored as a two-dimensional array, by default in row-major order.
2515: See `MAT_ROW_ORIENTED` in `MatSetOption()` for how to use column-major order.
2516: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2518: Level: intermediate
2520: Notes:
2521: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2522: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2524: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2525: options cannot be mixed without intervening calls to the assembly
2526: routines.
2528: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2529: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2531: Fortran Notes:
2532: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2533: .vb
2534: call MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES, ierr)
2535: .ve
2537: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2539: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2540: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2541: @*/
2542: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2543: {
2544: PetscFunctionBeginHot;
2547: MatCheckPreallocated(mat, 1);
2548: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2549: PetscAssertPointer(irow, 3);
2550: PetscAssertPointer(icol, 5);
2551: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2552: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2553: if (PetscDefined(USE_DEBUG)) {
2554: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2555: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2556: }
2558: if (mat->assembled) {
2559: mat->was_assembled = PETSC_TRUE;
2560: mat->assembled = PETSC_FALSE;
2561: }
2562: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2563: PetscInt irbs, rbs;
2564: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2565: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2566: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2567: }
2568: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2569: PetscInt icbs, cbs;
2570: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2571: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2572: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2573: }
2574: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2575: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2576: else {
2577: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2578: const PetscInt *irowm, *icolm;
2580: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2581: bufr = buf;
2582: bufc = buf + nrow;
2583: irowm = bufr;
2584: icolm = bufc;
2585: } else {
2586: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2587: irowm = bufr;
2588: icolm = bufc;
2589: }
2590: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2591: else irowm = irow;
2592: if (mat->cmap->mapping) {
2593: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2594: else icolm = irowm;
2595: } else icolm = icol;
2596: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2597: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2598: }
2599: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2600: PetscFunctionReturn(PETSC_SUCCESS);
2601: }
2603: /*@
2604: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2606: Collective
2608: Input Parameters:
2609: + mat - the matrix
2610: - x - the vector to be multiplied
2612: Output Parameter:
2613: . y - the result
2615: Level: developer
2617: Note:
2618: The vectors `x` and `y` cannot be the same. I.e., one cannot
2619: call `MatMultDiagonalBlock`(A,y,y).
2621: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2622: @*/
2623: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2624: {
2625: PetscFunctionBegin;
2631: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2632: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2633: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2634: MatCheckPreallocated(mat, 1);
2636: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2637: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2638: PetscFunctionReturn(PETSC_SUCCESS);
2639: }
2641: /*@
2642: MatMult - Computes the matrix-vector product, $y = Ax$.
2644: Neighbor-wise Collective
2646: Input Parameters:
2647: + mat - the matrix
2648: - x - the vector to be multiplied
2650: Output Parameter:
2651: . y - the result
2653: Level: beginner
2655: Note:
2656: The vectors `x` and `y` cannot be the same. I.e., one cannot
2657: call `MatMult`(A,y,y).
2659: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2660: @*/
2661: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2662: {
2663: PetscFunctionBegin;
2667: VecCheckAssembled(x);
2669: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2670: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2671: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2672: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2673: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2674: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2675: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2676: PetscCall(VecSetErrorIfLocked(y, 3));
2677: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2678: MatCheckPreallocated(mat, 1);
2680: PetscCall(VecLockReadPush(x));
2681: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2682: PetscUseTypeMethod(mat, mult, x, y);
2683: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2684: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2685: PetscCall(VecLockReadPop(x));
2686: PetscFunctionReturn(PETSC_SUCCESS);
2687: }
2689: /*@
2690: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2692: Neighbor-wise Collective
2694: Input Parameters:
2695: + mat - the matrix
2696: - x - the vector to be multiplied
2698: Output Parameter:
2699: . y - the result
2701: Level: beginner
2703: Notes:
2704: The vectors `x` and `y` cannot be the same. I.e., one cannot
2705: call `MatMultTranspose`(A,y,y).
2707: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2708: use `MatMultHermitianTranspose()`
2710: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2711: @*/
2712: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2713: {
2714: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2716: PetscFunctionBegin;
2720: VecCheckAssembled(x);
2723: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2724: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2725: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2726: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2727: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2728: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2729: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2730: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2731: MatCheckPreallocated(mat, 1);
2733: if (!mat->ops->multtranspose) {
2734: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2735: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2736: } else op = mat->ops->multtranspose;
2737: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2738: PetscCall(VecLockReadPush(x));
2739: PetscCall((*op)(mat, x, y));
2740: PetscCall(VecLockReadPop(x));
2741: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2742: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2743: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2744: PetscFunctionReturn(PETSC_SUCCESS);
2745: }
2747: /*@
2748: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2750: Neighbor-wise Collective
2752: Input Parameters:
2753: + mat - the matrix
2754: - x - the vector to be multiplied
2756: Output Parameter:
2757: . y - the result
2759: Level: beginner
2761: Notes:
2762: The vectors `x` and `y` cannot be the same. I.e., one cannot
2763: call `MatMultHermitianTranspose`(A,y,y).
2765: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2767: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2769: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2770: @*/
2771: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2772: {
2773: PetscFunctionBegin;
2779: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2780: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2781: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2782: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2783: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2784: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2785: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2786: MatCheckPreallocated(mat, 1);
2788: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2789: #if defined(PETSC_USE_COMPLEX)
2790: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2791: PetscCall(VecLockReadPush(x));
2792: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2793: else PetscUseTypeMethod(mat, mult, x, y);
2794: PetscCall(VecLockReadPop(x));
2795: } else {
2796: Vec w;
2797: PetscCall(VecDuplicate(x, &w));
2798: PetscCall(VecCopy(x, w));
2799: PetscCall(VecConjugate(w));
2800: PetscCall(MatMultTranspose(mat, w, y));
2801: PetscCall(VecDestroy(&w));
2802: PetscCall(VecConjugate(y));
2803: }
2804: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2805: #else
2806: PetscCall(MatMultTranspose(mat, x, y));
2807: #endif
2808: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2809: PetscFunctionReturn(PETSC_SUCCESS);
2810: }
2812: /*@
2813: MatMultAdd - Computes $v3 = v2 + A * v1$.
2815: Neighbor-wise Collective
2817: Input Parameters:
2818: + mat - the matrix
2819: . v1 - the vector to be multiplied by `mat`
2820: - v2 - the vector to be added to the result
2822: Output Parameter:
2823: . v3 - the result
2825: Level: beginner
2827: Note:
2828: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2829: call `MatMultAdd`(A,v1,v2,v1).
2831: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2832: @*/
2833: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2834: {
2835: PetscFunctionBegin;
2842: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2843: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2844: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2845: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2846: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2847: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2848: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2849: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2850: MatCheckPreallocated(mat, 1);
2852: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2853: PetscCall(VecLockReadPush(v1));
2854: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2855: PetscCall(VecLockReadPop(v1));
2856: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2857: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2858: PetscFunctionReturn(PETSC_SUCCESS);
2859: }
2861: /*@
2862: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2864: Neighbor-wise Collective
2866: Input Parameters:
2867: + mat - the matrix
2868: . v1 - the vector to be multiplied by the transpose of the matrix
2869: - v2 - the vector to be added to the result
2871: Output Parameter:
2872: . v3 - the result
2874: Level: beginner
2876: Note:
2877: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2878: call `MatMultTransposeAdd`(A,v1,v2,v1).
2880: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2881: @*/
2882: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2883: {
2884: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2886: PetscFunctionBegin;
2893: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2894: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2895: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2896: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2897: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2898: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2899: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2900: MatCheckPreallocated(mat, 1);
2902: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2903: PetscCall(VecLockReadPush(v1));
2904: PetscCall((*op)(mat, v1, v2, v3));
2905: PetscCall(VecLockReadPop(v1));
2906: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2907: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2908: PetscFunctionReturn(PETSC_SUCCESS);
2909: }
2911: /*@
2912: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2914: Neighbor-wise Collective
2916: Input Parameters:
2917: + mat - the matrix
2918: . v1 - the vector to be multiplied by the Hermitian transpose
2919: - v2 - the vector to be added to the result
2921: Output Parameter:
2922: . v3 - the result
2924: Level: beginner
2926: Note:
2927: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2928: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2930: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2931: @*/
2932: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2933: {
2934: PetscFunctionBegin;
2941: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2942: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2943: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2944: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2945: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2946: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2947: MatCheckPreallocated(mat, 1);
2949: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2950: PetscCall(VecLockReadPush(v1));
2951: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2952: else {
2953: Vec w, z;
2954: PetscCall(VecDuplicate(v1, &w));
2955: PetscCall(VecCopy(v1, w));
2956: PetscCall(VecConjugate(w));
2957: PetscCall(VecDuplicate(v3, &z));
2958: PetscCall(MatMultTranspose(mat, w, z));
2959: PetscCall(VecDestroy(&w));
2960: PetscCall(VecConjugate(z));
2961: if (v2 != v3) PetscCall(VecWAXPY(v3, 1.0, v2, z));
2962: else PetscCall(VecAXPY(v3, 1.0, z));
2963: PetscCall(VecDestroy(&z));
2964: }
2965: PetscCall(VecLockReadPop(v1));
2966: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2967: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2968: PetscFunctionReturn(PETSC_SUCCESS);
2969: }
2971: PetscErrorCode MatADot_Default(Mat mat, Vec x, Vec y, PetscScalar *val)
2972: {
2973: PetscFunctionBegin;
2974: if (!mat->dot_vec) PetscCall(MatCreateVecs(mat, &mat->dot_vec, NULL));
2975: PetscCall(MatMult(mat, x, mat->dot_vec));
2976: PetscCall(VecDot(mat->dot_vec, y, val));
2977: PetscFunctionReturn(PETSC_SUCCESS);
2978: }
2980: PetscErrorCode MatANorm_Default(Mat mat, Vec x, PetscReal *val)
2981: {
2982: PetscScalar sval;
2984: PetscFunctionBegin;
2985: PetscCall(MatADot_Default(mat, x, x, &sval));
2986: PetscCheck(PetscRealPart(sval) >= 0.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix argument is not positive definite");
2987: PetscCheck(PetscAbsReal(PetscImaginaryPart(sval)) < 100 * PETSC_MACHINE_EPSILON, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix argument is not Hermitian");
2988: *val = PetscSqrtReal(PetscRealPart(sval));
2989: PetscFunctionReturn(PETSC_SUCCESS);
2990: }
2992: /*@
2993: MatADot - Computes the inner product with respect to a matrix, i.e., $(x, y)_A = y^H A x$ where $A$ is symmetric (Hermitian when using complex)
2994: positive definite.
2996: Collective
2998: Input Parameters:
2999: + mat - matrix used to define the inner product
3000: . x - first vector
3001: - y - second vector
3003: Output Parameter:
3004: . val - the dot product with respect to `A`
3006: Level: intermediate
3008: Note:
3009: For complex vectors, `MatADot()` computes
3010: $$
3011: val = (x,y)_A = y^H A x,
3012: $$
3013: where $y^H$ denotes the conjugate transpose of `y`. Note that this corresponds to the "mathematicians" complex
3014: inner product where the SECOND argument gets the complex conjugate.
3016: .seealso: [](ch_matrices), `Mat`, `MatANorm()`, `VecDot()`, `VecNorm()`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`
3017: @*/
3018: PetscErrorCode MatADot(Mat mat, Vec x, Vec y, PetscScalar *val)
3019: {
3020: PetscFunctionBegin;
3024: VecCheckAssembled(x);
3026: VecCheckAssembled(y);
3029: PetscAssertPointer(val, 4);
3030: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3031: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3032: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3033: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3034: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
3035: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
3036: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
3037: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_TRUE));
3038: MatCheckPreallocated(mat, 1);
3040: PetscCall(VecLockReadPush(x));
3041: PetscCall(VecLockReadPush(y));
3042: PetscCall(PetscLogEventBegin(MAT_ADot, mat, x, y, 0));
3043: PetscUseTypeMethod(mat, adot, x, y, val);
3044: PetscCall(PetscLogEventEnd(MAT_ADot, mat, x, y, 0));
3045: PetscCall(VecLockReadPop(y));
3046: PetscCall(VecLockReadPop(x));
3047: PetscFunctionReturn(PETSC_SUCCESS);
3048: }
3050: /*@
3051: MatANorm - Computes the norm with respect to a matrix, i.e., $(x, x)_A^{1/2} = (x^H A x)^{1/2}$ where $A$ is symmetric (Hermitian when using complex)
3052: positive definite.
3054: Collective
3056: Input Parameters:
3057: + mat - matrix used to define norm
3058: - x - the vector to compute the norm of
3060: Output Parameter:
3061: . val - the norm with respect to `A`
3063: Level: intermediate
3065: Note:
3066: For complex vectors, `MatANorm()` computes
3067: $$
3068: val = (x,x)_A^{1/2} = (x^H A x)^{1/2},
3069: $$
3070: where $x^H$ denotes the conjugate transpose of `x`.
3072: .seealso: [](ch_matrices), `Mat`, `MatADot()`, `VecDot()`, `VecNorm()`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`
3073: @*/
3074: PetscErrorCode MatANorm(Mat mat, Vec x, PetscReal *val)
3075: {
3076: PetscFunctionBegin;
3080: VecCheckAssembled(x);
3082: PetscAssertPointer(val, 3);
3083: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3084: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3085: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3086: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
3087: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
3088: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
3089: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
3090: MatCheckPreallocated(mat, 1);
3092: PetscCall(VecLockReadPush(x));
3093: PetscCall(PetscLogEventBegin(MAT_ANorm, mat, x, 0, 0));
3094: PetscUseTypeMethod(mat, anorm, x, val);
3095: PetscCall(PetscLogEventEnd(MAT_ANorm, mat, x, 0, 0));
3096: PetscCall(VecLockReadPop(x));
3097: PetscFunctionReturn(PETSC_SUCCESS);
3098: }
3100: /*@
3101: MatGetFactorType - gets the type of factorization a matrix is
3103: Not Collective
3105: Input Parameter:
3106: . mat - the matrix
3108: Output Parameter:
3109: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3111: Level: intermediate
3113: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3114: `MAT_FACTOR_ICC`, `MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3115: @*/
3116: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
3117: {
3118: PetscFunctionBegin;
3121: PetscAssertPointer(t, 2);
3122: *t = mat->factortype;
3123: PetscFunctionReturn(PETSC_SUCCESS);
3124: }
3126: /*@
3127: MatSetFactorType - sets the type of factorization a matrix is
3129: Logically Collective
3131: Input Parameters:
3132: + mat - the matrix
3133: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3135: Level: intermediate
3137: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
3138: `MAT_FACTOR_ICC`, `MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
3139: @*/
3140: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3141: {
3142: PetscFunctionBegin;
3145: mat->factortype = t;
3146: PetscFunctionReturn(PETSC_SUCCESS);
3147: }
3149: /*@
3150: MatGetInfo - Returns information about matrix storage (number of
3151: nonzeros, memory, etc.).
3153: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3155: Input Parameters:
3156: + mat - the matrix
3157: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3159: Output Parameter:
3160: . info - matrix information context
3162: Options Database Key:
3163: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3165: Level: intermediate
3167: Notes:
3168: The `MatInfo` context contains a variety of matrix data, including
3169: number of nonzeros allocated and used, number of mallocs during
3170: matrix assembly, etc. Additional information for factored matrices
3171: is provided (such as the fill ratio, number of mallocs during
3172: factorization, etc.).
3174: Example:
3175: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3176: data within the `MatInfo` context. For example,
3177: .vb
3178: MatInfo info;
3179: Mat A;
3180: double mal, nz_a, nz_u;
3182: MatGetInfo(A, MAT_LOCAL, &info);
3183: mal = info.mallocs;
3184: nz_a = info.nz_allocated;
3185: .ve
3187: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3188: @*/
3189: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3190: {
3191: PetscFunctionBegin;
3194: PetscAssertPointer(info, 3);
3195: MatCheckPreallocated(mat, 1);
3196: PetscUseTypeMethod(mat, getinfo, flag, info);
3197: PetscFunctionReturn(PETSC_SUCCESS);
3198: }
3200: /*
3201: This is used by external packages where it is not easy to get the info from the actual
3202: matrix factorization.
3203: */
3204: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3205: {
3206: PetscFunctionBegin;
3207: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3208: PetscFunctionReturn(PETSC_SUCCESS);
3209: }
3211: /*@
3212: MatLUFactor - Performs in-place LU factorization of matrix.
3214: Collective
3216: Input Parameters:
3217: + mat - the matrix
3218: . row - row permutation
3219: . col - column permutation
3220: - info - options for factorization, includes
3221: .vb
3222: fill - expected fill as ratio of original fill.
3223: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3224: Run with the option -info to determine an optimal value to use
3225: .ve
3227: Level: developer
3229: Notes:
3230: Most users should employ the `KSP` interface for linear solvers
3231: instead of working directly with matrix algebra routines such as this.
3232: See, e.g., `KSPCreate()`.
3234: This changes the state of the matrix to a factored matrix; it cannot be used
3235: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3237: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3238: when not using `KSP`.
3240: Fortran Note:
3241: A valid (non-null) `info` argument must be provided
3243: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3244: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3245: @*/
3246: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3247: {
3248: MatFactorInfo tinfo;
3250: PetscFunctionBegin;
3254: if (info) PetscAssertPointer(info, 4);
3256: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3257: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3258: MatCheckPreallocated(mat, 1);
3259: if (!info) {
3260: PetscCall(MatFactorInfoInitialize(&tinfo));
3261: info = &tinfo;
3262: }
3264: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3265: PetscUseTypeMethod(mat, lufactor, row, col, info);
3266: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3267: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3268: PetscFunctionReturn(PETSC_SUCCESS);
3269: }
3271: /*@
3272: MatILUFactor - Performs in-place ILU factorization of matrix.
3274: Collective
3276: Input Parameters:
3277: + mat - the matrix
3278: . row - row permutation
3279: . col - column permutation
3280: - info - structure containing
3281: .vb
3282: levels - number of levels of fill.
3283: expected fill - as ratio of original fill.
3284: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3285: missing diagonal entries)
3286: .ve
3288: Level: developer
3290: Notes:
3291: Most users should employ the `KSP` interface for linear solvers
3292: instead of working directly with matrix algebra routines such as this.
3293: See, e.g., `KSPCreate()`.
3295: Probably really in-place only when level of fill is zero, otherwise allocates
3296: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3297: when not using `KSP`.
3299: Fortran Note:
3300: A valid (non-null) `info` argument must be provided
3302: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3303: @*/
3304: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3305: {
3306: PetscFunctionBegin;
3310: PetscAssertPointer(info, 4);
3312: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3313: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3314: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3315: MatCheckPreallocated(mat, 1);
3317: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3318: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3319: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3320: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3321: PetscFunctionReturn(PETSC_SUCCESS);
3322: }
3324: /*@
3325: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3326: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3328: Collective
3330: Input Parameters:
3331: + fact - the factor matrix obtained with `MatGetFactor()`
3332: . mat - the matrix
3333: . row - the row permutation
3334: . col - the column permutation
3335: - info - options for factorization, includes
3336: .vb
3337: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3338: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3339: .ve
3341: Level: developer
3343: Notes:
3344: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3346: Most users should employ the simplified `KSP` interface for linear solvers
3347: instead of working directly with matrix algebra routines such as this.
3348: See, e.g., `KSPCreate()`.
3350: Fortran Note:
3351: A valid (non-null) `info` argument must be provided
3353: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3354: @*/
3355: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3356: {
3357: MatFactorInfo tinfo;
3359: PetscFunctionBegin;
3364: if (info) PetscAssertPointer(info, 5);
3367: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3368: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3369: MatCheckPreallocated(mat, 2);
3370: if (!info) {
3371: PetscCall(MatFactorInfoInitialize(&tinfo));
3372: info = &tinfo;
3373: }
3375: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3376: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3377: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3378: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3379: PetscFunctionReturn(PETSC_SUCCESS);
3380: }
3382: /*@
3383: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3384: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3386: Collective
3388: Input Parameters:
3389: + fact - the factor matrix obtained with `MatGetFactor()`
3390: . mat - the matrix
3391: - info - options for factorization
3393: Level: developer
3395: Notes:
3396: See `MatLUFactor()` for in-place factorization. See
3397: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3399: Most users should employ the `KSP` interface for linear solvers
3400: instead of working directly with matrix algebra routines such as this.
3401: See, e.g., `KSPCreate()`.
3403: Fortran Note:
3404: A valid (non-null) `info` argument must be provided
3406: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3407: @*/
3408: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3409: {
3410: MatFactorInfo tinfo;
3412: PetscFunctionBegin;
3417: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3418: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3419: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3421: MatCheckPreallocated(mat, 2);
3422: if (!info) {
3423: PetscCall(MatFactorInfoInitialize(&tinfo));
3424: info = &tinfo;
3425: }
3427: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3428: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3429: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3430: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3431: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3432: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3433: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3434: PetscFunctionReturn(PETSC_SUCCESS);
3435: }
3437: /*@
3438: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3439: symmetric matrix.
3441: Collective
3443: Input Parameters:
3444: + mat - the matrix
3445: . perm - row and column permutations
3446: - info - expected fill as ratio of original fill
3448: Level: developer
3450: Notes:
3451: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3452: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3454: Most users should employ the `KSP` interface for linear solvers
3455: instead of working directly with matrix algebra routines such as this.
3456: See, e.g., `KSPCreate()`.
3458: Fortran Note:
3459: A valid (non-null) `info` argument must be provided
3461: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`,
3462: `MatGetOrdering()`
3463: @*/
3464: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3465: {
3466: MatFactorInfo tinfo;
3468: PetscFunctionBegin;
3471: if (info) PetscAssertPointer(info, 3);
3473: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3474: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3475: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3476: MatCheckPreallocated(mat, 1);
3477: if (!info) {
3478: PetscCall(MatFactorInfoInitialize(&tinfo));
3479: info = &tinfo;
3480: }
3482: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3483: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3484: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3485: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3486: PetscFunctionReturn(PETSC_SUCCESS);
3487: }
3489: /*@
3490: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3491: of a symmetric matrix.
3493: Collective
3495: Input Parameters:
3496: + fact - the factor matrix obtained with `MatGetFactor()`
3497: . mat - the matrix
3498: . perm - row and column permutations
3499: - info - options for factorization, includes
3500: .vb
3501: fill - expected fill as ratio of original fill.
3502: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3503: Run with the option -info to determine an optimal value to use
3504: .ve
3506: Level: developer
3508: Notes:
3509: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3510: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3512: Most users should employ the `KSP` interface for linear solvers
3513: instead of working directly with matrix algebra routines such as this.
3514: See, e.g., `KSPCreate()`.
3516: Fortran Note:
3517: A valid (non-null) `info` argument must be provided
3519: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`,
3520: `MatGetOrdering()`
3521: @*/
3522: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3523: {
3524: MatFactorInfo tinfo;
3526: PetscFunctionBegin;
3530: if (info) PetscAssertPointer(info, 4);
3533: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3534: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3535: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3536: MatCheckPreallocated(mat, 2);
3537: if (!info) {
3538: PetscCall(MatFactorInfoInitialize(&tinfo));
3539: info = &tinfo;
3540: }
3542: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3543: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3544: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3545: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3546: PetscFunctionReturn(PETSC_SUCCESS);
3547: }
3549: /*@
3550: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3551: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3552: `MatCholeskyFactorSymbolic()`.
3554: Collective
3556: Input Parameters:
3557: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3558: . mat - the initial matrix that is to be factored
3559: - info - options for factorization
3561: Level: developer
3563: Note:
3564: Most users should employ the `KSP` interface for linear solvers
3565: instead of working directly with matrix algebra routines such as this.
3566: See, e.g., `KSPCreate()`.
3568: Fortran Note:
3569: A valid (non-null) `info` argument must be provided
3571: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3572: @*/
3573: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3574: {
3575: MatFactorInfo tinfo;
3577: PetscFunctionBegin;
3582: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3583: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3584: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3585: MatCheckPreallocated(mat, 2);
3586: if (!info) {
3587: PetscCall(MatFactorInfoInitialize(&tinfo));
3588: info = &tinfo;
3589: }
3591: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3592: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3593: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3594: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3595: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3596: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3597: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3598: PetscFunctionReturn(PETSC_SUCCESS);
3599: }
3601: /*@
3602: MatQRFactor - Performs in-place QR factorization of matrix.
3604: Collective
3606: Input Parameters:
3607: + mat - the matrix
3608: . col - column permutation
3609: - info - options for factorization, includes
3610: .vb
3611: fill - expected fill as ratio of original fill.
3612: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3613: Run with the option -info to determine an optimal value to use
3614: .ve
3616: Level: developer
3618: Notes:
3619: Most users should employ the `KSP` interface for linear solvers
3620: instead of working directly with matrix algebra routines such as this.
3621: See, e.g., `KSPCreate()`.
3623: This changes the state of the matrix to a factored matrix; it cannot be used
3624: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3626: Fortran Note:
3627: A valid (non-null) `info` argument must be provided
3629: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3630: `MatSetUnfactored()`
3631: @*/
3632: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3633: {
3634: PetscFunctionBegin;
3637: if (info) PetscAssertPointer(info, 3);
3639: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3640: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3641: MatCheckPreallocated(mat, 1);
3642: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3643: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3644: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3645: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3646: PetscFunctionReturn(PETSC_SUCCESS);
3647: }
3649: /*@
3650: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3651: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3653: Collective
3655: Input Parameters:
3656: + fact - the factor matrix obtained with `MatGetFactor()`
3657: . mat - the matrix
3658: . col - column permutation
3659: - info - options for factorization, includes
3660: .vb
3661: fill - expected fill as ratio of original fill.
3662: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3663: Run with the option -info to determine an optimal value to use
3664: .ve
3666: Level: developer
3668: Note:
3669: Most users should employ the `KSP` interface for linear solvers
3670: instead of working directly with matrix algebra routines such as this.
3671: See, e.g., `KSPCreate()`.
3673: Fortran Note:
3674: A valid (non-null) `info` argument must be provided
3676: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3677: @*/
3678: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3679: {
3680: MatFactorInfo tinfo;
3682: PetscFunctionBegin;
3686: if (info) PetscAssertPointer(info, 4);
3689: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3690: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3691: MatCheckPreallocated(mat, 2);
3692: if (!info) {
3693: PetscCall(MatFactorInfoInitialize(&tinfo));
3694: info = &tinfo;
3695: }
3697: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3698: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3699: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3700: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3701: PetscFunctionReturn(PETSC_SUCCESS);
3702: }
3704: /*@
3705: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3706: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3708: Collective
3710: Input Parameters:
3711: + fact - the factor matrix obtained with `MatGetFactor()`
3712: . mat - the matrix
3713: - info - options for factorization
3715: Level: developer
3717: Notes:
3718: See `MatQRFactor()` for in-place factorization.
3720: Most users should employ the `KSP` interface for linear solvers
3721: instead of working directly with matrix algebra routines such as this.
3722: See, e.g., `KSPCreate()`.
3724: Fortran Note:
3725: A valid (non-null) `info` argument must be provided
3727: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3728: @*/
3729: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3730: {
3731: MatFactorInfo tinfo;
3733: PetscFunctionBegin;
3738: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3739: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3740: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3742: MatCheckPreallocated(mat, 2);
3743: if (!info) {
3744: PetscCall(MatFactorInfoInitialize(&tinfo));
3745: info = &tinfo;
3746: }
3748: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3749: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3750: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3751: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3752: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3753: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3754: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3755: PetscFunctionReturn(PETSC_SUCCESS);
3756: }
3758: /*@
3759: MatSolve - Solves $A x = b$, given a factored matrix.
3761: Neighbor-wise Collective
3763: Input Parameters:
3764: + mat - the factored matrix
3765: - b - the right-hand-side vector
3767: Output Parameter:
3768: . x - the result vector
3770: Level: developer
3772: Notes:
3773: The vectors `b` and `x` cannot be the same. I.e., one cannot
3774: call `MatSolve`(A,x,x).
3776: Most users should employ the `KSP` interface for linear solvers
3777: instead of working directly with matrix algebra routines such as this.
3778: See, e.g., `KSPCreate()`.
3780: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3781: @*/
3782: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3783: {
3784: PetscFunctionBegin;
3789: PetscCheckSameComm(mat, 1, b, 2);
3790: PetscCheckSameComm(mat, 1, x, 3);
3791: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3792: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3793: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3794: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3795: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3796: MatCheckPreallocated(mat, 1);
3798: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3799: PetscCall(VecFlag(x, mat->factorerrortype));
3800: if (mat->factorerrortype) PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3801: else PetscUseTypeMethod(mat, solve, b, x);
3802: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3803: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3804: PetscFunctionReturn(PETSC_SUCCESS);
3805: }
3807: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3808: {
3809: Vec b, x;
3810: PetscInt N, i;
3811: PetscErrorCode (*f)(Mat, Vec, Vec);
3812: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3814: PetscFunctionBegin;
3815: if (A->factorerrortype) {
3816: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3817: PetscCall(MatSetInf(X));
3818: PetscFunctionReturn(PETSC_SUCCESS);
3819: }
3820: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3821: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3822: PetscCall(MatBoundToCPU(A, &Abound));
3823: if (!Abound) {
3824: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3825: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3826: }
3827: #if PetscDefined(HAVE_CUDA)
3828: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3829: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3830: #elif PetscDefined(HAVE_HIP)
3831: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3832: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3833: #endif
3834: PetscCall(MatGetSize(B, NULL, &N));
3835: for (i = 0; i < N; i++) {
3836: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3837: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3838: PetscCall((*f)(A, b, x));
3839: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3840: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3841: }
3842: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3843: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3844: PetscFunctionReturn(PETSC_SUCCESS);
3845: }
3847: /*@
3848: MatMatSolve - Solves $A X = B$, given a factored matrix.
3850: Neighbor-wise Collective
3852: Input Parameters:
3853: + A - the factored matrix
3854: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3856: Output Parameter:
3857: . X - the result matrix (dense matrix)
3859: Level: developer
3861: Note:
3862: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3863: otherwise, `B` and `X` cannot be the same.
3865: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3866: @*/
3867: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3868: {
3869: PetscFunctionBegin;
3874: PetscCheckSameComm(A, 1, B, 2);
3875: PetscCheckSameComm(A, 1, X, 3);
3876: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3877: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3878: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3879: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3880: MatCheckPreallocated(A, 1);
3882: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3883: if (!A->ops->matsolve) {
3884: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3885: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3886: } else PetscUseTypeMethod(A, matsolve, B, X);
3887: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3888: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3889: PetscFunctionReturn(PETSC_SUCCESS);
3890: }
3892: /*@
3893: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3895: Neighbor-wise Collective
3897: Input Parameters:
3898: + A - the factored matrix
3899: - B - the right-hand-side matrix (`MATDENSE` matrix)
3901: Output Parameter:
3902: . X - the result matrix (dense matrix)
3904: Level: developer
3906: Note:
3907: The matrices `B` and `X` cannot be the same. I.e., one cannot
3908: call `MatMatSolveTranspose`(A,X,X).
3910: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3911: @*/
3912: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3913: {
3914: PetscFunctionBegin;
3919: PetscCheckSameComm(A, 1, B, 2);
3920: PetscCheckSameComm(A, 1, X, 3);
3921: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3922: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3923: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3924: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3925: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3926: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3927: MatCheckPreallocated(A, 1);
3929: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3930: if (!A->ops->matsolvetranspose) {
3931: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3932: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3933: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3934: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3935: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3936: PetscFunctionReturn(PETSC_SUCCESS);
3937: }
3939: /*@
3940: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3942: Neighbor-wise Collective
3944: Input Parameters:
3945: + A - the factored matrix
3946: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3948: Output Parameter:
3949: . X - the result matrix (dense matrix)
3951: Level: developer
3953: Note:
3954: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3955: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3957: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3958: @*/
3959: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3960: {
3961: PetscFunctionBegin;
3966: PetscCheckSameComm(A, 1, Bt, 2);
3967: PetscCheckSameComm(A, 1, X, 3);
3969: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3970: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3971: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3972: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3973: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3974: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3975: MatCheckPreallocated(A, 1);
3977: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3978: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3979: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3980: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3981: PetscFunctionReturn(PETSC_SUCCESS);
3982: }
3984: /*@
3985: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3986: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3988: Neighbor-wise Collective
3990: Input Parameters:
3991: + mat - the factored matrix
3992: - b - the right-hand-side vector
3994: Output Parameter:
3995: . x - the result vector
3997: Level: developer
3999: Notes:
4000: `MatSolve()` should be used for most applications, as it performs
4001: a forward solve followed by a backward solve.
4003: The vectors `b` and `x` cannot be the same, i.e., one cannot
4004: call `MatForwardSolve`(A,x,x).
4006: For matrix in `MATSEQBAIJ` format with block size larger than 1,
4007: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
4008: `MatForwardSolve()` solves $U^T*D y = b$, and
4009: `MatBackwardSolve()` solves $U x = y$.
4010: Thus they do not provide a symmetric preconditioner.
4012: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
4013: @*/
4014: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
4015: {
4016: PetscFunctionBegin;
4021: PetscCheckSameComm(mat, 1, b, 2);
4022: PetscCheckSameComm(mat, 1, x, 3);
4023: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4024: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4025: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4026: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4027: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4028: MatCheckPreallocated(mat, 1);
4030: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
4031: PetscUseTypeMethod(mat, forwardsolve, b, x);
4032: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
4033: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4034: PetscFunctionReturn(PETSC_SUCCESS);
4035: }
4037: /*@
4038: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
4039: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
4041: Neighbor-wise Collective
4043: Input Parameters:
4044: + mat - the factored matrix
4045: - b - the right-hand-side vector
4047: Output Parameter:
4048: . x - the result vector
4050: Level: developer
4052: Notes:
4053: `MatSolve()` should be used for most applications, as it performs
4054: a forward solve followed by a backward solve.
4056: The vectors `b` and `x` cannot be the same. I.e., one cannot
4057: call `MatBackwardSolve`(A,x,x).
4059: For matrix in `MATSEQBAIJ` format with block size larger than 1,
4060: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
4061: `MatForwardSolve()` solves $U^T*D y = b$, and
4062: `MatBackwardSolve()` solves $U x = y$.
4063: Thus they do not provide a symmetric preconditioner.
4065: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
4066: @*/
4067: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
4068: {
4069: PetscFunctionBegin;
4074: PetscCheckSameComm(mat, 1, b, 2);
4075: PetscCheckSameComm(mat, 1, x, 3);
4076: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4077: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4078: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4079: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4080: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4081: MatCheckPreallocated(mat, 1);
4083: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
4084: PetscUseTypeMethod(mat, backwardsolve, b, x);
4085: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
4086: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4087: PetscFunctionReturn(PETSC_SUCCESS);
4088: }
4090: /*@
4091: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
4093: Neighbor-wise Collective
4095: Input Parameters:
4096: + mat - the factored matrix
4097: . b - the right-hand-side vector
4098: - y - the vector to be added to
4100: Output Parameter:
4101: . x - the result vector
4103: Level: developer
4105: Note:
4106: The vectors `b` and `x` cannot be the same. I.e., one cannot
4107: call `MatSolveAdd`(A,x,y,x).
4109: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
4110: @*/
4111: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
4112: {
4113: PetscScalar one = 1.0;
4114: Vec tmp;
4116: PetscFunctionBegin;
4122: PetscCheckSameComm(mat, 1, b, 2);
4123: PetscCheckSameComm(mat, 1, y, 3);
4124: PetscCheckSameComm(mat, 1, x, 4);
4125: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4126: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4127: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4128: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4129: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4130: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4131: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4132: MatCheckPreallocated(mat, 1);
4134: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4135: PetscCall(VecFlag(x, mat->factorerrortype));
4136: if (mat->factorerrortype) {
4137: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4138: } else if (mat->ops->solveadd) {
4139: PetscUseTypeMethod(mat, solveadd, b, y, x);
4140: } else {
4141: /* do the solve then the add manually */
4142: if (x != y) {
4143: PetscCall(MatSolve(mat, b, x));
4144: PetscCall(VecAXPY(x, one, y));
4145: } else {
4146: PetscCall(VecDuplicate(x, &tmp));
4147: PetscCall(VecCopy(x, tmp));
4148: PetscCall(MatSolve(mat, b, x));
4149: PetscCall(VecAXPY(x, one, tmp));
4150: PetscCall(VecDestroy(&tmp));
4151: }
4152: }
4153: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4154: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4155: PetscFunctionReturn(PETSC_SUCCESS);
4156: }
4158: /*@
4159: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4161: Neighbor-wise Collective
4163: Input Parameters:
4164: + mat - the factored matrix
4165: - b - the right-hand-side vector
4167: Output Parameter:
4168: . x - the result vector
4170: Level: developer
4172: Notes:
4173: The vectors `b` and `x` cannot be the same. I.e., one cannot
4174: call `MatSolveTranspose`(A,x,x).
4176: Most users should employ the `KSP` interface for linear solvers
4177: instead of working directly with matrix algebra routines such as this.
4178: See, e.g., `KSPCreate()`.
4180: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4181: @*/
4182: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4183: {
4184: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4186: PetscFunctionBegin;
4191: PetscCheckSameComm(mat, 1, b, 2);
4192: PetscCheckSameComm(mat, 1, x, 3);
4193: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4194: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4195: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4196: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4197: MatCheckPreallocated(mat, 1);
4198: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4199: PetscCall(VecFlag(x, mat->factorerrortype));
4200: if (mat->factorerrortype) {
4201: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4202: } else {
4203: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4204: PetscCall((*f)(mat, b, x));
4205: }
4206: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4207: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4208: PetscFunctionReturn(PETSC_SUCCESS);
4209: }
4211: /*@
4212: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4213: factored matrix.
4215: Neighbor-wise Collective
4217: Input Parameters:
4218: + mat - the factored matrix
4219: . b - the right-hand-side vector
4220: - y - the vector to be added to
4222: Output Parameter:
4223: . x - the result vector
4225: Level: developer
4227: Note:
4228: The vectors `b` and `x` cannot be the same. I.e., one cannot
4229: call `MatSolveTransposeAdd`(A,x,y,x).
4231: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4232: @*/
4233: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4234: {
4235: PetscScalar one = 1.0;
4236: Vec tmp;
4237: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4239: PetscFunctionBegin;
4245: PetscCheckSameComm(mat, 1, b, 2);
4246: PetscCheckSameComm(mat, 1, y, 3);
4247: PetscCheckSameComm(mat, 1, x, 4);
4248: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4249: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4250: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4251: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4252: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4253: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4254: MatCheckPreallocated(mat, 1);
4256: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4257: PetscCall(VecFlag(x, mat->factorerrortype));
4258: if (mat->factorerrortype) {
4259: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4260: } else if (f) {
4261: PetscCall((*f)(mat, b, y, x));
4262: } else {
4263: /* do the solve then the add manually */
4264: if (x != y) {
4265: PetscCall(MatSolveTranspose(mat, b, x));
4266: PetscCall(VecAXPY(x, one, y));
4267: } else {
4268: PetscCall(VecDuplicate(x, &tmp));
4269: PetscCall(VecCopy(x, tmp));
4270: PetscCall(MatSolveTranspose(mat, b, x));
4271: PetscCall(VecAXPY(x, one, tmp));
4272: PetscCall(VecDestroy(&tmp));
4273: }
4274: }
4275: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4276: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4277: PetscFunctionReturn(PETSC_SUCCESS);
4278: }
4280: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4281: /*@
4282: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4284: Neighbor-wise Collective
4286: Input Parameters:
4287: + mat - the matrix
4288: . b - the right-hand side
4289: . omega - the relaxation factor
4290: . flag - flag indicating the type of SOR (see below)
4291: . shift - diagonal shift
4292: . its - the number of iterations
4293: - lits - the number of local iterations
4295: Output Parameter:
4296: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4298: SOR Flags:
4299: + `SOR_FORWARD_SWEEP` - forward SOR
4300: . `SOR_BACKWARD_SWEEP` - backward SOR
4301: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4302: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4303: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4304: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4305: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4306: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies upper/lower triangular part of matrix to vector (with `omega`)
4307: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4309: Level: developer
4311: Notes:
4312: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4313: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4314: on each processor.
4316: Application programmers will not generally use `MatSOR()` directly,
4317: but instead will employ `PCSOR` or `PCEISENSTAT`
4319: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with inodes, this does a block SOR smoothing, otherwise it does a pointwise smoothing.
4320: For `MATAIJ` matrices with inodes, the block sizes are determined by the inode sizes, not the block size set with `MatSetBlockSize()`
4322: Vectors `x` and `b` CANNOT be the same
4324: The flags are implemented as bitwise inclusive or operations.
4325: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4326: to specify a zero initial guess for SSOR.
4328: Developer Note:
4329: We should add block SOR support for `MATAIJ` matrices with block size set to greater than one and no inodes
4331: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4332: @*/
4333: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4334: {
4335: PetscFunctionBegin;
4340: PetscCheckSameComm(mat, 1, b, 2);
4341: PetscCheckSameComm(mat, 1, x, 8);
4342: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4343: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4344: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4345: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4346: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4347: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4348: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4349: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4351: MatCheckPreallocated(mat, 1);
4352: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4353: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4354: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4355: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4356: PetscFunctionReturn(PETSC_SUCCESS);
4357: }
4359: /*
4360: Default matrix copy routine.
4361: */
4362: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4363: {
4364: PetscInt i, rstart = 0, rend = 0, nz;
4365: const PetscInt *cwork;
4366: const PetscScalar *vwork;
4368: PetscFunctionBegin;
4369: if (B->assembled) PetscCall(MatZeroEntries(B));
4370: if (str == SAME_NONZERO_PATTERN) {
4371: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4372: for (i = rstart; i < rend; i++) {
4373: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4374: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4375: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4376: }
4377: } else {
4378: PetscCall(MatAYPX(B, 0.0, A, str));
4379: }
4380: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4381: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4382: PetscFunctionReturn(PETSC_SUCCESS);
4383: }
4385: /*@
4386: MatCopy - Copies a matrix to another matrix.
4388: Collective
4390: Input Parameters:
4391: + A - the matrix
4392: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4394: Output Parameter:
4395: . B - where the copy is put
4397: Level: intermediate
4399: Notes:
4400: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4402: `MatCopy()` copies the matrix entries of a matrix to another existing
4403: matrix (after first zeroing the second matrix). A related routine is
4404: `MatConvert()`, which first creates a new matrix and then copies the data.
4406: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4407: @*/
4408: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4409: {
4410: PetscInt i;
4412: PetscFunctionBegin;
4417: PetscCheckSameComm(A, 1, B, 2);
4418: MatCheckPreallocated(B, 2);
4419: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4420: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4421: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4422: A->cmap->N, B->cmap->N);
4423: MatCheckPreallocated(A, 1);
4424: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4426: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4427: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4428: else PetscCall(MatCopy_Basic(A, B, str));
4430: B->stencil.dim = A->stencil.dim;
4431: B->stencil.noc = A->stencil.noc;
4432: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4433: B->stencil.dims[i] = A->stencil.dims[i];
4434: B->stencil.starts[i] = A->stencil.starts[i];
4435: }
4437: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4438: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4439: PetscFunctionReturn(PETSC_SUCCESS);
4440: }
4442: /*@
4443: MatConvert - Converts a matrix to another matrix, either of the same
4444: or different type.
4446: Collective
4448: Input Parameters:
4449: + mat - the matrix
4450: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4451: same type as the original matrix.
4452: - reuse - denotes if the destination matrix is to be created or reused.
4453: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4454: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4456: Output Parameter:
4457: . M - pointer to place new matrix
4459: Level: intermediate
4461: Notes:
4462: `MatConvert()` first creates a new matrix and then copies the data from
4463: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4464: entries of one matrix to another already existing matrix context.
4466: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4467: the MPI communicator of the generated matrix is always the same as the communicator
4468: of the input matrix.
4470: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4471: @*/
4472: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4473: {
4474: PetscBool sametype, issame, flg;
4475: PetscBool3 issymmetric, ishermitian, isspd;
4476: char convname[256], mtype[256];
4477: Mat B;
4479: PetscFunctionBegin;
4482: PetscAssertPointer(M, 4);
4483: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4484: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4485: MatCheckPreallocated(mat, 1);
4487: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4488: if (flg) newtype = mtype;
4490: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4491: PetscCall(PetscStrcmp(newtype, "same", &issame));
4492: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4493: if (reuse == MAT_REUSE_MATRIX) {
4495: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4496: }
4498: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4499: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4500: PetscFunctionReturn(PETSC_SUCCESS);
4501: }
4503: /* Cache Mat options because some converters use MatHeaderReplace() */
4504: issymmetric = mat->symmetric;
4505: ishermitian = mat->hermitian;
4506: isspd = mat->spd;
4508: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4509: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4510: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4511: } else {
4512: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4513: const char *prefix[3] = {"seq", "mpi", ""};
4514: PetscInt i;
4515: /*
4516: Order of precedence:
4517: 0) See if newtype is a superclass of the current matrix.
4518: 1) See if a specialized converter is known to the current matrix.
4519: 2) See if a specialized converter is known to the desired matrix class.
4520: 3) See if a good general converter is registered for the desired class
4521: (as of 6/27/03 only MATMPIADJ falls into this category).
4522: 4) See if a good general converter is known for the current matrix.
4523: 5) Use a really basic converter.
4524: */
4526: /* 0) See if newtype is a superclass of the current matrix.
4527: i.e mat is mpiaij and newtype is aij */
4528: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4529: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4530: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4531: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4532: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4533: if (flg) {
4534: if (reuse == MAT_INPLACE_MATRIX) {
4535: PetscCall(PetscInfo(mat, "Early return\n"));
4536: PetscFunctionReturn(PETSC_SUCCESS);
4537: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4538: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4539: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4540: PetscFunctionReturn(PETSC_SUCCESS);
4541: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4542: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4543: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4544: PetscFunctionReturn(PETSC_SUCCESS);
4545: }
4546: }
4547: }
4548: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4549: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4550: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4551: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4552: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4553: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4554: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4555: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4556: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4557: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4558: if (conv) goto foundconv;
4559: }
4561: /* 2) See if a specialized converter is known to the desired matrix class. */
4562: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4563: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4564: PetscCall(MatSetType(B, newtype));
4565: for (i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(prefix); i++) {
4566: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4567: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4568: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4569: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4570: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4571: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4572: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4573: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4574: if (conv) {
4575: PetscCall(MatDestroy(&B));
4576: goto foundconv;
4577: }
4578: }
4580: /* 3) See if a good general converter is registered for the desired class */
4581: conv = B->ops->convertfrom;
4582: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4583: PetscCall(MatDestroy(&B));
4584: if (conv) goto foundconv;
4586: /* 4) See if a good general converter is known for the current matrix */
4587: if (mat->ops->convert) conv = mat->ops->convert;
4588: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4589: if (conv) goto foundconv;
4591: /* 5) Use a really basic converter. */
4592: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4593: conv = MatConvert_Basic;
4595: foundconv:
4596: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4597: PetscCall((*conv)(mat, newtype, reuse, M));
4598: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4599: /* the block sizes must be same if the mappings are copied over */
4600: (*M)->rmap->bs = mat->rmap->bs;
4601: (*M)->cmap->bs = mat->cmap->bs;
4602: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4603: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4604: (*M)->rmap->mapping = mat->rmap->mapping;
4605: (*M)->cmap->mapping = mat->cmap->mapping;
4606: }
4607: (*M)->stencil.dim = mat->stencil.dim;
4608: (*M)->stencil.noc = mat->stencil.noc;
4609: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4610: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4611: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4612: }
4613: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4614: }
4615: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4617: /* Reset Mat options */
4618: if (issymmetric != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PetscBool3ToBool(issymmetric)));
4619: if (ishermitian != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PetscBool3ToBool(ishermitian)));
4620: if (isspd != PETSC_BOOL3_UNKNOWN) PetscCall(MatSetOption(*M, MAT_SPD, PetscBool3ToBool(isspd)));
4621: PetscFunctionReturn(PETSC_SUCCESS);
4622: }
4624: /*@
4625: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4627: Not Collective
4629: Input Parameter:
4630: . mat - the matrix, must be a factored matrix
4632: Output Parameter:
4633: . type - the string name of the package (do not free this string)
4635: Level: intermediate
4637: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4638: @*/
4639: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4640: {
4641: PetscErrorCode (*conv)(Mat, MatSolverType *);
4643: PetscFunctionBegin;
4646: PetscAssertPointer(type, 2);
4647: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4648: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4649: if (conv) PetscCall((*conv)(mat, type));
4650: else *type = MATSOLVERPETSC;
4651: PetscFunctionReturn(PETSC_SUCCESS);
4652: }
4654: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4655: struct _MatSolverTypeForSpecifcType {
4656: MatType mtype;
4657: /* no entry for MAT_FACTOR_NONE */
4658: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4659: MatSolverTypeForSpecifcType next;
4660: };
4662: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4663: struct _MatSolverTypeHolder {
4664: char *name;
4665: MatSolverTypeForSpecifcType handlers;
4666: MatSolverTypeHolder next;
4667: };
4669: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4671: /*@C
4672: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4674: Logically Collective, No Fortran Support
4676: Input Parameters:
4677: + package - name of the package, for example `petsc` or `superlu`
4678: . mtype - the matrix type that works with this package
4679: . ftype - the type of factorization supported by the package
4680: - createfactor - routine that will create the factored matrix ready to be used
4682: Level: developer
4684: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4685: `MatGetFactor()`
4686: @*/
4687: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4688: {
4689: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4690: PetscBool flg;
4691: MatSolverTypeForSpecifcType inext, iprev = NULL;
4693: PetscFunctionBegin;
4694: PetscCall(MatInitializePackage());
4695: if (!next) {
4696: PetscCall(PetscNew(&MatSolverTypeHolders));
4697: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4698: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4699: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4700: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4701: PetscFunctionReturn(PETSC_SUCCESS);
4702: }
4703: while (next) {
4704: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4705: if (flg) {
4706: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4707: inext = next->handlers;
4708: while (inext) {
4709: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4710: if (flg) {
4711: inext->createfactor[(int)ftype - 1] = createfactor;
4712: PetscFunctionReturn(PETSC_SUCCESS);
4713: }
4714: iprev = inext;
4715: inext = inext->next;
4716: }
4717: PetscCall(PetscNew(&iprev->next));
4718: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4719: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4720: PetscFunctionReturn(PETSC_SUCCESS);
4721: }
4722: prev = next;
4723: next = next->next;
4724: }
4725: PetscCall(PetscNew(&prev->next));
4726: PetscCall(PetscStrallocpy(package, &prev->next->name));
4727: PetscCall(PetscNew(&prev->next->handlers));
4728: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4729: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4730: PetscFunctionReturn(PETSC_SUCCESS);
4731: }
4733: /*@C
4734: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4736: Input Parameters:
4737: + type - name of the package, for example `petsc` or `superlu`, if this is 'NULL', then the first result that satisfies the other criteria is returned
4738: . ftype - the type of factorization supported by the type
4739: - mtype - the matrix type that works with this type
4741: Output Parameters:
4742: + foundtype - `PETSC_TRUE` if the type was registered
4743: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4744: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4746: Calling sequence of `createfactor`:
4747: + A - the matrix providing the factor matrix
4748: . ftype - the `MatFactorType` of the factor requested
4749: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4751: Level: developer
4753: Note:
4754: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4755: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4756: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4758: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4759: `MatInitializePackage()`
4760: @*/
4761: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4762: {
4763: MatSolverTypeHolder next = MatSolverTypeHolders;
4764: PetscBool flg;
4765: MatSolverTypeForSpecifcType inext;
4767: PetscFunctionBegin;
4768: if (foundtype) *foundtype = PETSC_FALSE;
4769: if (foundmtype) *foundmtype = PETSC_FALSE;
4770: if (createfactor) *createfactor = NULL;
4772: if (type) {
4773: while (next) {
4774: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4775: if (flg) {
4776: if (foundtype) *foundtype = PETSC_TRUE;
4777: inext = next->handlers;
4778: while (inext) {
4779: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4780: if (flg) {
4781: if (foundmtype) *foundmtype = PETSC_TRUE;
4782: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4783: PetscFunctionReturn(PETSC_SUCCESS);
4784: }
4785: inext = inext->next;
4786: }
4787: }
4788: next = next->next;
4789: }
4790: } else {
4791: while (next) {
4792: inext = next->handlers;
4793: while (inext) {
4794: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4795: if (flg && inext->createfactor[(int)ftype - 1]) {
4796: if (foundtype) *foundtype = PETSC_TRUE;
4797: if (foundmtype) *foundmtype = PETSC_TRUE;
4798: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4799: PetscFunctionReturn(PETSC_SUCCESS);
4800: }
4801: inext = inext->next;
4802: }
4803: next = next->next;
4804: }
4805: /* try with base classes inext->mtype */
4806: next = MatSolverTypeHolders;
4807: while (next) {
4808: inext = next->handlers;
4809: while (inext) {
4810: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4811: if (flg && inext->createfactor[(int)ftype - 1]) {
4812: if (foundtype) *foundtype = PETSC_TRUE;
4813: if (foundmtype) *foundmtype = PETSC_TRUE;
4814: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4815: PetscFunctionReturn(PETSC_SUCCESS);
4816: }
4817: inext = inext->next;
4818: }
4819: next = next->next;
4820: }
4821: }
4822: PetscFunctionReturn(PETSC_SUCCESS);
4823: }
4825: PetscErrorCode MatSolverTypeDestroy(void)
4826: {
4827: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4828: MatSolverTypeForSpecifcType inext, iprev;
4830: PetscFunctionBegin;
4831: while (next) {
4832: PetscCall(PetscFree(next->name));
4833: inext = next->handlers;
4834: while (inext) {
4835: PetscCall(PetscFree(inext->mtype));
4836: iprev = inext;
4837: inext = inext->next;
4838: PetscCall(PetscFree(iprev));
4839: }
4840: prev = next;
4841: next = next->next;
4842: PetscCall(PetscFree(prev));
4843: }
4844: MatSolverTypeHolders = NULL;
4845: PetscFunctionReturn(PETSC_SUCCESS);
4846: }
4848: /*@
4849: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4851: Logically Collective
4853: Input Parameter:
4854: . mat - the matrix
4856: Output Parameter:
4857: . flg - `PETSC_TRUE` if uses the ordering
4859: Level: developer
4861: Note:
4862: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4863: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4865: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4866: @*/
4867: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4868: {
4869: PetscFunctionBegin;
4870: *flg = mat->canuseordering;
4871: PetscFunctionReturn(PETSC_SUCCESS);
4872: }
4874: /*@
4875: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4877: Logically Collective
4879: Input Parameters:
4880: + mat - the matrix obtained with `MatGetFactor()`
4881: - ftype - the factorization type to be used
4883: Output Parameter:
4884: . otype - the preferred ordering type
4886: Level: developer
4888: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4889: @*/
4890: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4891: {
4892: PetscFunctionBegin;
4893: *otype = mat->preferredordering[ftype];
4894: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4895: PetscFunctionReturn(PETSC_SUCCESS);
4896: }
4898: /*@
4899: MatGetFactor - Returns a matrix suitable to calls to routines such as `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatILUFactorSymbolic()`,
4900: `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactorNumeric()`, `MatILUFactorNumeric()`, and
4901: `MatICCFactorNumeric()`
4903: Collective
4905: Input Parameters:
4906: + mat - the matrix
4907: . type - name of solver type, for example, `superlu_dist`, `petsc` (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4908: the other criteria is returned
4909: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4911: Output Parameter:
4912: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4914: Options Database Keys:
4915: + -pc_factor_mat_solver_type type - choose the type at run time. When using `KSP` solvers
4916: . -pc_factor_mat_factor_on_host (true|false) - do matrix factorization on host (with device matrices). Default is doing it on device
4917: - -pc_factor_mat_solve_on_host (true|false) - do matrix solve on host (with device matrices). Default is doing it on device
4919: Level: intermediate
4921: Notes:
4922: Some of the packages, such as MUMPS, have options for controlling the factorization, these are in the form `-prefix_mat_packagename_packageoption`
4923: (for example, `-mat_mumps_icntl_6 1`) where `prefix` is normally set automatically from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly,
4924: without using a `PC`, one can set the prefix by
4925: calling `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4927: Some PETSc matrix formats have alternative solvers available that are provided by alternative packages
4928: such as PaStiX, SuperLU_DIST, MUMPS etc. PETSc must have been configured to use the external solver,
4929: using the corresponding `./configure` option such as `--download-package` or `--with-package-dir`.
4931: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4932: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4933: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4935: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4936: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4938: Developer Note:
4939: This should actually be called `MatCreateFactor()` since it creates a new factor object
4941: The `MatGetFactor()` implementations should not be accessing the PETSc options database or making other decisions about solver options,
4942: that should be delayed until the later operations. This is to ensure the correct options prefix has been set in the factor matrix.
4944: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4945: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`,
4946: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`,
4947: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatILUFactorSymbolic()`,
4948: `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactorNumeric()`, `MatILUFactorNumeric()`,
4949: `MatICCFactorNumeric()`
4950: @*/
4951: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4952: {
4953: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4954: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4956: PetscFunctionBegin;
4960: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4961: MatCheckPreallocated(mat, 1);
4963: PetscCall(MatIsShell(mat, &shell));
4964: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4965: if (hasop) {
4966: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4967: PetscFunctionReturn(PETSC_SUCCESS);
4968: }
4970: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4971: if (!foundtype) {
4972: if (type) {
4973: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4974: ((PetscObject)mat)->type_name, type);
4975: } else {
4976: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4977: }
4978: }
4979: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4980: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4982: PetscCall((*conv)(mat, ftype, f));
4983: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4984: PetscFunctionReturn(PETSC_SUCCESS);
4985: }
4987: /*@
4988: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4990: Not Collective
4992: Input Parameters:
4993: + mat - the matrix
4994: . type - name of solver type, for example, `superlu`, `petsc` (to use PETSc's default)
4995: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4997: Output Parameter:
4998: . flg - PETSC_TRUE if the factorization is available
5000: Level: intermediate
5002: Notes:
5003: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
5004: such as pastix, superlu, mumps etc.
5006: PETSc must have been ./configure to use the external solver, using the option --download-package
5008: Developer Note:
5009: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
5011: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
5012: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
5013: @*/
5014: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
5015: {
5016: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
5018: PetscFunctionBegin;
5020: PetscAssertPointer(flg, 4);
5022: *flg = PETSC_FALSE;
5023: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
5025: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5026: MatCheckPreallocated(mat, 1);
5028: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
5029: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
5030: PetscFunctionReturn(PETSC_SUCCESS);
5031: }
5033: /*@
5034: MatDuplicate - Duplicates a matrix including the non-zero structure.
5036: Collective
5038: Input Parameters:
5039: + mat - the matrix
5040: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
5041: See the manual page for `MatDuplicateOption()` for an explanation of these options.
5043: Output Parameter:
5044: . M - pointer to place new matrix
5046: Level: intermediate
5048: Notes:
5049: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
5051: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
5053: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
5055: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
5056: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
5057: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
5059: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
5060: @*/
5061: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
5062: {
5063: Mat B;
5064: VecType vtype;
5065: PetscInt i;
5066: PetscObject dm, container_h, container_d;
5067: PetscErrorCodeFn *viewf;
5069: PetscFunctionBegin;
5072: PetscAssertPointer(M, 3);
5073: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
5074: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5075: MatCheckPreallocated(mat, 1);
5077: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
5078: PetscUseTypeMethod(mat, duplicate, op, M);
5079: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
5080: B = *M;
5082: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
5083: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
5084: PetscCall(MatGetVecType(mat, &vtype));
5085: PetscCall(MatSetVecType(B, vtype));
5087: B->stencil.dim = mat->stencil.dim;
5088: B->stencil.noc = mat->stencil.noc;
5089: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
5090: B->stencil.dims[i] = mat->stencil.dims[i];
5091: B->stencil.starts[i] = mat->stencil.starts[i];
5092: }
5094: B->nooffproczerorows = mat->nooffproczerorows;
5095: B->nooffprocentries = mat->nooffprocentries;
5097: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
5098: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
5099: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
5100: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
5101: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
5102: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
5103: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
5104: PetscCall(PetscObjectStateIncrease((PetscObject)B));
5105: PetscFunctionReturn(PETSC_SUCCESS);
5106: }
5108: /*@
5109: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
5111: Logically Collective
5113: Input Parameter:
5114: . mat - the matrix
5116: Output Parameter:
5117: . v - the diagonal of the matrix
5119: Level: intermediate
5121: Note:
5122: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5123: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5124: is larger than `ndiag`, the values of the remaining entries are unspecified.
5126: Currently only correct in parallel for square matrices.
5128: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5129: @*/
5130: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5131: {
5132: PetscFunctionBegin;
5136: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5137: MatCheckPreallocated(mat, 1);
5138: if (PetscDefined(USE_DEBUG)) {
5139: PetscInt nv, row, col, ndiag;
5141: PetscCall(VecGetLocalSize(v, &nv));
5142: PetscCall(MatGetLocalSize(mat, &row, &col));
5143: ndiag = PetscMin(row, col);
5144: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5145: }
5147: PetscUseTypeMethod(mat, getdiagonal, v);
5148: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5149: PetscFunctionReturn(PETSC_SUCCESS);
5150: }
5152: /*@
5153: MatGetRowMin - Gets the minimum value (of the real part) of each
5154: row of the matrix
5156: Logically Collective
5158: Input Parameter:
5159: . mat - the matrix
5161: Output Parameters:
5162: + v - the vector for storing the maximums
5163: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5165: Level: intermediate
5167: Note:
5168: The result of this call are the same as if one converted the matrix to dense format
5169: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5171: This code is only implemented for a couple of matrix formats.
5173: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5174: `MatGetRowMax()`
5175: @*/
5176: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5177: {
5178: PetscFunctionBegin;
5182: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5184: if (!mat->cmap->N) {
5185: PetscCall(VecSet(v, PETSC_MAX_REAL));
5186: if (idx) {
5187: PetscInt i, m = mat->rmap->n;
5188: for (i = 0; i < m; i++) idx[i] = -1;
5189: }
5190: } else {
5191: MatCheckPreallocated(mat, 1);
5192: }
5193: PetscUseTypeMethod(mat, getrowmin, v, idx);
5194: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5195: PetscFunctionReturn(PETSC_SUCCESS);
5196: }
5198: /*@
5199: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5200: row of the matrix
5202: Logically Collective
5204: Input Parameter:
5205: . mat - the matrix
5207: Output Parameters:
5208: + v - the vector for storing the minimums
5209: - idx - the indices of the column found for each row (or `NULL` if not needed)
5211: Level: intermediate
5213: Notes:
5214: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5215: row is 0 (the first column).
5217: This code is only implemented for a couple of matrix formats.
5219: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5220: @*/
5221: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5222: {
5223: PetscFunctionBegin;
5227: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5228: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5230: if (!mat->cmap->N) {
5231: PetscCall(VecSet(v, 0.0));
5232: if (idx) {
5233: PetscInt i, m = mat->rmap->n;
5234: for (i = 0; i < m; i++) idx[i] = -1;
5235: }
5236: } else {
5237: MatCheckPreallocated(mat, 1);
5238: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5239: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5240: }
5241: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5242: PetscFunctionReturn(PETSC_SUCCESS);
5243: }
5245: /*@
5246: MatGetRowMax - Gets the maximum value (of the real part) of each
5247: row of the matrix
5249: Logically Collective
5251: Input Parameter:
5252: . mat - the matrix
5254: Output Parameters:
5255: + v - the vector for storing the maximums
5256: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5258: Level: intermediate
5260: Notes:
5261: The result of this call are the same as if one converted the matrix to dense format
5262: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5264: This code is only implemented for a couple of matrix formats.
5266: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5267: @*/
5268: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5269: {
5270: PetscFunctionBegin;
5274: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5276: if (!mat->cmap->N) {
5277: PetscCall(VecSet(v, PETSC_MIN_REAL));
5278: if (idx) {
5279: PetscInt i, m = mat->rmap->n;
5280: for (i = 0; i < m; i++) idx[i] = -1;
5281: }
5282: } else {
5283: MatCheckPreallocated(mat, 1);
5284: PetscUseTypeMethod(mat, getrowmax, v, idx);
5285: }
5286: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5287: PetscFunctionReturn(PETSC_SUCCESS);
5288: }
5290: /*@
5291: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5292: row of the matrix
5294: Logically Collective
5296: Input Parameter:
5297: . mat - the matrix
5299: Output Parameters:
5300: + v - the vector for storing the maximums
5301: - idx - the indices of the column found for each row (or `NULL` if not needed)
5303: Level: intermediate
5305: Notes:
5306: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5307: row is 0 (the first column).
5309: This code is only implemented for a couple of matrix formats.
5311: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5312: @*/
5313: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5314: {
5315: PetscFunctionBegin;
5319: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5321: if (!mat->cmap->N) {
5322: PetscCall(VecSet(v, 0.0));
5323: if (idx) {
5324: PetscInt i, m = mat->rmap->n;
5325: for (i = 0; i < m; i++) idx[i] = -1;
5326: }
5327: } else {
5328: MatCheckPreallocated(mat, 1);
5329: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5330: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5331: }
5332: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5333: PetscFunctionReturn(PETSC_SUCCESS);
5334: }
5336: /*@
5337: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5339: Logically Collective
5341: Input Parameter:
5342: . mat - the matrix
5344: Output Parameter:
5345: . v - the vector for storing the sum
5347: Level: intermediate
5349: This code is only implemented for a couple of matrix formats.
5351: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5352: @*/
5353: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5354: {
5355: PetscFunctionBegin;
5359: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5361: if (!mat->cmap->N) PetscCall(VecSet(v, 0.0));
5362: else {
5363: MatCheckPreallocated(mat, 1);
5364: PetscUseTypeMethod(mat, getrowsumabs, v);
5365: }
5366: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5367: PetscFunctionReturn(PETSC_SUCCESS);
5368: }
5370: /*@
5371: MatGetRowSum - Gets the sum of each row of the matrix
5373: Logically or Neighborhood Collective
5375: Input Parameter:
5376: . mat - the matrix
5378: Output Parameter:
5379: . v - the vector for storing the sum of rows
5381: Level: intermediate
5383: Note:
5384: This code is slow since it is not currently specialized for different formats
5386: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5387: @*/
5388: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5389: {
5390: Vec ones;
5392: PetscFunctionBegin;
5396: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5397: MatCheckPreallocated(mat, 1);
5398: PetscCall(MatCreateVecs(mat, &ones, NULL));
5399: PetscCall(VecSet(ones, 1.));
5400: PetscCall(MatMult(mat, ones, v));
5401: PetscCall(VecDestroy(&ones));
5402: PetscFunctionReturn(PETSC_SUCCESS);
5403: }
5405: /*@
5406: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5407: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5409: Collective
5411: Input Parameter:
5412: . mat - the matrix to provide the transpose
5414: Output Parameter:
5415: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5417: Level: advanced
5419: Note:
5420: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5421: routine allows bypassing that call.
5423: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5424: @*/
5425: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5426: {
5427: MatParentState *rb = NULL;
5429: PetscFunctionBegin;
5430: PetscCall(PetscNew(&rb));
5431: rb->id = ((PetscObject)mat)->id;
5432: rb->state = 0;
5433: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5434: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscCtxDestroyDefault));
5435: PetscFunctionReturn(PETSC_SUCCESS);
5436: }
5438: static PetscErrorCode MatTranspose_Private(Mat mat, MatReuse reuse, Mat *B, PetscBool conjugate)
5439: {
5440: PetscContainer rB = NULL;
5441: MatParentState *rb = NULL;
5442: PetscErrorCode (*f)(Mat, MatReuse, Mat *) = NULL;
5444: PetscFunctionBegin;
5447: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5448: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5449: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5450: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5451: MatCheckPreallocated(mat, 1);
5452: if (reuse == MAT_REUSE_MATRIX) {
5453: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5454: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5455: PetscCall(PetscContainerGetPointer(rB, &rb));
5456: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5457: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5458: }
5460: if (conjugate) {
5461: f = mat->ops->hermitiantranspose;
5462: if (f) PetscCall((*f)(mat, reuse, B));
5463: }
5464: if (!f && !(reuse == MAT_INPLACE_MATRIX && mat->hermitian == PETSC_BOOL3_TRUE && conjugate)) {
5465: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5466: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5467: PetscUseTypeMethod(mat, transpose, reuse, B);
5468: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5469: }
5470: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5471: if (conjugate) PetscCall(MatConjugate(*B));
5472: }
5474: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5475: if (reuse != MAT_INPLACE_MATRIX) {
5476: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5477: PetscCall(PetscContainerGetPointer(rB, &rb));
5478: rb->state = ((PetscObject)mat)->state;
5479: rb->nonzerostate = mat->nonzerostate;
5480: }
5481: PetscFunctionReturn(PETSC_SUCCESS);
5482: }
5484: /*@
5485: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5487: Collective
5489: Input Parameters:
5490: + mat - the matrix to transpose
5491: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5493: Output Parameter:
5494: . B - the transpose of the matrix
5496: Level: intermediate
5498: Notes:
5499: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5501: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5502: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5504: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5506: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5507: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5509: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5511: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5513: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5514: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5515: @*/
5516: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5517: {
5518: PetscFunctionBegin;
5519: PetscCall(MatTranspose_Private(mat, reuse, B, PETSC_FALSE));
5520: PetscFunctionReturn(PETSC_SUCCESS);
5521: }
5523: /*@
5524: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5526: Collective
5528: Input Parameter:
5529: . A - the matrix to transpose
5531: Output Parameter:
5532: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5533: numerical portion.
5535: Level: intermediate
5537: Note:
5538: This is not supported for many matrix types, use `MatTranspose()` in those cases
5540: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5541: @*/
5542: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5543: {
5544: PetscFunctionBegin;
5547: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5548: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5549: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5550: PetscUseTypeMethod(A, transposesymbolic, B);
5551: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5553: PetscCall(MatTransposeSetPrecursor(A, *B));
5554: PetscFunctionReturn(PETSC_SUCCESS);
5555: }
5557: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5558: {
5559: PetscContainer rB;
5560: MatParentState *rb;
5562: PetscFunctionBegin;
5565: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5566: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5567: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5568: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5569: PetscCall(PetscContainerGetPointer(rB, &rb));
5570: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5571: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5572: PetscFunctionReturn(PETSC_SUCCESS);
5573: }
5575: /*@
5576: MatIsTranspose - Test whether a matrix is another one's transpose,
5577: or its own, in which case it tests symmetry.
5579: Collective
5581: Input Parameters:
5582: + A - the matrix to test
5583: . B - the matrix to test against, this can equal the first parameter
5584: - tol - tolerance, differences between entries smaller than this are counted as zero
5586: Output Parameter:
5587: . flg - the result
5589: Level: intermediate
5591: Notes:
5592: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5593: test involves parallel copies of the block off-diagonal parts of the matrix.
5595: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5596: @*/
5597: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5598: {
5599: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5601: PetscFunctionBegin;
5604: PetscAssertPointer(flg, 4);
5605: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5606: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5607: *flg = PETSC_FALSE;
5608: if (f && g) {
5609: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5610: PetscCall((*f)(A, B, tol, flg));
5611: } else {
5612: MatType mattype;
5614: PetscCall(MatGetType(f ? B : A, &mattype));
5615: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5616: }
5617: PetscFunctionReturn(PETSC_SUCCESS);
5618: }
5620: /*@
5621: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5623: Collective
5625: Input Parameters:
5626: + mat - the matrix to transpose and complex conjugate
5627: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5629: Output Parameter:
5630: . B - the Hermitian transpose
5632: Level: intermediate
5634: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5635: @*/
5636: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5637: {
5638: PetscFunctionBegin;
5639: PetscCall(MatTranspose_Private(mat, reuse, B, PETSC_TRUE));
5640: PetscFunctionReturn(PETSC_SUCCESS);
5641: }
5643: /*@
5644: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5646: Collective
5648: Input Parameters:
5649: + A - the matrix to test
5650: . B - the matrix to test against, this can equal the first parameter
5651: - tol - tolerance, differences between entries smaller than this are counted as zero
5653: Output Parameter:
5654: . flg - the result
5656: Level: intermediate
5658: Notes:
5659: Only available for `MATAIJ` matrices.
5661: The sequential algorithm
5662: has a running time of the order of the number of nonzeros; the parallel
5663: test involves parallel copies of the block off-diagonal parts of the matrix.
5665: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5666: @*/
5667: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5668: {
5669: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5671: PetscFunctionBegin;
5674: PetscAssertPointer(flg, 4);
5675: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5676: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5677: if (f && g) {
5678: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5679: PetscCall((*f)(A, B, tol, flg));
5680: } else {
5681: MatType mattype;
5683: PetscCall(MatGetType(f ? B : A, &mattype));
5684: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for Hermitian transpose", mattype);
5685: }
5686: PetscFunctionReturn(PETSC_SUCCESS);
5687: }
5689: /*@
5690: MatPermute - Creates a new matrix with rows and columns permuted from the
5691: original.
5693: Collective
5695: Input Parameters:
5696: + mat - the matrix to permute
5697: . row - row permutation, each processor supplies only the permutation for its rows
5698: - col - column permutation, each processor supplies only the permutation for its columns
5700: Output Parameter:
5701: . B - the permuted matrix
5703: Level: advanced
5705: Note:
5706: The index sets map from row/col of permuted matrix to row/col of original matrix.
5707: The index sets should be on the same communicator as mat and have the same local sizes.
5709: Developer Note:
5710: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5711: exploit the fact that row and col are permutations, consider implementing the
5712: more general `MatCreateSubMatrix()` instead.
5714: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5715: @*/
5716: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5717: {
5718: PetscFunctionBegin;
5723: PetscAssertPointer(B, 4);
5724: PetscCheckSameComm(mat, 1, row, 2);
5725: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5726: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5727: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5728: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5729: MatCheckPreallocated(mat, 1);
5731: if (mat->ops->permute) {
5732: PetscUseTypeMethod(mat, permute, row, col, B);
5733: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5734: } else {
5735: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5736: }
5737: PetscFunctionReturn(PETSC_SUCCESS);
5738: }
5740: /*@
5741: MatEqual - Compares two matrices.
5743: Collective
5745: Input Parameters:
5746: + A - the first matrix
5747: - B - the second matrix
5749: Output Parameter:
5750: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5752: Level: intermediate
5754: Note:
5755: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing
5756: the results of several matrix-vector product using randomly created vectors, see `MatMultEqual()`.
5758: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5759: @*/
5760: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5761: {
5762: PetscFunctionBegin;
5767: PetscAssertPointer(flg, 3);
5768: PetscCheckSameComm(A, 1, B, 2);
5769: MatCheckPreallocated(A, 1);
5770: MatCheckPreallocated(B, 2);
5771: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5772: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5773: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5774: B->cmap->N);
5775: if (A->ops->equal && A->ops->equal == B->ops->equal) PetscUseTypeMethod(A, equal, B, flg);
5776: else PetscCall(MatMultEqual(A, B, 10, flg));
5777: PetscFunctionReturn(PETSC_SUCCESS);
5778: }
5780: /*@
5781: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5782: matrices that are stored as vectors. Either of the two scaling
5783: matrices can be `NULL`.
5785: Collective
5787: Input Parameters:
5788: + mat - the matrix to be scaled
5789: . l - the left scaling vector (or `NULL`)
5790: - r - the right scaling vector (or `NULL`)
5792: Level: intermediate
5794: Note:
5795: `MatDiagonalScale()` computes $A = LAR$, where
5796: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5797: The L scales the rows of the matrix, the R scales the columns of the matrix.
5799: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5800: @*/
5801: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5802: {
5803: PetscBool flg = PETSC_FALSE;
5805: PetscFunctionBegin;
5808: if (l) {
5810: PetscCheckSameComm(mat, 1, l, 2);
5811: }
5812: if (r) {
5814: PetscCheckSameComm(mat, 1, r, 3);
5815: }
5816: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5817: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5818: MatCheckPreallocated(mat, 1);
5819: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5821: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5822: PetscUseTypeMethod(mat, diagonalscale, l, r);
5823: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5824: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5825: if (l != r && (PetscBool3ToBool(mat->symmetric) || PetscBool3ToBool(mat->hermitian))) {
5826: if (!PetscDefined(USE_COMPLEX) || PetscBool3ToBool(mat->symmetric)) {
5827: if (l && r) PetscCall(VecEqual(l, r, &flg));
5828: if (!flg) {
5829: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
5830: PetscCheck(!flg, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format, left and right scaling vectors must be the same");
5831: mat->symmetric = mat->spd = PETSC_BOOL3_FALSE;
5832: if (!PetscDefined(USE_COMPLEX)) mat->hermitian = PETSC_BOOL3_FALSE;
5833: else mat->hermitian = PETSC_BOOL3_UNKNOWN;
5834: }
5835: }
5836: if (PetscDefined(USE_COMPLEX) && PetscBool3ToBool(mat->hermitian)) {
5837: flg = PETSC_FALSE;
5838: if (l && r) {
5839: Vec conjugate;
5841: PetscCall(VecDuplicate(l, &conjugate));
5842: PetscCall(VecCopy(l, conjugate));
5843: PetscCall(VecConjugate(conjugate));
5844: PetscCall(VecEqual(conjugate, r, &flg));
5845: PetscCall(VecDestroy(&conjugate));
5846: }
5847: if (!flg) {
5848: PetscCall(PetscObjectTypeCompareAny((PetscObject)mat, &flg, MATSEQSBAIJ, MATMPISBAIJ, ""));
5849: PetscCheck(!flg, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format and Hermitian matrix, left and right scaling vectors must be conjugate one of the other");
5850: mat->hermitian = PETSC_BOOL3_FALSE;
5851: mat->symmetric = mat->spd = PETSC_BOOL3_UNKNOWN;
5852: }
5853: }
5854: }
5855: PetscFunctionReturn(PETSC_SUCCESS);
5856: }
5858: /*@
5859: MatScale - Scales all elements of a matrix by a given number.
5861: Logically Collective
5863: Input Parameters:
5864: + mat - the matrix to be scaled
5865: - a - the scaling value
5867: Level: intermediate
5869: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5870: @*/
5871: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5872: {
5873: PetscFunctionBegin;
5876: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5877: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5879: MatCheckPreallocated(mat, 1);
5881: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5882: if (a != (PetscScalar)1.0) {
5883: PetscUseTypeMethod(mat, scale, a);
5884: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5885: }
5886: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5887: PetscFunctionReturn(PETSC_SUCCESS);
5888: }
5890: /*@
5891: MatNorm - Calculates various norms of a matrix.
5893: Collective
5895: Input Parameters:
5896: + mat - the matrix
5897: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5899: Output Parameter:
5900: . nrm - the resulting norm
5902: Level: intermediate
5904: .seealso: [](ch_matrices), `Mat`
5905: @*/
5906: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5907: {
5908: PetscFunctionBegin;
5911: PetscAssertPointer(nrm, 3);
5913: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5914: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5915: MatCheckPreallocated(mat, 1);
5917: PetscUseTypeMethod(mat, norm, type, nrm);
5918: PetscFunctionReturn(PETSC_SUCCESS);
5919: }
5921: /*
5922: This variable is used to prevent counting of MatAssemblyBegin() that
5923: are called from within a MatAssemblyEnd().
5924: */
5925: static PetscInt MatAssemblyEnd_InUse = 0;
5926: /*@
5927: MatAssemblyBegin - Begins assembling the matrix. This routine should
5928: be called after completing all calls to `MatSetValues()`.
5930: Collective
5932: Input Parameters:
5933: + mat - the matrix
5934: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5936: Level: beginner
5938: Notes:
5939: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5940: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5942: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5943: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5944: using the matrix.
5946: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5947: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5948: a global collective operation requiring all processes that share the matrix.
5950: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5951: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5952: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5954: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5955: @*/
5956: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5957: {
5958: PetscFunctionBegin;
5961: MatCheckPreallocated(mat, 1);
5962: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5963: if (mat->assembled) {
5964: mat->was_assembled = PETSC_TRUE;
5965: mat->assembled = PETSC_FALSE;
5966: }
5968: if (!MatAssemblyEnd_InUse) {
5969: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5970: PetscTryTypeMethod(mat, assemblybegin, type);
5971: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5972: } else PetscTryTypeMethod(mat, assemblybegin, type);
5973: PetscFunctionReturn(PETSC_SUCCESS);
5974: }
5976: /*@
5977: MatAssembled - Indicates if a matrix has been assembled and is ready for
5978: use; for example, in matrix-vector product.
5980: Not Collective
5982: Input Parameter:
5983: . mat - the matrix
5985: Output Parameter:
5986: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5988: Level: advanced
5990: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5991: @*/
5992: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5993: {
5994: PetscFunctionBegin;
5996: PetscAssertPointer(assembled, 2);
5997: *assembled = mat->assembled;
5998: PetscFunctionReturn(PETSC_SUCCESS);
5999: }
6001: /*@
6002: MatAssemblyEnd - Completes assembling the matrix. This routine should
6003: be called after `MatAssemblyBegin()`.
6005: Collective
6007: Input Parameters:
6008: + mat - the matrix
6009: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
6011: Options Database Keys:
6012: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
6013: . -mat_view ::ascii_info_detail - Prints more detailed info
6014: . -mat_view - Prints matrix in ASCII format
6015: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
6016: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
6017: . -display name - Sets display name (default is host)
6018: . -draw_pause sec - Sets number of seconds to pause after display
6019: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
6020: . -viewer_socket_machine machine - Machine to use for socket
6021: . -viewer_socket_port port - Port number to use for socket
6022: - -mat_view binary:filename[:append] - Save matrix to file in binary format
6024: Level: beginner
6026: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
6027: @*/
6028: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
6029: {
6030: static PetscInt inassm = 0;
6031: PetscBool flg = PETSC_FALSE;
6033: PetscFunctionBegin;
6037: inassm++;
6038: MatAssemblyEnd_InUse++;
6039: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
6040: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
6041: PetscTryTypeMethod(mat, assemblyend, type);
6042: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
6043: } else PetscTryTypeMethod(mat, assemblyend, type);
6045: /* Flush assembly is not a true assembly */
6046: if (type != MAT_FLUSH_ASSEMBLY) {
6047: if (mat->num_ass) {
6048: if (!mat->symmetry_eternal) {
6049: mat->symmetric = PETSC_BOOL3_UNKNOWN;
6050: mat->hermitian = PETSC_BOOL3_UNKNOWN;
6051: }
6052: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
6053: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
6054: }
6055: mat->num_ass++;
6056: mat->assembled = PETSC_TRUE;
6057: mat->ass_nonzerostate = mat->nonzerostate;
6058: }
6060: mat->insertmode = NOT_SET_VALUES;
6061: MatAssemblyEnd_InUse--;
6062: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6063: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
6064: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6066: if (mat->checksymmetryonassembly) {
6067: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
6068: if (flg) {
6069: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
6070: } else {
6071: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
6072: }
6073: }
6074: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
6075: }
6076: inassm--;
6077: PetscFunctionReturn(PETSC_SUCCESS);
6078: }
6080: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
6081: /*@
6082: MatSetOption - Sets a parameter option for a matrix. Some options
6083: may be specific to certain storage formats. Some options
6084: determine how values will be inserted (or added). Sorted,
6085: row-oriented input will generally assemble the fastest. The default
6086: is row-oriented.
6088: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
6090: Input Parameters:
6091: + mat - the matrix
6092: . op - the option, one of those listed below (and possibly others),
6093: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6095: Options Describing Matrix Structure:
6096: + `MAT_SPD` - symmetric positive definite
6097: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
6098: . `MAT_HERMITIAN` - transpose is the complex conjugation
6099: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
6100: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
6101: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
6102: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
6104: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
6105: do not need to be computed (usually at a high cost)
6107: Options For Use with `MatSetValues()`:
6108: Insert a logically dense subblock, which can be
6109: . `MAT_ROW_ORIENTED` - row-oriented (default)
6111: These options reflect the data you pass in with `MatSetValues()`; it has
6112: nothing to do with how the data is stored internally in the matrix
6113: data structure.
6115: When (re)assembling a matrix, we can restrict the input for
6116: efficiency/debugging purposes. These options include
6117: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
6118: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
6119: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
6120: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
6121: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
6122: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
6123: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
6124: performance for very large process counts.
6125: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
6126: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
6127: functions, instead sending only neighbor messages.
6129: Level: intermediate
6131: Notes:
6132: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
6134: Some options are relevant only for particular matrix types and
6135: are thus ignored by others. Other options are not supported by
6136: certain matrix types and will generate an error message if set.
6138: If using Fortran to compute a matrix, one may need to
6139: use the column-oriented option (or convert to the row-oriented
6140: format).
6142: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
6143: that would generate a new entry in the nonzero structure is instead
6144: ignored. Thus, if memory has not already been allocated for this particular
6145: data, then the insertion is ignored. For dense matrices, in which
6146: the entire array is allocated, no entries are ever ignored.
6147: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6149: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
6150: that would generate a new entry in the nonzero structure instead produces
6151: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6153: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
6154: that would generate a new entry that has not been preallocated will
6155: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
6156: only.) This is a useful flag when debugging matrix memory preallocation.
6157: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
6159: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
6160: other processors should be dropped, rather than stashed.
6161: This is useful if you know that the "owning" processor is also
6162: always generating the correct matrix entries, so that PETSc need
6163: not transfer duplicate entries generated on another processor.
6165: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6166: searches during matrix assembly. When this flag is set, the hash table
6167: is created during the first matrix assembly. This hash table is
6168: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6169: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6170: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6171: supported by `MATMPIBAIJ` format only.
6173: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6174: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6176: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6177: a zero location in the matrix
6179: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6181: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6182: zero row routines and thus improves performance for very large process counts.
6184: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6185: part of the matrix (since they should match the upper triangular part).
6187: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6188: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6189: with finite difference schemes with non-periodic boundary conditions.
6191: Developer Note:
6192: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6193: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6194: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6195: not changed.
6197: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6198: @*/
6199: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6200: {
6201: PetscFunctionBegin;
6203: if (op > 0) {
6206: }
6208: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6210: switch (op) {
6211: case MAT_FORCE_DIAGONAL_ENTRIES:
6212: mat->force_diagonals = flg;
6213: PetscFunctionReturn(PETSC_SUCCESS);
6214: case MAT_NO_OFF_PROC_ENTRIES:
6215: mat->nooffprocentries = flg;
6216: PetscFunctionReturn(PETSC_SUCCESS);
6217: case MAT_SUBSET_OFF_PROC_ENTRIES:
6218: mat->assembly_subset = flg;
6219: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6220: #if !defined(PETSC_HAVE_MPIUNI)
6221: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6222: #endif
6223: mat->stash.first_assembly_done = PETSC_FALSE;
6224: }
6225: PetscFunctionReturn(PETSC_SUCCESS);
6226: case MAT_NO_OFF_PROC_ZERO_ROWS:
6227: mat->nooffproczerorows = flg;
6228: PetscFunctionReturn(PETSC_SUCCESS);
6229: case MAT_SPD:
6230: if (flg) {
6231: mat->spd = PETSC_BOOL3_TRUE;
6232: mat->symmetric = PETSC_BOOL3_TRUE;
6233: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6234: #if !defined(PETSC_USE_COMPLEX)
6235: mat->hermitian = PETSC_BOOL3_TRUE;
6236: #endif
6237: } else {
6238: mat->spd = PETSC_BOOL3_FALSE;
6239: }
6240: break;
6241: case MAT_SYMMETRIC:
6242: mat->symmetric = PetscBoolToBool3(flg);
6243: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6244: #if !defined(PETSC_USE_COMPLEX)
6245: mat->hermitian = PetscBoolToBool3(flg);
6246: #endif
6247: break;
6248: case MAT_HERMITIAN:
6249: mat->hermitian = PetscBoolToBool3(flg);
6250: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6251: #if !defined(PETSC_USE_COMPLEX)
6252: mat->symmetric = PetscBoolToBool3(flg);
6253: #endif
6254: break;
6255: case MAT_STRUCTURALLY_SYMMETRIC:
6256: mat->structurally_symmetric = PetscBoolToBool3(flg);
6257: break;
6258: case MAT_SYMMETRY_ETERNAL:
6259: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6260: mat->symmetry_eternal = flg;
6261: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6262: break;
6263: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6264: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6265: mat->structural_symmetry_eternal = flg;
6266: break;
6267: case MAT_SPD_ETERNAL:
6268: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6269: mat->spd_eternal = flg;
6270: if (flg) {
6271: mat->structural_symmetry_eternal = PETSC_TRUE;
6272: mat->symmetry_eternal = PETSC_TRUE;
6273: }
6274: break;
6275: case MAT_STRUCTURE_ONLY:
6276: mat->structure_only = flg;
6277: break;
6278: case MAT_SORTED_FULL:
6279: mat->sortedfull = flg;
6280: break;
6281: default:
6282: break;
6283: }
6284: PetscTryTypeMethod(mat, setoption, op, flg);
6285: PetscFunctionReturn(PETSC_SUCCESS);
6286: }
6288: /*@
6289: MatGetOption - Gets a parameter option that has been set for a matrix.
6291: Logically Collective
6293: Input Parameters:
6294: + mat - the matrix
6295: - op - the option, this only responds to certain options, check the code for which ones
6297: Output Parameter:
6298: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6300: Level: intermediate
6302: Notes:
6303: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6305: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6306: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6308: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6309: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6310: @*/
6311: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6312: {
6313: PetscFunctionBegin;
6317: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6318: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6320: switch (op) {
6321: case MAT_NO_OFF_PROC_ENTRIES:
6322: *flg = mat->nooffprocentries;
6323: break;
6324: case MAT_NO_OFF_PROC_ZERO_ROWS:
6325: *flg = mat->nooffproczerorows;
6326: break;
6327: case MAT_SYMMETRIC:
6328: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6329: break;
6330: case MAT_HERMITIAN:
6331: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6332: break;
6333: case MAT_STRUCTURALLY_SYMMETRIC:
6334: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6335: break;
6336: case MAT_SPD:
6337: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6338: break;
6339: case MAT_SYMMETRY_ETERNAL:
6340: *flg = mat->symmetry_eternal;
6341: break;
6342: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6343: *flg = mat->symmetry_eternal;
6344: break;
6345: default:
6346: break;
6347: }
6348: PetscFunctionReturn(PETSC_SUCCESS);
6349: }
6351: /*@
6352: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6353: this routine retains the old nonzero structure.
6355: Logically Collective
6357: Input Parameter:
6358: . mat - the matrix
6360: Level: intermediate
6362: Note:
6363: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6364: See the Performance chapter of the users manual for information on preallocating matrices.
6366: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6367: @*/
6368: PetscErrorCode MatZeroEntries(Mat mat)
6369: {
6370: PetscFunctionBegin;
6373: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6374: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6375: MatCheckPreallocated(mat, 1);
6377: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6378: PetscUseTypeMethod(mat, zeroentries);
6379: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6380: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6381: PetscFunctionReturn(PETSC_SUCCESS);
6382: }
6384: /*@
6385: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6386: of a set of rows and columns of a matrix.
6388: Collective
6390: Input Parameters:
6391: + mat - the matrix
6392: . numRows - the number of rows/columns to zero
6393: . rows - the global row indices
6394: . diag - value put in the diagonal of the eliminated rows
6395: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6396: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6398: Level: intermediate
6400: Notes:
6401: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6403: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6404: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6406: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6407: Krylov method to take advantage of the known solution on the zeroed rows.
6409: For the parallel case, all processes that share the matrix (i.e.,
6410: those in the communicator used for matrix creation) MUST call this
6411: routine, regardless of whether any rows being zeroed are owned by
6412: them.
6414: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6415: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6416: missing.
6418: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6419: list only rows local to itself).
6421: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6423: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6424: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6425: @*/
6426: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6427: {
6428: PetscFunctionBegin;
6431: if (numRows) PetscAssertPointer(rows, 3);
6432: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6433: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6434: MatCheckPreallocated(mat, 1);
6436: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6437: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6438: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6439: PetscFunctionReturn(PETSC_SUCCESS);
6440: }
6442: /*@
6443: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6444: of a set of rows and columns of a matrix.
6446: Collective
6448: Input Parameters:
6449: + mat - the matrix
6450: . is - the rows to zero
6451: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6452: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6453: - b - optional vector of right-hand side, that will be adjusted by provided solution
6455: Level: intermediate
6457: Note:
6458: See `MatZeroRowsColumns()` for details on how this routine operates.
6460: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6461: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6462: @*/
6463: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6464: {
6465: PetscInt numRows;
6466: const PetscInt *rows;
6468: PetscFunctionBegin;
6473: PetscCall(ISGetLocalSize(is, &numRows));
6474: PetscCall(ISGetIndices(is, &rows));
6475: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6476: PetscCall(ISRestoreIndices(is, &rows));
6477: PetscFunctionReturn(PETSC_SUCCESS);
6478: }
6480: /*@
6481: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6482: of a set of rows of a matrix.
6484: Collective
6486: Input Parameters:
6487: + mat - the matrix
6488: . numRows - the number of rows to zero
6489: . rows - the global row indices
6490: . diag - value put in the diagonal of the zeroed rows
6491: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6492: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6494: Level: intermediate
6496: Notes:
6497: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6499: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6501: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6502: Krylov method to take advantage of the known solution on the zeroed rows.
6504: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6505: from the matrix.
6507: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6508: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6509: formats this does not alter the nonzero structure.
6511: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6512: of the matrix is not changed the values are
6513: merely zeroed.
6515: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6516: formats can optionally remove the main diagonal entry from the
6517: nonzero structure as well, by passing 0.0 as the final argument).
6519: For the parallel case, all processes that share the matrix (i.e.,
6520: those in the communicator used for matrix creation) MUST call this
6521: routine, regardless of whether any rows being zeroed are owned by
6522: them.
6524: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6525: list only rows local to itself).
6527: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6528: owns that are to be zeroed. This saves a global synchronization in the implementation.
6530: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6531: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6532: @*/
6533: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6534: {
6535: PetscFunctionBegin;
6538: if (numRows) PetscAssertPointer(rows, 3);
6539: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6540: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6541: MatCheckPreallocated(mat, 1);
6543: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6544: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6545: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6546: PetscFunctionReturn(PETSC_SUCCESS);
6547: }
6549: /*@
6550: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6551: of a set of rows of a matrix indicated by an `IS`
6553: Collective
6555: Input Parameters:
6556: + mat - the matrix
6557: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6558: . diag - value put in all diagonals of eliminated rows
6559: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6560: - b - optional vector of right-hand side, that will be adjusted by provided solution
6562: Level: intermediate
6564: Note:
6565: See `MatZeroRows()` for details on how this routine operates.
6567: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6568: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6569: @*/
6570: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6571: {
6572: PetscInt numRows = 0;
6573: const PetscInt *rows = NULL;
6575: PetscFunctionBegin;
6578: if (is) {
6580: PetscCall(ISGetLocalSize(is, &numRows));
6581: PetscCall(ISGetIndices(is, &rows));
6582: }
6583: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6584: if (is) PetscCall(ISRestoreIndices(is, &rows));
6585: PetscFunctionReturn(PETSC_SUCCESS);
6586: }
6588: /*@
6589: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6590: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6592: Collective
6594: Input Parameters:
6595: + mat - the matrix
6596: . numRows - the number of rows to remove
6597: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6598: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6599: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6600: - b - optional vector of right-hand side, that will be adjusted by provided solution
6602: Level: intermediate
6604: Notes:
6605: See `MatZeroRows()` for details on how this routine operates.
6607: The grid coordinates are across the entire grid, not just the local portion
6609: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6610: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6611: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6612: `DM_BOUNDARY_PERIODIC` boundary type.
6614: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6615: a single value per point) you can skip filling those indices.
6617: Fortran Note:
6618: `idxm` and `idxn` should be declared as
6619: .vb
6620: MatStencil idxm(4, m)
6621: .ve
6622: and the values inserted using
6623: .vb
6624: idxm(MatStencil_i, 1) = i
6625: idxm(MatStencil_j, 1) = j
6626: idxm(MatStencil_k, 1) = k
6627: idxm(MatStencil_c, 1) = c
6628: etc
6629: .ve
6631: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6632: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6633: @*/
6634: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6635: {
6636: PetscInt dim = mat->stencil.dim;
6637: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6638: PetscInt *dims = mat->stencil.dims + 1;
6639: PetscInt *starts = mat->stencil.starts;
6640: PetscInt *dxm = (PetscInt *)rows;
6641: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6643: PetscFunctionBegin;
6646: if (numRows) PetscAssertPointer(rows, 3);
6648: PetscCall(PetscMalloc1(numRows, &jdxm));
6649: for (i = 0; i < numRows; ++i) {
6650: /* Skip unused dimensions (they are ordered k, j, i, c) */
6651: for (j = 0; j < 3 - sdim; ++j) dxm++;
6652: /* Local index in X dir */
6653: tmp = *dxm++ - starts[0];
6654: /* Loop over remaining dimensions */
6655: for (j = 0; j < dim - 1; ++j) {
6656: /* If nonlocal, set index to be negative */
6657: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6658: /* Update local index */
6659: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6660: }
6661: /* Skip component slot if necessary */
6662: if (mat->stencil.noc) dxm++;
6663: /* Local row number */
6664: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6665: }
6666: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6667: PetscCall(PetscFree(jdxm));
6668: PetscFunctionReturn(PETSC_SUCCESS);
6669: }
6671: /*@
6672: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6673: of a set of rows and columns of a matrix.
6675: Collective
6677: Input Parameters:
6678: + mat - the matrix
6679: . numRows - the number of rows/columns to remove
6680: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6681: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6682: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6683: - b - optional vector of right-hand side, that will be adjusted by provided solution
6685: Level: intermediate
6687: Notes:
6688: See `MatZeroRowsColumns()` for details on how this routine operates.
6690: The grid coordinates are across the entire grid, not just the local portion
6692: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6693: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6694: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6695: `DM_BOUNDARY_PERIODIC` boundary type.
6697: For indices that don't mean anything for your case (like the `k` index when working in 2d) or the `c` index when you have
6698: a single value per point) you can skip filling those indices.
6700: Fortran Note:
6701: `idxm` and `idxn` should be declared as
6702: .vb
6703: MatStencil idxm(4, m)
6704: .ve
6705: and the values inserted using
6706: .vb
6707: idxm(MatStencil_i, 1) = i
6708: idxm(MatStencil_j, 1) = j
6709: idxm(MatStencil_k, 1) = k
6710: idxm(MatStencil_c, 1) = c
6711: etc
6712: .ve
6714: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6715: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6716: @*/
6717: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6718: {
6719: PetscInt dim = mat->stencil.dim;
6720: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6721: PetscInt *dims = mat->stencil.dims + 1;
6722: PetscInt *starts = mat->stencil.starts;
6723: PetscInt *dxm = (PetscInt *)rows;
6724: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6726: PetscFunctionBegin;
6729: if (numRows) PetscAssertPointer(rows, 3);
6731: PetscCall(PetscMalloc1(numRows, &jdxm));
6732: for (i = 0; i < numRows; ++i) {
6733: /* Skip unused dimensions (they are ordered k, j, i, c) */
6734: for (j = 0; j < 3 - sdim; ++j) dxm++;
6735: /* Local index in X dir */
6736: tmp = *dxm++ - starts[0];
6737: /* Loop over remaining dimensions */
6738: for (j = 0; j < dim - 1; ++j) {
6739: /* If nonlocal, set index to be negative */
6740: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6741: /* Update local index */
6742: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6743: }
6744: /* Skip component slot if necessary */
6745: if (mat->stencil.noc) dxm++;
6746: /* Local row number */
6747: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6748: }
6749: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6750: PetscCall(PetscFree(jdxm));
6751: PetscFunctionReturn(PETSC_SUCCESS);
6752: }
6754: /*@
6755: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6756: of a set of rows of a matrix; using local numbering of rows.
6758: Collective
6760: Input Parameters:
6761: + mat - the matrix
6762: . numRows - the number of rows to remove
6763: . rows - the local row indices
6764: . diag - value put in all diagonals of eliminated rows
6765: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6766: - b - optional vector of right-hand side, that will be adjusted by provided solution
6768: Level: intermediate
6770: Notes:
6771: Before calling `MatZeroRowsLocal()`, the user must first set the
6772: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6774: See `MatZeroRows()` for details on how this routine operates.
6776: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6777: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6778: @*/
6779: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6780: {
6781: PetscFunctionBegin;
6784: if (numRows) PetscAssertPointer(rows, 3);
6785: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6786: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6787: MatCheckPreallocated(mat, 1);
6789: if (mat->ops->zerorowslocal) {
6790: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6791: } else {
6792: IS is, newis;
6793: PetscInt *newRows, nl = 0;
6795: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6796: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6797: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6798: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6799: for (PetscInt i = 0; i < numRows; i++)
6800: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6801: PetscUseTypeMethod(mat, zerorows, nl, newRows, diag, x, b);
6802: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6803: PetscCall(ISDestroy(&newis));
6804: PetscCall(ISDestroy(&is));
6805: }
6806: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6807: PetscFunctionReturn(PETSC_SUCCESS);
6808: }
6810: /*@
6811: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6812: of a set of rows of a matrix; using local numbering of rows.
6814: Collective
6816: Input Parameters:
6817: + mat - the matrix
6818: . is - index set of rows to remove
6819: . diag - value put in all diagonals of eliminated rows
6820: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6821: - b - optional vector of right-hand side, that will be adjusted by provided solution
6823: Level: intermediate
6825: Notes:
6826: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6827: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6829: See `MatZeroRows()` for details on how this routine operates.
6831: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6832: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6833: @*/
6834: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6835: {
6836: PetscInt numRows;
6837: const PetscInt *rows;
6839: PetscFunctionBegin;
6843: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6844: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6845: MatCheckPreallocated(mat, 1);
6847: PetscCall(ISGetLocalSize(is, &numRows));
6848: PetscCall(ISGetIndices(is, &rows));
6849: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6850: PetscCall(ISRestoreIndices(is, &rows));
6851: PetscFunctionReturn(PETSC_SUCCESS);
6852: }
6854: /*@
6855: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6856: of a set of rows and columns of a matrix; using local numbering of rows.
6858: Collective
6860: Input Parameters:
6861: + mat - the matrix
6862: . numRows - the number of rows to remove
6863: . rows - the global row indices
6864: . diag - value put in all diagonals of eliminated rows
6865: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6866: - b - optional vector of right-hand side, that will be adjusted by provided solution
6868: Level: intermediate
6870: Notes:
6871: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6872: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6874: See `MatZeroRowsColumns()` for details on how this routine operates.
6876: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6877: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6878: @*/
6879: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6880: {
6881: PetscFunctionBegin;
6884: if (numRows) PetscAssertPointer(rows, 3);
6885: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6886: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6887: MatCheckPreallocated(mat, 1);
6889: if (mat->ops->zerorowscolumnslocal) {
6890: PetscUseTypeMethod(mat, zerorowscolumnslocal, numRows, rows, diag, x, b);
6891: } else {
6892: IS is, newis;
6893: PetscInt *newRows, nl = 0;
6895: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6896: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_USE_POINTER, &is));
6897: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6898: PetscCall(ISGetIndices(newis, (const PetscInt **)&newRows));
6899: for (PetscInt i = 0; i < numRows; i++)
6900: if (newRows[i] > -1) newRows[nl++] = newRows[i];
6901: PetscUseTypeMethod(mat, zerorowscolumns, nl, newRows, diag, x, b);
6902: PetscCall(ISRestoreIndices(newis, (const PetscInt **)&newRows));
6903: PetscCall(ISDestroy(&newis));
6904: PetscCall(ISDestroy(&is));
6905: }
6906: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6907: PetscFunctionReturn(PETSC_SUCCESS);
6908: }
6910: /*@
6911: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6912: of a set of rows and columns of a matrix; using local numbering of rows.
6914: Collective
6916: Input Parameters:
6917: + mat - the matrix
6918: . is - index set of rows to remove
6919: . diag - value put in all diagonals of eliminated rows
6920: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6921: - b - optional vector of right-hand side, that will be adjusted by provided solution
6923: Level: intermediate
6925: Notes:
6926: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6927: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6929: See `MatZeroRowsColumns()` for details on how this routine operates.
6931: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6932: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6933: @*/
6934: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6935: {
6936: PetscInt numRows;
6937: const PetscInt *rows;
6939: PetscFunctionBegin;
6943: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6944: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6945: MatCheckPreallocated(mat, 1);
6947: PetscCall(ISGetLocalSize(is, &numRows));
6948: PetscCall(ISGetIndices(is, &rows));
6949: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6950: PetscCall(ISRestoreIndices(is, &rows));
6951: PetscFunctionReturn(PETSC_SUCCESS);
6952: }
6954: /*@
6955: MatGetSize - Returns the numbers of rows and columns in a matrix.
6957: Not Collective
6959: Input Parameter:
6960: . mat - the matrix
6962: Output Parameters:
6963: + m - the number of global rows
6964: - n - the number of global columns
6966: Level: beginner
6968: Note:
6969: Both output parameters can be `NULL` on input.
6971: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6972: @*/
6973: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6974: {
6975: PetscFunctionBegin;
6977: if (m) *m = mat->rmap->N;
6978: if (n) *n = mat->cmap->N;
6979: PetscFunctionReturn(PETSC_SUCCESS);
6980: }
6982: /*@
6983: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6984: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6986: Not Collective
6988: Input Parameter:
6989: . mat - the matrix
6991: Output Parameters:
6992: + m - the number of local rows, use `NULL` to not obtain this value
6993: - n - the number of local columns, use `NULL` to not obtain this value
6995: Level: beginner
6997: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6998: @*/
6999: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
7000: {
7001: PetscFunctionBegin;
7003: if (m) PetscAssertPointer(m, 2);
7004: if (n) PetscAssertPointer(n, 3);
7005: if (m) *m = mat->rmap->n;
7006: if (n) *n = mat->cmap->n;
7007: PetscFunctionReturn(PETSC_SUCCESS);
7008: }
7010: /*@
7011: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
7012: vector one multiplies this matrix by that are owned by this processor.
7014: Not Collective, unless matrix has not been allocated, then collective
7016: Input Parameter:
7017: . mat - the matrix
7019: Output Parameters:
7020: + m - the global index of the first local column, use `NULL` to not obtain this value
7021: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
7023: Level: developer
7025: Notes:
7026: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7028: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7029: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7031: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7032: the local values in the matrix.
7034: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
7035: Layouts](sec_matlayout) for details on matrix layouts.
7037: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
7038: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
7039: @*/
7040: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
7041: {
7042: PetscFunctionBegin;
7045: if (m) PetscAssertPointer(m, 2);
7046: if (n) PetscAssertPointer(n, 3);
7047: MatCheckPreallocated(mat, 1);
7048: if (m) *m = mat->cmap->rstart;
7049: if (n) *n = mat->cmap->rend;
7050: PetscFunctionReturn(PETSC_SUCCESS);
7051: }
7053: /*@
7054: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
7055: this MPI process.
7057: Not Collective
7059: Input Parameter:
7060: . mat - the matrix
7062: Output Parameters:
7063: + m - the global index of the first local row, use `NULL` to not obtain this value
7064: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
7066: Level: beginner
7068: Notes:
7069: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7071: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7072: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7074: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7075: the local values in the matrix.
7077: The high argument is one more than the last element stored locally.
7079: For all matrices it returns the range of matrix rows associated with rows of a vector that
7080: would contain the result of a matrix vector product with this matrix. See [Matrix
7081: Layouts](sec_matlayout) for details on matrix layouts.
7083: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
7084: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
7085: @*/
7086: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
7087: {
7088: PetscFunctionBegin;
7091: if (m) PetscAssertPointer(m, 2);
7092: if (n) PetscAssertPointer(n, 3);
7093: MatCheckPreallocated(mat, 1);
7094: if (m) *m = mat->rmap->rstart;
7095: if (n) *n = mat->rmap->rend;
7096: PetscFunctionReturn(PETSC_SUCCESS);
7097: }
7099: /*@C
7100: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
7101: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
7103: Not Collective, unless matrix has not been allocated
7105: Input Parameter:
7106: . mat - the matrix
7108: Output Parameter:
7109: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
7110: where `size` is the number of MPI processes used by `mat`
7112: Level: beginner
7114: Notes:
7115: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7117: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7118: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7120: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7121: the local values in the matrix.
7123: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
7124: would contain the result of a matrix vector product with this matrix. See [Matrix
7125: Layouts](sec_matlayout) for details on matrix layouts.
7127: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
7128: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
7129: `DMDAGetGhostCorners()`, `DM`
7130: @*/
7131: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
7132: {
7133: PetscFunctionBegin;
7136: MatCheckPreallocated(mat, 1);
7137: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
7138: PetscFunctionReturn(PETSC_SUCCESS);
7139: }
7141: /*@C
7142: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
7143: vector one multiplies this vector by that are owned by each processor.
7145: Not Collective, unless matrix has not been allocated
7147: Input Parameter:
7148: . mat - the matrix
7150: Output Parameter:
7151: . ranges - start of each processors portion plus one more than the total length at the end
7153: Level: beginner
7155: Notes:
7156: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
7158: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
7159: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
7161: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
7162: the local values in the matrix.
7164: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
7165: Layouts](sec_matlayout) for details on matrix layouts.
7167: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
7168: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
7169: `DMDAGetGhostCorners()`, `DM`
7170: @*/
7171: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
7172: {
7173: PetscFunctionBegin;
7176: MatCheckPreallocated(mat, 1);
7177: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7178: PetscFunctionReturn(PETSC_SUCCESS);
7179: }
7181: /*@
7182: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7184: Not Collective
7186: Input Parameter:
7187: . A - matrix
7189: Output Parameters:
7190: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7191: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7193: Level: intermediate
7195: Note:
7196: You should call `ISDestroy()` on the returned `IS`
7198: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7199: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7200: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7201: details on matrix layouts.
7203: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7204: @*/
7205: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7206: {
7207: PetscErrorCode (*f)(Mat, IS *, IS *);
7209: PetscFunctionBegin;
7212: MatCheckPreallocated(A, 1);
7213: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7214: if (f) {
7215: PetscCall((*f)(A, rows, cols));
7216: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7217: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7218: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7219: }
7220: PetscFunctionReturn(PETSC_SUCCESS);
7221: }
7223: /*@
7224: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7225: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7226: to complete the factorization.
7228: Collective
7230: Input Parameters:
7231: + fact - the factorized matrix obtained with `MatGetFactor()`
7232: . mat - the matrix
7233: . row - row permutation
7234: . col - column permutation
7235: - info - structure containing
7236: .vb
7237: levels - number of levels of fill.
7238: expected fill - as ratio of original fill.
7239: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7240: missing diagonal entries)
7241: .ve
7243: Level: developer
7245: Notes:
7246: See [Matrix Factorization](sec_matfactor) for additional information.
7248: Most users should employ the `KSP` interface for linear solvers
7249: instead of working directly with matrix algebra routines such as this.
7250: See, e.g., `KSPCreate()`.
7252: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7254: Fortran Note:
7255: A valid (non-null) `info` argument must be provided
7257: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
7258: `MatGetOrdering()`, `MatFactorInfo`
7259: @*/
7260: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7261: {
7262: PetscFunctionBegin;
7267: PetscAssertPointer(info, 5);
7268: PetscAssertPointer(fact, 1);
7269: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7270: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7271: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7272: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7273: MatCheckPreallocated(mat, 2);
7275: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7276: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7277: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7278: PetscFunctionReturn(PETSC_SUCCESS);
7279: }
7281: /*@
7282: MatICCFactorSymbolic - Performs symbolic incomplete
7283: Cholesky factorization for a symmetric matrix. Use
7284: `MatCholeskyFactorNumeric()` to complete the factorization.
7286: Collective
7288: Input Parameters:
7289: + fact - the factorized matrix obtained with `MatGetFactor()`
7290: . mat - the matrix to be factored
7291: . perm - row and column permutation
7292: - info - structure containing
7293: .vb
7294: levels - number of levels of fill.
7295: expected fill - as ratio of original fill.
7296: .ve
7298: Level: developer
7300: Notes:
7301: Most users should employ the `KSP` interface for linear solvers
7302: instead of working directly with matrix algebra routines such as this.
7303: See, e.g., `KSPCreate()`.
7305: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7307: Fortran Note:
7308: A valid (non-null) `info` argument must be provided
7310: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7311: @*/
7312: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7313: {
7314: PetscFunctionBegin;
7318: PetscAssertPointer(info, 4);
7319: PetscAssertPointer(fact, 1);
7320: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7321: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7322: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7323: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7324: MatCheckPreallocated(mat, 2);
7326: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7327: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7328: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7329: PetscFunctionReturn(PETSC_SUCCESS);
7330: }
7332: /*@C
7333: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7334: points to an array of valid matrices, they may be reused to store the new
7335: submatrices.
7337: Collective
7339: Input Parameters:
7340: + mat - the matrix
7341: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7342: . irow - index set of rows to extract
7343: . icol - index set of columns to extract
7344: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7346: Output Parameter:
7347: . submat - the array of submatrices
7349: Level: advanced
7351: Notes:
7352: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7353: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7354: to extract a parallel submatrix.
7356: Some matrix types place restrictions on the row and column
7357: indices, such as that they be sorted or that they be equal to each other.
7359: The index sets may not have duplicate entries.
7361: When extracting submatrices from a parallel matrix, each processor can
7362: form a different submatrix by setting the rows and columns of its
7363: individual index sets according to the local submatrix desired.
7365: When finished using the submatrices, the user should destroy
7366: them with `MatDestroySubMatrices()`.
7368: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7369: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7371: This routine creates the matrices in submat; you should NOT create them before
7372: calling it. It also allocates the array of matrix pointers submat.
7374: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7375: request one row/column in a block, they must request all rows/columns that are in
7376: that block. For example, if the block size is 2 you cannot request just row 0 and
7377: column 0.
7379: Fortran Note:
7380: .vb
7381: Mat, pointer :: submat(:)
7382: .ve
7384: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7385: @*/
7386: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7387: {
7388: PetscInt i;
7389: PetscBool eq;
7391: PetscFunctionBegin;
7394: if (n) {
7395: PetscAssertPointer(irow, 3);
7397: PetscAssertPointer(icol, 4);
7399: }
7400: PetscAssertPointer(submat, 6);
7401: if (n && scall == MAT_REUSE_MATRIX) {
7402: PetscAssertPointer(*submat, 6);
7404: }
7405: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7406: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7407: MatCheckPreallocated(mat, 1);
7408: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7409: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7410: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7411: for (i = 0; i < n; i++) {
7412: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7413: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7414: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7415: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7416: if (mat->boundtocpu && mat->bindingpropagates) {
7417: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7418: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7419: }
7420: #endif
7421: }
7422: PetscFunctionReturn(PETSC_SUCCESS);
7423: }
7425: /*@C
7426: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of `mat` (by pairs of `IS` that may live on subcomms).
7428: Collective
7430: Input Parameters:
7431: + mat - the matrix
7432: . n - the number of submatrixes to be extracted
7433: . irow - index set of rows to extract
7434: . icol - index set of columns to extract
7435: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7437: Output Parameter:
7438: . submat - the array of submatrices
7440: Level: advanced
7442: Note:
7443: This is used by `PCGASM`
7445: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7446: @*/
7447: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7448: {
7449: PetscInt i;
7450: PetscBool eq;
7452: PetscFunctionBegin;
7455: if (n) {
7456: PetscAssertPointer(irow, 3);
7458: PetscAssertPointer(icol, 4);
7460: }
7461: PetscAssertPointer(submat, 6);
7462: if (n && scall == MAT_REUSE_MATRIX) {
7463: PetscAssertPointer(*submat, 6);
7465: }
7466: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7467: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7468: MatCheckPreallocated(mat, 1);
7470: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7471: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7472: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7473: for (i = 0; i < n; i++) {
7474: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7475: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7476: }
7477: PetscFunctionReturn(PETSC_SUCCESS);
7478: }
7480: /*@C
7481: MatDestroyMatrices - Destroys an array of matrices
7483: Collective
7485: Input Parameters:
7486: + n - the number of local matrices
7487: - mat - the matrices (this is a pointer to the array of matrices)
7489: Level: advanced
7491: Notes:
7492: Frees not only the matrices, but also the array that contains the matrices
7494: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7496: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7497: @*/
7498: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7499: {
7500: PetscInt i;
7502: PetscFunctionBegin;
7503: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7504: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7505: PetscAssertPointer(mat, 2);
7507: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7509: /* memory is allocated even if n = 0 */
7510: PetscCall(PetscFree(*mat));
7511: PetscFunctionReturn(PETSC_SUCCESS);
7512: }
7514: /*@C
7515: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7517: Collective
7519: Input Parameters:
7520: + n - the number of local matrices
7521: - mat - the matrices (this is a pointer to the array of matrices, to match the calling sequence of `MatCreateSubMatrices()`)
7523: Level: advanced
7525: Note:
7526: Frees not only the matrices, but also the array that contains the matrices
7528: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7529: @*/
7530: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7531: {
7532: Mat mat0;
7534: PetscFunctionBegin;
7535: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7536: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7537: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7538: PetscAssertPointer(mat, 2);
7540: mat0 = (*mat)[0];
7541: if (mat0 && mat0->ops->destroysubmatrices) {
7542: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7543: } else {
7544: PetscCall(MatDestroyMatrices(n, mat));
7545: }
7546: PetscFunctionReturn(PETSC_SUCCESS);
7547: }
7549: /*@
7550: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7552: Collective
7554: Input Parameter:
7555: . mat - the matrix
7557: Output Parameter:
7558: . matstruct - the sequential matrix with the nonzero structure of `mat`
7560: Level: developer
7562: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7563: @*/
7564: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7565: {
7566: PetscFunctionBegin;
7568: PetscAssertPointer(matstruct, 2);
7571: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7572: MatCheckPreallocated(mat, 1);
7574: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7575: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7576: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7577: PetscFunctionReturn(PETSC_SUCCESS);
7578: }
7580: /*@C
7581: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7583: Collective
7585: Input Parameter:
7586: . mat - the matrix
7588: Level: advanced
7590: Note:
7591: This is not needed, one can just call `MatDestroy()`
7593: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7594: @*/
7595: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7596: {
7597: PetscFunctionBegin;
7598: PetscAssertPointer(mat, 1);
7599: PetscCall(MatDestroy(mat));
7600: PetscFunctionReturn(PETSC_SUCCESS);
7601: }
7603: /*@
7604: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7605: replaces the index sets by larger ones that represent submatrices with
7606: additional overlap.
7608: Collective
7610: Input Parameters:
7611: + mat - the matrix
7612: . n - the number of index sets
7613: . is - the array of index sets (these index sets will changed during the call)
7614: - ov - the additional overlap requested
7616: Options Database Key:
7617: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7619: Level: developer
7621: Note:
7622: The computed overlap preserves the matrix block sizes when the blocks are square.
7623: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7624: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7626: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7627: @*/
7628: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7629: {
7630: PetscInt i, bs, cbs;
7632: PetscFunctionBegin;
7636: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7637: if (n) {
7638: PetscAssertPointer(is, 3);
7640: }
7641: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7642: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7643: MatCheckPreallocated(mat, 1);
7645: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7646: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7647: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7648: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7649: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7650: if (bs == cbs) {
7651: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7652: }
7653: PetscFunctionReturn(PETSC_SUCCESS);
7654: }
7656: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7658: /*@
7659: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7660: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7661: additional overlap.
7663: Collective
7665: Input Parameters:
7666: + mat - the matrix
7667: . n - the number of index sets
7668: . is - the array of index sets (these index sets will changed during the call)
7669: - ov - the additional overlap requested
7671: ` Options Database Key:
7672: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7674: Level: developer
7676: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7677: @*/
7678: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7679: {
7680: PetscInt i;
7682: PetscFunctionBegin;
7685: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7686: if (n) {
7687: PetscAssertPointer(is, 3);
7689: }
7690: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7691: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7692: MatCheckPreallocated(mat, 1);
7693: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7694: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7695: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7696: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7697: PetscFunctionReturn(PETSC_SUCCESS);
7698: }
7700: /*@
7701: MatGetBlockSize - Returns the matrix block size.
7703: Not Collective
7705: Input Parameter:
7706: . mat - the matrix
7708: Output Parameter:
7709: . bs - block size
7711: Level: intermediate
7713: Notes:
7714: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7716: If the block size has not been set yet this routine returns 1.
7718: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7719: @*/
7720: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7721: {
7722: PetscFunctionBegin;
7724: PetscAssertPointer(bs, 2);
7725: *bs = mat->rmap->bs;
7726: PetscFunctionReturn(PETSC_SUCCESS);
7727: }
7729: /*@
7730: MatGetBlockSizes - Returns the matrix block row and column sizes.
7732: Not Collective
7734: Input Parameter:
7735: . mat - the matrix
7737: Output Parameters:
7738: + rbs - row block size
7739: - cbs - column block size
7741: Level: intermediate
7743: Notes:
7744: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7745: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7747: If a block size has not been set yet this routine returns 1.
7749: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7750: @*/
7751: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7752: {
7753: PetscFunctionBegin;
7755: if (rbs) PetscAssertPointer(rbs, 2);
7756: if (cbs) PetscAssertPointer(cbs, 3);
7757: if (rbs) *rbs = mat->rmap->bs;
7758: if (cbs) *cbs = mat->cmap->bs;
7759: PetscFunctionReturn(PETSC_SUCCESS);
7760: }
7762: /*@
7763: MatSetBlockSize - Sets the matrix block size.
7765: Logically Collective
7767: Input Parameters:
7768: + mat - the matrix
7769: - bs - block size
7771: Level: intermediate
7773: Notes:
7774: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7775: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7777: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7778: is compatible with the matrix local sizes.
7780: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7781: @*/
7782: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7783: {
7784: PetscFunctionBegin;
7787: PetscCall(MatSetBlockSizes(mat, bs, bs));
7788: PetscFunctionReturn(PETSC_SUCCESS);
7789: }
7791: typedef struct {
7792: PetscInt n;
7793: IS *is;
7794: Mat *mat;
7795: PetscObjectState nonzerostate;
7796: Mat C;
7797: } EnvelopeData;
7799: static PetscErrorCode EnvelopeDataDestroy(PetscCtxRt ptr)
7800: {
7801: EnvelopeData *edata = *(EnvelopeData **)ptr;
7803: PetscFunctionBegin;
7804: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7805: PetscCall(PetscFree(edata->is));
7806: PetscCall(PetscFree(edata));
7807: PetscFunctionReturn(PETSC_SUCCESS);
7808: }
7810: /*@
7811: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7812: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7814: Collective
7816: Input Parameter:
7817: . mat - the matrix
7819: Level: intermediate
7821: Notes:
7822: There can be zeros within the blocks
7824: The blocks can overlap between processes, including laying on more than two processes
7826: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7827: @*/
7828: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7829: {
7830: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7831: PetscInt *diag, *odiag, sc;
7832: VecScatter scatter;
7833: PetscScalar *seqv;
7834: const PetscScalar *parv;
7835: const PetscInt *ia, *ja;
7836: PetscBool set, flag, done;
7837: Mat AA = mat, A;
7838: MPI_Comm comm;
7839: PetscMPIInt rank, size, tag;
7840: MPI_Status status;
7841: PetscContainer container;
7842: EnvelopeData *edata;
7843: Vec seq, par;
7844: IS isglobal;
7846: PetscFunctionBegin;
7848: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7849: if (!set || !flag) {
7850: /* TODO: only needs nonzero structure of transpose */
7851: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7852: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7853: }
7854: PetscCall(MatAIJGetLocalMat(AA, &A));
7855: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7856: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7858: PetscCall(MatGetLocalSize(mat, &n, NULL));
7859: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7860: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7861: PetscCallMPI(MPI_Comm_size(comm, &size));
7862: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7864: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7866: if (rank > 0) {
7867: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7868: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7869: }
7870: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7871: for (i = 0; i < n; i++) {
7872: env = PetscMax(env, ja[ia[i + 1] - 1]);
7873: II = rstart + i;
7874: if (env == II) {
7875: starts[lblocks] = tbs;
7876: sizes[lblocks++] = 1 + II - tbs;
7877: tbs = 1 + II;
7878: }
7879: }
7880: if (rank < size - 1) {
7881: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7882: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7883: }
7885: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7886: if (!set || !flag) PetscCall(MatDestroy(&AA));
7887: PetscCall(MatDestroy(&A));
7889: PetscCall(PetscNew(&edata));
7890: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7891: edata->n = lblocks;
7892: /* create IS needed for extracting blocks from the original matrix */
7893: PetscCall(PetscMalloc1(lblocks, &edata->is));
7894: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7896: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7897: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7898: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7899: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7900: PetscCall(MatSetType(edata->C, MATAIJ));
7902: /* Communicate the start and end of each row, from each block to the correct rank */
7903: /* TODO: Use PetscSF instead of VecScatter */
7904: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7905: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7906: PetscCall(VecGetArrayWrite(seq, &seqv));
7907: for (PetscInt i = 0; i < lblocks; i++) {
7908: for (PetscInt j = 0; j < sizes[i]; j++) {
7909: seqv[cnt] = starts[i];
7910: seqv[cnt + 1] = starts[i] + sizes[i];
7911: cnt += 2;
7912: }
7913: }
7914: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7915: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7916: sc -= cnt;
7917: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7918: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7919: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7920: PetscCall(ISDestroy(&isglobal));
7921: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7922: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7923: PetscCall(VecScatterDestroy(&scatter));
7924: PetscCall(VecDestroy(&seq));
7925: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7926: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7927: PetscCall(VecGetArrayRead(par, &parv));
7928: cnt = 0;
7929: PetscCall(MatGetSize(mat, NULL, &n));
7930: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7931: PetscInt start, end, d = 0, od = 0;
7933: start = (PetscInt)PetscRealPart(parv[cnt]);
7934: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7935: cnt += 2;
7937: if (start < cstart) {
7938: od += cstart - start + n - cend;
7939: d += cend - cstart;
7940: } else if (start < cend) {
7941: od += n - cend;
7942: d += cend - start;
7943: } else od += n - start;
7944: if (end <= cstart) {
7945: od -= cstart - end + n - cend;
7946: d -= cend - cstart;
7947: } else if (end < cend) {
7948: od -= n - cend;
7949: d -= cend - end;
7950: } else od -= n - end;
7952: odiag[i] = od;
7953: diag[i] = d;
7954: }
7955: PetscCall(VecRestoreArrayRead(par, &parv));
7956: PetscCall(VecDestroy(&par));
7957: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7958: PetscCall(PetscFree2(diag, odiag));
7959: PetscCall(PetscFree2(sizes, starts));
7961: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7962: PetscCall(PetscContainerSetPointer(container, edata));
7963: PetscCall(PetscContainerSetCtxDestroy(container, EnvelopeDataDestroy));
7964: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7965: PetscCall(PetscObjectDereference((PetscObject)container));
7966: PetscFunctionReturn(PETSC_SUCCESS);
7967: }
7969: /*@
7970: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7972: Collective
7974: Input Parameters:
7975: + A - the matrix
7976: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7978: Output Parameter:
7979: . C - matrix with inverted block diagonal of `A`
7981: Level: advanced
7983: Note:
7984: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7986: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7987: @*/
7988: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7989: {
7990: PetscContainer container;
7991: EnvelopeData *edata;
7992: PetscObjectState nonzerostate;
7994: PetscFunctionBegin;
7995: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7996: if (!container) {
7997: PetscCall(MatComputeVariableBlockEnvelope(A));
7998: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7999: }
8000: PetscCall(PetscContainerGetPointer(container, &edata));
8001: PetscCall(MatGetNonzeroState(A, &nonzerostate));
8002: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
8003: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
8005: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
8006: *C = edata->C;
8008: for (PetscInt i = 0; i < edata->n; i++) {
8009: Mat D;
8010: PetscScalar *dvalues;
8012: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
8013: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
8014: PetscCall(MatSeqDenseInvert(D));
8015: PetscCall(MatDenseGetArray(D, &dvalues));
8016: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
8017: PetscCall(MatDestroy(&D));
8018: }
8019: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
8020: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
8021: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
8022: PetscFunctionReturn(PETSC_SUCCESS);
8023: }
8025: /*@
8026: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
8028: Not Collective
8030: Input Parameters:
8031: + mat - the matrix
8032: . nblocks - the number of blocks on this process, each block can only exist on a single process
8033: - bsizes - the block sizes
8035: Level: intermediate
8037: Notes:
8038: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
8040: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
8042: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
8043: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
8044: @*/
8045: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
8046: {
8047: PetscInt ncnt = 0, nlocal;
8049: PetscFunctionBegin;
8051: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
8052: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
8053: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
8054: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
8055: PetscCall(PetscFree(mat->bsizes));
8056: mat->nblocks = nblocks;
8057: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
8058: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
8059: PetscFunctionReturn(PETSC_SUCCESS);
8060: }
8062: /*@C
8063: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
8065: Not Collective; No Fortran Support
8067: Input Parameter:
8068: . mat - the matrix
8070: Output Parameters:
8071: + nblocks - the number of blocks on this process
8072: - bsizes - the block sizes
8074: Level: intermediate
8076: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
8077: @*/
8078: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
8079: {
8080: PetscFunctionBegin;
8082: if (nblocks) *nblocks = mat->nblocks;
8083: if (bsizes) *bsizes = mat->bsizes;
8084: PetscFunctionReturn(PETSC_SUCCESS);
8085: }
8087: /*@
8088: MatSelectVariableBlockSizes - When creating a submatrix, pass on the variable block sizes
8090: Not Collective
8092: Input Parameter:
8093: + subA - the submatrix
8094: . A - the original matrix
8095: - isrow - The `IS` of selected rows for the submatrix, must be sorted
8097: Level: developer
8099: Notes:
8100: If the index set is not sorted or contains off-process entries, this function will do nothing.
8102: .seealso: [](ch_matrices), `Mat`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
8103: @*/
8104: PetscErrorCode MatSelectVariableBlockSizes(Mat subA, Mat A, IS isrow)
8105: {
8106: const PetscInt *rows;
8107: PetscInt n, rStart, rEnd, Nb = 0;
8108: PetscBool flg = A->bsizes ? PETSC_TRUE : PETSC_FALSE;
8110: PetscFunctionBegin;
8111: // The code for block size extraction does not support an unsorted IS
8112: if (flg) PetscCall(ISSorted(isrow, &flg));
8113: // We don't support originally off-diagonal blocks
8114: if (flg) {
8115: PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
8116: PetscCall(ISGetLocalSize(isrow, &n));
8117: PetscCall(ISGetIndices(isrow, &rows));
8118: for (PetscInt i = 0; i < n && flg; ++i) {
8119: if (rows[i] < rStart || rows[i] >= rEnd) flg = PETSC_FALSE;
8120: }
8121: PetscCall(ISRestoreIndices(isrow, &rows));
8122: }
8123: // quiet return if we can't extract block size
8124: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)subA)));
8125: if (!flg) PetscFunctionReturn(PETSC_SUCCESS);
8127: // extract block sizes
8128: PetscCall(ISGetIndices(isrow, &rows));
8129: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8130: PetscBool occupied = PETSC_FALSE;
8132: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8133: const PetscInt row = gr + br;
8135: if (i == n) break;
8136: if (rows[i] == row) {
8137: occupied = PETSC_TRUE;
8138: ++i;
8139: }
8140: while (i < n && rows[i] < row) ++i;
8141: }
8142: gr += A->bsizes[b];
8143: if (occupied) ++Nb;
8144: }
8145: subA->nblocks = Nb;
8146: PetscCall(PetscFree(subA->bsizes));
8147: PetscCall(PetscMalloc1(subA->nblocks, &subA->bsizes));
8148: PetscInt sb = 0;
8149: for (PetscInt b = 0, gr = rStart, i = 0; b < A->nblocks; ++b) {
8150: if (sb < subA->nblocks) subA->bsizes[sb] = 0;
8151: for (PetscInt br = 0; br < A->bsizes[b]; ++br) {
8152: const PetscInt row = gr + br;
8154: if (i == n) break;
8155: if (rows[i] == row) {
8156: ++subA->bsizes[sb];
8157: ++i;
8158: }
8159: while (i < n && rows[i] < row) ++i;
8160: }
8161: gr += A->bsizes[b];
8162: if (sb < subA->nblocks && subA->bsizes[sb]) ++sb;
8163: }
8164: PetscCheck(sb == subA->nblocks, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid number of blocks %" PetscInt_FMT " != %" PetscInt_FMT, sb, subA->nblocks);
8165: PetscInt nlocal, ncnt = 0;
8166: PetscCall(MatGetLocalSize(subA, &nlocal, NULL));
8167: PetscCheck(subA->nblocks >= 0 && subA->nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", subA->nblocks, nlocal);
8168: for (PetscInt i = 0; i < subA->nblocks; i++) ncnt += subA->bsizes[i];
8169: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
8170: PetscCall(ISRestoreIndices(isrow, &rows));
8171: PetscFunctionReturn(PETSC_SUCCESS);
8172: }
8174: /*@
8175: MatSetBlockSizes - Sets the matrix block row and column sizes.
8177: Logically Collective
8179: Input Parameters:
8180: + mat - the matrix
8181: . rbs - row block size
8182: - cbs - column block size
8184: Level: intermediate
8186: Notes:
8187: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
8188: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
8189: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
8191: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
8192: are compatible with the matrix local sizes.
8194: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
8196: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
8197: @*/
8198: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
8199: {
8200: PetscFunctionBegin;
8204: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
8205: if (mat->rmap->refcnt) {
8206: ISLocalToGlobalMapping l2g = NULL;
8207: PetscLayout nmap = NULL;
8209: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
8210: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
8211: PetscCall(PetscLayoutDestroy(&mat->rmap));
8212: mat->rmap = nmap;
8213: mat->rmap->mapping = l2g;
8214: }
8215: if (mat->cmap->refcnt) {
8216: ISLocalToGlobalMapping l2g = NULL;
8217: PetscLayout nmap = NULL;
8219: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
8220: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
8221: PetscCall(PetscLayoutDestroy(&mat->cmap));
8222: mat->cmap = nmap;
8223: mat->cmap->mapping = l2g;
8224: }
8225: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
8226: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
8227: PetscFunctionReturn(PETSC_SUCCESS);
8228: }
8230: /*@
8231: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
8233: Logically Collective
8235: Input Parameters:
8236: + mat - the matrix
8237: . fromRow - matrix from which to copy row block size
8238: - fromCol - matrix from which to copy column block size (can be same as `fromRow`)
8240: Level: developer
8242: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
8243: @*/
8244: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
8245: {
8246: PetscFunctionBegin;
8250: PetscTryTypeMethod(mat, setblocksizes, fromRow->rmap->bs, fromCol->cmap->bs);
8251: PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
8252: PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
8253: PetscFunctionReturn(PETSC_SUCCESS);
8254: }
8256: /*@
8257: MatResidual - Default routine to calculate the residual r = b - Ax
8259: Collective
8261: Input Parameters:
8262: + mat - the matrix
8263: . b - the right-hand-side
8264: - x - the approximate solution
8266: Output Parameter:
8267: . r - location to store the residual
8269: Level: developer
8271: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8272: @*/
8273: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8274: {
8275: PetscFunctionBegin;
8281: MatCheckPreallocated(mat, 1);
8282: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8283: if (!mat->ops->residual) {
8284: PetscCall(MatMult(mat, x, r));
8285: PetscCall(VecAYPX(r, -1.0, b));
8286: } else {
8287: PetscUseTypeMethod(mat, residual, b, x, r);
8288: }
8289: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8290: PetscFunctionReturn(PETSC_SUCCESS);
8291: }
8293: /*@C
8294: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8296: Collective
8298: Input Parameters:
8299: + mat - the matrix
8300: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8301: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8302: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8303: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8304: always used.
8306: Output Parameters:
8307: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8308: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8309: . ja - the column indices, use `NULL` if not needed
8310: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8311: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8313: Level: developer
8315: Notes:
8316: You CANNOT change any of the ia[] or ja[] values.
8318: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8320: Fortran Notes:
8321: Use
8322: .vb
8323: PetscInt, pointer :: ia(:),ja(:)
8324: call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8325: ! Access the ith and jth entries via ia(i) and ja(j)
8326: .ve
8328: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8329: @*/
8330: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8331: {
8332: PetscFunctionBegin;
8335: if (n) PetscAssertPointer(n, 5);
8336: if (ia) PetscAssertPointer(ia, 6);
8337: if (ja) PetscAssertPointer(ja, 7);
8338: if (done) PetscAssertPointer(done, 8);
8339: MatCheckPreallocated(mat, 1);
8340: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8341: else {
8342: if (done) *done = PETSC_TRUE;
8343: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8344: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8345: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8346: }
8347: PetscFunctionReturn(PETSC_SUCCESS);
8348: }
8350: /*@C
8351: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8353: Collective
8355: Input Parameters:
8356: + mat - the matrix
8357: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8358: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8359: symmetrized
8360: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8361: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8362: always used.
8364: Output Parameters:
8365: + n - number of columns in the (possibly compressed) matrix
8366: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8367: . ja - the row indices
8368: - done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8370: Level: developer
8372: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8373: @*/
8374: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8375: {
8376: PetscFunctionBegin;
8379: PetscAssertPointer(n, 5);
8380: if (ia) PetscAssertPointer(ia, 6);
8381: if (ja) PetscAssertPointer(ja, 7);
8382: PetscAssertPointer(done, 8);
8383: MatCheckPreallocated(mat, 1);
8384: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8385: else {
8386: *done = PETSC_TRUE;
8387: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8388: }
8389: PetscFunctionReturn(PETSC_SUCCESS);
8390: }
8392: /*@C
8393: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8395: Collective
8397: Input Parameters:
8398: + mat - the matrix
8399: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8400: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8401: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8402: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8403: always used.
8404: . n - size of (possibly compressed) matrix
8405: . ia - the row pointers
8406: - ja - the column indices
8408: Output Parameter:
8409: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8411: Level: developer
8413: Note:
8414: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8415: us of the array after it has been restored. If you pass `NULL`, it will
8416: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8418: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8419: @*/
8420: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8421: {
8422: PetscFunctionBegin;
8425: if (ia) PetscAssertPointer(ia, 6);
8426: if (ja) PetscAssertPointer(ja, 7);
8427: if (done) PetscAssertPointer(done, 8);
8428: MatCheckPreallocated(mat, 1);
8430: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8431: else {
8432: if (done) *done = PETSC_TRUE;
8433: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8434: if (n) *n = 0;
8435: if (ia) *ia = NULL;
8436: if (ja) *ja = NULL;
8437: }
8438: PetscFunctionReturn(PETSC_SUCCESS);
8439: }
8441: /*@C
8442: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8444: Collective
8446: Input Parameters:
8447: + mat - the matrix
8448: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8449: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8450: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8451: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8452: always used.
8454: Output Parameters:
8455: + n - size of (possibly compressed) matrix
8456: . ia - the column pointers
8457: . ja - the row indices
8458: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8460: Level: developer
8462: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8463: @*/
8464: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8465: {
8466: PetscFunctionBegin;
8469: if (ia) PetscAssertPointer(ia, 6);
8470: if (ja) PetscAssertPointer(ja, 7);
8471: PetscAssertPointer(done, 8);
8472: MatCheckPreallocated(mat, 1);
8474: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8475: else {
8476: *done = PETSC_TRUE;
8477: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8478: if (n) *n = 0;
8479: if (ia) *ia = NULL;
8480: if (ja) *ja = NULL;
8481: }
8482: PetscFunctionReturn(PETSC_SUCCESS);
8483: }
8485: /*@
8486: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8487: `MatGetColumnIJ()`.
8489: Collective
8491: Input Parameters:
8492: + mat - the matrix
8493: . ncolors - maximum color value
8494: . n - number of entries in colorarray
8495: - colorarray - array indicating color for each column
8497: Output Parameter:
8498: . iscoloring - coloring generated using colorarray information
8500: Level: developer
8502: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8503: @*/
8504: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8505: {
8506: PetscFunctionBegin;
8509: PetscAssertPointer(colorarray, 4);
8510: PetscAssertPointer(iscoloring, 5);
8511: MatCheckPreallocated(mat, 1);
8513: if (!mat->ops->coloringpatch) {
8514: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8515: } else {
8516: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8517: }
8518: PetscFunctionReturn(PETSC_SUCCESS);
8519: }
8521: /*@
8522: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8524: Logically Collective
8526: Input Parameter:
8527: . mat - the factored matrix to be reset
8529: Level: developer
8531: Notes:
8532: This routine should be used only with factored matrices formed by in-place
8533: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8534: format). This option can save memory, for example, when solving nonlinear
8535: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8536: ILU(0) preconditioner.
8538: One can specify in-place ILU(0) factorization by calling
8539: .vb
8540: PCType(pc,PCILU);
8541: PCFactorSeUseInPlace(pc);
8542: .ve
8543: or by using the options -pc_type ilu -pc_factor_in_place
8545: In-place factorization ILU(0) can also be used as a local
8546: solver for the blocks within the block Jacobi or additive Schwarz
8547: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8548: for details on setting local solver options.
8550: Most users should employ the `KSP` interface for linear solvers
8551: instead of working directly with matrix algebra routines such as this.
8552: See, e.g., `KSPCreate()`.
8554: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8555: @*/
8556: PetscErrorCode MatSetUnfactored(Mat mat)
8557: {
8558: PetscFunctionBegin;
8561: MatCheckPreallocated(mat, 1);
8562: mat->factortype = MAT_FACTOR_NONE;
8563: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8564: PetscUseTypeMethod(mat, setunfactored);
8565: PetscFunctionReturn(PETSC_SUCCESS);
8566: }
8568: /*@
8569: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8570: as the original matrix.
8572: Collective
8574: Input Parameters:
8575: + mat - the original matrix
8576: . isrow - parallel `IS` containing the rows this processor should obtain
8577: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8578: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8580: Output Parameter:
8581: . newmat - the new submatrix, of the same type as the original matrix
8583: Level: advanced
8585: Notes:
8586: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8588: Some matrix types place restrictions on the row and column indices, such
8589: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8590: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8592: The index sets may not have duplicate entries.
8594: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8595: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8596: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8597: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8598: you are finished using it.
8600: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8601: the input matrix.
8603: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8605: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8606: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8608: Example usage:
8609: Consider the following 8x8 matrix with 34 non-zero values, that is
8610: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8611: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8612: as follows
8613: .vb
8614: 1 2 0 | 0 3 0 | 0 4
8615: Proc0 0 5 6 | 7 0 0 | 8 0
8616: 9 0 10 | 11 0 0 | 12 0
8617: -------------------------------------
8618: 13 0 14 | 15 16 17 | 0 0
8619: Proc1 0 18 0 | 19 20 21 | 0 0
8620: 0 0 0 | 22 23 0 | 24 0
8621: -------------------------------------
8622: Proc2 25 26 27 | 0 0 28 | 29 0
8623: 30 0 0 | 31 32 33 | 0 34
8624: .ve
8626: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8628: .vb
8629: 2 0 | 0 3 0 | 0
8630: Proc0 5 6 | 7 0 0 | 8
8631: -------------------------------
8632: Proc1 18 0 | 19 20 21 | 0
8633: -------------------------------
8634: Proc2 26 27 | 0 0 28 | 29
8635: 0 0 | 31 32 33 | 0
8636: .ve
8638: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8639: @*/
8640: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8641: {
8642: PetscMPIInt size;
8643: Mat *local;
8644: IS iscoltmp;
8645: PetscBool flg;
8647: PetscFunctionBegin;
8651: PetscAssertPointer(newmat, 5);
8654: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8655: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8656: PetscCheck(cll != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_INPLACE_MATRIX");
8658: MatCheckPreallocated(mat, 1);
8659: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8661: if (!iscol || isrow == iscol) {
8662: PetscBool stride;
8663: PetscMPIInt grabentirematrix = 0, grab;
8664: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8665: if (stride) {
8666: PetscInt first, step, n, rstart, rend;
8667: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8668: if (step == 1) {
8669: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8670: if (rstart == first) {
8671: PetscCall(ISGetLocalSize(isrow, &n));
8672: if (n == rend - rstart) grabentirematrix = 1;
8673: }
8674: }
8675: }
8676: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8677: if (grab) {
8678: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8679: if (cll == MAT_INITIAL_MATRIX) {
8680: *newmat = mat;
8681: PetscCall(PetscObjectReference((PetscObject)mat));
8682: }
8683: PetscFunctionReturn(PETSC_SUCCESS);
8684: }
8685: }
8687: if (!iscol) {
8688: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8689: } else {
8690: iscoltmp = iscol;
8691: }
8693: /* if original matrix is on just one processor then use submatrix generated */
8694: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8695: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8696: goto setproperties;
8697: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8698: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8699: *newmat = *local;
8700: PetscCall(PetscFree(local));
8701: goto setproperties;
8702: } else if (!mat->ops->createsubmatrix) {
8703: /* Create a new matrix type that implements the operation using the full matrix */
8704: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8705: switch (cll) {
8706: case MAT_INITIAL_MATRIX:
8707: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8708: break;
8709: case MAT_REUSE_MATRIX:
8710: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8711: break;
8712: default:
8713: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8714: }
8715: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8716: goto setproperties;
8717: }
8719: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8720: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8721: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8723: setproperties:
8724: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8725: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8726: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8727: }
8728: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8729: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8730: if (!iscol || isrow == iscol) PetscCall(MatSelectVariableBlockSizes(*newmat, mat, isrow));
8731: PetscFunctionReturn(PETSC_SUCCESS);
8732: }
8734: /*@
8735: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8737: Not Collective
8739: Input Parameters:
8740: + A - the matrix we wish to propagate options from
8741: - B - the matrix we wish to propagate options to
8743: Level: beginner
8745: Note:
8746: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8748: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8749: @*/
8750: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8751: {
8752: PetscFunctionBegin;
8755: B->symmetry_eternal = A->symmetry_eternal;
8756: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8757: B->symmetric = A->symmetric;
8758: B->structurally_symmetric = A->structurally_symmetric;
8759: B->spd = A->spd;
8760: B->hermitian = A->hermitian;
8761: PetscFunctionReturn(PETSC_SUCCESS);
8762: }
8764: /*@
8765: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8766: used during the assembly process to store values that belong to
8767: other processors.
8769: Not Collective
8771: Input Parameters:
8772: + mat - the matrix
8773: . size - the initial size of the stash.
8774: - bsize - the initial size of the block-stash(if used).
8776: Options Database Keys:
8777: + -matstash_initial_size size or size0,size1,...,sizep-1 - set initial size
8778: - -matstash_block_initial_size bsize or bsize0,bsize1,...,bsizep-1 - set initial block size
8780: Level: intermediate
8782: Notes:
8783: The block-stash is used for values set with `MatSetValuesBlocked()` while
8784: the stash is used for values set with `MatSetValues()`
8786: Run with the option -info and look for output of the form
8787: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8788: to determine the appropriate value, MM, to use for size and
8789: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8790: to determine the value, BMM to use for bsize
8792: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8793: @*/
8794: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8795: {
8796: PetscFunctionBegin;
8799: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8800: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8801: PetscFunctionReturn(PETSC_SUCCESS);
8802: }
8804: /*@
8805: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8806: the matrix
8808: Neighbor-wise Collective
8810: Input Parameters:
8811: + A - the matrix
8812: . x - the vector to be multiplied by the interpolation operator
8813: - y - the vector to be added to the result
8815: Output Parameter:
8816: . w - the resulting vector
8818: Level: intermediate
8820: Notes:
8821: `w` may be the same vector as `y`.
8823: This allows one to use either the restriction or interpolation (its transpose)
8824: matrix to do the interpolation
8826: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8827: @*/
8828: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8829: {
8830: PetscInt M, N, Ny;
8832: PetscFunctionBegin;
8837: PetscCall(MatGetSize(A, &M, &N));
8838: PetscCall(VecGetSize(y, &Ny));
8839: if (M == Ny) PetscCall(MatMultAdd(A, x, y, w));
8840: else PetscCall(MatMultTransposeAdd(A, x, y, w));
8841: PetscFunctionReturn(PETSC_SUCCESS);
8842: }
8844: /*@
8845: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8846: the matrix
8848: Neighbor-wise Collective
8850: Input Parameters:
8851: + A - the matrix
8852: - x - the vector to be interpolated
8854: Output Parameter:
8855: . y - the resulting vector
8857: Level: intermediate
8859: Note:
8860: This allows one to use either the restriction or interpolation (its transpose)
8861: matrix to do the interpolation
8863: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8864: @*/
8865: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8866: {
8867: PetscInt M, N, Ny;
8869: PetscFunctionBegin;
8873: PetscCall(MatGetSize(A, &M, &N));
8874: PetscCall(VecGetSize(y, &Ny));
8875: if (M == Ny) PetscCall(MatMult(A, x, y));
8876: else PetscCall(MatMultTranspose(A, x, y));
8877: PetscFunctionReturn(PETSC_SUCCESS);
8878: }
8880: /*@
8881: MatRestrict - $y = A*x$ or $A^T*x$
8883: Neighbor-wise Collective
8885: Input Parameters:
8886: + A - the matrix
8887: - x - the vector to be restricted
8889: Output Parameter:
8890: . y - the resulting vector
8892: Level: intermediate
8894: Note:
8895: This allows one to use either the restriction or interpolation (its transpose)
8896: matrix to do the restriction
8898: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8899: @*/
8900: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8901: {
8902: PetscInt M, N, Nx;
8904: PetscFunctionBegin;
8908: PetscCall(MatGetSize(A, &M, &N));
8909: PetscCall(VecGetSize(x, &Nx));
8910: if (M == Nx) PetscCall(MatMultTranspose(A, x, y));
8911: else PetscCall(MatMult(A, x, y));
8912: PetscFunctionReturn(PETSC_SUCCESS);
8913: }
8915: /*@
8916: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8918: Neighbor-wise Collective
8920: Input Parameters:
8921: + A - the matrix
8922: . x - the input dense matrix to be multiplied
8923: - w - the input dense matrix to be added to the result
8925: Output Parameter:
8926: . y - the output dense matrix
8928: Level: intermediate
8930: Note:
8931: This allows one to use either the restriction or interpolation (its transpose)
8932: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8933: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8935: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8936: @*/
8937: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8938: {
8939: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8940: PetscBool trans = PETSC_TRUE;
8941: MatReuse reuse = MAT_INITIAL_MATRIX;
8943: PetscFunctionBegin;
8949: PetscCall(MatGetSize(A, &M, &N));
8950: PetscCall(MatGetSize(x, &Mx, &Nx));
8951: if (N == Mx) trans = PETSC_FALSE;
8952: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8953: Mo = trans ? N : M;
8954: if (*y) {
8955: PetscCall(MatGetSize(*y, &My, &Ny));
8956: if (Mo == My && Nx == Ny) reuse = MAT_REUSE_MATRIX;
8957: else {
8958: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8959: PetscCall(MatDestroy(y));
8960: }
8961: }
8963: if (w && *y == w) { /* this is to minimize changes in PCMG */
8964: PetscBool flg;
8966: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8967: if (w) {
8968: PetscInt My, Ny, Mw, Nw;
8970: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8971: PetscCall(MatGetSize(*y, &My, &Ny));
8972: PetscCall(MatGetSize(w, &Mw, &Nw));
8973: if (!flg || My != Mw || Ny != Nw) w = NULL;
8974: }
8975: if (!w) {
8976: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8977: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8978: PetscCall(PetscObjectDereference((PetscObject)w));
8979: } else PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8980: }
8981: if (!trans) PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8982: else PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8983: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8984: PetscFunctionReturn(PETSC_SUCCESS);
8985: }
8987: /*@
8988: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8990: Neighbor-wise Collective
8992: Input Parameters:
8993: + A - the matrix
8994: - x - the input dense matrix
8996: Output Parameter:
8997: . y - the output dense matrix
8999: Level: intermediate
9001: Note:
9002: This allows one to use either the restriction or interpolation (its transpose)
9003: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
9004: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
9006: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
9007: @*/
9008: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
9009: {
9010: PetscFunctionBegin;
9011: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
9012: PetscFunctionReturn(PETSC_SUCCESS);
9013: }
9015: /*@
9016: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
9018: Neighbor-wise Collective
9020: Input Parameters:
9021: + A - the matrix
9022: - x - the input dense matrix
9024: Output Parameter:
9025: . y - the output dense matrix
9027: Level: intermediate
9029: Note:
9030: This allows one to use either the restriction or interpolation (its transpose)
9031: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
9032: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
9034: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
9035: @*/
9036: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
9037: {
9038: PetscFunctionBegin;
9039: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
9040: PetscFunctionReturn(PETSC_SUCCESS);
9041: }
9043: /*@
9044: MatGetNullSpace - retrieves the null space of a matrix.
9046: Logically Collective
9048: Input Parameters:
9049: + mat - the matrix
9050: - nullsp - the null space object
9052: Level: developer
9054: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
9055: @*/
9056: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
9057: {
9058: PetscFunctionBegin;
9060: PetscAssertPointer(nullsp, 2);
9061: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
9062: PetscFunctionReturn(PETSC_SUCCESS);
9063: }
9065: /*@C
9066: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
9068: Logically Collective
9070: Input Parameters:
9071: + n - the number of matrices
9072: - mat - the array of matrices
9074: Output Parameters:
9075: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
9077: Level: developer
9079: Note:
9080: Call `MatRestoreNullspaces()` to provide these to another array of matrices
9082: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9083: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
9084: @*/
9085: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9086: {
9087: PetscFunctionBegin;
9088: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9089: PetscAssertPointer(mat, 2);
9090: PetscAssertPointer(nullsp, 3);
9092: PetscCall(PetscCalloc1(3 * n, nullsp));
9093: for (PetscInt i = 0; i < n; i++) {
9095: (*nullsp)[i] = mat[i]->nullsp;
9096: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
9097: (*nullsp)[n + i] = mat[i]->nearnullsp;
9098: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
9099: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
9100: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
9101: }
9102: PetscFunctionReturn(PETSC_SUCCESS);
9103: }
9105: /*@C
9106: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
9108: Logically Collective
9110: Input Parameters:
9111: + n - the number of matrices
9112: . mat - the array of matrices
9113: - nullsp - an array of null spaces
9115: Level: developer
9117: Note:
9118: Call `MatGetNullSpaces()` to create `nullsp`
9120: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9121: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
9122: @*/
9123: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9124: {
9125: PetscFunctionBegin;
9126: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9127: PetscAssertPointer(mat, 2);
9128: PetscAssertPointer(nullsp, 3);
9129: PetscAssertPointer(*nullsp, 3);
9131: for (PetscInt i = 0; i < n; i++) {
9133: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9134: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9135: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9136: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9137: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9138: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9139: }
9140: PetscCall(PetscFree(*nullsp));
9141: PetscFunctionReturn(PETSC_SUCCESS);
9142: }
9144: /*@
9145: MatSetNullSpace - attaches a null space to a matrix.
9147: Logically Collective
9149: Input Parameters:
9150: + mat - the matrix
9151: - nullsp - the null space object
9153: Level: advanced
9155: Notes:
9156: This null space is used by the `KSP` linear solvers to solve singular systems.
9158: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9160: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
9161: to zero but the linear system will still be solved in a least squares sense.
9163: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9164: the domain of a matrix $A$ (from $R^n$ to $R^m$ ($m$ rows, $n$ columns) $R^n$ = the direct sum of the null space of $A$, $n(A)$, plus the range of $A^T$, $R(A^T)$.
9165: Similarly $R^m$ = direct sum $n(A^T) + R(A)$. Hence the linear system $A x = b$ has a solution only if $b$ in $R(A)$ (or correspondingly $b$ is orthogonal to
9166: $n(A^T))$ and if $x$ is a solution then $x + \alpha n(A)$ is a solution for any $\alpha$. The minimum norm solution is orthogonal to $n(A)$. For problems without a solution
9167: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving $A x = \hat{b}$ where $\hat{b}$ is $b$ orthogonalized to the $n(A^T)$.
9168: This $\hat{b}$ can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9170: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one has called
9171: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9172: routine also automatically calls `MatSetTransposeNullSpace()`.
9174: The user should call `MatNullSpaceDestroy()`.
9176: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9177: `KSPSetPCSide()`
9178: @*/
9179: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9180: {
9181: PetscFunctionBegin;
9184: PetscCall(PetscObjectReference((PetscObject)nullsp));
9185: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9186: mat->nullsp = nullsp;
9187: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9188: PetscFunctionReturn(PETSC_SUCCESS);
9189: }
9191: /*@
9192: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9194: Logically Collective
9196: Input Parameters:
9197: + mat - the matrix
9198: - nullsp - the null space object
9200: Level: developer
9202: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9203: @*/
9204: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9205: {
9206: PetscFunctionBegin;
9209: PetscAssertPointer(nullsp, 2);
9210: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9211: PetscFunctionReturn(PETSC_SUCCESS);
9212: }
9214: /*@
9215: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9217: Logically Collective
9219: Input Parameters:
9220: + mat - the matrix
9221: - nullsp - the null space object
9223: Level: advanced
9225: Notes:
9226: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9228: See `MatSetNullSpace()`
9230: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9231: @*/
9232: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9233: {
9234: PetscFunctionBegin;
9237: PetscCall(PetscObjectReference((PetscObject)nullsp));
9238: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9239: mat->transnullsp = nullsp;
9240: PetscFunctionReturn(PETSC_SUCCESS);
9241: }
9243: /*@
9244: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9245: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9247: Logically Collective
9249: Input Parameters:
9250: + mat - the matrix
9251: - nullsp - the null space object
9253: Level: advanced
9255: Notes:
9256: Overwrites any previous near null space that may have been attached
9258: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9260: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9261: @*/
9262: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9263: {
9264: PetscFunctionBegin;
9268: MatCheckPreallocated(mat, 1);
9269: PetscCall(PetscObjectReference((PetscObject)nullsp));
9270: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9271: mat->nearnullsp = nullsp;
9272: PetscFunctionReturn(PETSC_SUCCESS);
9273: }
9275: /*@
9276: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9278: Not Collective
9280: Input Parameter:
9281: . mat - the matrix
9283: Output Parameter:
9284: . nullsp - the null space object, `NULL` if not set
9286: Level: advanced
9288: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9289: @*/
9290: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9291: {
9292: PetscFunctionBegin;
9295: PetscAssertPointer(nullsp, 2);
9296: MatCheckPreallocated(mat, 1);
9297: *nullsp = mat->nearnullsp;
9298: PetscFunctionReturn(PETSC_SUCCESS);
9299: }
9301: /*@
9302: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9304: Collective
9306: Input Parameters:
9307: + mat - the matrix
9308: . row - row/column permutation
9309: - info - information on desired factorization process
9311: Level: developer
9313: Notes:
9314: Probably really in-place only when level of fill is zero, otherwise allocates
9315: new space to store factored matrix and deletes previous memory.
9317: Most users should employ the `KSP` interface for linear solvers
9318: instead of working directly with matrix algebra routines such as this.
9319: See, e.g., `KSPCreate()`.
9321: Fortran Note:
9322: A valid (non-null) `info` argument must be provided
9324: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9325: @*/
9326: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9327: {
9328: PetscFunctionBegin;
9332: PetscAssertPointer(info, 3);
9333: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9334: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9335: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9336: MatCheckPreallocated(mat, 1);
9337: PetscUseTypeMethod(mat, iccfactor, row, info);
9338: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9339: PetscFunctionReturn(PETSC_SUCCESS);
9340: }
9342: /*@
9343: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9344: ghosted ones.
9346: Not Collective
9348: Input Parameters:
9349: + mat - the matrix
9350: - diag - the diagonal values, including ghost ones
9352: Level: developer
9354: Notes:
9355: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9357: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9359: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9360: @*/
9361: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9362: {
9363: PetscMPIInt size;
9365: PetscFunctionBegin;
9370: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9371: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9372: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9373: if (size == 1) {
9374: PetscInt n, m;
9375: PetscCall(VecGetSize(diag, &n));
9376: PetscCall(MatGetSize(mat, NULL, &m));
9377: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9378: PetscCall(MatDiagonalScale(mat, NULL, diag));
9379: } else PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9380: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9381: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9382: PetscFunctionReturn(PETSC_SUCCESS);
9383: }
9385: /*@
9386: MatGetInertia - Gets the inertia from a factored matrix
9388: Collective
9390: Input Parameter:
9391: . mat - the matrix
9393: Output Parameters:
9394: + nneg - number of negative eigenvalues
9395: . nzero - number of zero eigenvalues
9396: - npos - number of positive eigenvalues
9398: Level: advanced
9400: Note:
9401: Matrix must have been factored by `MatCholeskyFactor()`
9403: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9404: @*/
9405: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9406: {
9407: PetscFunctionBegin;
9410: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9411: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9412: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9413: PetscFunctionReturn(PETSC_SUCCESS);
9414: }
9416: /*@C
9417: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9419: Neighbor-wise Collective
9421: Input Parameters:
9422: + mat - the factored matrix obtained with `MatGetFactor()`
9423: - b - the right-hand-side vectors
9425: Output Parameter:
9426: . x - the result vectors
9428: Level: developer
9430: Note:
9431: The vectors `b` and `x` cannot be the same. I.e., one cannot
9432: call `MatSolves`(A,x,x).
9434: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9435: @*/
9436: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9437: {
9438: PetscFunctionBegin;
9441: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9442: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9443: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9445: MatCheckPreallocated(mat, 1);
9446: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9447: PetscUseTypeMethod(mat, solves, b, x);
9448: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9449: PetscFunctionReturn(PETSC_SUCCESS);
9450: }
9452: /*@
9453: MatIsSymmetric - Test whether a matrix is symmetric
9455: Collective
9457: Input Parameters:
9458: + A - the matrix to test
9459: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9461: Output Parameter:
9462: . flg - the result
9464: Level: intermediate
9466: Notes:
9467: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9469: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9471: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9472: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9474: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9475: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9476: @*/
9477: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9478: {
9479: PetscFunctionBegin;
9481: PetscAssertPointer(flg, 3);
9482: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9483: else {
9484: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9485: else PetscCall(MatIsTranspose(A, A, tol, flg));
9486: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9487: }
9488: PetscFunctionReturn(PETSC_SUCCESS);
9489: }
9491: /*@
9492: MatIsHermitian - Test whether a matrix is Hermitian
9494: Collective
9496: Input Parameters:
9497: + A - the matrix to test
9498: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9500: Output Parameter:
9501: . flg - the result
9503: Level: intermediate
9505: Notes:
9506: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9508: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9510: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9511: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9513: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9514: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9515: @*/
9516: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9517: {
9518: PetscFunctionBegin;
9520: PetscAssertPointer(flg, 3);
9521: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9522: else {
9523: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9524: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9525: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9526: }
9527: PetscFunctionReturn(PETSC_SUCCESS);
9528: }
9530: /*@
9531: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9533: Not Collective
9535: Input Parameter:
9536: . A - the matrix to check
9538: Output Parameters:
9539: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9540: - flg - the result (only valid if set is `PETSC_TRUE`)
9542: Level: advanced
9544: Notes:
9545: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9546: if you want it explicitly checked
9548: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9549: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9551: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9552: @*/
9553: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9554: {
9555: PetscFunctionBegin;
9557: PetscAssertPointer(set, 2);
9558: PetscAssertPointer(flg, 3);
9559: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9560: *set = PETSC_TRUE;
9561: *flg = PetscBool3ToBool(A->symmetric);
9562: } else *set = PETSC_FALSE;
9563: PetscFunctionReturn(PETSC_SUCCESS);
9564: }
9566: /*@
9567: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9569: Not Collective
9571: Input Parameter:
9572: . A - the matrix to check
9574: Output Parameters:
9575: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9576: - flg - the result (only valid if set is `PETSC_TRUE`)
9578: Level: advanced
9580: Notes:
9581: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9583: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9584: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9586: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9587: @*/
9588: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9589: {
9590: PetscFunctionBegin;
9592: PetscAssertPointer(set, 2);
9593: PetscAssertPointer(flg, 3);
9594: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9595: *set = PETSC_TRUE;
9596: *flg = PetscBool3ToBool(A->spd);
9597: } else *set = PETSC_FALSE;
9598: PetscFunctionReturn(PETSC_SUCCESS);
9599: }
9601: /*@
9602: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9604: Not Collective
9606: Input Parameter:
9607: . A - the matrix to check
9609: Output Parameters:
9610: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9611: - flg - the result (only valid if set is `PETSC_TRUE`)
9613: Level: advanced
9615: Notes:
9616: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9617: if you want it explicitly checked
9619: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9620: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9622: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9623: @*/
9624: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9625: {
9626: PetscFunctionBegin;
9628: PetscAssertPointer(set, 2);
9629: PetscAssertPointer(flg, 3);
9630: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9631: *set = PETSC_TRUE;
9632: *flg = PetscBool3ToBool(A->hermitian);
9633: } else *set = PETSC_FALSE;
9634: PetscFunctionReturn(PETSC_SUCCESS);
9635: }
9637: /*@
9638: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9640: Collective
9642: Input Parameter:
9643: . A - the matrix to test
9645: Output Parameter:
9646: . flg - the result
9648: Level: intermediate
9650: Notes:
9651: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9653: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9654: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9656: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9657: @*/
9658: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9659: {
9660: PetscFunctionBegin;
9662: PetscAssertPointer(flg, 2);
9663: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) *flg = PetscBool3ToBool(A->structurally_symmetric);
9664: else {
9665: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9666: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9667: }
9668: PetscFunctionReturn(PETSC_SUCCESS);
9669: }
9671: /*@
9672: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9674: Not Collective
9676: Input Parameter:
9677: . A - the matrix to check
9679: Output Parameters:
9680: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9681: - flg - the result (only valid if set is PETSC_TRUE)
9683: Level: advanced
9685: Notes:
9686: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9687: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9689: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9691: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9692: @*/
9693: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9694: {
9695: PetscFunctionBegin;
9697: PetscAssertPointer(set, 2);
9698: PetscAssertPointer(flg, 3);
9699: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9700: *set = PETSC_TRUE;
9701: *flg = PetscBool3ToBool(A->structurally_symmetric);
9702: } else *set = PETSC_FALSE;
9703: PetscFunctionReturn(PETSC_SUCCESS);
9704: }
9706: /*@
9707: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9708: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9710: Not Collective
9712: Input Parameter:
9713: . mat - the matrix
9715: Output Parameters:
9716: + nstash - the size of the stash
9717: . reallocs - the number of additional mallocs incurred.
9718: . bnstash - the size of the block stash
9719: - breallocs - the number of additional mallocs incurred.in the block stash
9721: Level: advanced
9723: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9724: @*/
9725: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9726: {
9727: PetscFunctionBegin;
9728: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9729: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9730: PetscFunctionReturn(PETSC_SUCCESS);
9731: }
9733: /*@
9734: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9735: parallel layout, `PetscLayout` for rows and columns
9737: Collective
9739: Input Parameter:
9740: . mat - the matrix
9742: Output Parameters:
9743: + right - (optional) vector that the matrix can be multiplied against
9744: - left - (optional) vector that the matrix vector product can be stored in
9746: Level: advanced
9748: Notes:
9749: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9751: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9753: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9754: @*/
9755: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9756: {
9757: PetscFunctionBegin;
9760: if (mat->ops->getvecs) {
9761: PetscUseTypeMethod(mat, getvecs, right, left);
9762: } else {
9763: if (right) {
9764: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9765: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9766: PetscCall(VecSetType(*right, mat->defaultvectype));
9767: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9768: if (mat->boundtocpu && mat->bindingpropagates) {
9769: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9770: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9771: }
9772: #endif
9773: }
9774: if (left) {
9775: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9776: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9777: PetscCall(VecSetType(*left, mat->defaultvectype));
9778: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9779: if (mat->boundtocpu && mat->bindingpropagates) {
9780: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9781: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9782: }
9783: #endif
9784: }
9785: }
9786: PetscFunctionReturn(PETSC_SUCCESS);
9787: }
9789: /*@
9790: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9791: with default values.
9793: Not Collective
9795: Input Parameter:
9796: . info - the `MatFactorInfo` data structure
9798: Level: developer
9800: Notes:
9801: The solvers are generally used through the `KSP` and `PC` objects, for example
9802: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9804: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9806: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9807: @*/
9808: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9809: {
9810: PetscFunctionBegin;
9811: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9812: PetscFunctionReturn(PETSC_SUCCESS);
9813: }
9815: /*@
9816: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9818: Collective
9820: Input Parameters:
9821: + mat - the factored matrix
9822: - is - the index set defining the Schur indices (0-based)
9824: Level: advanced
9826: Notes:
9827: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9829: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9831: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9833: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9834: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9835: @*/
9836: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9837: {
9838: PetscErrorCode (*f)(Mat, IS);
9840: PetscFunctionBegin;
9845: PetscCheckSameComm(mat, 1, is, 2);
9846: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9847: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9848: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9849: PetscCall(MatDestroy(&mat->schur));
9850: PetscCall((*f)(mat, is));
9851: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9852: PetscFunctionReturn(PETSC_SUCCESS);
9853: }
9855: /*@
9856: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9858: Logically Collective
9860: Input Parameters:
9861: + F - the factored matrix obtained by calling `MatGetFactor()`
9862: . S - location where to return the Schur complement, can be `NULL`
9863: - status - the status of the Schur complement matrix, can be `NULL`
9865: Level: advanced
9867: Notes:
9868: You must call `MatFactorSetSchurIS()` before calling this routine.
9870: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9872: The routine provides a copy of the Schur matrix stored within the solver data structures.
9873: The caller must destroy the object when it is no longer needed.
9874: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9876: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9878: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9880: Developer Note:
9881: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9882: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9884: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9885: @*/
9886: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9887: {
9888: PetscFunctionBegin;
9890: if (S) PetscAssertPointer(S, 2);
9891: if (status) PetscAssertPointer(status, 3);
9892: if (S) {
9893: PetscErrorCode (*f)(Mat, Mat *);
9895: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9896: if (f) PetscCall((*f)(F, S));
9897: else PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9898: }
9899: if (status) *status = F->schur_status;
9900: PetscFunctionReturn(PETSC_SUCCESS);
9901: }
9903: /*@
9904: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9906: Logically Collective
9908: Input Parameters:
9909: + F - the factored matrix obtained by calling `MatGetFactor()`
9910: . S - location where to return the Schur complement, can be `NULL`
9911: - status - the status of the Schur complement matrix, can be `NULL`
9913: Level: advanced
9915: Notes:
9916: You must call `MatFactorSetSchurIS()` before calling this routine.
9918: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9920: The routine returns a the Schur Complement stored within the data structures of the solver.
9922: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9924: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9926: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9928: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9930: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9931: @*/
9932: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9933: {
9934: PetscFunctionBegin;
9936: if (S) {
9937: PetscAssertPointer(S, 2);
9938: *S = F->schur;
9939: }
9940: if (status) {
9941: PetscAssertPointer(status, 3);
9942: *status = F->schur_status;
9943: }
9944: PetscFunctionReturn(PETSC_SUCCESS);
9945: }
9947: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9948: {
9949: Mat S = F->schur;
9951: PetscFunctionBegin;
9952: switch (F->schur_status) {
9953: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9954: case MAT_FACTOR_SCHUR_INVERTED:
9955: if (S) {
9956: S->ops->solve = NULL;
9957: S->ops->matsolve = NULL;
9958: S->ops->solvetranspose = NULL;
9959: S->ops->matsolvetranspose = NULL;
9960: S->ops->solveadd = NULL;
9961: S->ops->solvetransposeadd = NULL;
9962: S->factortype = MAT_FACTOR_NONE;
9963: PetscCall(PetscFree(S->solvertype));
9964: }
9965: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9966: break;
9967: default:
9968: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9969: }
9970: PetscFunctionReturn(PETSC_SUCCESS);
9971: }
9973: /*@
9974: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9976: Logically Collective
9978: Input Parameters:
9979: + F - the factored matrix obtained by calling `MatGetFactor()`
9980: . S - location where the Schur complement is stored
9981: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9983: Level: advanced
9985: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9986: @*/
9987: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9988: {
9989: PetscFunctionBegin;
9991: if (S) {
9993: *S = NULL;
9994: }
9995: F->schur_status = status;
9996: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9997: PetscFunctionReturn(PETSC_SUCCESS);
9998: }
10000: /*@
10001: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
10003: Logically Collective
10005: Input Parameters:
10006: + F - the factored matrix obtained by calling `MatGetFactor()`
10007: . rhs - location where the right-hand side of the Schur complement system is stored
10008: - sol - location where the solution of the Schur complement system has to be returned
10010: Level: advanced
10012: Notes:
10013: The sizes of the vectors should match the size of the Schur complement
10015: Must be called after `MatFactorSetSchurIS()`
10017: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
10018: @*/
10019: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
10020: {
10021: PetscFunctionBegin;
10028: PetscCheckSameComm(F, 1, rhs, 2);
10029: PetscCheckSameComm(F, 1, sol, 3);
10030: PetscCall(MatFactorFactorizeSchurComplement(F));
10031: switch (F->schur_status) {
10032: case MAT_FACTOR_SCHUR_FACTORED:
10033: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
10034: break;
10035: case MAT_FACTOR_SCHUR_INVERTED:
10036: PetscCall(MatMultTranspose(F->schur, rhs, sol));
10037: break;
10038: default:
10039: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10040: }
10041: PetscFunctionReturn(PETSC_SUCCESS);
10042: }
10044: /*@
10045: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
10047: Logically Collective
10049: Input Parameters:
10050: + F - the factored matrix obtained by calling `MatGetFactor()`
10051: . rhs - location where the right-hand side of the Schur complement system is stored
10052: - sol - location where the solution of the Schur complement system has to be returned
10054: Level: advanced
10056: Notes:
10057: The sizes of the vectors should match the size of the Schur complement
10059: Must be called after `MatFactorSetSchurIS()`
10061: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
10062: @*/
10063: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
10064: {
10065: PetscFunctionBegin;
10072: PetscCheckSameComm(F, 1, rhs, 2);
10073: PetscCheckSameComm(F, 1, sol, 3);
10074: PetscCall(MatFactorFactorizeSchurComplement(F));
10075: switch (F->schur_status) {
10076: case MAT_FACTOR_SCHUR_FACTORED:
10077: PetscCall(MatSolve(F->schur, rhs, sol));
10078: break;
10079: case MAT_FACTOR_SCHUR_INVERTED:
10080: PetscCall(MatMult(F->schur, rhs, sol));
10081: break;
10082: default:
10083: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10084: }
10085: PetscFunctionReturn(PETSC_SUCCESS);
10086: }
10088: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
10089: #if PetscDefined(HAVE_CUDA)
10090: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
10091: #endif
10093: /* Schur status updated in the interface */
10094: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
10095: {
10096: Mat S = F->schur;
10098: PetscFunctionBegin;
10099: if (S) {
10100: PetscMPIInt size;
10101: PetscBool isdense, isdensecuda;
10103: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
10104: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
10105: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
10106: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
10107: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
10108: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
10109: if (isdense) {
10110: PetscCall(MatSeqDenseInvertFactors_Private(S));
10111: } else if (isdensecuda) {
10112: #if defined(PETSC_HAVE_CUDA)
10113: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10114: #endif
10115: }
10116: // HIP??????????????
10117: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10118: }
10119: PetscFunctionReturn(PETSC_SUCCESS);
10120: }
10122: /*@
10123: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10125: Logically Collective
10127: Input Parameter:
10128: . F - the factored matrix obtained by calling `MatGetFactor()`
10130: Level: advanced
10132: Notes:
10133: Must be called after `MatFactorSetSchurIS()`.
10135: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10137: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10138: @*/
10139: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10140: {
10141: PetscFunctionBegin;
10144: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10145: PetscCall(MatFactorFactorizeSchurComplement(F));
10146: PetscCall(MatFactorInvertSchurComplement_Private(F));
10147: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10148: PetscFunctionReturn(PETSC_SUCCESS);
10149: }
10151: /*@
10152: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10154: Logically Collective
10156: Input Parameter:
10157: . F - the factored matrix obtained by calling `MatGetFactor()`
10159: Level: advanced
10161: Note:
10162: Must be called after `MatFactorSetSchurIS()`
10164: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10165: @*/
10166: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10167: {
10168: MatFactorInfo info;
10170: PetscFunctionBegin;
10173: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10174: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10175: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10176: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10177: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10178: } else {
10179: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10180: }
10181: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10182: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10183: PetscFunctionReturn(PETSC_SUCCESS);
10184: }
10186: /*@
10187: MatPtAP - Creates the matrix product $C = P^T * A * P$
10189: Neighbor-wise Collective
10191: Input Parameters:
10192: + A - the matrix
10193: . P - the projection matrix
10194: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10195: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10196: if the result is a dense matrix this is irrelevant
10198: Output Parameter:
10199: . C - the product matrix
10201: Level: intermediate
10203: Notes:
10204: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10206: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_PtAP`
10207: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10209: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10211: Developer Note:
10212: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10214: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10215: @*/
10216: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10217: {
10218: PetscFunctionBegin;
10219: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10220: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10222: if (scall == MAT_INITIAL_MATRIX) {
10223: PetscCall(MatProductCreate(A, P, NULL, C));
10224: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10225: PetscCall(MatProductSetAlgorithm(*C, "default"));
10226: PetscCall(MatProductSetFill(*C, fill));
10228: (*C)->product->api_user = PETSC_TRUE;
10229: PetscCall(MatProductSetFromOptions(*C));
10230: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10231: PetscCall(MatProductSymbolic(*C));
10232: } else { /* scall == MAT_REUSE_MATRIX */
10233: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10234: }
10236: PetscCall(MatProductNumeric(*C));
10237: if (A->symmetric == PETSC_BOOL3_TRUE) {
10238: PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10239: (*C)->spd = A->spd;
10240: }
10241: PetscFunctionReturn(PETSC_SUCCESS);
10242: }
10244: /*@
10245: MatRARt - Creates the matrix product $C = R * A * R^T$
10247: Neighbor-wise Collective
10249: Input Parameters:
10250: + A - the matrix
10251: . R - the projection matrix
10252: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10253: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10254: if the result is a dense matrix this is irrelevant
10256: Output Parameter:
10257: . C - the product matrix
10259: Level: intermediate
10261: Notes:
10262: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10264: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_RARt`
10265: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10267: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10268: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10269: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10270: We recommend using `MatPtAP()` when possible.
10272: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10274: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10275: @*/
10276: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10277: {
10278: PetscFunctionBegin;
10279: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10280: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10282: if (scall == MAT_INITIAL_MATRIX) {
10283: PetscCall(MatProductCreate(A, R, NULL, C));
10284: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10285: PetscCall(MatProductSetAlgorithm(*C, "default"));
10286: PetscCall(MatProductSetFill(*C, fill));
10288: (*C)->product->api_user = PETSC_TRUE;
10289: PetscCall(MatProductSetFromOptions(*C));
10290: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10291: PetscCall(MatProductSymbolic(*C));
10292: } else { /* scall == MAT_REUSE_MATRIX */
10293: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10294: }
10296: PetscCall(MatProductNumeric(*C));
10297: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10298: PetscFunctionReturn(PETSC_SUCCESS);
10299: }
10301: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10302: {
10303: PetscBool flg = PETSC_TRUE;
10305: PetscFunctionBegin;
10306: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10307: if (scall == MAT_INITIAL_MATRIX) {
10308: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10309: PetscCall(MatProductCreate(A, B, NULL, C));
10310: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10311: PetscCall(MatProductSetFill(*C, fill));
10312: } else { /* scall == MAT_REUSE_MATRIX */
10313: Mat_Product *product = (*C)->product;
10315: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10316: if (flg && product && product->type != ptype) {
10317: PetscCall(MatProductClear(*C));
10318: product = NULL;
10319: }
10320: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10321: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10322: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10323: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10324: product = (*C)->product;
10325: product->fill = fill;
10326: product->clear = PETSC_TRUE;
10327: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10328: flg = PETSC_FALSE;
10329: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10330: }
10331: }
10332: if (flg) {
10333: (*C)->product->api_user = PETSC_TRUE;
10334: PetscCall(MatProductSetType(*C, ptype));
10335: PetscCall(MatProductSetFromOptions(*C));
10336: PetscCall(MatProductSymbolic(*C));
10337: }
10338: PetscCall(MatProductNumeric(*C));
10339: PetscFunctionReturn(PETSC_SUCCESS);
10340: }
10342: /*@
10343: MatMatMult - Performs matrix-matrix multiplication $ C=A*B $.
10345: Neighbor-wise Collective
10347: Input Parameters:
10348: + A - the left matrix
10349: . B - the right matrix
10350: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10351: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10352: if the result is a dense matrix this is irrelevant
10354: Output Parameter:
10355: . C - the product matrix
10357: Notes:
10358: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10360: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10361: call to this function with `MAT_INITIAL_MATRIX`.
10363: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10365: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10366: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10368: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10370: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_AB`
10371: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10373: Example of Usage:
10374: .vb
10375: MatProductCreate(A,B,NULL,&C);
10376: MatProductSetType(C,MATPRODUCT_AB);
10377: MatProductSymbolic(C);
10378: MatProductNumeric(C); // compute C=A * B
10379: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10380: MatProductNumeric(C);
10381: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10382: MatProductNumeric(C);
10383: .ve
10385: Level: intermediate
10387: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10388: @*/
10389: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10390: {
10391: PetscFunctionBegin;
10392: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10393: PetscFunctionReturn(PETSC_SUCCESS);
10394: }
10396: /*@
10397: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10399: Neighbor-wise Collective
10401: Input Parameters:
10402: + A - the left matrix
10403: . B - the right matrix
10404: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10405: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10407: Output Parameter:
10408: . C - the product matrix
10410: Options Database Key:
10411: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10412: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10413: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10415: Level: intermediate
10417: Notes:
10418: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10420: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10422: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10423: actually needed.
10425: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10426: and for pairs of `MATMPIDENSE` matrices.
10428: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_ABt`
10429: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10431: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10433: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()`, `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10434: @*/
10435: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10436: {
10437: PetscFunctionBegin;
10438: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10439: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10440: PetscFunctionReturn(PETSC_SUCCESS);
10441: }
10443: /*@
10444: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10446: Neighbor-wise Collective
10448: Input Parameters:
10449: + A - the left matrix
10450: . B - the right matrix
10451: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10452: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10454: Output Parameter:
10455: . C - the product matrix
10457: Level: intermediate
10459: Notes:
10460: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10462: `MAT_REUSE_MATRIX` can only be used if `A` and `B` have the same nonzero pattern as in the previous call.
10464: This is a convenience routine that wraps the use of `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_AtB`
10465: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10467: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10468: actually needed.
10470: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10471: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10473: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10475: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10476: @*/
10477: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10478: {
10479: PetscFunctionBegin;
10480: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10481: PetscFunctionReturn(PETSC_SUCCESS);
10482: }
10484: /*@
10485: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10487: Neighbor-wise Collective
10489: Input Parameters:
10490: + A - the left matrix
10491: . B - the middle matrix
10492: . C - the right matrix
10493: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10494: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10495: if the result is a dense matrix this is irrelevant
10497: Output Parameter:
10498: . D - the product matrix
10500: Level: intermediate
10502: Notes:
10503: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10505: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10507: This is a convenience routine that wraps the use of the `MatProductCreate()` with a `MatProductType` of `MATPRODUCT_ABC`
10508: functionality into a single function call. For more involved matrix-matrix operations see `MatProductCreate()`.
10510: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10511: actually needed.
10513: If you have many matrices with the same non-zero structure to multiply, you
10514: should use `MAT_REUSE_MATRIX` in all calls but the first
10516: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10518: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10519: @*/
10520: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10521: {
10522: PetscFunctionBegin;
10523: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10524: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10526: if (scall == MAT_INITIAL_MATRIX) {
10527: PetscCall(MatProductCreate(A, B, C, D));
10528: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10529: PetscCall(MatProductSetAlgorithm(*D, "default"));
10530: PetscCall(MatProductSetFill(*D, fill));
10532: (*D)->product->api_user = PETSC_TRUE;
10533: PetscCall(MatProductSetFromOptions(*D));
10534: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10535: ((PetscObject)C)->type_name);
10536: PetscCall(MatProductSymbolic(*D));
10537: } else { /* user may change input matrices when REUSE */
10538: PetscCall(MatProductReplaceMats(A, B, C, *D));
10539: }
10540: PetscCall(MatProductNumeric(*D));
10541: PetscFunctionReturn(PETSC_SUCCESS);
10542: }
10544: /*@
10545: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10547: Collective
10549: Input Parameters:
10550: + mat - the matrix
10551: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10552: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10553: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10555: Output Parameter:
10556: . matredundant - redundant matrix
10558: Level: advanced
10560: Notes:
10561: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10562: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10564: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10565: calling it.
10567: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10569: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10570: @*/
10571: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10572: {
10573: MPI_Comm comm;
10574: PetscMPIInt size;
10575: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10576: Mat_Redundant *redund = NULL;
10577: PetscSubcomm psubcomm = NULL;
10578: MPI_Comm subcomm_in = subcomm;
10579: Mat *matseq;
10580: IS isrow, iscol;
10581: PetscBool newsubcomm = PETSC_FALSE;
10583: PetscFunctionBegin;
10585: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10586: PetscAssertPointer(*matredundant, 5);
10588: }
10590: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10591: if (size == 1 || nsubcomm == 1) {
10592: if (reuse == MAT_INITIAL_MATRIX) {
10593: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10594: } else {
10595: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10596: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10597: }
10598: PetscFunctionReturn(PETSC_SUCCESS);
10599: }
10601: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10602: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10603: MatCheckPreallocated(mat, 1);
10605: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10606: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10607: /* create psubcomm, then get subcomm */
10608: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10609: PetscCallMPI(MPI_Comm_size(comm, &size));
10610: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10612: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10613: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10614: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10615: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10616: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10617: newsubcomm = PETSC_TRUE;
10618: PetscCall(PetscSubcommDestroy(&psubcomm));
10619: }
10621: /* get isrow, iscol and a local sequential matrix matseq[0] */
10622: if (reuse == MAT_INITIAL_MATRIX) {
10623: mloc_sub = PETSC_DECIDE;
10624: nloc_sub = PETSC_DECIDE;
10625: if (bs < 1) {
10626: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10627: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10628: } else {
10629: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10630: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10631: }
10632: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10633: rstart = rend - mloc_sub;
10634: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10635: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10636: PetscCall(ISSetIdentity(iscol));
10637: } else { /* reuse == MAT_REUSE_MATRIX */
10638: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10639: /* retrieve subcomm */
10640: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10641: redund = (*matredundant)->redundant;
10642: isrow = redund->isrow;
10643: iscol = redund->iscol;
10644: matseq = redund->matseq;
10645: }
10646: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10648: /* get matredundant over subcomm */
10649: if (reuse == MAT_INITIAL_MATRIX) {
10650: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10652: /* create a supporting struct and attach it to C for reuse */
10653: PetscCall(PetscNew(&redund));
10654: (*matredundant)->redundant = redund;
10655: redund->isrow = isrow;
10656: redund->iscol = iscol;
10657: redund->matseq = matseq;
10658: if (newsubcomm) {
10659: redund->subcomm = subcomm;
10660: } else {
10661: redund->subcomm = MPI_COMM_NULL;
10662: }
10663: } else {
10664: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10665: }
10666: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10667: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10668: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10669: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10670: }
10671: #endif
10672: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10673: PetscFunctionReturn(PETSC_SUCCESS);
10674: }
10676: /*@C
10677: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10678: a given `Mat`. Each submatrix can span multiple procs.
10680: Collective
10682: Input Parameters:
10683: + mat - the matrix
10684: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10685: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10687: Output Parameter:
10688: . subMat - parallel sub-matrices each spanning a given `subcomm`
10690: Level: advanced
10692: Notes:
10693: The submatrix partition across processors is dictated by `subComm` a
10694: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10695: is not restricted to be grouped with consecutive original MPI processes.
10697: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10698: map directly to the layout of the original matrix [wrt the local
10699: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10700: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10701: the `subMat`. However the offDiagMat looses some columns - and this is
10702: reconstructed with `MatSetValues()`
10704: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10706: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10707: @*/
10708: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10709: {
10710: PetscMPIInt commsize, subCommSize;
10712: PetscFunctionBegin;
10713: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10714: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10715: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10717: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10718: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10719: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10720: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10721: PetscFunctionReturn(PETSC_SUCCESS);
10722: }
10724: /*@
10725: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10727: Not Collective
10729: Input Parameters:
10730: + mat - matrix to extract local submatrix from
10731: . isrow - local row indices for submatrix
10732: - iscol - local column indices for submatrix
10734: Output Parameter:
10735: . submat - the submatrix
10737: Level: intermediate
10739: Notes:
10740: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10742: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10743: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10745: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10746: `MatSetValuesBlockedLocal()` will also be implemented.
10748: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10749: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10751: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10752: @*/
10753: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10754: {
10755: PetscFunctionBegin;
10759: PetscCheckSameComm(isrow, 2, iscol, 3);
10760: PetscAssertPointer(submat, 4);
10761: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10763: if (mat->ops->getlocalsubmatrix) {
10764: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10765: } else {
10766: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10767: }
10768: (*submat)->assembled = mat->assembled;
10769: PetscFunctionReturn(PETSC_SUCCESS);
10770: }
10772: /*@
10773: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10775: Not Collective
10777: Input Parameters:
10778: + mat - matrix to extract local submatrix from
10779: . isrow - local row indices for submatrix
10780: . iscol - local column indices for submatrix
10781: - submat - the submatrix
10783: Level: intermediate
10785: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10786: @*/
10787: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10788: {
10789: PetscFunctionBegin;
10793: PetscCheckSameComm(isrow, 2, iscol, 3);
10794: PetscAssertPointer(submat, 4);
10797: if (mat->ops->restorelocalsubmatrix) {
10798: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10799: } else {
10800: PetscCall(MatDestroy(submat));
10801: }
10802: *submat = NULL;
10803: PetscFunctionReturn(PETSC_SUCCESS);
10804: }
10806: /*@
10807: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10809: Collective
10811: Input Parameter:
10812: . mat - the matrix
10814: Output Parameter:
10815: . is - if any rows have zero diagonals this contains the list of them
10817: Level: developer
10819: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10820: @*/
10821: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10822: {
10823: PetscFunctionBegin;
10826: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10827: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10829: if (!mat->ops->findzerodiagonals) {
10830: Vec diag;
10831: const PetscScalar *a;
10832: PetscInt *rows;
10833: PetscInt rStart, rEnd, r, nrow = 0;
10835: PetscCall(MatCreateVecs(mat, &diag, NULL));
10836: PetscCall(MatGetDiagonal(mat, diag));
10837: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10838: PetscCall(VecGetArrayRead(diag, &a));
10839: for (r = 0; r < rEnd - rStart; ++r)
10840: if (a[r] == 0.0) ++nrow;
10841: PetscCall(PetscMalloc1(nrow, &rows));
10842: nrow = 0;
10843: for (r = 0; r < rEnd - rStart; ++r)
10844: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10845: PetscCall(VecRestoreArrayRead(diag, &a));
10846: PetscCall(VecDestroy(&diag));
10847: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10848: } else {
10849: PetscUseTypeMethod(mat, findzerodiagonals, is);
10850: }
10851: PetscFunctionReturn(PETSC_SUCCESS);
10852: }
10854: /*@
10855: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10857: Collective
10859: Input Parameter:
10860: . mat - the matrix
10862: Output Parameter:
10863: . is - contains the list of rows with off block diagonal entries
10865: Level: developer
10867: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10868: @*/
10869: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10870: {
10871: PetscFunctionBegin;
10874: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10875: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10877: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10878: PetscFunctionReturn(PETSC_SUCCESS);
10879: }
10881: /*@C
10882: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10884: Collective; No Fortran Support
10886: Input Parameter:
10887: . mat - the matrix
10889: Output Parameter:
10890: . values - the block inverses in column major order (FORTRAN-like)
10892: Level: advanced
10894: Notes:
10895: The size of the blocks is determined by the block size of the matrix.
10897: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10899: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10901: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10902: @*/
10903: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10904: {
10905: PetscFunctionBegin;
10907: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10908: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10909: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10910: PetscFunctionReturn(PETSC_SUCCESS);
10911: }
10913: /*@
10914: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10916: Collective; No Fortran Support
10918: Input Parameters:
10919: + mat - the matrix
10920: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10921: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10923: Output Parameter:
10924: . values - the block inverses in column major order (FORTRAN-like)
10926: Level: advanced
10928: Notes:
10929: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10931: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10933: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10934: @*/
10935: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10936: {
10937: PetscFunctionBegin;
10939: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10940: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10941: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10942: PetscFunctionReturn(PETSC_SUCCESS);
10943: }
10945: /*@
10946: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10948: Collective
10950: Input Parameters:
10951: + A - the matrix
10952: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10954: Level: advanced
10956: Note:
10957: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10959: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10960: @*/
10961: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10962: {
10963: const PetscScalar *vals;
10964: PetscInt *dnnz;
10965: PetscInt m, rstart, rend, bs, i, j;
10967: PetscFunctionBegin;
10968: PetscCall(MatInvertBlockDiagonal(A, &vals));
10969: PetscCall(MatGetBlockSize(A, &bs));
10970: PetscCall(MatGetLocalSize(A, &m, NULL));
10971: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10972: PetscCall(MatSetBlockSizes(C, A->rmap->bs, A->cmap->bs));
10973: PetscCall(PetscMalloc1(m / bs, &dnnz));
10974: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10975: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10976: PetscCall(PetscFree(dnnz));
10977: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10978: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10979: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10980: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
10981: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10982: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10983: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_FALSE));
10984: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10985: PetscFunctionReturn(PETSC_SUCCESS);
10986: }
10988: /*@
10989: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10990: via `MatTransposeColoringCreate()`.
10992: Collective
10994: Input Parameter:
10995: . c - coloring context
10997: Level: intermediate
10999: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
11000: @*/
11001: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
11002: {
11003: MatTransposeColoring matcolor = *c;
11005: PetscFunctionBegin;
11006: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
11007: if (--((PetscObject)matcolor)->refct > 0) {
11008: matcolor = NULL;
11009: PetscFunctionReturn(PETSC_SUCCESS);
11010: }
11012: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
11013: PetscCall(PetscFree(matcolor->rows));
11014: PetscCall(PetscFree(matcolor->den2sp));
11015: PetscCall(PetscFree(matcolor->colorforcol));
11016: PetscCall(PetscFree(matcolor->columns));
11017: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
11018: PetscCall(PetscHeaderDestroy(c));
11019: PetscFunctionReturn(PETSC_SUCCESS);
11020: }
11022: /*@
11023: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
11024: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
11025: `MatTransposeColoring` to sparse `B`.
11027: Collective
11029: Input Parameters:
11030: + coloring - coloring context created with `MatTransposeColoringCreate()`
11031: - B - sparse matrix
11033: Output Parameter:
11034: . Btdense - dense matrix $B^T$
11036: Level: developer
11038: Note:
11039: These are used internally for some implementations of `MatRARt()`
11041: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
11042: @*/
11043: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
11044: {
11045: PetscFunctionBegin;
11050: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
11051: PetscFunctionReturn(PETSC_SUCCESS);
11052: }
11054: /*@
11055: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
11056: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
11057: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
11058: $C_{sp}$ from $C_{den}$.
11060: Collective
11062: Input Parameters:
11063: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
11064: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
11066: Output Parameter:
11067: . Csp - sparse matrix
11069: Level: developer
11071: Note:
11072: These are used internally for some implementations of `MatRARt()`
11074: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
11075: @*/
11076: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
11077: {
11078: PetscFunctionBegin;
11083: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
11084: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
11085: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
11086: PetscFunctionReturn(PETSC_SUCCESS);
11087: }
11089: /*@
11090: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
11092: Collective
11094: Input Parameters:
11095: + mat - the matrix product C
11096: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
11098: Output Parameter:
11099: . color - the new coloring context
11101: Level: intermediate
11103: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
11104: `MatTransColoringApplyDenToSp()`
11105: @*/
11106: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
11107: {
11108: MatTransposeColoring c;
11109: MPI_Comm comm;
11111: PetscFunctionBegin;
11112: PetscAssertPointer(color, 3);
11114: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11115: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
11116: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
11117: c->ctype = iscoloring->ctype;
11118: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
11119: *color = c;
11120: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11121: PetscFunctionReturn(PETSC_SUCCESS);
11122: }
11124: /*@
11125: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
11126: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11128: Not Collective
11130: Input Parameter:
11131: . mat - the matrix
11133: Output Parameter:
11134: . state - the current state
11136: Level: intermediate
11138: Notes:
11139: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11140: different matrices
11142: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11144: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11146: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11147: @*/
11148: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11149: {
11150: PetscFunctionBegin;
11152: *state = mat->nonzerostate;
11153: PetscFunctionReturn(PETSC_SUCCESS);
11154: }
11156: /*@
11157: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11158: matrices from each processor
11160: Collective
11162: Input Parameters:
11163: + comm - the communicators the parallel matrix will live on
11164: . seqmat - the input sequential matrices
11165: . n - number of local columns (or `PETSC_DECIDE`)
11166: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11168: Output Parameter:
11169: . mpimat - the parallel matrix generated
11171: Level: developer
11173: Note:
11174: The number of columns of the matrix in EACH processor MUST be the same.
11176: .seealso: [](ch_matrices), `Mat`
11177: @*/
11178: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11179: {
11180: PetscMPIInt size;
11182: PetscFunctionBegin;
11183: PetscCallMPI(MPI_Comm_size(comm, &size));
11184: if (size == 1) {
11185: if (reuse == MAT_INITIAL_MATRIX) {
11186: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11187: } else {
11188: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11189: }
11190: PetscFunctionReturn(PETSC_SUCCESS);
11191: }
11193: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11195: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11196: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11197: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11198: PetscFunctionReturn(PETSC_SUCCESS);
11199: }
11201: /*@
11202: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11204: Collective
11206: Input Parameters:
11207: + A - the matrix to create subdomains from
11208: - N - requested number of subdomains
11210: Output Parameters:
11211: + n - number of subdomains resulting on this MPI process
11212: - iss - `IS` list with indices of subdomains on this MPI process
11214: Level: advanced
11216: Note:
11217: The number of subdomains must be smaller than the communicator size
11219: .seealso: [](ch_matrices), `Mat`, `IS`
11220: @*/
11221: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11222: {
11223: MPI_Comm comm, subcomm;
11224: PetscMPIInt size, rank, color;
11225: PetscInt rstart, rend, k;
11227: PetscFunctionBegin;
11228: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11229: PetscCallMPI(MPI_Comm_size(comm, &size));
11230: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11231: PetscCheck(N >= 1 && N < size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11232: *n = 1;
11233: k = size / N + (size % N > 0); /* There are up to k ranks to a color */
11234: color = rank / k;
11235: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11236: PetscCall(PetscMalloc1(1, iss));
11237: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11238: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11239: PetscCallMPI(MPI_Comm_free(&subcomm));
11240: PetscFunctionReturn(PETSC_SUCCESS);
11241: }
11243: /*@
11244: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11246: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11247: If they are not the same, uses `MatMatMatMult()`.
11249: Once the coarse grid problem is constructed, correct for interpolation operators
11250: that are not of full rank, which can legitimately happen in the case of non-nested
11251: geometric multigrid.
11253: Input Parameters:
11254: + restrct - restriction operator
11255: . dA - fine grid matrix
11256: . interpolate - interpolation operator
11257: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11258: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11260: Output Parameter:
11261: . A - the Galerkin coarse matrix
11263: Options Database Key:
11264: . -pc_mg_galerkin (both|pmat|mat|none) - for what matrices the Galerkin process should be used
11266: Level: developer
11268: Note:
11269: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11271: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11272: @*/
11273: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11274: {
11275: IS zerorows;
11276: Vec diag;
11278: PetscFunctionBegin;
11279: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11280: /* Construct the coarse grid matrix */
11281: if (interpolate == restrct) {
11282: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11283: } else {
11284: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11285: }
11287: /* If the interpolation matrix is not of full rank, A will have zero rows.
11288: This can legitimately happen in the case of non-nested geometric multigrid.
11289: In that event, we set the rows of the matrix to the rows of the identity,
11290: ignoring the equations (as the RHS will also be zero). */
11292: PetscCall(MatFindZeroRows(*A, &zerorows));
11294: if (zerorows != NULL) { /* if there are any zero rows */
11295: PetscCall(MatCreateVecs(*A, &diag, NULL));
11296: PetscCall(MatGetDiagonal(*A, diag));
11297: PetscCall(VecISSet(diag, zerorows, 1.0));
11298: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11299: PetscCall(VecDestroy(&diag));
11300: PetscCall(ISDestroy(&zerorows));
11301: }
11302: PetscFunctionReturn(PETSC_SUCCESS);
11303: }
11305: /*@C
11306: MatSetOperation - Allows user to set a matrix operation for any matrix type
11308: Logically Collective
11310: Input Parameters:
11311: + mat - the matrix
11312: . op - the name of the operation
11313: - f - the function that provides the operation
11315: Level: developer
11317: Example Usage:
11318: .vb
11319: extern PetscErrorCode usermult(Mat, Vec, Vec);
11321: PetscCall(MatCreateXXX(comm, ..., &A));
11322: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscErrorCodeFn *)usermult));
11323: .ve
11325: Notes:
11326: See the file `include/petscmat.h` for a complete list of matrix
11327: operations, which all have the form MATOP_<OPERATION>, where
11328: <OPERATION> is the name (in all capital letters) of the
11329: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11331: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11332: sequence as the usual matrix interface routines, since they
11333: are intended to be accessed via the usual matrix interface
11334: routines, e.g.,
11335: .vb
11336: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11337: .ve
11339: In particular each function MUST return `PETSC_SUCCESS` on success and
11340: nonzero on failure.
11342: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11344: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11345: @*/
11346: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, PetscErrorCodeFn *f)
11347: {
11348: PetscFunctionBegin;
11350: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (PetscErrorCodeFn *)mat->ops->view) mat->ops->viewnative = mat->ops->view;
11351: (((PetscErrorCodeFn **)mat->ops)[op]) = f;
11352: PetscFunctionReturn(PETSC_SUCCESS);
11353: }
11355: /*@C
11356: MatGetOperation - Gets a matrix operation for any matrix type.
11358: Not Collective
11360: Input Parameters:
11361: + mat - the matrix
11362: - op - the name of the operation
11364: Output Parameter:
11365: . f - the function that provides the operation
11367: Level: developer
11369: Example Usage:
11370: .vb
11371: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11373: MatGetOperation(A, MATOP_MULT, (PetscErrorCodeFn **)&usermult);
11374: .ve
11376: Notes:
11377: See the file `include/petscmat.h` for a complete list of matrix
11378: operations, which all have the form MATOP_<OPERATION>, where
11379: <OPERATION> is the name (in all capital letters) of the
11380: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11382: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11384: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11385: @*/
11386: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, PetscErrorCodeFn **f)
11387: {
11388: PetscFunctionBegin;
11390: *f = (((PetscErrorCodeFn **)mat->ops)[op]);
11391: PetscFunctionReturn(PETSC_SUCCESS);
11392: }
11394: /*@
11395: MatHasOperation - Determines whether the given matrix supports the particular operation.
11397: Not Collective
11399: Input Parameters:
11400: + mat - the matrix
11401: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11403: Output Parameter:
11404: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11406: Level: advanced
11408: Note:
11409: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11411: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11412: @*/
11413: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11414: {
11415: PetscFunctionBegin;
11417: PetscAssertPointer(has, 3);
11418: if (mat->ops->hasoperation) {
11419: PetscUseTypeMethod(mat, hasoperation, op, has);
11420: } else {
11421: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11422: else {
11423: *has = PETSC_FALSE;
11424: if (op == MATOP_CREATE_SUBMATRIX) {
11425: PetscMPIInt size;
11427: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11428: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11429: }
11430: }
11431: }
11432: PetscFunctionReturn(PETSC_SUCCESS);
11433: }
11435: /*@
11436: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11438: Collective
11440: Input Parameter:
11441: . mat - the matrix
11443: Output Parameter:
11444: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11446: Level: beginner
11448: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11449: @*/
11450: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11451: {
11452: PetscFunctionBegin;
11455: PetscAssertPointer(cong, 2);
11456: if (!mat->rmap || !mat->cmap) {
11457: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11458: PetscFunctionReturn(PETSC_SUCCESS);
11459: }
11460: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11461: PetscCall(PetscLayoutSetUp(mat->rmap));
11462: PetscCall(PetscLayoutSetUp(mat->cmap));
11463: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11464: if (*cong) mat->congruentlayouts = 1;
11465: else mat->congruentlayouts = 0;
11466: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11467: PetscFunctionReturn(PETSC_SUCCESS);
11468: }
11470: PetscErrorCode MatSetInf(Mat A)
11471: {
11472: PetscFunctionBegin;
11473: PetscUseTypeMethod(A, setinf);
11474: PetscFunctionReturn(PETSC_SUCCESS);
11475: }
11477: /*@
11478: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11479: and possibly removes small values from the graph structure.
11481: Collective
11483: Input Parameters:
11484: + A - the matrix
11485: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11486: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11487: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11488: . num_idx - size of 'index' array
11489: - index - array of block indices to use for graph strength of connection weight
11491: Output Parameter:
11492: . graph - the resulting graph
11494: Level: advanced
11496: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11497: @*/
11498: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11499: {
11500: PetscFunctionBegin;
11504: PetscAssertPointer(graph, 7);
11505: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11506: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11507: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11508: PetscFunctionReturn(PETSC_SUCCESS);
11509: }
11511: /*@
11512: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11513: meaning the same memory is used for the matrix, and no new memory is allocated.
11515: Collective
11517: Input Parameters:
11518: + A - the matrix
11519: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11521: Level: intermediate
11523: Developer Note:
11524: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11525: of the arrays in the data structure are unneeded.
11527: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11528: @*/
11529: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11530: {
11531: PetscFunctionBegin;
11533: PetscUseTypeMethod(A, eliminatezeros, keep);
11534: PetscFunctionReturn(PETSC_SUCCESS);
11535: }
11537: /*@C
11538: MatGetCurrentMemType - Get the memory location of the matrix
11540: Not Collective, but the result will be the same on all MPI processes
11542: Input Parameter:
11543: . A - the matrix whose memory type we are checking
11545: Output Parameter:
11546: . m - the memory type
11548: Level: intermediate
11550: .seealso: [](ch_matrices), `Mat`, `MatBoundToCPU()`, `PetscMemType`
11551: @*/
11552: PetscErrorCode MatGetCurrentMemType(Mat A, PetscMemType *m)
11553: {
11554: PetscFunctionBegin;
11556: PetscAssertPointer(m, 2);
11557: if (A->ops->getcurrentmemtype) PetscUseTypeMethod(A, getcurrentmemtype, m);
11558: else *m = PETSC_MEMTYPE_HOST;
11559: PetscFunctionReturn(PETSC_SUCCESS);
11560: }