Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_CreateGraph;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
43: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
44: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
45: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
46: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
47: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
49: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
51: /*@
52: MatSetRandom - Sets all components of a matrix to random numbers.
54: Logically Collective
56: Input Parameters:
57: + x - the matrix
58: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
59: it will create one internally.
61: Example:
62: .vb
63: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64: MatSetRandom(x,rctx);
65: PetscRandomDestroy(rctx);
66: .ve
68: Level: intermediate
70: Notes:
71: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
73: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
75: It generates an error if used on unassembled sparse matrices that have not been preallocated.
77: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
78: @*/
79: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
80: {
81: PetscRandom randObj = NULL;
83: PetscFunctionBegin;
87: MatCheckPreallocated(x, 1);
89: if (!rctx) {
90: MPI_Comm comm;
91: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
92: PetscCall(PetscRandomCreate(comm, &randObj));
93: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
94: PetscCall(PetscRandomSetFromOptions(randObj));
95: rctx = randObj;
96: }
97: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
98: PetscUseTypeMethod(x, setrandom, rctx);
99: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
101: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(PetscRandomDestroy(&randObj));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: /*@
108: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
110: Logically Collective
112: Input Parameter:
113: . mat - the factored matrix
115: Output Parameters:
116: + pivot - the pivot value computed
117: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
118: the share the matrix
120: Level: advanced
122: Notes:
123: This routine does not work for factorizations done with external packages.
125: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
127: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
129: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
130: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
131: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
132: @*/
133: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
134: {
135: PetscFunctionBegin;
137: PetscAssertPointer(pivot, 2);
138: PetscAssertPointer(row, 3);
139: *pivot = mat->factorerror_zeropivot_value;
140: *row = mat->factorerror_zeropivot_row;
141: PetscFunctionReturn(PETSC_SUCCESS);
142: }
144: /*@
145: MatFactorGetError - gets the error code from a factorization
147: Logically Collective
149: Input Parameter:
150: . mat - the factored matrix
152: Output Parameter:
153: . err - the error code
155: Level: advanced
157: Note:
158: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
160: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
161: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
162: @*/
163: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
164: {
165: PetscFunctionBegin;
167: PetscAssertPointer(err, 2);
168: *err = mat->factorerrortype;
169: PetscFunctionReturn(PETSC_SUCCESS);
170: }
172: /*@
173: MatFactorClearError - clears the error code in a factorization
175: Logically Collective
177: Input Parameter:
178: . mat - the factored matrix
180: Level: developer
182: Note:
183: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
185: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
186: `MatGetErrorCode()`, `MatFactorError`
187: @*/
188: PetscErrorCode MatFactorClearError(Mat mat)
189: {
190: PetscFunctionBegin;
192: mat->factorerrortype = MAT_FACTOR_NOERROR;
193: mat->factorerror_zeropivot_value = 0.0;
194: mat->factorerror_zeropivot_row = 0;
195: PetscFunctionReturn(PETSC_SUCCESS);
196: }
198: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
199: {
200: Vec r, l;
201: const PetscScalar *al;
202: PetscInt i, nz, gnz, N, n, st;
204: PetscFunctionBegin;
205: PetscCall(MatCreateVecs(mat, &r, &l));
206: if (!cols) { /* nonzero rows */
207: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
208: PetscCall(MatGetSize(mat, &N, NULL));
209: PetscCall(MatGetLocalSize(mat, &n, NULL));
210: PetscCall(VecSet(l, 0.0));
211: PetscCall(VecSetRandom(r, NULL));
212: PetscCall(MatMult(mat, r, l));
213: PetscCall(VecGetArrayRead(l, &al));
214: } else { /* nonzero columns */
215: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
216: PetscCall(MatGetSize(mat, NULL, &N));
217: PetscCall(MatGetLocalSize(mat, NULL, &n));
218: PetscCall(VecSet(r, 0.0));
219: PetscCall(VecSetRandom(l, NULL));
220: PetscCall(MatMultTranspose(mat, l, r));
221: PetscCall(VecGetArrayRead(r, &al));
222: }
223: if (tol <= 0.0) {
224: for (i = 0, nz = 0; i < n; i++)
225: if (al[i] != 0.0) nz++;
226: } else {
227: for (i = 0, nz = 0; i < n; i++)
228: if (PetscAbsScalar(al[i]) > tol) nz++;
229: }
230: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
231: if (gnz != N) {
232: PetscInt *nzr;
233: PetscCall(PetscMalloc1(nz, &nzr));
234: if (nz) {
235: if (tol < 0) {
236: for (i = 0, nz = 0; i < n; i++)
237: if (al[i] != 0.0) nzr[nz++] = i + st;
238: } else {
239: for (i = 0, nz = 0; i < n; i++)
240: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
241: }
242: }
243: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
244: } else *nonzero = NULL;
245: if (!cols) { /* nonzero rows */
246: PetscCall(VecRestoreArrayRead(l, &al));
247: } else {
248: PetscCall(VecRestoreArrayRead(r, &al));
249: }
250: PetscCall(VecDestroy(&l));
251: PetscCall(VecDestroy(&r));
252: PetscFunctionReturn(PETSC_SUCCESS);
253: }
255: /*@
256: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
258: Input Parameter:
259: . mat - the matrix
261: Output Parameter:
262: . keptrows - the rows that are not completely zero
264: Level: intermediate
266: Note:
267: `keptrows` is set to `NULL` if all rows are nonzero.
269: Developer Note:
270: If `keptrows` is not `NULL`, it must be sorted.
272: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
273: @*/
274: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
275: {
276: PetscFunctionBegin;
279: PetscAssertPointer(keptrows, 2);
280: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
281: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
282: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
283: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
284: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
285: PetscFunctionReturn(PETSC_SUCCESS);
286: }
288: /*@
289: MatFindZeroRows - Locate all rows that are completely zero in the matrix
291: Input Parameter:
292: . mat - the matrix
294: Output Parameter:
295: . zerorows - the rows that are completely zero
297: Level: intermediate
299: Note:
300: `zerorows` is set to `NULL` if no rows are zero.
302: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
303: @*/
304: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
305: {
306: IS keptrows;
307: PetscInt m, n;
309: PetscFunctionBegin;
312: PetscAssertPointer(zerorows, 2);
313: PetscCall(MatFindNonzeroRows(mat, &keptrows));
314: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
315: In keeping with this convention, we set zerorows to NULL if there are no zero
316: rows. */
317: if (keptrows == NULL) {
318: *zerorows = NULL;
319: } else {
320: PetscCall(MatGetOwnershipRange(mat, &m, &n));
321: PetscCall(ISComplement(keptrows, m, n, zerorows));
322: PetscCall(ISDestroy(&keptrows));
323: }
324: PetscFunctionReturn(PETSC_SUCCESS);
325: }
327: /*@
328: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
330: Not Collective
332: Input Parameter:
333: . A - the matrix
335: Output Parameter:
336: . a - the diagonal part (which is a SEQUENTIAL matrix)
338: Level: advanced
340: Notes:
341: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
343: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
345: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
346: @*/
347: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
348: {
349: PetscFunctionBegin;
352: PetscAssertPointer(a, 2);
353: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
354: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
355: else {
356: PetscMPIInt size;
358: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
359: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
360: *a = A;
361: }
362: PetscFunctionReturn(PETSC_SUCCESS);
363: }
365: /*@
366: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
368: Collective
370: Input Parameter:
371: . mat - the matrix
373: Output Parameter:
374: . trace - the sum of the diagonal entries
376: Level: advanced
378: .seealso: [](ch_matrices), `Mat`
379: @*/
380: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
381: {
382: Vec diag;
384: PetscFunctionBegin;
386: PetscAssertPointer(trace, 2);
387: PetscCall(MatCreateVecs(mat, &diag, NULL));
388: PetscCall(MatGetDiagonal(mat, diag));
389: PetscCall(VecSum(diag, trace));
390: PetscCall(VecDestroy(&diag));
391: PetscFunctionReturn(PETSC_SUCCESS);
392: }
394: /*@
395: MatRealPart - Zeros out the imaginary part of the matrix
397: Logically Collective
399: Input Parameter:
400: . mat - the matrix
402: Level: advanced
404: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
405: @*/
406: PetscErrorCode MatRealPart(Mat mat)
407: {
408: PetscFunctionBegin;
411: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
412: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
413: MatCheckPreallocated(mat, 1);
414: PetscUseTypeMethod(mat, realpart);
415: PetscFunctionReturn(PETSC_SUCCESS);
416: }
418: /*@C
419: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
421: Collective
423: Input Parameter:
424: . mat - the matrix
426: Output Parameters:
427: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
428: - ghosts - the global indices of the ghost points
430: Level: advanced
432: Note:
433: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
435: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
436: @*/
437: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
438: {
439: PetscFunctionBegin;
442: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
443: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
444: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
445: else {
446: if (nghosts) *nghosts = 0;
447: if (ghosts) *ghosts = NULL;
448: }
449: PetscFunctionReturn(PETSC_SUCCESS);
450: }
452: /*@
453: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
455: Logically Collective
457: Input Parameter:
458: . mat - the matrix
460: Level: advanced
462: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
463: @*/
464: PetscErrorCode MatImaginaryPart(Mat mat)
465: {
466: PetscFunctionBegin;
469: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
470: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
471: MatCheckPreallocated(mat, 1);
472: PetscUseTypeMethod(mat, imaginarypart);
473: PetscFunctionReturn(PETSC_SUCCESS);
474: }
476: /*@
477: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
479: Not Collective
481: Input Parameter:
482: . mat - the matrix
484: Output Parameters:
485: + missing - is any diagonal entry missing
486: - dd - first diagonal entry that is missing (optional) on this process
488: Level: advanced
490: Note:
491: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
493: .seealso: [](ch_matrices), `Mat`
494: @*/
495: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
496: {
497: PetscFunctionBegin;
500: PetscAssertPointer(missing, 2);
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
504: PetscFunctionReturn(PETSC_SUCCESS);
505: }
507: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
508: /*@C
509: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
510: for each row that you get to ensure that your application does
511: not bleed memory.
513: Not Collective
515: Input Parameters:
516: + mat - the matrix
517: - row - the row to get
519: Output Parameters:
520: + ncols - if not `NULL`, the number of nonzeros in `row`
521: . cols - if not `NULL`, the column numbers
522: - vals - if not `NULL`, the numerical values
524: Level: advanced
526: Notes:
527: This routine is provided for people who need to have direct access
528: to the structure of a matrix. We hope that we provide enough
529: high-level matrix routines that few users will need it.
531: `MatGetRow()` always returns 0-based column indices, regardless of
532: whether the internal representation is 0-based (default) or 1-based.
534: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
535: not wish to extract these quantities.
537: The user can only examine the values extracted with `MatGetRow()`;
538: the values CANNOT be altered. To change the matrix entries, one
539: must use `MatSetValues()`.
541: You can only have one call to `MatGetRow()` outstanding for a particular
542: matrix at a time, per processor. `MatGetRow()` can only obtain rows
543: associated with the given processor, it cannot get rows from the
544: other processors; for that we suggest using `MatCreateSubMatrices()`, then
545: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
546: is in the global number of rows.
548: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
550: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
552: Fortran Note:
553: The calling sequence is
554: .vb
555: MatGetRow(matrix,row,ncols,cols,values,ierr)
556: Mat matrix (input)
557: PetscInt row (input)
558: PetscInt ncols (output)
559: PetscInt cols(maxcols) (output)
560: PetscScalar values(maxcols) output
561: .ve
562: where maxcols >= maximum nonzeros in any row of the matrix.
564: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
565: @*/
566: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
567: {
568: PetscInt incols;
570: PetscFunctionBegin;
573: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
574: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
575: MatCheckPreallocated(mat, 1);
576: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
577: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
578: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
579: if (ncols) *ncols = incols;
580: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
581: PetscFunctionReturn(PETSC_SUCCESS);
582: }
584: /*@
585: MatConjugate - replaces the matrix values with their complex conjugates
587: Logically Collective
589: Input Parameter:
590: . mat - the matrix
592: Level: advanced
594: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
595: @*/
596: PetscErrorCode MatConjugate(Mat mat)
597: {
598: PetscFunctionBegin;
600: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
601: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
602: PetscUseTypeMethod(mat, conjugate);
603: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
604: }
605: PetscFunctionReturn(PETSC_SUCCESS);
606: }
608: /*@C
609: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
611: Not Collective
613: Input Parameters:
614: + mat - the matrix
615: . row - the row to get
616: . ncols - the number of nonzeros
617: . cols - the columns of the nonzeros
618: - vals - if nonzero the column values
620: Level: advanced
622: Notes:
623: This routine should be called after you have finished examining the entries.
625: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
626: us of the array after it has been restored. If you pass `NULL`, it will
627: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
629: Fortran Note:
630: `MatRestoreRow()` MUST be called after `MatGetRow()`
631: before another call to `MatGetRow()` can be made.
633: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
634: @*/
635: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
636: {
637: PetscFunctionBegin;
639: if (ncols) PetscAssertPointer(ncols, 3);
640: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
641: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
642: PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
643: if (ncols) *ncols = 0;
644: if (cols) *cols = NULL;
645: if (vals) *vals = NULL;
646: PetscFunctionReturn(PETSC_SUCCESS);
647: }
649: /*@
650: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
651: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
653: Not Collective
655: Input Parameter:
656: . mat - the matrix
658: Level: advanced
660: Note:
661: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
663: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
664: @*/
665: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
666: {
667: PetscFunctionBegin;
670: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
671: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
672: MatCheckPreallocated(mat, 1);
673: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
674: PetscUseTypeMethod(mat, getrowuppertriangular);
675: PetscFunctionReturn(PETSC_SUCCESS);
676: }
678: /*@
679: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
681: Not Collective
683: Input Parameter:
684: . mat - the matrix
686: Level: advanced
688: Note:
689: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
691: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
692: @*/
693: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
694: {
695: PetscFunctionBegin;
698: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
699: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
700: MatCheckPreallocated(mat, 1);
701: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
702: PetscUseTypeMethod(mat, restorerowuppertriangular);
703: PetscFunctionReturn(PETSC_SUCCESS);
704: }
706: /*@
707: MatSetOptionsPrefix - Sets the prefix used for searching for all
708: `Mat` options in the database.
710: Logically Collective
712: Input Parameters:
713: + A - the matrix
714: - prefix - the prefix to prepend to all option names
716: Level: advanced
718: Notes:
719: A hyphen (-) must NOT be given at the beginning of the prefix name.
720: The first character of all runtime options is AUTOMATICALLY the hyphen.
722: This is NOT used for options for the factorization of the matrix. Normally the
723: prefix is automatically passed in from the PC calling the factorization. To set
724: it directly use `MatSetOptionsPrefixFactor()`
726: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
727: @*/
728: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
729: {
730: PetscFunctionBegin;
732: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
733: PetscFunctionReturn(PETSC_SUCCESS);
734: }
736: /*@
737: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
738: for matrices created with `MatGetFactor()`
740: Logically Collective
742: Input Parameters:
743: + A - the matrix
744: - prefix - the prefix to prepend to all option names for the factored matrix
746: Level: developer
748: Notes:
749: A hyphen (-) must NOT be given at the beginning of the prefix name.
750: The first character of all runtime options is AUTOMATICALLY the hyphen.
752: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
753: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
755: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
756: @*/
757: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
758: {
759: PetscFunctionBegin;
761: if (prefix) {
762: PetscAssertPointer(prefix, 2);
763: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
764: if (prefix != A->factorprefix) {
765: PetscCall(PetscFree(A->factorprefix));
766: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
767: }
768: } else PetscCall(PetscFree(A->factorprefix));
769: PetscFunctionReturn(PETSC_SUCCESS);
770: }
772: /*@
773: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
774: for matrices created with `MatGetFactor()`
776: Logically Collective
778: Input Parameters:
779: + A - the matrix
780: - prefix - the prefix to prepend to all option names for the factored matrix
782: Level: developer
784: Notes:
785: A hyphen (-) must NOT be given at the beginning of the prefix name.
786: The first character of all runtime options is AUTOMATICALLY the hyphen.
788: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
789: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
791: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
792: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
793: `MatSetOptionsPrefix()`
794: @*/
795: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
796: {
797: size_t len1, len2, new_len;
799: PetscFunctionBegin;
801: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
802: if (!A->factorprefix) {
803: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
804: PetscFunctionReturn(PETSC_SUCCESS);
805: }
806: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
808: PetscCall(PetscStrlen(A->factorprefix, &len1));
809: PetscCall(PetscStrlen(prefix, &len2));
810: new_len = len1 + len2 + 1;
811: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
812: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
813: PetscFunctionReturn(PETSC_SUCCESS);
814: }
816: /*@
817: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
818: matrix options in the database.
820: Logically Collective
822: Input Parameters:
823: + A - the matrix
824: - prefix - the prefix to prepend to all option names
826: Level: advanced
828: Note:
829: A hyphen (-) must NOT be given at the beginning of the prefix name.
830: The first character of all runtime options is AUTOMATICALLY the hyphen.
832: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
833: @*/
834: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
835: {
836: PetscFunctionBegin;
838: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
839: PetscFunctionReturn(PETSC_SUCCESS);
840: }
842: /*@
843: MatGetOptionsPrefix - Gets the prefix used for searching for all
844: matrix options in the database.
846: Not Collective
848: Input Parameter:
849: . A - the matrix
851: Output Parameter:
852: . prefix - pointer to the prefix string used
854: Level: advanced
856: Fortran Note:
857: The user should pass in a string `prefix` of
858: sufficient length to hold the prefix.
860: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
861: @*/
862: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
863: {
864: PetscFunctionBegin;
866: PetscAssertPointer(prefix, 2);
867: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
868: PetscFunctionReturn(PETSC_SUCCESS);
869: }
871: /*@
872: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
874: Not Collective
876: Input Parameter:
877: . A - the matrix
879: Output Parameter:
880: . state - the object state
882: Level: advanced
884: Note:
885: Object state is an integer which gets increased every time
886: the object is changed. By saving and later querying the object state
887: one can determine whether information about the object is still current.
889: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
891: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
892: @*/
893: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
894: {
895: PetscFunctionBegin;
897: PetscAssertPointer(state, 2);
898: PetscCall(PetscObjectStateGet((PetscObject)A, state));
899: PetscFunctionReturn(PETSC_SUCCESS);
900: }
902: /*@
903: MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by the user.
905: Collective
907: Input Parameter:
908: . A - the matrix
910: Level: beginner
912: Notes:
913: The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
915: Users can reset the preallocation to access the original memory.
917: Currently only supported for `MATAIJ` matrices.
919: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
920: @*/
921: PetscErrorCode MatResetPreallocation(Mat A)
922: {
923: PetscFunctionBegin;
926: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset preallocation after setting some values but not yet calling MatAssemblyBegin()/MatAssemblyEnd()");
927: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
928: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
929: PetscFunctionReturn(PETSC_SUCCESS);
930: }
932: /*@
933: MatSetUp - Sets up the internal matrix data structures for later use.
935: Collective
937: Input Parameter:
938: . A - the matrix
940: Level: intermediate
942: Notes:
943: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
944: setting values in the matrix.
946: This routine is called internally by other matrix functions when needed so rarely needs to be called by users
948: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
949: @*/
950: PetscErrorCode MatSetUp(Mat A)
951: {
952: PetscFunctionBegin;
954: if (!((PetscObject)A)->type_name) {
955: PetscMPIInt size;
957: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
958: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
959: }
960: if (!A->preallocated) PetscTryTypeMethod(A, setup);
961: PetscCall(PetscLayoutSetUp(A->rmap));
962: PetscCall(PetscLayoutSetUp(A->cmap));
963: A->preallocated = PETSC_TRUE;
964: PetscFunctionReturn(PETSC_SUCCESS);
965: }
967: #if defined(PETSC_HAVE_SAWS)
968: #include <petscviewersaws.h>
969: #endif
971: /*
972: If threadsafety is on extraneous matrices may be printed
974: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
975: */
976: #if !defined(PETSC_HAVE_THREADSAFETY)
977: static PetscInt insidematview = 0;
978: #endif
980: /*@
981: MatViewFromOptions - View properties of the matrix based on options set in the options database
983: Collective
985: Input Parameters:
986: + A - the matrix
987: . obj - optional additional object that provides the options prefix to use
988: - name - command line option
990: Options Database Key:
991: . -mat_view [viewertype]:... - the viewer and its options
993: Level: intermediate
995: Note:
996: .vb
997: If no value is provided ascii:stdout is used
998: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
999: for example ascii::ascii_info prints just the information about the object not all details
1000: unless :append is given filename opens in write mode, overwriting what was already there
1001: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1002: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1003: socket[:port] defaults to the standard output port
1004: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1005: .ve
1007: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1008: @*/
1009: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1010: {
1011: PetscFunctionBegin;
1013: #if !defined(PETSC_HAVE_THREADSAFETY)
1014: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1015: #endif
1016: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1017: PetscFunctionReturn(PETSC_SUCCESS);
1018: }
1020: /*@
1021: MatView - display information about a matrix in a variety ways
1023: Collective on viewer
1025: Input Parameters:
1026: + mat - the matrix
1027: - viewer - visualization context
1029: Options Database Keys:
1030: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1031: . -mat_view ::ascii_info_detail - Prints more detailed info
1032: . -mat_view - Prints matrix in ASCII format
1033: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1034: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1035: . -display <name> - Sets display name (default is host)
1036: . -draw_pause <sec> - Sets number of seconds to pause after display
1037: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1038: . -viewer_socket_machine <machine> - -
1039: . -viewer_socket_port <port> - -
1040: . -mat_view binary - save matrix to file in binary format
1041: - -viewer_binary_filename <name> - -
1043: Level: beginner
1045: Notes:
1046: The available visualization contexts include
1047: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1048: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1049: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1050: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1052: The user can open alternative visualization contexts with
1053: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1054: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1055: specified file; corresponding input uses `MatLoad()`
1056: . `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1057: an X window display
1058: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1059: Currently only the `MATSEQDENSE` and `MATAIJ`
1060: matrix types support the Socket viewer.
1062: The user can call `PetscViewerPushFormat()` to specify the output
1063: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1064: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1065: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1066: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1067: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1068: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1069: format common among all matrix types
1070: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1071: format (which is in many cases the same as the default)
1072: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1073: size and structure (not the matrix entries)
1074: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1075: the matrix structure (still not vector or matrix entries)
1077: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1078: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1080: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1082: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1083: viewer is used.
1085: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1086: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1088: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1089: and then use the following mouse functions.
1090: .vb
1091: left mouse: zoom in
1092: middle mouse: zoom out
1093: right mouse: continue with the simulation
1094: .ve
1096: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1097: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1098: @*/
1099: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1100: {
1101: PetscInt rows, cols, rbs, cbs;
1102: PetscBool isascii, isstring, issaws;
1103: PetscViewerFormat format;
1104: PetscMPIInt size;
1106: PetscFunctionBegin;
1109: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1112: PetscCall(PetscViewerGetFormat(viewer, &format));
1113: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1114: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1116: #if !defined(PETSC_HAVE_THREADSAFETY)
1117: insidematview++;
1118: #endif
1119: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1120: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1121: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1122: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1124: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1125: if (isascii) {
1126: if (!mat->preallocated) {
1127: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1128: #if !defined(PETSC_HAVE_THREADSAFETY)
1129: insidematview--;
1130: #endif
1131: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1132: PetscFunctionReturn(PETSC_SUCCESS);
1133: }
1134: if (!mat->assembled) {
1135: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1136: #if !defined(PETSC_HAVE_THREADSAFETY)
1137: insidematview--;
1138: #endif
1139: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1140: PetscFunctionReturn(PETSC_SUCCESS);
1141: }
1142: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1143: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1144: MatNullSpace nullsp, transnullsp;
1146: PetscCall(PetscViewerASCIIPushTab(viewer));
1147: PetscCall(MatGetSize(mat, &rows, &cols));
1148: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1149: if (rbs != 1 || cbs != 1) {
1150: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1151: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1152: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1153: if (mat->factortype) {
1154: MatSolverType solver;
1155: PetscCall(MatFactorGetSolverType(mat, &solver));
1156: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1157: }
1158: if (mat->ops->getinfo) {
1159: MatInfo info;
1160: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1161: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1162: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1163: }
1164: PetscCall(MatGetNullSpace(mat, &nullsp));
1165: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1166: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1167: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1168: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1169: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1170: PetscCall(PetscViewerASCIIPushTab(viewer));
1171: PetscCall(MatProductView(mat, viewer));
1172: PetscCall(PetscViewerASCIIPopTab(viewer));
1173: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1174: IS tmp;
1176: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1177: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1178: PetscCall(PetscViewerASCIIPushTab(viewer));
1179: PetscCall(ISView(tmp, viewer));
1180: PetscCall(PetscViewerASCIIPopTab(viewer));
1181: PetscCall(ISDestroy(&tmp));
1182: }
1183: }
1184: } else if (issaws) {
1185: #if defined(PETSC_HAVE_SAWS)
1186: PetscMPIInt rank;
1188: PetscCall(PetscObjectName((PetscObject)mat));
1189: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1190: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1191: #endif
1192: } else if (isstring) {
1193: const char *type;
1194: PetscCall(MatGetType(mat, &type));
1195: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1196: PetscTryTypeMethod(mat, view, viewer);
1197: }
1198: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1199: PetscCall(PetscViewerASCIIPushTab(viewer));
1200: PetscUseTypeMethod(mat, viewnative, viewer);
1201: PetscCall(PetscViewerASCIIPopTab(viewer));
1202: } else if (mat->ops->view) {
1203: PetscCall(PetscViewerASCIIPushTab(viewer));
1204: PetscUseTypeMethod(mat, view, viewer);
1205: PetscCall(PetscViewerASCIIPopTab(viewer));
1206: }
1207: if (isascii) {
1208: PetscCall(PetscViewerGetFormat(viewer, &format));
1209: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1210: }
1211: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1212: #if !defined(PETSC_HAVE_THREADSAFETY)
1213: insidematview--;
1214: #endif
1215: PetscFunctionReturn(PETSC_SUCCESS);
1216: }
1218: #if defined(PETSC_USE_DEBUG)
1219: #include <../src/sys/totalview/tv_data_display.h>
1220: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1221: {
1222: TV_add_row("Local rows", "int", &mat->rmap->n);
1223: TV_add_row("Local columns", "int", &mat->cmap->n);
1224: TV_add_row("Global rows", "int", &mat->rmap->N);
1225: TV_add_row("Global columns", "int", &mat->cmap->N);
1226: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1227: return TV_format_OK;
1228: }
1229: #endif
1231: /*@
1232: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1233: with `MatView()`. The matrix format is determined from the options database.
1234: Generates a parallel MPI matrix if the communicator has more than one
1235: processor. The default matrix type is `MATAIJ`.
1237: Collective
1239: Input Parameters:
1240: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1241: or some related function before a call to `MatLoad()`
1242: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1244: Options Database Key:
1245: . -matload_block_size <bs> - set block size
1247: Level: beginner
1249: Notes:
1250: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1251: `Mat` before calling this routine if you wish to set it from the options database.
1253: `MatLoad()` automatically loads into the options database any options
1254: given in the file filename.info where filename is the name of the file
1255: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1256: file will be ignored if you use the -viewer_binary_skip_info option.
1258: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1259: sets the default matrix type AIJ and sets the local and global sizes.
1260: If type and/or size is already set, then the same are used.
1262: In parallel, each processor can load a subset of rows (or the
1263: entire matrix). This routine is especially useful when a large
1264: matrix is stored on disk and only part of it is desired on each
1265: processor. For example, a parallel solver may access only some of
1266: the rows from each processor. The algorithm used here reads
1267: relatively small blocks of data rather than reading the entire
1268: matrix and then subsetting it.
1270: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1271: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1272: or the sequence like
1273: .vb
1274: `PetscViewer` v;
1275: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1276: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1277: `PetscViewerSetFromOptions`(v);
1278: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1279: `PetscViewerFileSetName`(v,"datafile");
1280: .ve
1281: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1282: $ -viewer_type {binary, hdf5}
1284: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1285: and src/mat/tutorials/ex10.c with the second approach.
1287: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1288: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1289: Multiple objects, both matrices and vectors, can be stored within the same file.
1290: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1292: Most users should not need to know the details of the binary storage
1293: format, since `MatLoad()` and `MatView()` completely hide these details.
1294: But for anyone who is interested, the standard binary matrix storage
1295: format is
1297: .vb
1298: PetscInt MAT_FILE_CLASSID
1299: PetscInt number of rows
1300: PetscInt number of columns
1301: PetscInt total number of nonzeros
1302: PetscInt *number nonzeros in each row
1303: PetscInt *column indices of all nonzeros (starting index is zero)
1304: PetscScalar *values of all nonzeros
1305: .ve
1306: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1307: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1308: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1310: PETSc automatically does the byte swapping for
1311: machines that store the bytes reversed. Thus if you write your own binary
1312: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1313: and `PetscBinaryWrite()` to see how this may be done.
1315: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1316: Each processor's chunk is loaded independently by its owning MPI process.
1317: Multiple objects, both matrices and vectors, can be stored within the same file.
1318: They are looked up by their PetscObject name.
1320: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1321: by default the same structure and naming of the AIJ arrays and column count
1322: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1323: $ save example.mat A b -v7.3
1324: can be directly read by this routine (see Reference 1 for details).
1326: Depending on your MATLAB version, this format might be a default,
1327: otherwise you can set it as default in Preferences.
1329: Unless -nocompression flag is used to save the file in MATLAB,
1330: PETSc must be configured with ZLIB package.
1332: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1334: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1336: Corresponding `MatView()` is not yet implemented.
1338: The loaded matrix is actually a transpose of the original one in MATLAB,
1339: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1340: With this format, matrix is automatically transposed by PETSc,
1341: unless the matrix is marked as SPD or symmetric
1342: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1344: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1346: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1347: @*/
1348: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1349: {
1350: PetscBool flg;
1352: PetscFunctionBegin;
1356: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1358: flg = PETSC_FALSE;
1359: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1360: if (flg) {
1361: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1362: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1363: }
1364: flg = PETSC_FALSE;
1365: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1366: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1368: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1369: PetscUseTypeMethod(mat, load, viewer);
1370: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1371: PetscFunctionReturn(PETSC_SUCCESS);
1372: }
1374: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1375: {
1376: Mat_Redundant *redund = *redundant;
1378: PetscFunctionBegin;
1379: if (redund) {
1380: if (redund->matseq) { /* via MatCreateSubMatrices() */
1381: PetscCall(ISDestroy(&redund->isrow));
1382: PetscCall(ISDestroy(&redund->iscol));
1383: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1384: } else {
1385: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1386: PetscCall(PetscFree(redund->sbuf_j));
1387: PetscCall(PetscFree(redund->sbuf_a));
1388: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1389: PetscCall(PetscFree(redund->rbuf_j[i]));
1390: PetscCall(PetscFree(redund->rbuf_a[i]));
1391: }
1392: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1393: }
1395: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1396: PetscCall(PetscFree(redund));
1397: }
1398: PetscFunctionReturn(PETSC_SUCCESS);
1399: }
1401: /*@
1402: MatDestroy - Frees space taken by a matrix.
1404: Collective
1406: Input Parameter:
1407: . A - the matrix
1409: Level: beginner
1411: Developer Note:
1412: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1413: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1414: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1415: if changes are needed here.
1417: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1418: @*/
1419: PetscErrorCode MatDestroy(Mat *A)
1420: {
1421: PetscFunctionBegin;
1422: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1424: if (--((PetscObject)*A)->refct > 0) {
1425: *A = NULL;
1426: PetscFunctionReturn(PETSC_SUCCESS);
1427: }
1429: /* if memory was published with SAWs then destroy it */
1430: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1431: PetscTryTypeMethod(*A, destroy);
1433: PetscCall(PetscFree((*A)->factorprefix));
1434: PetscCall(PetscFree((*A)->defaultvectype));
1435: PetscCall(PetscFree((*A)->defaultrandtype));
1436: PetscCall(PetscFree((*A)->bsizes));
1437: PetscCall(PetscFree((*A)->solvertype));
1438: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1439: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1440: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1441: PetscCall(MatProductClear(*A));
1442: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1443: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1444: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1445: PetscCall(MatDestroy(&(*A)->schur));
1446: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1447: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1448: PetscCall(PetscHeaderDestroy(A));
1449: PetscFunctionReturn(PETSC_SUCCESS);
1450: }
1452: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1453: /*@
1454: MatSetValues - Inserts or adds a block of values into a matrix.
1455: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1456: MUST be called after all calls to `MatSetValues()` have been completed.
1458: Not Collective
1460: Input Parameters:
1461: + mat - the matrix
1462: . v - a logically two-dimensional array of values
1463: . m - the number of rows
1464: . idxm - the global indices of the rows
1465: . n - the number of columns
1466: . idxn - the global indices of the columns
1467: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1469: Level: beginner
1471: Notes:
1472: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1474: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1475: options cannot be mixed without intervening calls to the assembly
1476: routines.
1478: `MatSetValues()` uses 0-based row and column numbers in Fortran
1479: as well as in C.
1481: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1482: simply ignored. This allows easily inserting element stiffness matrices
1483: with homogeneous Dirichlet boundary conditions that you don't want represented
1484: in the matrix.
1486: Efficiency Alert:
1487: The routine `MatSetValuesBlocked()` may offer much better efficiency
1488: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1490: Fortran Notes:
1491: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1492: .vb
1493: MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES)
1494: .ve
1496: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1498: Developer Note:
1499: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1500: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1502: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1503: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1504: @*/
1505: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1506: {
1507: PetscFunctionBeginHot;
1510: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1511: PetscAssertPointer(idxm, 3);
1512: PetscAssertPointer(idxn, 5);
1513: MatCheckPreallocated(mat, 1);
1515: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1516: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1518: if (PetscDefined(USE_DEBUG)) {
1519: PetscInt i, j;
1521: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1522: if (v) {
1523: for (i = 0; i < m; i++) {
1524: for (j = 0; j < n; j++) {
1525: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1526: #if defined(PETSC_USE_COMPLEX)
1527: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1528: #else
1529: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1530: #endif
1531: }
1532: }
1533: }
1534: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1535: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1536: }
1538: if (mat->assembled) {
1539: mat->was_assembled = PETSC_TRUE;
1540: mat->assembled = PETSC_FALSE;
1541: }
1542: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1543: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1544: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1545: PetscFunctionReturn(PETSC_SUCCESS);
1546: }
1548: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1549: /*@
1550: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1551: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1552: MUST be called after all calls to `MatSetValues()` have been completed.
1554: Not Collective
1556: Input Parameters:
1557: + mat - the matrix
1558: . v - a logically two-dimensional array of values
1559: . ism - the rows to provide
1560: . isn - the columns to provide
1561: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1563: Level: beginner
1565: Notes:
1566: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1568: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1569: options cannot be mixed without intervening calls to the assembly
1570: routines.
1572: `MatSetValues()` uses 0-based row and column numbers in Fortran
1573: as well as in C.
1575: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1576: simply ignored. This allows easily inserting element stiffness matrices
1577: with homogeneous Dirichlet boundary conditions that you don't want represented
1578: in the matrix.
1580: Efficiency Alert:
1581: The routine `MatSetValuesBlocked()` may offer much better efficiency
1582: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1584: This is currently not optimized for any particular `ISType`
1586: Developer Note:
1587: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1588: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1590: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1591: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1592: @*/
1593: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1594: {
1595: PetscInt m, n;
1596: const PetscInt *rows, *cols;
1598: PetscFunctionBeginHot;
1600: PetscCall(ISGetIndices(ism, &rows));
1601: PetscCall(ISGetIndices(isn, &cols));
1602: PetscCall(ISGetLocalSize(ism, &m));
1603: PetscCall(ISGetLocalSize(isn, &n));
1604: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1605: PetscCall(ISRestoreIndices(ism, &rows));
1606: PetscCall(ISRestoreIndices(isn, &cols));
1607: PetscFunctionReturn(PETSC_SUCCESS);
1608: }
1610: /*@
1611: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1612: values into a matrix
1614: Not Collective
1616: Input Parameters:
1617: + mat - the matrix
1618: . row - the (block) row to set
1619: - v - a logically two-dimensional array of values
1621: Level: intermediate
1623: Notes:
1624: The values, `v`, are column-oriented (for the block version) and sorted
1626: All the nonzero values in `row` must be provided
1628: The matrix must have previously had its column indices set, likely by having been assembled.
1630: `row` must belong to this MPI process
1632: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1633: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1634: @*/
1635: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1636: {
1637: PetscInt globalrow;
1639: PetscFunctionBegin;
1642: PetscAssertPointer(v, 3);
1643: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1644: PetscCall(MatSetValuesRow(mat, globalrow, v));
1645: PetscFunctionReturn(PETSC_SUCCESS);
1646: }
1648: /*@
1649: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1650: values into a matrix
1652: Not Collective
1654: Input Parameters:
1655: + mat - the matrix
1656: . row - the (block) row to set
1657: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1659: Level: advanced
1661: Notes:
1662: The values, `v`, are column-oriented for the block version.
1664: All the nonzeros in `row` must be provided
1666: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1668: `row` must belong to this process
1670: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1671: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1672: @*/
1673: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1674: {
1675: PetscFunctionBeginHot;
1678: MatCheckPreallocated(mat, 1);
1679: PetscAssertPointer(v, 3);
1680: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1681: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1682: mat->insertmode = INSERT_VALUES;
1684: if (mat->assembled) {
1685: mat->was_assembled = PETSC_TRUE;
1686: mat->assembled = PETSC_FALSE;
1687: }
1688: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1689: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1690: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1691: PetscFunctionReturn(PETSC_SUCCESS);
1692: }
1694: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1695: /*@
1696: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1697: Using structured grid indexing
1699: Not Collective
1701: Input Parameters:
1702: + mat - the matrix
1703: . m - number of rows being entered
1704: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1705: . n - number of columns being entered
1706: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1707: . v - a logically two-dimensional array of values
1708: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1710: Level: beginner
1712: Notes:
1713: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1715: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1716: options cannot be mixed without intervening calls to the assembly
1717: routines.
1719: The grid coordinates are across the entire grid, not just the local portion
1721: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1722: as well as in C.
1724: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1726: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1727: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1729: The columns and rows in the stencil passed in MUST be contained within the
1730: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1731: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1732: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1733: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1735: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1736: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1737: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1738: `DM_BOUNDARY_PERIODIC` boundary type.
1740: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1741: a single value per point) you can skip filling those indices.
1743: Inspired by the structured grid interface to the HYPRE package
1744: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1746: Efficiency Alert:
1747: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1748: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1750: Fortran Note:
1751: `idxm` and `idxn` should be declared as
1752: $ MatStencil idxm(4,m),idxn(4,n)
1753: and the values inserted using
1754: .vb
1755: idxm(MatStencil_i,1) = i
1756: idxm(MatStencil_j,1) = j
1757: idxm(MatStencil_k,1) = k
1758: idxm(MatStencil_c,1) = c
1759: etc
1760: .ve
1762: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1763: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1764: @*/
1765: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1766: {
1767: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1768: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1769: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1771: PetscFunctionBegin;
1772: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1775: PetscAssertPointer(idxm, 3);
1776: PetscAssertPointer(idxn, 5);
1778: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1779: jdxm = buf;
1780: jdxn = buf + m;
1781: } else {
1782: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1783: jdxm = bufm;
1784: jdxn = bufn;
1785: }
1786: for (i = 0; i < m; i++) {
1787: for (j = 0; j < 3 - sdim; j++) dxm++;
1788: tmp = *dxm++ - starts[0];
1789: for (j = 0; j < dim - 1; j++) {
1790: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1791: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1792: }
1793: if (mat->stencil.noc) dxm++;
1794: jdxm[i] = tmp;
1795: }
1796: for (i = 0; i < n; i++) {
1797: for (j = 0; j < 3 - sdim; j++) dxn++;
1798: tmp = *dxn++ - starts[0];
1799: for (j = 0; j < dim - 1; j++) {
1800: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1801: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1802: }
1803: if (mat->stencil.noc) dxn++;
1804: jdxn[i] = tmp;
1805: }
1806: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1807: PetscCall(PetscFree2(bufm, bufn));
1808: PetscFunctionReturn(PETSC_SUCCESS);
1809: }
1811: /*@
1812: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1813: Using structured grid indexing
1815: Not Collective
1817: Input Parameters:
1818: + mat - the matrix
1819: . m - number of rows being entered
1820: . idxm - grid coordinates for matrix rows being entered
1821: . n - number of columns being entered
1822: . idxn - grid coordinates for matrix columns being entered
1823: . v - a logically two-dimensional array of values
1824: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1826: Level: beginner
1828: Notes:
1829: By default the values, `v`, are row-oriented and unsorted.
1830: See `MatSetOption()` for other options.
1832: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1833: options cannot be mixed without intervening calls to the assembly
1834: routines.
1836: The grid coordinates are across the entire grid, not just the local portion
1838: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1839: as well as in C.
1841: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1843: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1844: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1846: The columns and rows in the stencil passed in MUST be contained within the
1847: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1848: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1849: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1850: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1852: Negative indices may be passed in idxm and idxn, these rows and columns are
1853: simply ignored. This allows easily inserting element stiffness matrices
1854: with homogeneous Dirichlet boundary conditions that you don't want represented
1855: in the matrix.
1857: Inspired by the structured grid interface to the HYPRE package
1858: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1860: Fortran Note:
1861: `idxm` and `idxn` should be declared as
1862: $ MatStencil idxm(4,m),idxn(4,n)
1863: and the values inserted using
1864: .vb
1865: idxm(MatStencil_i,1) = i
1866: idxm(MatStencil_j,1) = j
1867: idxm(MatStencil_k,1) = k
1868: etc
1869: .ve
1871: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1872: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1873: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1874: @*/
1875: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1876: {
1877: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1878: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1879: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1881: PetscFunctionBegin;
1882: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1885: PetscAssertPointer(idxm, 3);
1886: PetscAssertPointer(idxn, 5);
1887: PetscAssertPointer(v, 6);
1889: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1890: jdxm = buf;
1891: jdxn = buf + m;
1892: } else {
1893: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1894: jdxm = bufm;
1895: jdxn = bufn;
1896: }
1897: for (i = 0; i < m; i++) {
1898: for (j = 0; j < 3 - sdim; j++) dxm++;
1899: tmp = *dxm++ - starts[0];
1900: for (j = 0; j < sdim - 1; j++) {
1901: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1902: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1903: }
1904: dxm++;
1905: jdxm[i] = tmp;
1906: }
1907: for (i = 0; i < n; i++) {
1908: for (j = 0; j < 3 - sdim; j++) dxn++;
1909: tmp = *dxn++ - starts[0];
1910: for (j = 0; j < sdim - 1; j++) {
1911: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1912: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1913: }
1914: dxn++;
1915: jdxn[i] = tmp;
1916: }
1917: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1918: PetscCall(PetscFree2(bufm, bufn));
1919: PetscFunctionReturn(PETSC_SUCCESS);
1920: }
1922: /*@
1923: MatSetStencil - Sets the grid information for setting values into a matrix via
1924: `MatSetValuesStencil()`
1926: Not Collective
1928: Input Parameters:
1929: + mat - the matrix
1930: . dim - dimension of the grid 1, 2, or 3
1931: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1932: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1933: - dof - number of degrees of freedom per node
1935: Level: beginner
1937: Notes:
1938: Inspired by the structured grid interface to the HYPRE package
1939: (www.llnl.gov/CASC/hyper)
1941: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1942: user.
1944: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1945: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1946: @*/
1947: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1948: {
1949: PetscFunctionBegin;
1951: PetscAssertPointer(dims, 3);
1952: PetscAssertPointer(starts, 4);
1954: mat->stencil.dim = dim + (dof > 1);
1955: for (PetscInt i = 0; i < dim; i++) {
1956: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1957: mat->stencil.starts[i] = starts[dim - i - 1];
1958: }
1959: mat->stencil.dims[dim] = dof;
1960: mat->stencil.starts[dim] = 0;
1961: mat->stencil.noc = (PetscBool)(dof == 1);
1962: PetscFunctionReturn(PETSC_SUCCESS);
1963: }
1965: /*@
1966: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1968: Not Collective
1970: Input Parameters:
1971: + mat - the matrix
1972: . v - a logically two-dimensional array of values
1973: . m - the number of block rows
1974: . idxm - the global block indices
1975: . n - the number of block columns
1976: . idxn - the global block indices
1977: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1979: Level: intermediate
1981: Notes:
1982: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1983: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
1985: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1986: NOT the total number of rows/columns; for example, if the block size is 2 and
1987: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
1988: The values in `idxm` would be 1 2; that is the first index for each block divided by
1989: the block size.
1991: You must call `MatSetBlockSize()` when constructing this matrix (before
1992: preallocating it).
1994: By default the values, `v`, are row-oriented, so the layout of
1995: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
1997: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1998: options cannot be mixed without intervening calls to the assembly
1999: routines.
2001: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
2002: as well as in C.
2004: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
2005: simply ignored. This allows easily inserting element stiffness matrices
2006: with homogeneous Dirichlet boundary conditions that you don't want represented
2007: in the matrix.
2009: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2010: internal searching must be done to determine where to place the
2011: data in the matrix storage space. By instead inserting blocks of
2012: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2013: reduced.
2015: Example:
2016: .vb
2017: Suppose m=n=2 and block size(bs) = 2 The array is
2019: 1 2 | 3 4
2020: 5 6 | 7 8
2021: - - - | - - -
2022: 9 10 | 11 12
2023: 13 14 | 15 16
2025: v[] should be passed in like
2026: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2028: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2029: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2030: .ve
2032: Fortran Notes:
2033: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2034: .vb
2035: MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES)
2036: .ve
2038: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2040: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2041: @*/
2042: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2043: {
2044: PetscFunctionBeginHot;
2047: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2048: PetscAssertPointer(idxm, 3);
2049: PetscAssertPointer(idxn, 5);
2050: MatCheckPreallocated(mat, 1);
2051: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2052: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2053: if (PetscDefined(USE_DEBUG)) {
2054: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2055: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2056: }
2057: if (PetscDefined(USE_DEBUG)) {
2058: PetscInt rbs, cbs, M, N, i;
2059: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2060: PetscCall(MatGetSize(mat, &M, &N));
2061: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2062: for (i = 0; i < n; i++)
2063: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2064: }
2065: if (mat->assembled) {
2066: mat->was_assembled = PETSC_TRUE;
2067: mat->assembled = PETSC_FALSE;
2068: }
2069: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2070: if (mat->ops->setvaluesblocked) {
2071: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2072: } else {
2073: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2074: PetscInt i, j, bs, cbs;
2076: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2077: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2078: iidxm = buf;
2079: iidxn = buf + m * bs;
2080: } else {
2081: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2082: iidxm = bufr;
2083: iidxn = bufc;
2084: }
2085: for (i = 0; i < m; i++) {
2086: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2087: }
2088: if (m != n || bs != cbs || idxm != idxn) {
2089: for (i = 0; i < n; i++) {
2090: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2091: }
2092: } else iidxn = iidxm;
2093: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2094: PetscCall(PetscFree2(bufr, bufc));
2095: }
2096: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2097: PetscFunctionReturn(PETSC_SUCCESS);
2098: }
2100: /*@
2101: MatGetValues - Gets a block of local values from a matrix.
2103: Not Collective; can only return values that are owned by the give process
2105: Input Parameters:
2106: + mat - the matrix
2107: . v - a logically two-dimensional array for storing the values
2108: . m - the number of rows
2109: . idxm - the global indices of the rows
2110: . n - the number of columns
2111: - idxn - the global indices of the columns
2113: Level: advanced
2115: Notes:
2116: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2117: The values, `v`, are then returned in a row-oriented format,
2118: analogous to that used by default in `MatSetValues()`.
2120: `MatGetValues()` uses 0-based row and column numbers in
2121: Fortran as well as in C.
2123: `MatGetValues()` requires that the matrix has been assembled
2124: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2125: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2126: without intermediate matrix assembly.
2128: Negative row or column indices will be ignored and those locations in `v` will be
2129: left unchanged.
2131: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2132: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2133: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2135: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2136: @*/
2137: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2138: {
2139: PetscFunctionBegin;
2142: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2143: PetscAssertPointer(idxm, 3);
2144: PetscAssertPointer(idxn, 5);
2145: PetscAssertPointer(v, 6);
2146: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2147: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2148: MatCheckPreallocated(mat, 1);
2150: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2151: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2152: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2153: PetscFunctionReturn(PETSC_SUCCESS);
2154: }
2156: /*@
2157: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2158: defined previously by `MatSetLocalToGlobalMapping()`
2160: Not Collective
2162: Input Parameters:
2163: + mat - the matrix
2164: . nrow - number of rows
2165: . irow - the row local indices
2166: . ncol - number of columns
2167: - icol - the column local indices
2169: Output Parameter:
2170: . y - a logically two-dimensional array of values
2172: Level: advanced
2174: Notes:
2175: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2177: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2178: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2179: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2180: with `MatSetLocalToGlobalMapping()`.
2182: Developer Note:
2183: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2184: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2186: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2187: `MatSetValuesLocal()`, `MatGetValues()`
2188: @*/
2189: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2190: {
2191: PetscFunctionBeginHot;
2194: MatCheckPreallocated(mat, 1);
2195: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2196: PetscAssertPointer(irow, 3);
2197: PetscAssertPointer(icol, 5);
2198: if (PetscDefined(USE_DEBUG)) {
2199: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2200: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2201: }
2202: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2203: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2204: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2205: else {
2206: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2207: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2208: irowm = buf;
2209: icolm = buf + nrow;
2210: } else {
2211: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2212: irowm = bufr;
2213: icolm = bufc;
2214: }
2215: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2216: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2217: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2218: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2219: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2220: PetscCall(PetscFree2(bufr, bufc));
2221: }
2222: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2223: PetscFunctionReturn(PETSC_SUCCESS);
2224: }
2226: /*@
2227: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2228: the same size. Currently, this can only be called once and creates the given matrix.
2230: Not Collective
2232: Input Parameters:
2233: + mat - the matrix
2234: . nb - the number of blocks
2235: . bs - the number of rows (and columns) in each block
2236: . rows - a concatenation of the rows for each block
2237: - v - a concatenation of logically two-dimensional arrays of values
2239: Level: advanced
2241: Notes:
2242: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2244: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2246: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2247: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2248: @*/
2249: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2250: {
2251: PetscFunctionBegin;
2254: PetscAssertPointer(rows, 4);
2255: PetscAssertPointer(v, 5);
2256: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2258: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2259: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2260: else {
2261: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2262: }
2263: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2264: PetscFunctionReturn(PETSC_SUCCESS);
2265: }
2267: /*@
2268: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2269: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2270: using a local (per-processor) numbering.
2272: Not Collective
2274: Input Parameters:
2275: + x - the matrix
2276: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2277: - cmapping - column mapping
2279: Level: intermediate
2281: Note:
2282: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2284: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2285: @*/
2286: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2287: {
2288: PetscFunctionBegin;
2293: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2294: else {
2295: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2296: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2297: }
2298: PetscFunctionReturn(PETSC_SUCCESS);
2299: }
2301: /*@
2302: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2304: Not Collective
2306: Input Parameter:
2307: . A - the matrix
2309: Output Parameters:
2310: + rmapping - row mapping
2311: - cmapping - column mapping
2313: Level: advanced
2315: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2316: @*/
2317: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2318: {
2319: PetscFunctionBegin;
2322: if (rmapping) {
2323: PetscAssertPointer(rmapping, 2);
2324: *rmapping = A->rmap->mapping;
2325: }
2326: if (cmapping) {
2327: PetscAssertPointer(cmapping, 3);
2328: *cmapping = A->cmap->mapping;
2329: }
2330: PetscFunctionReturn(PETSC_SUCCESS);
2331: }
2333: /*@
2334: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2336: Logically Collective
2338: Input Parameters:
2339: + A - the matrix
2340: . rmap - row layout
2341: - cmap - column layout
2343: Level: advanced
2345: Note:
2346: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2348: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2349: @*/
2350: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2351: {
2352: PetscFunctionBegin;
2354: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2355: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2356: PetscFunctionReturn(PETSC_SUCCESS);
2357: }
2359: /*@
2360: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2362: Not Collective
2364: Input Parameter:
2365: . A - the matrix
2367: Output Parameters:
2368: + rmap - row layout
2369: - cmap - column layout
2371: Level: advanced
2373: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2374: @*/
2375: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2376: {
2377: PetscFunctionBegin;
2380: if (rmap) {
2381: PetscAssertPointer(rmap, 2);
2382: *rmap = A->rmap;
2383: }
2384: if (cmap) {
2385: PetscAssertPointer(cmap, 3);
2386: *cmap = A->cmap;
2387: }
2388: PetscFunctionReturn(PETSC_SUCCESS);
2389: }
2391: /*@
2392: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2393: using a local numbering of the rows and columns.
2395: Not Collective
2397: Input Parameters:
2398: + mat - the matrix
2399: . nrow - number of rows
2400: . irow - the row local indices
2401: . ncol - number of columns
2402: . icol - the column local indices
2403: . y - a logically two-dimensional array of values
2404: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2406: Level: intermediate
2408: Notes:
2409: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2411: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2412: options cannot be mixed without intervening calls to the assembly
2413: routines.
2415: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2416: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2418: Fortran Notes:
2419: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2420: .vb
2421: MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES)
2422: .ve
2424: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2426: Developer Note:
2427: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2428: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2430: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2431: `MatGetValuesLocal()`
2432: @*/
2433: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2434: {
2435: PetscFunctionBeginHot;
2438: MatCheckPreallocated(mat, 1);
2439: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2440: PetscAssertPointer(irow, 3);
2441: PetscAssertPointer(icol, 5);
2442: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2443: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2444: if (PetscDefined(USE_DEBUG)) {
2445: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2446: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2447: }
2449: if (mat->assembled) {
2450: mat->was_assembled = PETSC_TRUE;
2451: mat->assembled = PETSC_FALSE;
2452: }
2453: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2454: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2455: else {
2456: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2457: const PetscInt *irowm, *icolm;
2459: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2460: bufr = buf;
2461: bufc = buf + nrow;
2462: irowm = bufr;
2463: icolm = bufc;
2464: } else {
2465: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2466: irowm = bufr;
2467: icolm = bufc;
2468: }
2469: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2470: else irowm = irow;
2471: if (mat->cmap->mapping) {
2472: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2473: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2474: } else icolm = irowm;
2475: } else icolm = icol;
2476: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2477: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2478: }
2479: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2480: PetscFunctionReturn(PETSC_SUCCESS);
2481: }
2483: /*@
2484: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2485: using a local ordering of the nodes a block at a time.
2487: Not Collective
2489: Input Parameters:
2490: + mat - the matrix
2491: . nrow - number of rows
2492: . irow - the row local indices
2493: . ncol - number of columns
2494: . icol - the column local indices
2495: . y - a logically two-dimensional array of values
2496: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2498: Level: intermediate
2500: Notes:
2501: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2502: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2504: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2505: options cannot be mixed without intervening calls to the assembly
2506: routines.
2508: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2509: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2511: Fortran Notes:
2512: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2513: .vb
2514: MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES)
2515: .ve
2517: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2519: Developer Note:
2520: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2521: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2523: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2524: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2525: @*/
2526: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2527: {
2528: PetscFunctionBeginHot;
2531: MatCheckPreallocated(mat, 1);
2532: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2533: PetscAssertPointer(irow, 3);
2534: PetscAssertPointer(icol, 5);
2535: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2536: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2537: if (PetscDefined(USE_DEBUG)) {
2538: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2539: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2540: }
2542: if (mat->assembled) {
2543: mat->was_assembled = PETSC_TRUE;
2544: mat->assembled = PETSC_FALSE;
2545: }
2546: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2547: PetscInt irbs, rbs;
2548: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2549: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2550: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2551: }
2552: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2553: PetscInt icbs, cbs;
2554: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2555: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2556: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2557: }
2558: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2559: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2560: else {
2561: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2562: const PetscInt *irowm, *icolm;
2564: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2565: bufr = buf;
2566: bufc = buf + nrow;
2567: irowm = bufr;
2568: icolm = bufc;
2569: } else {
2570: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2571: irowm = bufr;
2572: icolm = bufc;
2573: }
2574: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2575: else irowm = irow;
2576: if (mat->cmap->mapping) {
2577: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2578: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2579: } else icolm = irowm;
2580: } else icolm = icol;
2581: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2582: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2583: }
2584: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2585: PetscFunctionReturn(PETSC_SUCCESS);
2586: }
2588: /*@
2589: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2591: Collective
2593: Input Parameters:
2594: + mat - the matrix
2595: - x - the vector to be multiplied
2597: Output Parameter:
2598: . y - the result
2600: Level: developer
2602: Note:
2603: The vectors `x` and `y` cannot be the same. I.e., one cannot
2604: call `MatMultDiagonalBlock`(A,y,y).
2606: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2607: @*/
2608: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2609: {
2610: PetscFunctionBegin;
2616: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2617: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2618: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2619: MatCheckPreallocated(mat, 1);
2621: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2622: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2623: PetscFunctionReturn(PETSC_SUCCESS);
2624: }
2626: /*@
2627: MatMult - Computes the matrix-vector product, $y = Ax$.
2629: Neighbor-wise Collective
2631: Input Parameters:
2632: + mat - the matrix
2633: - x - the vector to be multiplied
2635: Output Parameter:
2636: . y - the result
2638: Level: beginner
2640: Note:
2641: The vectors `x` and `y` cannot be the same. I.e., one cannot
2642: call `MatMult`(A,y,y).
2644: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2645: @*/
2646: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2647: {
2648: PetscFunctionBegin;
2652: VecCheckAssembled(x);
2654: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2655: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2656: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2657: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2658: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2659: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2660: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2661: PetscCall(VecSetErrorIfLocked(y, 3));
2662: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2663: MatCheckPreallocated(mat, 1);
2665: PetscCall(VecLockReadPush(x));
2666: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2667: PetscUseTypeMethod(mat, mult, x, y);
2668: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2669: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2670: PetscCall(VecLockReadPop(x));
2671: PetscFunctionReturn(PETSC_SUCCESS);
2672: }
2674: /*@
2675: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2677: Neighbor-wise Collective
2679: Input Parameters:
2680: + mat - the matrix
2681: - x - the vector to be multiplied
2683: Output Parameter:
2684: . y - the result
2686: Level: beginner
2688: Notes:
2689: The vectors `x` and `y` cannot be the same. I.e., one cannot
2690: call `MatMultTranspose`(A,y,y).
2692: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2693: use `MatMultHermitianTranspose()`
2695: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2696: @*/
2697: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2698: {
2699: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2701: PetscFunctionBegin;
2705: VecCheckAssembled(x);
2708: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2709: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2710: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2711: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2712: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2713: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2714: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2715: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2716: MatCheckPreallocated(mat, 1);
2718: if (!mat->ops->multtranspose) {
2719: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2720: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2721: } else op = mat->ops->multtranspose;
2722: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2723: PetscCall(VecLockReadPush(x));
2724: PetscCall((*op)(mat, x, y));
2725: PetscCall(VecLockReadPop(x));
2726: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2727: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2728: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2729: PetscFunctionReturn(PETSC_SUCCESS);
2730: }
2732: /*@
2733: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2735: Neighbor-wise Collective
2737: Input Parameters:
2738: + mat - the matrix
2739: - x - the vector to be multiplied
2741: Output Parameter:
2742: . y - the result
2744: Level: beginner
2746: Notes:
2747: The vectors `x` and `y` cannot be the same. I.e., one cannot
2748: call `MatMultHermitianTranspose`(A,y,y).
2750: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2752: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2754: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2755: @*/
2756: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2757: {
2758: PetscFunctionBegin;
2764: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2765: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2766: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2767: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2768: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2769: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2770: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2771: MatCheckPreallocated(mat, 1);
2773: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2774: #if defined(PETSC_USE_COMPLEX)
2775: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2776: PetscCall(VecLockReadPush(x));
2777: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2778: else PetscUseTypeMethod(mat, mult, x, y);
2779: PetscCall(VecLockReadPop(x));
2780: } else {
2781: Vec w;
2782: PetscCall(VecDuplicate(x, &w));
2783: PetscCall(VecCopy(x, w));
2784: PetscCall(VecConjugate(w));
2785: PetscCall(MatMultTranspose(mat, w, y));
2786: PetscCall(VecDestroy(&w));
2787: PetscCall(VecConjugate(y));
2788: }
2789: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2790: #else
2791: PetscCall(MatMultTranspose(mat, x, y));
2792: #endif
2793: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2794: PetscFunctionReturn(PETSC_SUCCESS);
2795: }
2797: /*@
2798: MatMultAdd - Computes $v3 = v2 + A * v1$.
2800: Neighbor-wise Collective
2802: Input Parameters:
2803: + mat - the matrix
2804: . v1 - the vector to be multiplied by `mat`
2805: - v2 - the vector to be added to the result
2807: Output Parameter:
2808: . v3 - the result
2810: Level: beginner
2812: Note:
2813: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2814: call `MatMultAdd`(A,v1,v2,v1).
2816: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2817: @*/
2818: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2819: {
2820: PetscFunctionBegin;
2827: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2828: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2829: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2830: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2831: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2832: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2833: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2834: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2835: MatCheckPreallocated(mat, 1);
2837: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2838: PetscCall(VecLockReadPush(v1));
2839: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2840: PetscCall(VecLockReadPop(v1));
2841: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2842: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2843: PetscFunctionReturn(PETSC_SUCCESS);
2844: }
2846: /*@
2847: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2849: Neighbor-wise Collective
2851: Input Parameters:
2852: + mat - the matrix
2853: . v1 - the vector to be multiplied by the transpose of the matrix
2854: - v2 - the vector to be added to the result
2856: Output Parameter:
2857: . v3 - the result
2859: Level: beginner
2861: Note:
2862: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2863: call `MatMultTransposeAdd`(A,v1,v2,v1).
2865: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2866: @*/
2867: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2868: {
2869: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2871: PetscFunctionBegin;
2878: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2879: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2880: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2881: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2882: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2883: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2884: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2885: MatCheckPreallocated(mat, 1);
2887: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2888: PetscCall(VecLockReadPush(v1));
2889: PetscCall((*op)(mat, v1, v2, v3));
2890: PetscCall(VecLockReadPop(v1));
2891: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2892: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2893: PetscFunctionReturn(PETSC_SUCCESS);
2894: }
2896: /*@
2897: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2899: Neighbor-wise Collective
2901: Input Parameters:
2902: + mat - the matrix
2903: . v1 - the vector to be multiplied by the Hermitian transpose
2904: - v2 - the vector to be added to the result
2906: Output Parameter:
2907: . v3 - the result
2909: Level: beginner
2911: Note:
2912: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2913: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2915: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2916: @*/
2917: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2918: {
2919: PetscFunctionBegin;
2926: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2927: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2928: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2929: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2930: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2931: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2932: MatCheckPreallocated(mat, 1);
2934: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2935: PetscCall(VecLockReadPush(v1));
2936: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2937: else {
2938: Vec w, z;
2939: PetscCall(VecDuplicate(v1, &w));
2940: PetscCall(VecCopy(v1, w));
2941: PetscCall(VecConjugate(w));
2942: PetscCall(VecDuplicate(v3, &z));
2943: PetscCall(MatMultTranspose(mat, w, z));
2944: PetscCall(VecDestroy(&w));
2945: PetscCall(VecConjugate(z));
2946: if (v2 != v3) {
2947: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2948: } else {
2949: PetscCall(VecAXPY(v3, 1.0, z));
2950: }
2951: PetscCall(VecDestroy(&z));
2952: }
2953: PetscCall(VecLockReadPop(v1));
2954: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2955: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2956: PetscFunctionReturn(PETSC_SUCCESS);
2957: }
2959: /*@
2960: MatGetFactorType - gets the type of factorization a matrix is
2962: Not Collective
2964: Input Parameter:
2965: . mat - the matrix
2967: Output Parameter:
2968: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2970: Level: intermediate
2972: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2973: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2974: @*/
2975: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2976: {
2977: PetscFunctionBegin;
2980: PetscAssertPointer(t, 2);
2981: *t = mat->factortype;
2982: PetscFunctionReturn(PETSC_SUCCESS);
2983: }
2985: /*@
2986: MatSetFactorType - sets the type of factorization a matrix is
2988: Logically Collective
2990: Input Parameters:
2991: + mat - the matrix
2992: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2994: Level: intermediate
2996: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2997: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2998: @*/
2999: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
3000: {
3001: PetscFunctionBegin;
3004: mat->factortype = t;
3005: PetscFunctionReturn(PETSC_SUCCESS);
3006: }
3008: /*@
3009: MatGetInfo - Returns information about matrix storage (number of
3010: nonzeros, memory, etc.).
3012: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3014: Input Parameters:
3015: + mat - the matrix
3016: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3018: Output Parameter:
3019: . info - matrix information context
3021: Options Database Key:
3022: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3024: Level: intermediate
3026: Notes:
3027: The `MatInfo` context contains a variety of matrix data, including
3028: number of nonzeros allocated and used, number of mallocs during
3029: matrix assembly, etc. Additional information for factored matrices
3030: is provided (such as the fill ratio, number of mallocs during
3031: factorization, etc.).
3033: Example:
3034: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3035: data within the `MatInfo` context. For example,
3036: .vb
3037: MatInfo info;
3038: Mat A;
3039: double mal, nz_a, nz_u;
3041: MatGetInfo(A, MAT_LOCAL, &info);
3042: mal = info.mallocs;
3043: nz_a = info.nz_allocated;
3044: .ve
3046: Fortran Note:
3047: Declare info as a `MatInfo` array of dimension `MAT_INFO_SIZE`, and then extract the parameters
3048: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
3049: a complete list of parameter names.
3050: .vb
3051: MatInfo info(MAT_INFO_SIZE)
3052: double precision mal, nz_a
3053: Mat A
3054: integer ierr
3056: call MatGetInfo(A, MAT_LOCAL, info, ierr)
3057: mal = info(MAT_INFO_MALLOCS)
3058: nz_a = info(MAT_INFO_NZ_ALLOCATED)
3059: .ve
3061: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3062: @*/
3063: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3064: {
3065: PetscFunctionBegin;
3068: PetscAssertPointer(info, 3);
3069: MatCheckPreallocated(mat, 1);
3070: PetscUseTypeMethod(mat, getinfo, flag, info);
3071: PetscFunctionReturn(PETSC_SUCCESS);
3072: }
3074: /*
3075: This is used by external packages where it is not easy to get the info from the actual
3076: matrix factorization.
3077: */
3078: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3079: {
3080: PetscFunctionBegin;
3081: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3082: PetscFunctionReturn(PETSC_SUCCESS);
3083: }
3085: /*@
3086: MatLUFactor - Performs in-place LU factorization of matrix.
3088: Collective
3090: Input Parameters:
3091: + mat - the matrix
3092: . row - row permutation
3093: . col - column permutation
3094: - info - options for factorization, includes
3095: .vb
3096: fill - expected fill as ratio of original fill.
3097: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3098: Run with the option -info to determine an optimal value to use
3099: .ve
3101: Level: developer
3103: Notes:
3104: Most users should employ the `KSP` interface for linear solvers
3105: instead of working directly with matrix algebra routines such as this.
3106: See, e.g., `KSPCreate()`.
3108: This changes the state of the matrix to a factored matrix; it cannot be used
3109: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3111: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3112: when not using `KSP`.
3114: Developer Note:
3115: The Fortran interface is not autogenerated as the
3116: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3118: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3119: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3120: @*/
3121: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3122: {
3123: MatFactorInfo tinfo;
3125: PetscFunctionBegin;
3129: if (info) PetscAssertPointer(info, 4);
3131: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3132: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3133: MatCheckPreallocated(mat, 1);
3134: if (!info) {
3135: PetscCall(MatFactorInfoInitialize(&tinfo));
3136: info = &tinfo;
3137: }
3139: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3140: PetscUseTypeMethod(mat, lufactor, row, col, info);
3141: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3142: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3143: PetscFunctionReturn(PETSC_SUCCESS);
3144: }
3146: /*@
3147: MatILUFactor - Performs in-place ILU factorization of matrix.
3149: Collective
3151: Input Parameters:
3152: + mat - the matrix
3153: . row - row permutation
3154: . col - column permutation
3155: - info - structure containing
3156: .vb
3157: levels - number of levels of fill.
3158: expected fill - as ratio of original fill.
3159: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3160: missing diagonal entries)
3161: .ve
3163: Level: developer
3165: Notes:
3166: Most users should employ the `KSP` interface for linear solvers
3167: instead of working directly with matrix algebra routines such as this.
3168: See, e.g., `KSPCreate()`.
3170: Probably really in-place only when level of fill is zero, otherwise allocates
3171: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3172: when not using `KSP`.
3174: Developer Note:
3175: The Fortran interface is not autogenerated as the
3176: interface definition cannot be generated correctly [due to MatFactorInfo]
3178: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3179: @*/
3180: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3181: {
3182: PetscFunctionBegin;
3186: PetscAssertPointer(info, 4);
3188: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3189: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3190: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3191: MatCheckPreallocated(mat, 1);
3193: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3194: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3195: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3196: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3197: PetscFunctionReturn(PETSC_SUCCESS);
3198: }
3200: /*@
3201: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3202: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3204: Collective
3206: Input Parameters:
3207: + fact - the factor matrix obtained with `MatGetFactor()`
3208: . mat - the matrix
3209: . row - the row permutation
3210: . col - the column permutation
3211: - info - options for factorization, includes
3212: .vb
3213: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3214: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3215: .ve
3217: Level: developer
3219: Notes:
3220: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3222: Most users should employ the simplified `KSP` interface for linear solvers
3223: instead of working directly with matrix algebra routines such as this.
3224: See, e.g., `KSPCreate()`.
3226: Developer Note:
3227: The Fortran interface is not autogenerated as the
3228: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3230: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3231: @*/
3232: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3233: {
3234: MatFactorInfo tinfo;
3236: PetscFunctionBegin;
3241: if (info) PetscAssertPointer(info, 5);
3244: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3245: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3246: MatCheckPreallocated(mat, 2);
3247: if (!info) {
3248: PetscCall(MatFactorInfoInitialize(&tinfo));
3249: info = &tinfo;
3250: }
3252: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3253: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3254: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3255: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3256: PetscFunctionReturn(PETSC_SUCCESS);
3257: }
3259: /*@
3260: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3261: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3263: Collective
3265: Input Parameters:
3266: + fact - the factor matrix obtained with `MatGetFactor()`
3267: . mat - the matrix
3268: - info - options for factorization
3270: Level: developer
3272: Notes:
3273: See `MatLUFactor()` for in-place factorization. See
3274: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3276: Most users should employ the `KSP` interface for linear solvers
3277: instead of working directly with matrix algebra routines such as this.
3278: See, e.g., `KSPCreate()`.
3280: Developer Note:
3281: The Fortran interface is not autogenerated as the
3282: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3284: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3285: @*/
3286: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3287: {
3288: MatFactorInfo tinfo;
3290: PetscFunctionBegin;
3295: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3296: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3297: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3299: MatCheckPreallocated(mat, 2);
3300: if (!info) {
3301: PetscCall(MatFactorInfoInitialize(&tinfo));
3302: info = &tinfo;
3303: }
3305: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3306: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3307: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3308: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3309: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3310: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3311: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3312: PetscFunctionReturn(PETSC_SUCCESS);
3313: }
3315: /*@
3316: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3317: symmetric matrix.
3319: Collective
3321: Input Parameters:
3322: + mat - the matrix
3323: . perm - row and column permutations
3324: - info - expected fill as ratio of original fill
3326: Level: developer
3328: Notes:
3329: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3330: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3332: Most users should employ the `KSP` interface for linear solvers
3333: instead of working directly with matrix algebra routines such as this.
3334: See, e.g., `KSPCreate()`.
3336: Developer Note:
3337: The Fortran interface is not autogenerated as the
3338: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3340: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3341: `MatGetOrdering()`
3342: @*/
3343: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3344: {
3345: MatFactorInfo tinfo;
3347: PetscFunctionBegin;
3350: if (info) PetscAssertPointer(info, 3);
3352: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3353: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3354: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3355: MatCheckPreallocated(mat, 1);
3356: if (!info) {
3357: PetscCall(MatFactorInfoInitialize(&tinfo));
3358: info = &tinfo;
3359: }
3361: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3362: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3363: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3364: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3365: PetscFunctionReturn(PETSC_SUCCESS);
3366: }
3368: /*@
3369: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3370: of a symmetric matrix.
3372: Collective
3374: Input Parameters:
3375: + fact - the factor matrix obtained with `MatGetFactor()`
3376: . mat - the matrix
3377: . perm - row and column permutations
3378: - info - options for factorization, includes
3379: .vb
3380: fill - expected fill as ratio of original fill.
3381: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3382: Run with the option -info to determine an optimal value to use
3383: .ve
3385: Level: developer
3387: Notes:
3388: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3389: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3391: Most users should employ the `KSP` interface for linear solvers
3392: instead of working directly with matrix algebra routines such as this.
3393: See, e.g., `KSPCreate()`.
3395: Developer Note:
3396: The Fortran interface is not autogenerated as the
3397: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3399: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3400: `MatGetOrdering()`
3401: @*/
3402: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3403: {
3404: MatFactorInfo tinfo;
3406: PetscFunctionBegin;
3410: if (info) PetscAssertPointer(info, 4);
3413: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3414: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3415: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3416: MatCheckPreallocated(mat, 2);
3417: if (!info) {
3418: PetscCall(MatFactorInfoInitialize(&tinfo));
3419: info = &tinfo;
3420: }
3422: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3423: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3424: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3425: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3426: PetscFunctionReturn(PETSC_SUCCESS);
3427: }
3429: /*@
3430: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3431: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3432: `MatCholeskyFactorSymbolic()`.
3434: Collective
3436: Input Parameters:
3437: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3438: . mat - the initial matrix that is to be factored
3439: - info - options for factorization
3441: Level: developer
3443: Note:
3444: Most users should employ the `KSP` interface for linear solvers
3445: instead of working directly with matrix algebra routines such as this.
3446: See, e.g., `KSPCreate()`.
3448: Developer Note:
3449: The Fortran interface is not autogenerated as the
3450: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3452: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3453: @*/
3454: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3455: {
3456: MatFactorInfo tinfo;
3458: PetscFunctionBegin;
3463: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3464: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3465: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3466: MatCheckPreallocated(mat, 2);
3467: if (!info) {
3468: PetscCall(MatFactorInfoInitialize(&tinfo));
3469: info = &tinfo;
3470: }
3472: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3473: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3474: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3475: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3476: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3477: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3478: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3479: PetscFunctionReturn(PETSC_SUCCESS);
3480: }
3482: /*@
3483: MatQRFactor - Performs in-place QR factorization of matrix.
3485: Collective
3487: Input Parameters:
3488: + mat - the matrix
3489: . col - column permutation
3490: - info - options for factorization, includes
3491: .vb
3492: fill - expected fill as ratio of original fill.
3493: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3494: Run with the option -info to determine an optimal value to use
3495: .ve
3497: Level: developer
3499: Notes:
3500: Most users should employ the `KSP` interface for linear solvers
3501: instead of working directly with matrix algebra routines such as this.
3502: See, e.g., `KSPCreate()`.
3504: This changes the state of the matrix to a factored matrix; it cannot be used
3505: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3507: Developer Note:
3508: The Fortran interface is not autogenerated as the
3509: interface definition cannot be generated correctly [due to MatFactorInfo]
3511: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3512: `MatSetUnfactored()`
3513: @*/
3514: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3515: {
3516: PetscFunctionBegin;
3519: if (info) PetscAssertPointer(info, 3);
3521: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3522: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3523: MatCheckPreallocated(mat, 1);
3524: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3525: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3526: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3527: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3528: PetscFunctionReturn(PETSC_SUCCESS);
3529: }
3531: /*@
3532: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3533: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3535: Collective
3537: Input Parameters:
3538: + fact - the factor matrix obtained with `MatGetFactor()`
3539: . mat - the matrix
3540: . col - column permutation
3541: - info - options for factorization, includes
3542: .vb
3543: fill - expected fill as ratio of original fill.
3544: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3545: Run with the option -info to determine an optimal value to use
3546: .ve
3548: Level: developer
3550: Note:
3551: Most users should employ the `KSP` interface for linear solvers
3552: instead of working directly with matrix algebra routines such as this.
3553: See, e.g., `KSPCreate()`.
3555: Developer Note:
3556: The Fortran interface is not autogenerated as the
3557: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3559: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3560: @*/
3561: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3562: {
3563: MatFactorInfo tinfo;
3565: PetscFunctionBegin;
3569: if (info) PetscAssertPointer(info, 4);
3572: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3573: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3574: MatCheckPreallocated(mat, 2);
3575: if (!info) {
3576: PetscCall(MatFactorInfoInitialize(&tinfo));
3577: info = &tinfo;
3578: }
3580: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3581: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3582: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3583: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3584: PetscFunctionReturn(PETSC_SUCCESS);
3585: }
3587: /*@
3588: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3589: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3591: Collective
3593: Input Parameters:
3594: + fact - the factor matrix obtained with `MatGetFactor()`
3595: . mat - the matrix
3596: - info - options for factorization
3598: Level: developer
3600: Notes:
3601: See `MatQRFactor()` for in-place factorization.
3603: Most users should employ the `KSP` interface for linear solvers
3604: instead of working directly with matrix algebra routines such as this.
3605: See, e.g., `KSPCreate()`.
3607: Developer Note:
3608: The Fortran interface is not autogenerated as the
3609: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3611: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3612: @*/
3613: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3614: {
3615: MatFactorInfo tinfo;
3617: PetscFunctionBegin;
3622: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3623: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3624: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3626: MatCheckPreallocated(mat, 2);
3627: if (!info) {
3628: PetscCall(MatFactorInfoInitialize(&tinfo));
3629: info = &tinfo;
3630: }
3632: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3633: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3634: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3635: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3636: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3637: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3638: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3639: PetscFunctionReturn(PETSC_SUCCESS);
3640: }
3642: /*@
3643: MatSolve - Solves $A x = b$, given a factored matrix.
3645: Neighbor-wise Collective
3647: Input Parameters:
3648: + mat - the factored matrix
3649: - b - the right-hand-side vector
3651: Output Parameter:
3652: . x - the result vector
3654: Level: developer
3656: Notes:
3657: The vectors `b` and `x` cannot be the same. I.e., one cannot
3658: call `MatSolve`(A,x,x).
3660: Most users should employ the `KSP` interface for linear solvers
3661: instead of working directly with matrix algebra routines such as this.
3662: See, e.g., `KSPCreate()`.
3664: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3665: @*/
3666: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3667: {
3668: PetscFunctionBegin;
3673: PetscCheckSameComm(mat, 1, b, 2);
3674: PetscCheckSameComm(mat, 1, x, 3);
3675: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3676: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3677: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3678: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3679: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3680: MatCheckPreallocated(mat, 1);
3682: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3683: PetscCall(VecFlag(x, mat->factorerrortype));
3684: if (mat->factorerrortype) {
3685: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3686: } else PetscUseTypeMethod(mat, solve, b, x);
3687: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3688: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3689: PetscFunctionReturn(PETSC_SUCCESS);
3690: }
3692: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3693: {
3694: Vec b, x;
3695: PetscInt N, i;
3696: PetscErrorCode (*f)(Mat, Vec, Vec);
3697: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3699: PetscFunctionBegin;
3700: if (A->factorerrortype) {
3701: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3702: PetscCall(MatSetInf(X));
3703: PetscFunctionReturn(PETSC_SUCCESS);
3704: }
3705: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3706: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3707: PetscCall(MatBoundToCPU(A, &Abound));
3708: if (!Abound) {
3709: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3710: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3711: }
3712: #if PetscDefined(HAVE_CUDA)
3713: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3714: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3715: #elif PetscDefined(HAVE_HIP)
3716: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3717: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3718: #endif
3719: PetscCall(MatGetSize(B, NULL, &N));
3720: for (i = 0; i < N; i++) {
3721: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3722: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3723: PetscCall((*f)(A, b, x));
3724: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3725: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3726: }
3727: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3728: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3729: PetscFunctionReturn(PETSC_SUCCESS);
3730: }
3732: /*@
3733: MatMatSolve - Solves $A X = B$, given a factored matrix.
3735: Neighbor-wise Collective
3737: Input Parameters:
3738: + A - the factored matrix
3739: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3741: Output Parameter:
3742: . X - the result matrix (dense matrix)
3744: Level: developer
3746: Note:
3747: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3748: otherwise, `B` and `X` cannot be the same.
3750: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3751: @*/
3752: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3753: {
3754: PetscFunctionBegin;
3759: PetscCheckSameComm(A, 1, B, 2);
3760: PetscCheckSameComm(A, 1, X, 3);
3761: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3762: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3763: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3764: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3765: MatCheckPreallocated(A, 1);
3767: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3768: if (!A->ops->matsolve) {
3769: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3770: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3771: } else PetscUseTypeMethod(A, matsolve, B, X);
3772: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3773: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3774: PetscFunctionReturn(PETSC_SUCCESS);
3775: }
3777: /*@
3778: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3780: Neighbor-wise Collective
3782: Input Parameters:
3783: + A - the factored matrix
3784: - B - the right-hand-side matrix (`MATDENSE` matrix)
3786: Output Parameter:
3787: . X - the result matrix (dense matrix)
3789: Level: developer
3791: Note:
3792: The matrices `B` and `X` cannot be the same. I.e., one cannot
3793: call `MatMatSolveTranspose`(A,X,X).
3795: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3796: @*/
3797: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3798: {
3799: PetscFunctionBegin;
3804: PetscCheckSameComm(A, 1, B, 2);
3805: PetscCheckSameComm(A, 1, X, 3);
3806: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3807: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3808: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3809: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3810: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3811: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3812: MatCheckPreallocated(A, 1);
3814: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3815: if (!A->ops->matsolvetranspose) {
3816: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3817: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3818: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3819: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3820: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3821: PetscFunctionReturn(PETSC_SUCCESS);
3822: }
3824: /*@
3825: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3827: Neighbor-wise Collective
3829: Input Parameters:
3830: + A - the factored matrix
3831: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3833: Output Parameter:
3834: . X - the result matrix (dense matrix)
3836: Level: developer
3838: Note:
3839: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3840: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3842: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3843: @*/
3844: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3845: {
3846: PetscFunctionBegin;
3851: PetscCheckSameComm(A, 1, Bt, 2);
3852: PetscCheckSameComm(A, 1, X, 3);
3854: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3855: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3856: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3857: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3858: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3859: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3860: MatCheckPreallocated(A, 1);
3862: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3863: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3864: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3865: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3866: PetscFunctionReturn(PETSC_SUCCESS);
3867: }
3869: /*@
3870: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3871: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3873: Neighbor-wise Collective
3875: Input Parameters:
3876: + mat - the factored matrix
3877: - b - the right-hand-side vector
3879: Output Parameter:
3880: . x - the result vector
3882: Level: developer
3884: Notes:
3885: `MatSolve()` should be used for most applications, as it performs
3886: a forward solve followed by a backward solve.
3888: The vectors `b` and `x` cannot be the same, i.e., one cannot
3889: call `MatForwardSolve`(A,x,x).
3891: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3892: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3893: `MatForwardSolve()` solves $U^T*D y = b$, and
3894: `MatBackwardSolve()` solves $U x = y$.
3895: Thus they do not provide a symmetric preconditioner.
3897: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3898: @*/
3899: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3900: {
3901: PetscFunctionBegin;
3906: PetscCheckSameComm(mat, 1, b, 2);
3907: PetscCheckSameComm(mat, 1, x, 3);
3908: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3909: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3910: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3911: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3912: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3913: MatCheckPreallocated(mat, 1);
3915: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3916: PetscUseTypeMethod(mat, forwardsolve, b, x);
3917: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3918: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3919: PetscFunctionReturn(PETSC_SUCCESS);
3920: }
3922: /*@
3923: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3924: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3926: Neighbor-wise Collective
3928: Input Parameters:
3929: + mat - the factored matrix
3930: - b - the right-hand-side vector
3932: Output Parameter:
3933: . x - the result vector
3935: Level: developer
3937: Notes:
3938: `MatSolve()` should be used for most applications, as it performs
3939: a forward solve followed by a backward solve.
3941: The vectors `b` and `x` cannot be the same. I.e., one cannot
3942: call `MatBackwardSolve`(A,x,x).
3944: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3945: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3946: `MatForwardSolve()` solves $U^T*D y = b$, and
3947: `MatBackwardSolve()` solves $U x = y$.
3948: Thus they do not provide a symmetric preconditioner.
3950: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3951: @*/
3952: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3953: {
3954: PetscFunctionBegin;
3959: PetscCheckSameComm(mat, 1, b, 2);
3960: PetscCheckSameComm(mat, 1, x, 3);
3961: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3962: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3963: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3964: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3965: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3966: MatCheckPreallocated(mat, 1);
3968: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3969: PetscUseTypeMethod(mat, backwardsolve, b, x);
3970: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3971: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3972: PetscFunctionReturn(PETSC_SUCCESS);
3973: }
3975: /*@
3976: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3978: Neighbor-wise Collective
3980: Input Parameters:
3981: + mat - the factored matrix
3982: . b - the right-hand-side vector
3983: - y - the vector to be added to
3985: Output Parameter:
3986: . x - the result vector
3988: Level: developer
3990: Note:
3991: The vectors `b` and `x` cannot be the same. I.e., one cannot
3992: call `MatSolveAdd`(A,x,y,x).
3994: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3995: @*/
3996: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3997: {
3998: PetscScalar one = 1.0;
3999: Vec tmp;
4001: PetscFunctionBegin;
4007: PetscCheckSameComm(mat, 1, b, 2);
4008: PetscCheckSameComm(mat, 1, y, 3);
4009: PetscCheckSameComm(mat, 1, x, 4);
4010: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4011: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4012: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4013: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4014: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4015: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4016: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4017: MatCheckPreallocated(mat, 1);
4019: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4020: PetscCall(VecFlag(x, mat->factorerrortype));
4021: if (mat->factorerrortype) {
4022: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4023: } else if (mat->ops->solveadd) {
4024: PetscUseTypeMethod(mat, solveadd, b, y, x);
4025: } else {
4026: /* do the solve then the add manually */
4027: if (x != y) {
4028: PetscCall(MatSolve(mat, b, x));
4029: PetscCall(VecAXPY(x, one, y));
4030: } else {
4031: PetscCall(VecDuplicate(x, &tmp));
4032: PetscCall(VecCopy(x, tmp));
4033: PetscCall(MatSolve(mat, b, x));
4034: PetscCall(VecAXPY(x, one, tmp));
4035: PetscCall(VecDestroy(&tmp));
4036: }
4037: }
4038: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4039: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4040: PetscFunctionReturn(PETSC_SUCCESS);
4041: }
4043: /*@
4044: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4046: Neighbor-wise Collective
4048: Input Parameters:
4049: + mat - the factored matrix
4050: - b - the right-hand-side vector
4052: Output Parameter:
4053: . x - the result vector
4055: Level: developer
4057: Notes:
4058: The vectors `b` and `x` cannot be the same. I.e., one cannot
4059: call `MatSolveTranspose`(A,x,x).
4061: Most users should employ the `KSP` interface for linear solvers
4062: instead of working directly with matrix algebra routines such as this.
4063: See, e.g., `KSPCreate()`.
4065: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4066: @*/
4067: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4068: {
4069: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4071: PetscFunctionBegin;
4076: PetscCheckSameComm(mat, 1, b, 2);
4077: PetscCheckSameComm(mat, 1, x, 3);
4078: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4079: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4080: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4081: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4082: MatCheckPreallocated(mat, 1);
4083: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4084: PetscCall(VecFlag(x, mat->factorerrortype));
4085: if (mat->factorerrortype) {
4086: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4087: } else {
4088: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4089: PetscCall((*f)(mat, b, x));
4090: }
4091: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4092: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4093: PetscFunctionReturn(PETSC_SUCCESS);
4094: }
4096: /*@
4097: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4098: factored matrix.
4100: Neighbor-wise Collective
4102: Input Parameters:
4103: + mat - the factored matrix
4104: . b - the right-hand-side vector
4105: - y - the vector to be added to
4107: Output Parameter:
4108: . x - the result vector
4110: Level: developer
4112: Note:
4113: The vectors `b` and `x` cannot be the same. I.e., one cannot
4114: call `MatSolveTransposeAdd`(A,x,y,x).
4116: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4117: @*/
4118: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4119: {
4120: PetscScalar one = 1.0;
4121: Vec tmp;
4122: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4124: PetscFunctionBegin;
4130: PetscCheckSameComm(mat, 1, b, 2);
4131: PetscCheckSameComm(mat, 1, y, 3);
4132: PetscCheckSameComm(mat, 1, x, 4);
4133: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4134: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4135: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4136: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4137: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4138: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4139: MatCheckPreallocated(mat, 1);
4141: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4142: PetscCall(VecFlag(x, mat->factorerrortype));
4143: if (mat->factorerrortype) {
4144: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4145: } else if (f) {
4146: PetscCall((*f)(mat, b, y, x));
4147: } else {
4148: /* do the solve then the add manually */
4149: if (x != y) {
4150: PetscCall(MatSolveTranspose(mat, b, x));
4151: PetscCall(VecAXPY(x, one, y));
4152: } else {
4153: PetscCall(VecDuplicate(x, &tmp));
4154: PetscCall(VecCopy(x, tmp));
4155: PetscCall(MatSolveTranspose(mat, b, x));
4156: PetscCall(VecAXPY(x, one, tmp));
4157: PetscCall(VecDestroy(&tmp));
4158: }
4159: }
4160: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4161: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4162: PetscFunctionReturn(PETSC_SUCCESS);
4163: }
4165: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4166: /*@
4167: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4169: Neighbor-wise Collective
4171: Input Parameters:
4172: + mat - the matrix
4173: . b - the right-hand side
4174: . omega - the relaxation factor
4175: . flag - flag indicating the type of SOR (see below)
4176: . shift - diagonal shift
4177: . its - the number of iterations
4178: - lits - the number of local iterations
4180: Output Parameter:
4181: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4183: SOR Flags:
4184: + `SOR_FORWARD_SWEEP` - forward SOR
4185: . `SOR_BACKWARD_SWEEP` - backward SOR
4186: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4187: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4188: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4189: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4190: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4191: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4192: upper/lower triangular part of matrix to
4193: vector (with omega)
4194: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4196: Level: developer
4198: Notes:
4199: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4200: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4201: on each processor.
4203: Application programmers will not generally use `MatSOR()` directly,
4204: but instead will employ the `KSP`/`PC` interface.
4206: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4208: Most users should employ the `KSP` interface for linear solvers
4209: instead of working directly with matrix algebra routines such as this.
4210: See, e.g., `KSPCreate()`.
4212: Vectors `x` and `b` CANNOT be the same
4214: The flags are implemented as bitwise inclusive or operations.
4215: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4216: to specify a zero initial guess for SSOR.
4218: Developer Note:
4219: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4221: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4222: @*/
4223: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4224: {
4225: PetscFunctionBegin;
4230: PetscCheckSameComm(mat, 1, b, 2);
4231: PetscCheckSameComm(mat, 1, x, 8);
4232: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4233: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4234: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4235: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4236: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4237: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4238: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4239: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4241: MatCheckPreallocated(mat, 1);
4242: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4243: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4244: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4245: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4246: PetscFunctionReturn(PETSC_SUCCESS);
4247: }
4249: /*
4250: Default matrix copy routine.
4251: */
4252: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4253: {
4254: PetscInt i, rstart = 0, rend = 0, nz;
4255: const PetscInt *cwork;
4256: const PetscScalar *vwork;
4258: PetscFunctionBegin;
4259: if (B->assembled) PetscCall(MatZeroEntries(B));
4260: if (str == SAME_NONZERO_PATTERN) {
4261: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4262: for (i = rstart; i < rend; i++) {
4263: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4264: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4265: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4266: }
4267: } else {
4268: PetscCall(MatAYPX(B, 0.0, A, str));
4269: }
4270: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4271: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4272: PetscFunctionReturn(PETSC_SUCCESS);
4273: }
4275: /*@
4276: MatCopy - Copies a matrix to another matrix.
4278: Collective
4280: Input Parameters:
4281: + A - the matrix
4282: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4284: Output Parameter:
4285: . B - where the copy is put
4287: Level: intermediate
4289: Notes:
4290: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4292: `MatCopy()` copies the matrix entries of a matrix to another existing
4293: matrix (after first zeroing the second matrix). A related routine is
4294: `MatConvert()`, which first creates a new matrix and then copies the data.
4296: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4297: @*/
4298: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4299: {
4300: PetscInt i;
4302: PetscFunctionBegin;
4307: PetscCheckSameComm(A, 1, B, 2);
4308: MatCheckPreallocated(B, 2);
4309: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4310: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4311: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4312: A->cmap->N, B->cmap->N);
4313: MatCheckPreallocated(A, 1);
4314: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4316: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4317: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4318: else PetscCall(MatCopy_Basic(A, B, str));
4320: B->stencil.dim = A->stencil.dim;
4321: B->stencil.noc = A->stencil.noc;
4322: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4323: B->stencil.dims[i] = A->stencil.dims[i];
4324: B->stencil.starts[i] = A->stencil.starts[i];
4325: }
4327: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4328: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4329: PetscFunctionReturn(PETSC_SUCCESS);
4330: }
4332: /*@
4333: MatConvert - Converts a matrix to another matrix, either of the same
4334: or different type.
4336: Collective
4338: Input Parameters:
4339: + mat - the matrix
4340: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4341: same type as the original matrix.
4342: - reuse - denotes if the destination matrix is to be created or reused.
4343: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4344: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4346: Output Parameter:
4347: . M - pointer to place new matrix
4349: Level: intermediate
4351: Notes:
4352: `MatConvert()` first creates a new matrix and then copies the data from
4353: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4354: entries of one matrix to another already existing matrix context.
4356: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4357: the MPI communicator of the generated matrix is always the same as the communicator
4358: of the input matrix.
4360: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4361: @*/
4362: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4363: {
4364: PetscBool sametype, issame, flg;
4365: PetscBool3 issymmetric, ishermitian;
4366: char convname[256], mtype[256];
4367: Mat B;
4369: PetscFunctionBegin;
4372: PetscAssertPointer(M, 4);
4373: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4374: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4375: MatCheckPreallocated(mat, 1);
4377: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4378: if (flg) newtype = mtype;
4380: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4381: PetscCall(PetscStrcmp(newtype, "same", &issame));
4382: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4383: if (reuse == MAT_REUSE_MATRIX) {
4385: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4386: }
4388: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4389: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4390: PetscFunctionReturn(PETSC_SUCCESS);
4391: }
4393: /* Cache Mat options because some converters use MatHeaderReplace */
4394: issymmetric = mat->symmetric;
4395: ishermitian = mat->hermitian;
4397: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4398: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4399: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4400: } else {
4401: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4402: const char *prefix[3] = {"seq", "mpi", ""};
4403: PetscInt i;
4404: /*
4405: Order of precedence:
4406: 0) See if newtype is a superclass of the current matrix.
4407: 1) See if a specialized converter is known to the current matrix.
4408: 2) See if a specialized converter is known to the desired matrix class.
4409: 3) See if a good general converter is registered for the desired class
4410: (as of 6/27/03 only MATMPIADJ falls into this category).
4411: 4) See if a good general converter is known for the current matrix.
4412: 5) Use a really basic converter.
4413: */
4415: /* 0) See if newtype is a superclass of the current matrix.
4416: i.e mat is mpiaij and newtype is aij */
4417: for (i = 0; i < 2; i++) {
4418: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4419: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4420: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4421: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4422: if (flg) {
4423: if (reuse == MAT_INPLACE_MATRIX) {
4424: PetscCall(PetscInfo(mat, "Early return\n"));
4425: PetscFunctionReturn(PETSC_SUCCESS);
4426: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4427: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4428: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4429: PetscFunctionReturn(PETSC_SUCCESS);
4430: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4431: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4432: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4433: PetscFunctionReturn(PETSC_SUCCESS);
4434: }
4435: }
4436: }
4437: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4438: for (i = 0; i < 3; i++) {
4439: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4440: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4441: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4442: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4443: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4444: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4445: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4446: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4447: if (conv) goto foundconv;
4448: }
4450: /* 2) See if a specialized converter is known to the desired matrix class. */
4451: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4452: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4453: PetscCall(MatSetType(B, newtype));
4454: for (i = 0; i < 3; i++) {
4455: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4456: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4457: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4458: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4459: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4460: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4461: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4462: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4463: if (conv) {
4464: PetscCall(MatDestroy(&B));
4465: goto foundconv;
4466: }
4467: }
4469: /* 3) See if a good general converter is registered for the desired class */
4470: conv = B->ops->convertfrom;
4471: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4472: PetscCall(MatDestroy(&B));
4473: if (conv) goto foundconv;
4475: /* 4) See if a good general converter is known for the current matrix */
4476: if (mat->ops->convert) conv = mat->ops->convert;
4477: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4478: if (conv) goto foundconv;
4480: /* 5) Use a really basic converter. */
4481: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4482: conv = MatConvert_Basic;
4484: foundconv:
4485: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4486: PetscCall((*conv)(mat, newtype, reuse, M));
4487: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4488: /* the block sizes must be same if the mappings are copied over */
4489: (*M)->rmap->bs = mat->rmap->bs;
4490: (*M)->cmap->bs = mat->cmap->bs;
4491: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4492: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4493: (*M)->rmap->mapping = mat->rmap->mapping;
4494: (*M)->cmap->mapping = mat->cmap->mapping;
4495: }
4496: (*M)->stencil.dim = mat->stencil.dim;
4497: (*M)->stencil.noc = mat->stencil.noc;
4498: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4499: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4500: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4501: }
4502: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4503: }
4504: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4506: /* Copy Mat options */
4507: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4508: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4509: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4510: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4511: PetscFunctionReturn(PETSC_SUCCESS);
4512: }
4514: /*@
4515: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4517: Not Collective
4519: Input Parameter:
4520: . mat - the matrix, must be a factored matrix
4522: Output Parameter:
4523: . type - the string name of the package (do not free this string)
4525: Level: intermediate
4527: Fortran Note:
4528: Pass in an empty string that is long enough and the package name will be copied into it.
4530: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4531: @*/
4532: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4533: {
4534: PetscErrorCode (*conv)(Mat, MatSolverType *);
4536: PetscFunctionBegin;
4539: PetscAssertPointer(type, 2);
4540: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4541: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4542: if (conv) PetscCall((*conv)(mat, type));
4543: else *type = MATSOLVERPETSC;
4544: PetscFunctionReturn(PETSC_SUCCESS);
4545: }
4547: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4548: struct _MatSolverTypeForSpecifcType {
4549: MatType mtype;
4550: /* no entry for MAT_FACTOR_NONE */
4551: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4552: MatSolverTypeForSpecifcType next;
4553: };
4555: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4556: struct _MatSolverTypeHolder {
4557: char *name;
4558: MatSolverTypeForSpecifcType handlers;
4559: MatSolverTypeHolder next;
4560: };
4562: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4564: /*@C
4565: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4567: Logically Collective, No Fortran Support
4569: Input Parameters:
4570: + package - name of the package, for example petsc or superlu
4571: . mtype - the matrix type that works with this package
4572: . ftype - the type of factorization supported by the package
4573: - createfactor - routine that will create the factored matrix ready to be used
4575: Level: developer
4577: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4578: `MatGetFactor()`
4579: @*/
4580: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4581: {
4582: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4583: PetscBool flg;
4584: MatSolverTypeForSpecifcType inext, iprev = NULL;
4586: PetscFunctionBegin;
4587: PetscCall(MatInitializePackage());
4588: if (!next) {
4589: PetscCall(PetscNew(&MatSolverTypeHolders));
4590: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4591: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4592: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4593: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4594: PetscFunctionReturn(PETSC_SUCCESS);
4595: }
4596: while (next) {
4597: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4598: if (flg) {
4599: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4600: inext = next->handlers;
4601: while (inext) {
4602: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4603: if (flg) {
4604: inext->createfactor[(int)ftype - 1] = createfactor;
4605: PetscFunctionReturn(PETSC_SUCCESS);
4606: }
4607: iprev = inext;
4608: inext = inext->next;
4609: }
4610: PetscCall(PetscNew(&iprev->next));
4611: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4612: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4613: PetscFunctionReturn(PETSC_SUCCESS);
4614: }
4615: prev = next;
4616: next = next->next;
4617: }
4618: PetscCall(PetscNew(&prev->next));
4619: PetscCall(PetscStrallocpy(package, &prev->next->name));
4620: PetscCall(PetscNew(&prev->next->handlers));
4621: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4622: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4623: PetscFunctionReturn(PETSC_SUCCESS);
4624: }
4626: /*@C
4627: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4629: Input Parameters:
4630: + type - name of the package, for example petsc or superlu, if this is 'NULL', then the first result that satisfies the other criteria is returned
4631: . ftype - the type of factorization supported by the type
4632: - mtype - the matrix type that works with this type
4634: Output Parameters:
4635: + foundtype - `PETSC_TRUE` if the type was registered
4636: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4637: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4639: Calling sequence of `createfactor`:
4640: + A - the matrix providing the factor matrix
4641: . ftype - the `MatFactorType` of the factor requested
4642: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4644: Level: developer
4646: Note:
4647: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4648: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4649: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4651: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4652: `MatInitializePackage()`
4653: @*/
4654: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4655: {
4656: MatSolverTypeHolder next = MatSolverTypeHolders;
4657: PetscBool flg;
4658: MatSolverTypeForSpecifcType inext;
4660: PetscFunctionBegin;
4661: if (foundtype) *foundtype = PETSC_FALSE;
4662: if (foundmtype) *foundmtype = PETSC_FALSE;
4663: if (createfactor) *createfactor = NULL;
4665: if (type) {
4666: while (next) {
4667: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4668: if (flg) {
4669: if (foundtype) *foundtype = PETSC_TRUE;
4670: inext = next->handlers;
4671: while (inext) {
4672: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4673: if (flg) {
4674: if (foundmtype) *foundmtype = PETSC_TRUE;
4675: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4676: PetscFunctionReturn(PETSC_SUCCESS);
4677: }
4678: inext = inext->next;
4679: }
4680: }
4681: next = next->next;
4682: }
4683: } else {
4684: while (next) {
4685: inext = next->handlers;
4686: while (inext) {
4687: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4688: if (flg && inext->createfactor[(int)ftype - 1]) {
4689: if (foundtype) *foundtype = PETSC_TRUE;
4690: if (foundmtype) *foundmtype = PETSC_TRUE;
4691: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4692: PetscFunctionReturn(PETSC_SUCCESS);
4693: }
4694: inext = inext->next;
4695: }
4696: next = next->next;
4697: }
4698: /* try with base classes inext->mtype */
4699: next = MatSolverTypeHolders;
4700: while (next) {
4701: inext = next->handlers;
4702: while (inext) {
4703: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4704: if (flg && inext->createfactor[(int)ftype - 1]) {
4705: if (foundtype) *foundtype = PETSC_TRUE;
4706: if (foundmtype) *foundmtype = PETSC_TRUE;
4707: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4708: PetscFunctionReturn(PETSC_SUCCESS);
4709: }
4710: inext = inext->next;
4711: }
4712: next = next->next;
4713: }
4714: }
4715: PetscFunctionReturn(PETSC_SUCCESS);
4716: }
4718: PetscErrorCode MatSolverTypeDestroy(void)
4719: {
4720: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4721: MatSolverTypeForSpecifcType inext, iprev;
4723: PetscFunctionBegin;
4724: while (next) {
4725: PetscCall(PetscFree(next->name));
4726: inext = next->handlers;
4727: while (inext) {
4728: PetscCall(PetscFree(inext->mtype));
4729: iprev = inext;
4730: inext = inext->next;
4731: PetscCall(PetscFree(iprev));
4732: }
4733: prev = next;
4734: next = next->next;
4735: PetscCall(PetscFree(prev));
4736: }
4737: MatSolverTypeHolders = NULL;
4738: PetscFunctionReturn(PETSC_SUCCESS);
4739: }
4741: /*@
4742: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4744: Logically Collective
4746: Input Parameter:
4747: . mat - the matrix
4749: Output Parameter:
4750: . flg - `PETSC_TRUE` if uses the ordering
4752: Level: developer
4754: Note:
4755: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4756: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4758: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4759: @*/
4760: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4761: {
4762: PetscFunctionBegin;
4763: *flg = mat->canuseordering;
4764: PetscFunctionReturn(PETSC_SUCCESS);
4765: }
4767: /*@
4768: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4770: Logically Collective
4772: Input Parameters:
4773: + mat - the matrix obtained with `MatGetFactor()`
4774: - ftype - the factorization type to be used
4776: Output Parameter:
4777: . otype - the preferred ordering type
4779: Level: developer
4781: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4782: @*/
4783: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4784: {
4785: PetscFunctionBegin;
4786: *otype = mat->preferredordering[ftype];
4787: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4788: PetscFunctionReturn(PETSC_SUCCESS);
4789: }
4791: /*@
4792: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4794: Collective
4796: Input Parameters:
4797: + mat - the matrix
4798: . type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4799: the other criteria is returned
4800: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4802: Output Parameter:
4803: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4805: Options Database Keys:
4806: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4807: - -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4808: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4810: Level: intermediate
4812: Notes:
4813: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4814: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4816: Users usually access the factorization solvers via `KSP`
4818: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4819: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4821: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4822: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4823: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4825: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4826: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4827: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4829: Developer Note:
4830: This should actually be called `MatCreateFactor()` since it creates a new factor object
4832: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4833: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4834: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4835: @*/
4836: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4837: {
4838: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4839: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4841: PetscFunctionBegin;
4845: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4846: MatCheckPreallocated(mat, 1);
4848: PetscCall(MatIsShell(mat, &shell));
4849: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4850: if (hasop) {
4851: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4852: PetscFunctionReturn(PETSC_SUCCESS);
4853: }
4855: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4856: if (!foundtype) {
4857: if (type) {
4858: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4859: ((PetscObject)mat)->type_name, type);
4860: } else {
4861: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4862: }
4863: }
4864: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4865: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4867: PetscCall((*conv)(mat, ftype, f));
4868: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4869: PetscFunctionReturn(PETSC_SUCCESS);
4870: }
4872: /*@
4873: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4875: Not Collective
4877: Input Parameters:
4878: + mat - the matrix
4879: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4880: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4882: Output Parameter:
4883: . flg - PETSC_TRUE if the factorization is available
4885: Level: intermediate
4887: Notes:
4888: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4889: such as pastix, superlu, mumps etc.
4891: PETSc must have been ./configure to use the external solver, using the option --download-package
4893: Developer Note:
4894: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4896: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4897: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4898: @*/
4899: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4900: {
4901: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4903: PetscFunctionBegin;
4905: PetscAssertPointer(flg, 4);
4907: *flg = PETSC_FALSE;
4908: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4910: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4911: MatCheckPreallocated(mat, 1);
4913: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4914: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4915: PetscFunctionReturn(PETSC_SUCCESS);
4916: }
4918: /*@
4919: MatDuplicate - Duplicates a matrix including the non-zero structure.
4921: Collective
4923: Input Parameters:
4924: + mat - the matrix
4925: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4926: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4928: Output Parameter:
4929: . M - pointer to place new matrix
4931: Level: intermediate
4933: Notes:
4934: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4936: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4938: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4940: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4941: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4942: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4944: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4945: @*/
4946: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4947: {
4948: Mat B;
4949: VecType vtype;
4950: PetscInt i;
4951: PetscObject dm, container_h, container_d;
4952: void (*viewf)(void);
4954: PetscFunctionBegin;
4957: PetscAssertPointer(M, 3);
4958: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4959: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4960: MatCheckPreallocated(mat, 1);
4962: *M = NULL;
4963: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4964: PetscUseTypeMethod(mat, duplicate, op, M);
4965: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4966: B = *M;
4968: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4969: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4970: PetscCall(MatGetVecType(mat, &vtype));
4971: PetscCall(MatSetVecType(B, vtype));
4973: B->stencil.dim = mat->stencil.dim;
4974: B->stencil.noc = mat->stencil.noc;
4975: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4976: B->stencil.dims[i] = mat->stencil.dims[i];
4977: B->stencil.starts[i] = mat->stencil.starts[i];
4978: }
4980: B->nooffproczerorows = mat->nooffproczerorows;
4981: B->nooffprocentries = mat->nooffprocentries;
4983: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4984: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4985: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4986: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4987: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4988: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4989: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
4990: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4991: PetscFunctionReturn(PETSC_SUCCESS);
4992: }
4994: /*@
4995: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4997: Logically Collective
4999: Input Parameter:
5000: . mat - the matrix
5002: Output Parameter:
5003: . v - the diagonal of the matrix
5005: Level: intermediate
5007: Note:
5008: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5009: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5010: is larger than `ndiag`, the values of the remaining entries are unspecified.
5012: Currently only correct in parallel for square matrices.
5014: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5015: @*/
5016: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5017: {
5018: PetscFunctionBegin;
5022: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5023: MatCheckPreallocated(mat, 1);
5024: if (PetscDefined(USE_DEBUG)) {
5025: PetscInt nv, row, col, ndiag;
5027: PetscCall(VecGetLocalSize(v, &nv));
5028: PetscCall(MatGetLocalSize(mat, &row, &col));
5029: ndiag = PetscMin(row, col);
5030: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5031: }
5033: PetscUseTypeMethod(mat, getdiagonal, v);
5034: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5035: PetscFunctionReturn(PETSC_SUCCESS);
5036: }
5038: /*@
5039: MatGetRowMin - Gets the minimum value (of the real part) of each
5040: row of the matrix
5042: Logically Collective
5044: Input Parameter:
5045: . mat - the matrix
5047: Output Parameters:
5048: + v - the vector for storing the maximums
5049: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5051: Level: intermediate
5053: Note:
5054: The result of this call are the same as if one converted the matrix to dense format
5055: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5057: This code is only implemented for a couple of matrix formats.
5059: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5060: `MatGetRowMax()`
5061: @*/
5062: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5063: {
5064: PetscFunctionBegin;
5068: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5070: if (!mat->cmap->N) {
5071: PetscCall(VecSet(v, PETSC_MAX_REAL));
5072: if (idx) {
5073: PetscInt i, m = mat->rmap->n;
5074: for (i = 0; i < m; i++) idx[i] = -1;
5075: }
5076: } else {
5077: MatCheckPreallocated(mat, 1);
5078: }
5079: PetscUseTypeMethod(mat, getrowmin, v, idx);
5080: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5081: PetscFunctionReturn(PETSC_SUCCESS);
5082: }
5084: /*@
5085: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5086: row of the matrix
5088: Logically Collective
5090: Input Parameter:
5091: . mat - the matrix
5093: Output Parameters:
5094: + v - the vector for storing the minimums
5095: - idx - the indices of the column found for each row (or `NULL` if not needed)
5097: Level: intermediate
5099: Notes:
5100: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5101: row is 0 (the first column).
5103: This code is only implemented for a couple of matrix formats.
5105: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5106: @*/
5107: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5108: {
5109: PetscFunctionBegin;
5113: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5114: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5116: if (!mat->cmap->N) {
5117: PetscCall(VecSet(v, 0.0));
5118: if (idx) {
5119: PetscInt i, m = mat->rmap->n;
5120: for (i = 0; i < m; i++) idx[i] = -1;
5121: }
5122: } else {
5123: MatCheckPreallocated(mat, 1);
5124: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5125: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5126: }
5127: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5128: PetscFunctionReturn(PETSC_SUCCESS);
5129: }
5131: /*@
5132: MatGetRowMax - Gets the maximum value (of the real part) of each
5133: row of the matrix
5135: Logically Collective
5137: Input Parameter:
5138: . mat - the matrix
5140: Output Parameters:
5141: + v - the vector for storing the maximums
5142: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5144: Level: intermediate
5146: Notes:
5147: The result of this call are the same as if one converted the matrix to dense format
5148: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5150: This code is only implemented for a couple of matrix formats.
5152: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5153: @*/
5154: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5155: {
5156: PetscFunctionBegin;
5160: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5162: if (!mat->cmap->N) {
5163: PetscCall(VecSet(v, PETSC_MIN_REAL));
5164: if (idx) {
5165: PetscInt i, m = mat->rmap->n;
5166: for (i = 0; i < m; i++) idx[i] = -1;
5167: }
5168: } else {
5169: MatCheckPreallocated(mat, 1);
5170: PetscUseTypeMethod(mat, getrowmax, v, idx);
5171: }
5172: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5173: PetscFunctionReturn(PETSC_SUCCESS);
5174: }
5176: /*@
5177: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5178: row of the matrix
5180: Logically Collective
5182: Input Parameter:
5183: . mat - the matrix
5185: Output Parameters:
5186: + v - the vector for storing the maximums
5187: - idx - the indices of the column found for each row (or `NULL` if not needed)
5189: Level: intermediate
5191: Notes:
5192: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5193: row is 0 (the first column).
5195: This code is only implemented for a couple of matrix formats.
5197: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5198: @*/
5199: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5200: {
5201: PetscFunctionBegin;
5205: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5207: if (!mat->cmap->N) {
5208: PetscCall(VecSet(v, 0.0));
5209: if (idx) {
5210: PetscInt i, m = mat->rmap->n;
5211: for (i = 0; i < m; i++) idx[i] = -1;
5212: }
5213: } else {
5214: MatCheckPreallocated(mat, 1);
5215: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5216: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5217: }
5218: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5219: PetscFunctionReturn(PETSC_SUCCESS);
5220: }
5222: /*@
5223: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5225: Logically Collective
5227: Input Parameter:
5228: . mat - the matrix
5230: Output Parameter:
5231: . v - the vector for storing the sum
5233: Level: intermediate
5235: This code is only implemented for a couple of matrix formats.
5237: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5238: @*/
5239: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5240: {
5241: PetscFunctionBegin;
5245: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5247: if (!mat->cmap->N) {
5248: PetscCall(VecSet(v, 0.0));
5249: } else {
5250: MatCheckPreallocated(mat, 1);
5251: PetscUseTypeMethod(mat, getrowsumabs, v);
5252: }
5253: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5254: PetscFunctionReturn(PETSC_SUCCESS);
5255: }
5257: /*@
5258: MatGetRowSum - Gets the sum of each row of the matrix
5260: Logically or Neighborhood Collective
5262: Input Parameter:
5263: . mat - the matrix
5265: Output Parameter:
5266: . v - the vector for storing the sum of rows
5268: Level: intermediate
5270: Note:
5271: This code is slow since it is not currently specialized for different formats
5273: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5274: @*/
5275: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5276: {
5277: Vec ones;
5279: PetscFunctionBegin;
5283: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5284: MatCheckPreallocated(mat, 1);
5285: PetscCall(MatCreateVecs(mat, &ones, NULL));
5286: PetscCall(VecSet(ones, 1.));
5287: PetscCall(MatMult(mat, ones, v));
5288: PetscCall(VecDestroy(&ones));
5289: PetscFunctionReturn(PETSC_SUCCESS);
5290: }
5292: /*@
5293: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5294: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5296: Collective
5298: Input Parameter:
5299: . mat - the matrix to provide the transpose
5301: Output Parameter:
5302: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5304: Level: advanced
5306: Note:
5307: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5308: routine allows bypassing that call.
5310: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5311: @*/
5312: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5313: {
5314: MatParentState *rb = NULL;
5316: PetscFunctionBegin;
5317: PetscCall(PetscNew(&rb));
5318: rb->id = ((PetscObject)mat)->id;
5319: rb->state = 0;
5320: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5321: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscContainerUserDestroyDefault));
5322: PetscFunctionReturn(PETSC_SUCCESS);
5323: }
5325: /*@
5326: MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5328: Collective
5330: Input Parameters:
5331: + mat - the matrix to transpose
5332: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5334: Output Parameter:
5335: . B - the transpose
5337: Level: intermediate
5339: Notes:
5340: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5342: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5343: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5345: If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5347: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5349: If mat is unchanged from the last call this function returns immediately without recomputing the result
5351: If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`
5353: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5354: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5355: @*/
5356: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5357: {
5358: PetscContainer rB = NULL;
5359: MatParentState *rb = NULL;
5361: PetscFunctionBegin;
5364: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5365: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5366: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5367: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5368: MatCheckPreallocated(mat, 1);
5369: if (reuse == MAT_REUSE_MATRIX) {
5370: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5371: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5372: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5373: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5374: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5375: }
5377: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5378: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5379: PetscUseTypeMethod(mat, transpose, reuse, B);
5380: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5381: }
5382: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5384: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5385: if (reuse != MAT_INPLACE_MATRIX) {
5386: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5387: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5388: rb->state = ((PetscObject)mat)->state;
5389: rb->nonzerostate = mat->nonzerostate;
5390: }
5391: PetscFunctionReturn(PETSC_SUCCESS);
5392: }
5394: /*@
5395: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5397: Collective
5399: Input Parameter:
5400: . A - the matrix to transpose
5402: Output Parameter:
5403: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5404: numerical portion.
5406: Level: intermediate
5408: Note:
5409: This is not supported for many matrix types, use `MatTranspose()` in those cases
5411: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5412: @*/
5413: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5414: {
5415: PetscFunctionBegin;
5418: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5419: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5420: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5421: PetscUseTypeMethod(A, transposesymbolic, B);
5422: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5424: PetscCall(MatTransposeSetPrecursor(A, *B));
5425: PetscFunctionReturn(PETSC_SUCCESS);
5426: }
5428: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5429: {
5430: PetscContainer rB;
5431: MatParentState *rb;
5433: PetscFunctionBegin;
5436: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5437: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5438: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5439: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5440: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5441: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5442: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5443: PetscFunctionReturn(PETSC_SUCCESS);
5444: }
5446: /*@
5447: MatIsTranspose - Test whether a matrix is another one's transpose,
5448: or its own, in which case it tests symmetry.
5450: Collective
5452: Input Parameters:
5453: + A - the matrix to test
5454: . B - the matrix to test against, this can equal the first parameter
5455: - tol - tolerance, differences between entries smaller than this are counted as zero
5457: Output Parameter:
5458: . flg - the result
5460: Level: intermediate
5462: Notes:
5463: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5464: test involves parallel copies of the block off-diagonal parts of the matrix.
5466: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5467: @*/
5468: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5469: {
5470: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5472: PetscFunctionBegin;
5475: PetscAssertPointer(flg, 4);
5476: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5477: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5478: *flg = PETSC_FALSE;
5479: if (f && g) {
5480: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5481: PetscCall((*f)(A, B, tol, flg));
5482: } else {
5483: MatType mattype;
5485: PetscCall(MatGetType(f ? B : A, &mattype));
5486: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5487: }
5488: PetscFunctionReturn(PETSC_SUCCESS);
5489: }
5491: /*@
5492: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5494: Collective
5496: Input Parameters:
5497: + mat - the matrix to transpose and complex conjugate
5498: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5500: Output Parameter:
5501: . B - the Hermitian transpose
5503: Level: intermediate
5505: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5506: @*/
5507: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5508: {
5509: PetscFunctionBegin;
5510: PetscCall(MatTranspose(mat, reuse, B));
5511: #if defined(PETSC_USE_COMPLEX)
5512: PetscCall(MatConjugate(*B));
5513: #endif
5514: PetscFunctionReturn(PETSC_SUCCESS);
5515: }
5517: /*@
5518: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5520: Collective
5522: Input Parameters:
5523: + A - the matrix to test
5524: . B - the matrix to test against, this can equal the first parameter
5525: - tol - tolerance, differences between entries smaller than this are counted as zero
5527: Output Parameter:
5528: . flg - the result
5530: Level: intermediate
5532: Notes:
5533: Only available for `MATAIJ` matrices.
5535: The sequential algorithm
5536: has a running time of the order of the number of nonzeros; the parallel
5537: test involves parallel copies of the block off-diagonal parts of the matrix.
5539: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5540: @*/
5541: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5542: {
5543: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5545: PetscFunctionBegin;
5548: PetscAssertPointer(flg, 4);
5549: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5550: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5551: if (f && g) {
5552: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5553: PetscCall((*f)(A, B, tol, flg));
5554: }
5555: PetscFunctionReturn(PETSC_SUCCESS);
5556: }
5558: /*@
5559: MatPermute - Creates a new matrix with rows and columns permuted from the
5560: original.
5562: Collective
5564: Input Parameters:
5565: + mat - the matrix to permute
5566: . row - row permutation, each processor supplies only the permutation for its rows
5567: - col - column permutation, each processor supplies only the permutation for its columns
5569: Output Parameter:
5570: . B - the permuted matrix
5572: Level: advanced
5574: Note:
5575: The index sets map from row/col of permuted matrix to row/col of original matrix.
5576: The index sets should be on the same communicator as mat and have the same local sizes.
5578: Developer Note:
5579: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5580: exploit the fact that row and col are permutations, consider implementing the
5581: more general `MatCreateSubMatrix()` instead.
5583: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5584: @*/
5585: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5586: {
5587: PetscFunctionBegin;
5592: PetscAssertPointer(B, 4);
5593: PetscCheckSameComm(mat, 1, row, 2);
5594: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5595: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5596: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5597: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5598: MatCheckPreallocated(mat, 1);
5600: if (mat->ops->permute) {
5601: PetscUseTypeMethod(mat, permute, row, col, B);
5602: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5603: } else {
5604: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5605: }
5606: PetscFunctionReturn(PETSC_SUCCESS);
5607: }
5609: /*@
5610: MatEqual - Compares two matrices.
5612: Collective
5614: Input Parameters:
5615: + A - the first matrix
5616: - B - the second matrix
5618: Output Parameter:
5619: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5621: Level: intermediate
5623: .seealso: [](ch_matrices), `Mat`
5624: @*/
5625: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5626: {
5627: PetscFunctionBegin;
5632: PetscAssertPointer(flg, 3);
5633: PetscCheckSameComm(A, 1, B, 2);
5634: MatCheckPreallocated(A, 1);
5635: MatCheckPreallocated(B, 2);
5636: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5637: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5638: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5639: B->cmap->N);
5640: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5641: PetscUseTypeMethod(A, equal, B, flg);
5642: } else {
5643: PetscCall(MatMultEqual(A, B, 10, flg));
5644: }
5645: PetscFunctionReturn(PETSC_SUCCESS);
5646: }
5648: /*@
5649: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5650: matrices that are stored as vectors. Either of the two scaling
5651: matrices can be `NULL`.
5653: Collective
5655: Input Parameters:
5656: + mat - the matrix to be scaled
5657: . l - the left scaling vector (or `NULL`)
5658: - r - the right scaling vector (or `NULL`)
5660: Level: intermediate
5662: Note:
5663: `MatDiagonalScale()` computes $A = LAR$, where
5664: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5665: The L scales the rows of the matrix, the R scales the columns of the matrix.
5667: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5668: @*/
5669: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5670: {
5671: PetscFunctionBegin;
5674: if (l) {
5676: PetscCheckSameComm(mat, 1, l, 2);
5677: }
5678: if (r) {
5680: PetscCheckSameComm(mat, 1, r, 3);
5681: }
5682: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5683: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5684: MatCheckPreallocated(mat, 1);
5685: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5687: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5688: PetscUseTypeMethod(mat, diagonalscale, l, r);
5689: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5690: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5691: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5692: PetscFunctionReturn(PETSC_SUCCESS);
5693: }
5695: /*@
5696: MatScale - Scales all elements of a matrix by a given number.
5698: Logically Collective
5700: Input Parameters:
5701: + mat - the matrix to be scaled
5702: - a - the scaling value
5704: Level: intermediate
5706: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5707: @*/
5708: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5709: {
5710: PetscFunctionBegin;
5713: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5714: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5716: MatCheckPreallocated(mat, 1);
5718: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5719: if (a != (PetscScalar)1.0) {
5720: PetscUseTypeMethod(mat, scale, a);
5721: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5722: }
5723: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5724: PetscFunctionReturn(PETSC_SUCCESS);
5725: }
5727: /*@
5728: MatNorm - Calculates various norms of a matrix.
5730: Collective
5732: Input Parameters:
5733: + mat - the matrix
5734: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5736: Output Parameter:
5737: . nrm - the resulting norm
5739: Level: intermediate
5741: .seealso: [](ch_matrices), `Mat`
5742: @*/
5743: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5744: {
5745: PetscFunctionBegin;
5748: PetscAssertPointer(nrm, 3);
5750: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5751: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5752: MatCheckPreallocated(mat, 1);
5754: PetscUseTypeMethod(mat, norm, type, nrm);
5755: PetscFunctionReturn(PETSC_SUCCESS);
5756: }
5758: /*
5759: This variable is used to prevent counting of MatAssemblyBegin() that
5760: are called from within a MatAssemblyEnd().
5761: */
5762: static PetscInt MatAssemblyEnd_InUse = 0;
5763: /*@
5764: MatAssemblyBegin - Begins assembling the matrix. This routine should
5765: be called after completing all calls to `MatSetValues()`.
5767: Collective
5769: Input Parameters:
5770: + mat - the matrix
5771: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5773: Level: beginner
5775: Notes:
5776: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5777: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5779: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5780: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5781: using the matrix.
5783: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5784: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5785: a global collective operation requiring all processes that share the matrix.
5787: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5788: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5789: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5791: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5792: @*/
5793: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5794: {
5795: PetscFunctionBegin;
5798: MatCheckPreallocated(mat, 1);
5799: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5800: if (mat->assembled) {
5801: mat->was_assembled = PETSC_TRUE;
5802: mat->assembled = PETSC_FALSE;
5803: }
5805: if (!MatAssemblyEnd_InUse) {
5806: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5807: PetscTryTypeMethod(mat, assemblybegin, type);
5808: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5809: } else PetscTryTypeMethod(mat, assemblybegin, type);
5810: PetscFunctionReturn(PETSC_SUCCESS);
5811: }
5813: /*@
5814: MatAssembled - Indicates if a matrix has been assembled and is ready for
5815: use; for example, in matrix-vector product.
5817: Not Collective
5819: Input Parameter:
5820: . mat - the matrix
5822: Output Parameter:
5823: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5825: Level: advanced
5827: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5828: @*/
5829: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5830: {
5831: PetscFunctionBegin;
5833: PetscAssertPointer(assembled, 2);
5834: *assembled = mat->assembled;
5835: PetscFunctionReturn(PETSC_SUCCESS);
5836: }
5838: /*@
5839: MatAssemblyEnd - Completes assembling the matrix. This routine should
5840: be called after `MatAssemblyBegin()`.
5842: Collective
5844: Input Parameters:
5845: + mat - the matrix
5846: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5848: Options Database Keys:
5849: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5850: . -mat_view ::ascii_info_detail - Prints more detailed info
5851: . -mat_view - Prints matrix in ASCII format
5852: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5853: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5854: . -display <name> - Sets display name (default is host)
5855: . -draw_pause <sec> - Sets number of seconds to pause after display
5856: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5857: . -viewer_socket_machine <machine> - Machine to use for socket
5858: . -viewer_socket_port <port> - Port number to use for socket
5859: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5861: Level: beginner
5863: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5864: @*/
5865: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5866: {
5867: static PetscInt inassm = 0;
5868: PetscBool flg = PETSC_FALSE;
5870: PetscFunctionBegin;
5874: inassm++;
5875: MatAssemblyEnd_InUse++;
5876: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5877: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5878: PetscTryTypeMethod(mat, assemblyend, type);
5879: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5880: } else PetscTryTypeMethod(mat, assemblyend, type);
5882: /* Flush assembly is not a true assembly */
5883: if (type != MAT_FLUSH_ASSEMBLY) {
5884: if (mat->num_ass) {
5885: if (!mat->symmetry_eternal) {
5886: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5887: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5888: }
5889: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5890: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5891: }
5892: mat->num_ass++;
5893: mat->assembled = PETSC_TRUE;
5894: mat->ass_nonzerostate = mat->nonzerostate;
5895: }
5897: mat->insertmode = NOT_SET_VALUES;
5898: MatAssemblyEnd_InUse--;
5899: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5900: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5901: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5903: if (mat->checksymmetryonassembly) {
5904: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5905: if (flg) {
5906: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5907: } else {
5908: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5909: }
5910: }
5911: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5912: }
5913: inassm--;
5914: PetscFunctionReturn(PETSC_SUCCESS);
5915: }
5917: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5918: /*@
5919: MatSetOption - Sets a parameter option for a matrix. Some options
5920: may be specific to certain storage formats. Some options
5921: determine how values will be inserted (or added). Sorted,
5922: row-oriented input will generally assemble the fastest. The default
5923: is row-oriented.
5925: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5927: Input Parameters:
5928: + mat - the matrix
5929: . op - the option, one of those listed below (and possibly others),
5930: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5932: Options Describing Matrix Structure:
5933: + `MAT_SPD` - symmetric positive definite
5934: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5935: . `MAT_HERMITIAN` - transpose is the complex conjugation
5936: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5937: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5938: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5939: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5941: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5942: do not need to be computed (usually at a high cost)
5944: Options For Use with `MatSetValues()`:
5945: Insert a logically dense subblock, which can be
5946: . `MAT_ROW_ORIENTED` - row-oriented (default)
5948: These options reflect the data you pass in with `MatSetValues()`; it has
5949: nothing to do with how the data is stored internally in the matrix
5950: data structure.
5952: When (re)assembling a matrix, we can restrict the input for
5953: efficiency/debugging purposes. These options include
5954: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5955: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5956: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5957: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5958: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5959: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5960: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5961: performance for very large process counts.
5962: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5963: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5964: functions, instead sending only neighbor messages.
5966: Level: intermediate
5968: Notes:
5969: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5971: Some options are relevant only for particular matrix types and
5972: are thus ignored by others. Other options are not supported by
5973: certain matrix types and will generate an error message if set.
5975: If using Fortran to compute a matrix, one may need to
5976: use the column-oriented option (or convert to the row-oriented
5977: format).
5979: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5980: that would generate a new entry in the nonzero structure is instead
5981: ignored. Thus, if memory has not already been allocated for this particular
5982: data, then the insertion is ignored. For dense matrices, in which
5983: the entire array is allocated, no entries are ever ignored.
5984: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5986: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5987: that would generate a new entry in the nonzero structure instead produces
5988: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5990: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5991: that would generate a new entry that has not been preallocated will
5992: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5993: only.) This is a useful flag when debugging matrix memory preallocation.
5994: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5996: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
5997: other processors should be dropped, rather than stashed.
5998: This is useful if you know that the "owning" processor is also
5999: always generating the correct matrix entries, so that PETSc need
6000: not transfer duplicate entries generated on another processor.
6002: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6003: searches during matrix assembly. When this flag is set, the hash table
6004: is created during the first matrix assembly. This hash table is
6005: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6006: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6007: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6008: supported by `MATMPIBAIJ` format only.
6010: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6011: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6013: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6014: a zero location in the matrix
6016: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6018: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6019: zero row routines and thus improves performance for very large process counts.
6021: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6022: part of the matrix (since they should match the upper triangular part).
6024: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6025: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6026: with finite difference schemes with non-periodic boundary conditions.
6028: Developer Note:
6029: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6030: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6031: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6032: not changed.
6034: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6035: @*/
6036: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6037: {
6038: PetscFunctionBegin;
6040: if (op > 0) {
6043: }
6045: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6047: switch (op) {
6048: case MAT_FORCE_DIAGONAL_ENTRIES:
6049: mat->force_diagonals = flg;
6050: PetscFunctionReturn(PETSC_SUCCESS);
6051: case MAT_NO_OFF_PROC_ENTRIES:
6052: mat->nooffprocentries = flg;
6053: PetscFunctionReturn(PETSC_SUCCESS);
6054: case MAT_SUBSET_OFF_PROC_ENTRIES:
6055: mat->assembly_subset = flg;
6056: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6057: #if !defined(PETSC_HAVE_MPIUNI)
6058: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6059: #endif
6060: mat->stash.first_assembly_done = PETSC_FALSE;
6061: }
6062: PetscFunctionReturn(PETSC_SUCCESS);
6063: case MAT_NO_OFF_PROC_ZERO_ROWS:
6064: mat->nooffproczerorows = flg;
6065: PetscFunctionReturn(PETSC_SUCCESS);
6066: case MAT_SPD:
6067: if (flg) {
6068: mat->spd = PETSC_BOOL3_TRUE;
6069: mat->symmetric = PETSC_BOOL3_TRUE;
6070: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6071: } else {
6072: mat->spd = PETSC_BOOL3_FALSE;
6073: }
6074: break;
6075: case MAT_SYMMETRIC:
6076: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6077: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6078: #if !defined(PETSC_USE_COMPLEX)
6079: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6080: #endif
6081: break;
6082: case MAT_HERMITIAN:
6083: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6084: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6085: #if !defined(PETSC_USE_COMPLEX)
6086: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6087: #endif
6088: break;
6089: case MAT_STRUCTURALLY_SYMMETRIC:
6090: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6091: break;
6092: case MAT_SYMMETRY_ETERNAL:
6093: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6094: mat->symmetry_eternal = flg;
6095: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6096: break;
6097: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6098: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6099: mat->structural_symmetry_eternal = flg;
6100: break;
6101: case MAT_SPD_ETERNAL:
6102: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6103: mat->spd_eternal = flg;
6104: if (flg) {
6105: mat->structural_symmetry_eternal = PETSC_TRUE;
6106: mat->symmetry_eternal = PETSC_TRUE;
6107: }
6108: break;
6109: case MAT_STRUCTURE_ONLY:
6110: mat->structure_only = flg;
6111: break;
6112: case MAT_SORTED_FULL:
6113: mat->sortedfull = flg;
6114: break;
6115: default:
6116: break;
6117: }
6118: PetscTryTypeMethod(mat, setoption, op, flg);
6119: PetscFunctionReturn(PETSC_SUCCESS);
6120: }
6122: /*@
6123: MatGetOption - Gets a parameter option that has been set for a matrix.
6125: Logically Collective
6127: Input Parameters:
6128: + mat - the matrix
6129: - op - the option, this only responds to certain options, check the code for which ones
6131: Output Parameter:
6132: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6134: Level: intermediate
6136: Notes:
6137: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6139: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6140: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6142: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6143: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6144: @*/
6145: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6146: {
6147: PetscFunctionBegin;
6151: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6152: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6154: switch (op) {
6155: case MAT_NO_OFF_PROC_ENTRIES:
6156: *flg = mat->nooffprocentries;
6157: break;
6158: case MAT_NO_OFF_PROC_ZERO_ROWS:
6159: *flg = mat->nooffproczerorows;
6160: break;
6161: case MAT_SYMMETRIC:
6162: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6163: break;
6164: case MAT_HERMITIAN:
6165: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6166: break;
6167: case MAT_STRUCTURALLY_SYMMETRIC:
6168: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6169: break;
6170: case MAT_SPD:
6171: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6172: break;
6173: case MAT_SYMMETRY_ETERNAL:
6174: *flg = mat->symmetry_eternal;
6175: break;
6176: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6177: *flg = mat->symmetry_eternal;
6178: break;
6179: default:
6180: break;
6181: }
6182: PetscFunctionReturn(PETSC_SUCCESS);
6183: }
6185: /*@
6186: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6187: this routine retains the old nonzero structure.
6189: Logically Collective
6191: Input Parameter:
6192: . mat - the matrix
6194: Level: intermediate
6196: Note:
6197: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6198: See the Performance chapter of the users manual for information on preallocating matrices.
6200: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6201: @*/
6202: PetscErrorCode MatZeroEntries(Mat mat)
6203: {
6204: PetscFunctionBegin;
6207: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6208: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6209: MatCheckPreallocated(mat, 1);
6211: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6212: PetscUseTypeMethod(mat, zeroentries);
6213: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6214: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6215: PetscFunctionReturn(PETSC_SUCCESS);
6216: }
6218: /*@
6219: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6220: of a set of rows and columns of a matrix.
6222: Collective
6224: Input Parameters:
6225: + mat - the matrix
6226: . numRows - the number of rows/columns to zero
6227: . rows - the global row indices
6228: . diag - value put in the diagonal of the eliminated rows
6229: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6230: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6232: Level: intermediate
6234: Notes:
6235: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6237: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6238: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6240: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6241: Krylov method to take advantage of the known solution on the zeroed rows.
6243: For the parallel case, all processes that share the matrix (i.e.,
6244: those in the communicator used for matrix creation) MUST call this
6245: routine, regardless of whether any rows being zeroed are owned by
6246: them.
6248: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6249: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6250: missing.
6252: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6253: list only rows local to itself).
6255: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6257: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6258: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6259: @*/
6260: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6261: {
6262: PetscFunctionBegin;
6265: if (numRows) PetscAssertPointer(rows, 3);
6266: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6267: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6268: MatCheckPreallocated(mat, 1);
6270: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6271: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6272: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6273: PetscFunctionReturn(PETSC_SUCCESS);
6274: }
6276: /*@
6277: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6278: of a set of rows and columns of a matrix.
6280: Collective
6282: Input Parameters:
6283: + mat - the matrix
6284: . is - the rows to zero
6285: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6286: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6287: - b - optional vector of right-hand side, that will be adjusted by provided solution
6289: Level: intermediate
6291: Note:
6292: See `MatZeroRowsColumns()` for details on how this routine operates.
6294: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6295: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6296: @*/
6297: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6298: {
6299: PetscInt numRows;
6300: const PetscInt *rows;
6302: PetscFunctionBegin;
6307: PetscCall(ISGetLocalSize(is, &numRows));
6308: PetscCall(ISGetIndices(is, &rows));
6309: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6310: PetscCall(ISRestoreIndices(is, &rows));
6311: PetscFunctionReturn(PETSC_SUCCESS);
6312: }
6314: /*@
6315: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6316: of a set of rows of a matrix.
6318: Collective
6320: Input Parameters:
6321: + mat - the matrix
6322: . numRows - the number of rows to zero
6323: . rows - the global row indices
6324: . diag - value put in the diagonal of the zeroed rows
6325: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6326: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6328: Level: intermediate
6330: Notes:
6331: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6333: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6335: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6336: Krylov method to take advantage of the known solution on the zeroed rows.
6338: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6339: from the matrix.
6341: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6342: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6343: formats this does not alter the nonzero structure.
6345: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6346: of the matrix is not changed the values are
6347: merely zeroed.
6349: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6350: formats can optionally remove the main diagonal entry from the
6351: nonzero structure as well, by passing 0.0 as the final argument).
6353: For the parallel case, all processes that share the matrix (i.e.,
6354: those in the communicator used for matrix creation) MUST call this
6355: routine, regardless of whether any rows being zeroed are owned by
6356: them.
6358: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6359: list only rows local to itself).
6361: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6362: owns that are to be zeroed. This saves a global synchronization in the implementation.
6364: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6365: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6366: @*/
6367: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6368: {
6369: PetscFunctionBegin;
6372: if (numRows) PetscAssertPointer(rows, 3);
6373: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6374: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6375: MatCheckPreallocated(mat, 1);
6377: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6378: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6379: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6380: PetscFunctionReturn(PETSC_SUCCESS);
6381: }
6383: /*@
6384: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6385: of a set of rows of a matrix.
6387: Collective
6389: Input Parameters:
6390: + mat - the matrix
6391: . is - index set of rows to remove (if `NULL` then no row is removed)
6392: . diag - value put in all diagonals of eliminated rows
6393: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6394: - b - optional vector of right-hand side, that will be adjusted by provided solution
6396: Level: intermediate
6398: Note:
6399: See `MatZeroRows()` for details on how this routine operates.
6401: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6402: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6403: @*/
6404: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6405: {
6406: PetscInt numRows = 0;
6407: const PetscInt *rows = NULL;
6409: PetscFunctionBegin;
6412: if (is) {
6414: PetscCall(ISGetLocalSize(is, &numRows));
6415: PetscCall(ISGetIndices(is, &rows));
6416: }
6417: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6418: if (is) PetscCall(ISRestoreIndices(is, &rows));
6419: PetscFunctionReturn(PETSC_SUCCESS);
6420: }
6422: /*@
6423: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6424: of a set of rows of a matrix. These rows must be local to the process.
6426: Collective
6428: Input Parameters:
6429: + mat - the matrix
6430: . numRows - the number of rows to remove
6431: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6432: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6433: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6434: - b - optional vector of right-hand side, that will be adjusted by provided solution
6436: Level: intermediate
6438: Notes:
6439: See `MatZeroRows()` for details on how this routine operates.
6441: The grid coordinates are across the entire grid, not just the local portion
6443: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6444: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6445: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6446: `DM_BOUNDARY_PERIODIC` boundary type.
6448: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6449: a single value per point) you can skip filling those indices.
6451: Fortran Note:
6452: `idxm` and `idxn` should be declared as
6453: $ MatStencil idxm(4, m)
6454: and the values inserted using
6455: .vb
6456: idxm(MatStencil_i, 1) = i
6457: idxm(MatStencil_j, 1) = j
6458: idxm(MatStencil_k, 1) = k
6459: idxm(MatStencil_c, 1) = c
6460: etc
6461: .ve
6463: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6464: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6465: @*/
6466: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6467: {
6468: PetscInt dim = mat->stencil.dim;
6469: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6470: PetscInt *dims = mat->stencil.dims + 1;
6471: PetscInt *starts = mat->stencil.starts;
6472: PetscInt *dxm = (PetscInt *)rows;
6473: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6475: PetscFunctionBegin;
6478: if (numRows) PetscAssertPointer(rows, 3);
6480: PetscCall(PetscMalloc1(numRows, &jdxm));
6481: for (i = 0; i < numRows; ++i) {
6482: /* Skip unused dimensions (they are ordered k, j, i, c) */
6483: for (j = 0; j < 3 - sdim; ++j) dxm++;
6484: /* Local index in X dir */
6485: tmp = *dxm++ - starts[0];
6486: /* Loop over remaining dimensions */
6487: for (j = 0; j < dim - 1; ++j) {
6488: /* If nonlocal, set index to be negative */
6489: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6490: /* Update local index */
6491: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6492: }
6493: /* Skip component slot if necessary */
6494: if (mat->stencil.noc) dxm++;
6495: /* Local row number */
6496: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6497: }
6498: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6499: PetscCall(PetscFree(jdxm));
6500: PetscFunctionReturn(PETSC_SUCCESS);
6501: }
6503: /*@
6504: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6505: of a set of rows and columns of a matrix.
6507: Collective
6509: Input Parameters:
6510: + mat - the matrix
6511: . numRows - the number of rows/columns to remove
6512: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6513: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6514: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6515: - b - optional vector of right-hand side, that will be adjusted by provided solution
6517: Level: intermediate
6519: Notes:
6520: See `MatZeroRowsColumns()` for details on how this routine operates.
6522: The grid coordinates are across the entire grid, not just the local portion
6524: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6525: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6526: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6527: `DM_BOUNDARY_PERIODIC` boundary type.
6529: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6530: a single value per point) you can skip filling those indices.
6532: Fortran Note:
6533: `idxm` and `idxn` should be declared as
6534: $ MatStencil idxm(4, m)
6535: and the values inserted using
6536: .vb
6537: idxm(MatStencil_i, 1) = i
6538: idxm(MatStencil_j, 1) = j
6539: idxm(MatStencil_k, 1) = k
6540: idxm(MatStencil_c, 1) = c
6541: etc
6542: .ve
6544: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6545: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6546: @*/
6547: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6548: {
6549: PetscInt dim = mat->stencil.dim;
6550: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6551: PetscInt *dims = mat->stencil.dims + 1;
6552: PetscInt *starts = mat->stencil.starts;
6553: PetscInt *dxm = (PetscInt *)rows;
6554: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6556: PetscFunctionBegin;
6559: if (numRows) PetscAssertPointer(rows, 3);
6561: PetscCall(PetscMalloc1(numRows, &jdxm));
6562: for (i = 0; i < numRows; ++i) {
6563: /* Skip unused dimensions (they are ordered k, j, i, c) */
6564: for (j = 0; j < 3 - sdim; ++j) dxm++;
6565: /* Local index in X dir */
6566: tmp = *dxm++ - starts[0];
6567: /* Loop over remaining dimensions */
6568: for (j = 0; j < dim - 1; ++j) {
6569: /* If nonlocal, set index to be negative */
6570: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6571: /* Update local index */
6572: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6573: }
6574: /* Skip component slot if necessary */
6575: if (mat->stencil.noc) dxm++;
6576: /* Local row number */
6577: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6578: }
6579: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6580: PetscCall(PetscFree(jdxm));
6581: PetscFunctionReturn(PETSC_SUCCESS);
6582: }
6584: /*@
6585: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6586: of a set of rows of a matrix; using local numbering of rows.
6588: Collective
6590: Input Parameters:
6591: + mat - the matrix
6592: . numRows - the number of rows to remove
6593: . rows - the local row indices
6594: . diag - value put in all diagonals of eliminated rows
6595: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6596: - b - optional vector of right-hand side, that will be adjusted by provided solution
6598: Level: intermediate
6600: Notes:
6601: Before calling `MatZeroRowsLocal()`, the user must first set the
6602: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6604: See `MatZeroRows()` for details on how this routine operates.
6606: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6607: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6608: @*/
6609: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6610: {
6611: PetscFunctionBegin;
6614: if (numRows) PetscAssertPointer(rows, 3);
6615: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6616: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6617: MatCheckPreallocated(mat, 1);
6619: if (mat->ops->zerorowslocal) {
6620: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6621: } else {
6622: IS is, newis;
6623: const PetscInt *newRows;
6625: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6626: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6627: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6628: PetscCall(ISGetIndices(newis, &newRows));
6629: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6630: PetscCall(ISRestoreIndices(newis, &newRows));
6631: PetscCall(ISDestroy(&newis));
6632: PetscCall(ISDestroy(&is));
6633: }
6634: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6635: PetscFunctionReturn(PETSC_SUCCESS);
6636: }
6638: /*@
6639: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6640: of a set of rows of a matrix; using local numbering of rows.
6642: Collective
6644: Input Parameters:
6645: + mat - the matrix
6646: . is - index set of rows to remove
6647: . diag - value put in all diagonals of eliminated rows
6648: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6649: - b - optional vector of right-hand side, that will be adjusted by provided solution
6651: Level: intermediate
6653: Notes:
6654: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6655: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6657: See `MatZeroRows()` for details on how this routine operates.
6659: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6660: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6661: @*/
6662: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6663: {
6664: PetscInt numRows;
6665: const PetscInt *rows;
6667: PetscFunctionBegin;
6671: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6672: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6673: MatCheckPreallocated(mat, 1);
6675: PetscCall(ISGetLocalSize(is, &numRows));
6676: PetscCall(ISGetIndices(is, &rows));
6677: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6678: PetscCall(ISRestoreIndices(is, &rows));
6679: PetscFunctionReturn(PETSC_SUCCESS);
6680: }
6682: /*@
6683: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6684: of a set of rows and columns of a matrix; using local numbering of rows.
6686: Collective
6688: Input Parameters:
6689: + mat - the matrix
6690: . numRows - the number of rows to remove
6691: . rows - the global row indices
6692: . diag - value put in all diagonals of eliminated rows
6693: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6694: - b - optional vector of right-hand side, that will be adjusted by provided solution
6696: Level: intermediate
6698: Notes:
6699: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6700: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6702: See `MatZeroRowsColumns()` for details on how this routine operates.
6704: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6705: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6706: @*/
6707: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6708: {
6709: IS is, newis;
6710: const PetscInt *newRows;
6712: PetscFunctionBegin;
6715: if (numRows) PetscAssertPointer(rows, 3);
6716: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6717: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6718: MatCheckPreallocated(mat, 1);
6720: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6721: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6722: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6723: PetscCall(ISGetIndices(newis, &newRows));
6724: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6725: PetscCall(ISRestoreIndices(newis, &newRows));
6726: PetscCall(ISDestroy(&newis));
6727: PetscCall(ISDestroy(&is));
6728: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6729: PetscFunctionReturn(PETSC_SUCCESS);
6730: }
6732: /*@
6733: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6734: of a set of rows and columns of a matrix; using local numbering of rows.
6736: Collective
6738: Input Parameters:
6739: + mat - the matrix
6740: . is - index set of rows to remove
6741: . diag - value put in all diagonals of eliminated rows
6742: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6743: - b - optional vector of right-hand side, that will be adjusted by provided solution
6745: Level: intermediate
6747: Notes:
6748: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6749: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6751: See `MatZeroRowsColumns()` for details on how this routine operates.
6753: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6754: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6755: @*/
6756: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6757: {
6758: PetscInt numRows;
6759: const PetscInt *rows;
6761: PetscFunctionBegin;
6765: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6766: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6767: MatCheckPreallocated(mat, 1);
6769: PetscCall(ISGetLocalSize(is, &numRows));
6770: PetscCall(ISGetIndices(is, &rows));
6771: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6772: PetscCall(ISRestoreIndices(is, &rows));
6773: PetscFunctionReturn(PETSC_SUCCESS);
6774: }
6776: /*@
6777: MatGetSize - Returns the numbers of rows and columns in a matrix.
6779: Not Collective
6781: Input Parameter:
6782: . mat - the matrix
6784: Output Parameters:
6785: + m - the number of global rows
6786: - n - the number of global columns
6788: Level: beginner
6790: Note:
6791: Both output parameters can be `NULL` on input.
6793: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6794: @*/
6795: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6796: {
6797: PetscFunctionBegin;
6799: if (m) *m = mat->rmap->N;
6800: if (n) *n = mat->cmap->N;
6801: PetscFunctionReturn(PETSC_SUCCESS);
6802: }
6804: /*@
6805: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6806: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6808: Not Collective
6810: Input Parameter:
6811: . mat - the matrix
6813: Output Parameters:
6814: + m - the number of local rows, use `NULL` to not obtain this value
6815: - n - the number of local columns, use `NULL` to not obtain this value
6817: Level: beginner
6819: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6820: @*/
6821: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6822: {
6823: PetscFunctionBegin;
6825: if (m) PetscAssertPointer(m, 2);
6826: if (n) PetscAssertPointer(n, 3);
6827: if (m) *m = mat->rmap->n;
6828: if (n) *n = mat->cmap->n;
6829: PetscFunctionReturn(PETSC_SUCCESS);
6830: }
6832: /*@
6833: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6834: vector one multiplies this matrix by that are owned by this processor.
6836: Not Collective, unless matrix has not been allocated, then collective
6838: Input Parameter:
6839: . mat - the matrix
6841: Output Parameters:
6842: + m - the global index of the first local column, use `NULL` to not obtain this value
6843: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6845: Level: developer
6847: Notes:
6848: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6850: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6851: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6853: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6854: the local values in the matrix.
6856: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6857: Layouts](sec_matlayout) for details on matrix layouts.
6859: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6860: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6861: @*/
6862: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6863: {
6864: PetscFunctionBegin;
6867: if (m) PetscAssertPointer(m, 2);
6868: if (n) PetscAssertPointer(n, 3);
6869: MatCheckPreallocated(mat, 1);
6870: if (m) *m = mat->cmap->rstart;
6871: if (n) *n = mat->cmap->rend;
6872: PetscFunctionReturn(PETSC_SUCCESS);
6873: }
6875: /*@
6876: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6877: this MPI process.
6879: Not Collective
6881: Input Parameter:
6882: . mat - the matrix
6884: Output Parameters:
6885: + m - the global index of the first local row, use `NULL` to not obtain this value
6886: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6888: Level: beginner
6890: Notes:
6891: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6893: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6894: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6896: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6897: the local values in the matrix.
6899: The high argument is one more than the last element stored locally.
6901: For all matrices it returns the range of matrix rows associated with rows of a vector that
6902: would contain the result of a matrix vector product with this matrix. See [Matrix
6903: Layouts](sec_matlayout) for details on matrix layouts.
6905: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6906: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6907: @*/
6908: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6909: {
6910: PetscFunctionBegin;
6913: if (m) PetscAssertPointer(m, 2);
6914: if (n) PetscAssertPointer(n, 3);
6915: MatCheckPreallocated(mat, 1);
6916: if (m) *m = mat->rmap->rstart;
6917: if (n) *n = mat->rmap->rend;
6918: PetscFunctionReturn(PETSC_SUCCESS);
6919: }
6921: /*@C
6922: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6923: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6925: Not Collective, unless matrix has not been allocated
6927: Input Parameter:
6928: . mat - the matrix
6930: Output Parameter:
6931: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6932: where `size` is the number of MPI processes used by `mat`
6934: Level: beginner
6936: Notes:
6937: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6939: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6940: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6942: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6943: the local values in the matrix.
6945: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6946: would contain the result of a matrix vector product with this matrix. See [Matrix
6947: Layouts](sec_matlayout) for details on matrix layouts.
6949: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6950: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
6951: `DMDAGetGhostCorners()`, `DM`
6952: @*/
6953: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
6954: {
6955: PetscFunctionBegin;
6958: MatCheckPreallocated(mat, 1);
6959: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6960: PetscFunctionReturn(PETSC_SUCCESS);
6961: }
6963: /*@C
6964: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6965: vector one multiplies this vector by that are owned by each processor.
6967: Not Collective, unless matrix has not been allocated
6969: Input Parameter:
6970: . mat - the matrix
6972: Output Parameter:
6973: . ranges - start of each processors portion plus one more than the total length at the end
6975: Level: beginner
6977: Notes:
6978: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6980: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6981: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6983: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6984: the local values in the matrix.
6986: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6987: Layouts](sec_matlayout) for details on matrix layouts.
6989: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
6990: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
6991: `DMDAGetGhostCorners()`, `DM`
6992: @*/
6993: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
6994: {
6995: PetscFunctionBegin;
6998: MatCheckPreallocated(mat, 1);
6999: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
7000: PetscFunctionReturn(PETSC_SUCCESS);
7001: }
7003: /*@
7004: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7006: Not Collective
7008: Input Parameter:
7009: . A - matrix
7011: Output Parameters:
7012: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7013: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7015: Level: intermediate
7017: Note:
7018: You should call `ISDestroy()` on the returned `IS`
7020: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7021: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7022: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7023: details on matrix layouts.
7025: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7026: @*/
7027: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7028: {
7029: PetscErrorCode (*f)(Mat, IS *, IS *);
7031: PetscFunctionBegin;
7034: MatCheckPreallocated(A, 1);
7035: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7036: if (f) {
7037: PetscCall((*f)(A, rows, cols));
7038: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7039: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7040: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7041: }
7042: PetscFunctionReturn(PETSC_SUCCESS);
7043: }
7045: /*@
7046: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7047: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7048: to complete the factorization.
7050: Collective
7052: Input Parameters:
7053: + fact - the factorized matrix obtained with `MatGetFactor()`
7054: . mat - the matrix
7055: . row - row permutation
7056: . col - column permutation
7057: - info - structure containing
7058: .vb
7059: levels - number of levels of fill.
7060: expected fill - as ratio of original fill.
7061: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7062: missing diagonal entries)
7063: .ve
7065: Level: developer
7067: Notes:
7068: See [Matrix Factorization](sec_matfactor) for additional information.
7070: Most users should employ the `KSP` interface for linear solvers
7071: instead of working directly with matrix algebra routines such as this.
7072: See, e.g., `KSPCreate()`.
7074: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7076: Developer Note:
7077: The Fortran interface is not autogenerated as the
7078: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7080: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7081: `MatGetOrdering()`, `MatFactorInfo`
7082: @*/
7083: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7084: {
7085: PetscFunctionBegin;
7090: PetscAssertPointer(info, 5);
7091: PetscAssertPointer(fact, 1);
7092: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7093: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7094: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7095: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7096: MatCheckPreallocated(mat, 2);
7098: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7099: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7100: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7101: PetscFunctionReturn(PETSC_SUCCESS);
7102: }
7104: /*@
7105: MatICCFactorSymbolic - Performs symbolic incomplete
7106: Cholesky factorization for a symmetric matrix. Use
7107: `MatCholeskyFactorNumeric()` to complete the factorization.
7109: Collective
7111: Input Parameters:
7112: + fact - the factorized matrix obtained with `MatGetFactor()`
7113: . mat - the matrix to be factored
7114: . perm - row and column permutation
7115: - info - structure containing
7116: .vb
7117: levels - number of levels of fill.
7118: expected fill - as ratio of original fill.
7119: .ve
7121: Level: developer
7123: Notes:
7124: Most users should employ the `KSP` interface for linear solvers
7125: instead of working directly with matrix algebra routines such as this.
7126: See, e.g., `KSPCreate()`.
7128: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7130: Developer Note:
7131: The Fortran interface is not autogenerated as the
7132: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7134: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7135: @*/
7136: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7137: {
7138: PetscFunctionBegin;
7142: PetscAssertPointer(info, 4);
7143: PetscAssertPointer(fact, 1);
7144: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7145: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7146: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7147: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7148: MatCheckPreallocated(mat, 2);
7150: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7151: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7152: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7153: PetscFunctionReturn(PETSC_SUCCESS);
7154: }
7156: /*@C
7157: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7158: points to an array of valid matrices, they may be reused to store the new
7159: submatrices.
7161: Collective
7163: Input Parameters:
7164: + mat - the matrix
7165: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7166: . irow - index set of rows to extract
7167: . icol - index set of columns to extract
7168: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7170: Output Parameter:
7171: . submat - the array of submatrices
7173: Level: advanced
7175: Notes:
7176: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7177: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7178: to extract a parallel submatrix.
7180: Some matrix types place restrictions on the row and column
7181: indices, such as that they be sorted or that they be equal to each other.
7183: The index sets may not have duplicate entries.
7185: When extracting submatrices from a parallel matrix, each processor can
7186: form a different submatrix by setting the rows and columns of its
7187: individual index sets according to the local submatrix desired.
7189: When finished using the submatrices, the user should destroy
7190: them with `MatDestroySubMatrices()`.
7192: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7193: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7195: This routine creates the matrices in submat; you should NOT create them before
7196: calling it. It also allocates the array of matrix pointers submat.
7198: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7199: request one row/column in a block, they must request all rows/columns that are in
7200: that block. For example, if the block size is 2 you cannot request just row 0 and
7201: column 0.
7203: Fortran Note:
7204: One must pass in as `submat` a `Mat` array of size at least `n`+1.
7206: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7207: @*/
7208: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7209: {
7210: PetscInt i;
7211: PetscBool eq;
7213: PetscFunctionBegin;
7216: if (n) {
7217: PetscAssertPointer(irow, 3);
7219: PetscAssertPointer(icol, 4);
7221: }
7222: PetscAssertPointer(submat, 6);
7223: if (n && scall == MAT_REUSE_MATRIX) {
7224: PetscAssertPointer(*submat, 6);
7226: }
7227: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7228: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7229: MatCheckPreallocated(mat, 1);
7230: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7231: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7232: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7233: for (i = 0; i < n; i++) {
7234: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7235: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7236: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7237: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7238: if (mat->boundtocpu && mat->bindingpropagates) {
7239: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7240: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7241: }
7242: #endif
7243: }
7244: PetscFunctionReturn(PETSC_SUCCESS);
7245: }
7247: /*@C
7248: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7250: Collective
7252: Input Parameters:
7253: + mat - the matrix
7254: . n - the number of submatrixes to be extracted
7255: . irow - index set of rows to extract
7256: . icol - index set of columns to extract
7257: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7259: Output Parameter:
7260: . submat - the array of submatrices
7262: Level: advanced
7264: Note:
7265: This is used by `PCGASM`
7267: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7268: @*/
7269: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7270: {
7271: PetscInt i;
7272: PetscBool eq;
7274: PetscFunctionBegin;
7277: if (n) {
7278: PetscAssertPointer(irow, 3);
7280: PetscAssertPointer(icol, 4);
7282: }
7283: PetscAssertPointer(submat, 6);
7284: if (n && scall == MAT_REUSE_MATRIX) {
7285: PetscAssertPointer(*submat, 6);
7287: }
7288: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7289: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7290: MatCheckPreallocated(mat, 1);
7292: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7293: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7294: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7295: for (i = 0; i < n; i++) {
7296: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7297: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7298: }
7299: PetscFunctionReturn(PETSC_SUCCESS);
7300: }
7302: /*@C
7303: MatDestroyMatrices - Destroys an array of matrices.
7305: Collective
7307: Input Parameters:
7308: + n - the number of local matrices
7309: - mat - the matrices (this is a pointer to the array of matrices)
7311: Level: advanced
7313: Notes:
7314: Frees not only the matrices, but also the array that contains the matrices
7316: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7318: Fortran Note:
7319: Does not free the `mat` array.
7321: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7322: @*/
7323: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7324: {
7325: PetscInt i;
7327: PetscFunctionBegin;
7328: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7329: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7330: PetscAssertPointer(mat, 2);
7332: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7334: /* memory is allocated even if n = 0 */
7335: PetscCall(PetscFree(*mat));
7336: PetscFunctionReturn(PETSC_SUCCESS);
7337: }
7339: /*@C
7340: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7342: Collective
7344: Input Parameters:
7345: + n - the number of local matrices
7346: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7347: sequence of `MatCreateSubMatrices()`)
7349: Level: advanced
7351: Note:
7352: Frees not only the matrices, but also the array that contains the matrices
7354: Fortran Note:
7355: Does not free the `mat` array.
7357: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7358: @*/
7359: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7360: {
7361: Mat mat0;
7363: PetscFunctionBegin;
7364: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7365: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7366: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7367: PetscAssertPointer(mat, 2);
7369: mat0 = (*mat)[0];
7370: if (mat0 && mat0->ops->destroysubmatrices) {
7371: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7372: } else {
7373: PetscCall(MatDestroyMatrices(n, mat));
7374: }
7375: PetscFunctionReturn(PETSC_SUCCESS);
7376: }
7378: /*@
7379: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7381: Collective
7383: Input Parameter:
7384: . mat - the matrix
7386: Output Parameter:
7387: . matstruct - the sequential matrix with the nonzero structure of `mat`
7389: Level: developer
7391: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7392: @*/
7393: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7394: {
7395: PetscFunctionBegin;
7397: PetscAssertPointer(matstruct, 2);
7400: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7401: MatCheckPreallocated(mat, 1);
7403: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7404: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7405: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7406: PetscFunctionReturn(PETSC_SUCCESS);
7407: }
7409: /*@C
7410: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7412: Collective
7414: Input Parameter:
7415: . mat - the matrix
7417: Level: advanced
7419: Note:
7420: This is not needed, one can just call `MatDestroy()`
7422: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7423: @*/
7424: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7425: {
7426: PetscFunctionBegin;
7427: PetscAssertPointer(mat, 1);
7428: PetscCall(MatDestroy(mat));
7429: PetscFunctionReturn(PETSC_SUCCESS);
7430: }
7432: /*@
7433: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7434: replaces the index sets by larger ones that represent submatrices with
7435: additional overlap.
7437: Collective
7439: Input Parameters:
7440: + mat - the matrix
7441: . n - the number of index sets
7442: . is - the array of index sets (these index sets will changed during the call)
7443: - ov - the additional overlap requested
7445: Options Database Key:
7446: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7448: Level: developer
7450: Note:
7451: The computed overlap preserves the matrix block sizes when the blocks are square.
7452: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7453: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7455: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7456: @*/
7457: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7458: {
7459: PetscInt i, bs, cbs;
7461: PetscFunctionBegin;
7465: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7466: if (n) {
7467: PetscAssertPointer(is, 3);
7469: }
7470: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7471: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7472: MatCheckPreallocated(mat, 1);
7474: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7475: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7476: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7477: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7478: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7479: if (bs == cbs) {
7480: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7481: }
7482: PetscFunctionReturn(PETSC_SUCCESS);
7483: }
7485: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7487: /*@
7488: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7489: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7490: additional overlap.
7492: Collective
7494: Input Parameters:
7495: + mat - the matrix
7496: . n - the number of index sets
7497: . is - the array of index sets (these index sets will changed during the call)
7498: - ov - the additional overlap requested
7500: ` Options Database Key:
7501: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7503: Level: developer
7505: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7506: @*/
7507: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7508: {
7509: PetscInt i;
7511: PetscFunctionBegin;
7514: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7515: if (n) {
7516: PetscAssertPointer(is, 3);
7518: }
7519: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7520: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7521: MatCheckPreallocated(mat, 1);
7522: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7523: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7524: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7525: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7526: PetscFunctionReturn(PETSC_SUCCESS);
7527: }
7529: /*@
7530: MatGetBlockSize - Returns the matrix block size.
7532: Not Collective
7534: Input Parameter:
7535: . mat - the matrix
7537: Output Parameter:
7538: . bs - block size
7540: Level: intermediate
7542: Notes:
7543: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7545: If the block size has not been set yet this routine returns 1.
7547: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7548: @*/
7549: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7550: {
7551: PetscFunctionBegin;
7553: PetscAssertPointer(bs, 2);
7554: *bs = PetscAbs(mat->rmap->bs);
7555: PetscFunctionReturn(PETSC_SUCCESS);
7556: }
7558: /*@
7559: MatGetBlockSizes - Returns the matrix block row and column sizes.
7561: Not Collective
7563: Input Parameter:
7564: . mat - the matrix
7566: Output Parameters:
7567: + rbs - row block size
7568: - cbs - column block size
7570: Level: intermediate
7572: Notes:
7573: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7574: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7576: If a block size has not been set yet this routine returns 1.
7578: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7579: @*/
7580: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7581: {
7582: PetscFunctionBegin;
7584: if (rbs) PetscAssertPointer(rbs, 2);
7585: if (cbs) PetscAssertPointer(cbs, 3);
7586: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7587: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7588: PetscFunctionReturn(PETSC_SUCCESS);
7589: }
7591: /*@
7592: MatSetBlockSize - Sets the matrix block size.
7594: Logically Collective
7596: Input Parameters:
7597: + mat - the matrix
7598: - bs - block size
7600: Level: intermediate
7602: Notes:
7603: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7604: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7606: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7607: is compatible with the matrix local sizes.
7609: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7610: @*/
7611: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7612: {
7613: PetscFunctionBegin;
7616: PetscCall(MatSetBlockSizes(mat, bs, bs));
7617: PetscFunctionReturn(PETSC_SUCCESS);
7618: }
7620: typedef struct {
7621: PetscInt n;
7622: IS *is;
7623: Mat *mat;
7624: PetscObjectState nonzerostate;
7625: Mat C;
7626: } EnvelopeData;
7628: static PetscErrorCode EnvelopeDataDestroy(void *ptr)
7629: {
7630: EnvelopeData *edata = (EnvelopeData *)ptr;
7632: PetscFunctionBegin;
7633: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7634: PetscCall(PetscFree(edata->is));
7635: PetscCall(PetscFree(edata));
7636: PetscFunctionReturn(PETSC_SUCCESS);
7637: }
7639: /*@
7640: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7641: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7643: Collective
7645: Input Parameter:
7646: . mat - the matrix
7648: Level: intermediate
7650: Notes:
7651: There can be zeros within the blocks
7653: The blocks can overlap between processes, including laying on more than two processes
7655: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7656: @*/
7657: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7658: {
7659: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7660: PetscInt *diag, *odiag, sc;
7661: VecScatter scatter;
7662: PetscScalar *seqv;
7663: const PetscScalar *parv;
7664: const PetscInt *ia, *ja;
7665: PetscBool set, flag, done;
7666: Mat AA = mat, A;
7667: MPI_Comm comm;
7668: PetscMPIInt rank, size, tag;
7669: MPI_Status status;
7670: PetscContainer container;
7671: EnvelopeData *edata;
7672: Vec seq, par;
7673: IS isglobal;
7675: PetscFunctionBegin;
7677: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7678: if (!set || !flag) {
7679: /* TODO: only needs nonzero structure of transpose */
7680: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7681: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7682: }
7683: PetscCall(MatAIJGetLocalMat(AA, &A));
7684: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7685: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7687: PetscCall(MatGetLocalSize(mat, &n, NULL));
7688: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7689: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7690: PetscCallMPI(MPI_Comm_size(comm, &size));
7691: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7693: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7695: if (rank > 0) {
7696: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7697: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7698: }
7699: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7700: for (i = 0; i < n; i++) {
7701: env = PetscMax(env, ja[ia[i + 1] - 1]);
7702: II = rstart + i;
7703: if (env == II) {
7704: starts[lblocks] = tbs;
7705: sizes[lblocks++] = 1 + II - tbs;
7706: tbs = 1 + II;
7707: }
7708: }
7709: if (rank < size - 1) {
7710: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7711: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7712: }
7714: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7715: if (!set || !flag) PetscCall(MatDestroy(&AA));
7716: PetscCall(MatDestroy(&A));
7718: PetscCall(PetscNew(&edata));
7719: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7720: edata->n = lblocks;
7721: /* create IS needed for extracting blocks from the original matrix */
7722: PetscCall(PetscMalloc1(lblocks, &edata->is));
7723: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7725: /* Create the resulting inverse matrix structure with preallocation information */
7726: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7727: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7728: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7729: PetscCall(MatSetType(edata->C, MATAIJ));
7731: /* Communicate the start and end of each row, from each block to the correct rank */
7732: /* TODO: Use PetscSF instead of VecScatter */
7733: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7734: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7735: PetscCall(VecGetArrayWrite(seq, &seqv));
7736: for (PetscInt i = 0; i < lblocks; i++) {
7737: for (PetscInt j = 0; j < sizes[i]; j++) {
7738: seqv[cnt] = starts[i];
7739: seqv[cnt + 1] = starts[i] + sizes[i];
7740: cnt += 2;
7741: }
7742: }
7743: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7744: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7745: sc -= cnt;
7746: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7747: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7748: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7749: PetscCall(ISDestroy(&isglobal));
7750: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7751: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7752: PetscCall(VecScatterDestroy(&scatter));
7753: PetscCall(VecDestroy(&seq));
7754: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7755: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7756: PetscCall(VecGetArrayRead(par, &parv));
7757: cnt = 0;
7758: PetscCall(MatGetSize(mat, NULL, &n));
7759: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7760: PetscInt start, end, d = 0, od = 0;
7762: start = (PetscInt)PetscRealPart(parv[cnt]);
7763: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7764: cnt += 2;
7766: if (start < cstart) {
7767: od += cstart - start + n - cend;
7768: d += cend - cstart;
7769: } else if (start < cend) {
7770: od += n - cend;
7771: d += cend - start;
7772: } else od += n - start;
7773: if (end <= cstart) {
7774: od -= cstart - end + n - cend;
7775: d -= cend - cstart;
7776: } else if (end < cend) {
7777: od -= n - cend;
7778: d -= cend - end;
7779: } else od -= n - end;
7781: odiag[i] = od;
7782: diag[i] = d;
7783: }
7784: PetscCall(VecRestoreArrayRead(par, &parv));
7785: PetscCall(VecDestroy(&par));
7786: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7787: PetscCall(PetscFree2(diag, odiag));
7788: PetscCall(PetscFree2(sizes, starts));
7790: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7791: PetscCall(PetscContainerSetPointer(container, edata));
7792: PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode (*)(void *))EnvelopeDataDestroy));
7793: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7794: PetscCall(PetscObjectDereference((PetscObject)container));
7795: PetscFunctionReturn(PETSC_SUCCESS);
7796: }
7798: /*@
7799: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7801: Collective
7803: Input Parameters:
7804: + A - the matrix
7805: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7807: Output Parameter:
7808: . C - matrix with inverted block diagonal of `A`
7810: Level: advanced
7812: Note:
7813: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7815: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7816: @*/
7817: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7818: {
7819: PetscContainer container;
7820: EnvelopeData *edata;
7821: PetscObjectState nonzerostate;
7823: PetscFunctionBegin;
7824: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7825: if (!container) {
7826: PetscCall(MatComputeVariableBlockEnvelope(A));
7827: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7828: }
7829: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7830: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7831: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7832: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7834: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7835: *C = edata->C;
7837: for (PetscInt i = 0; i < edata->n; i++) {
7838: Mat D;
7839: PetscScalar *dvalues;
7841: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7842: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7843: PetscCall(MatSeqDenseInvert(D));
7844: PetscCall(MatDenseGetArray(D, &dvalues));
7845: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7846: PetscCall(MatDestroy(&D));
7847: }
7848: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7849: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7850: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7851: PetscFunctionReturn(PETSC_SUCCESS);
7852: }
7854: /*@
7855: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7857: Not Collective
7859: Input Parameters:
7860: + mat - the matrix
7861: . nblocks - the number of blocks on this process, each block can only exist on a single process
7862: - bsizes - the block sizes
7864: Level: intermediate
7866: Notes:
7867: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7869: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7871: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7872: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7873: @*/
7874: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7875: {
7876: PetscInt ncnt = 0, nlocal;
7878: PetscFunctionBegin;
7880: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7881: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7882: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7883: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7884: PetscCall(PetscFree(mat->bsizes));
7885: mat->nblocks = nblocks;
7886: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7887: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7888: PetscFunctionReturn(PETSC_SUCCESS);
7889: }
7891: /*@C
7892: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7894: Not Collective; No Fortran Support
7896: Input Parameter:
7897: . mat - the matrix
7899: Output Parameters:
7900: + nblocks - the number of blocks on this process
7901: - bsizes - the block sizes
7903: Level: intermediate
7905: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7906: @*/
7907: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7908: {
7909: PetscFunctionBegin;
7911: if (nblocks) *nblocks = mat->nblocks;
7912: if (bsizes) *bsizes = mat->bsizes;
7913: PetscFunctionReturn(PETSC_SUCCESS);
7914: }
7916: /*@
7917: MatSetBlockSizes - Sets the matrix block row and column sizes.
7919: Logically Collective
7921: Input Parameters:
7922: + mat - the matrix
7923: . rbs - row block size
7924: - cbs - column block size
7926: Level: intermediate
7928: Notes:
7929: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7930: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7931: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7933: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7934: are compatible with the matrix local sizes.
7936: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7938: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7939: @*/
7940: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7941: {
7942: PetscFunctionBegin;
7946: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7947: if (mat->rmap->refcnt) {
7948: ISLocalToGlobalMapping l2g = NULL;
7949: PetscLayout nmap = NULL;
7951: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7952: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7953: PetscCall(PetscLayoutDestroy(&mat->rmap));
7954: mat->rmap = nmap;
7955: mat->rmap->mapping = l2g;
7956: }
7957: if (mat->cmap->refcnt) {
7958: ISLocalToGlobalMapping l2g = NULL;
7959: PetscLayout nmap = NULL;
7961: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7962: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7963: PetscCall(PetscLayoutDestroy(&mat->cmap));
7964: mat->cmap = nmap;
7965: mat->cmap->mapping = l2g;
7966: }
7967: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7968: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7969: PetscFunctionReturn(PETSC_SUCCESS);
7970: }
7972: /*@
7973: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7975: Logically Collective
7977: Input Parameters:
7978: + mat - the matrix
7979: . fromRow - matrix from which to copy row block size
7980: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7982: Level: developer
7984: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7985: @*/
7986: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7987: {
7988: PetscFunctionBegin;
7992: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7993: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7994: PetscFunctionReturn(PETSC_SUCCESS);
7995: }
7997: /*@
7998: MatResidual - Default routine to calculate the residual r = b - Ax
8000: Collective
8002: Input Parameters:
8003: + mat - the matrix
8004: . b - the right-hand-side
8005: - x - the approximate solution
8007: Output Parameter:
8008: . r - location to store the residual
8010: Level: developer
8012: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8013: @*/
8014: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8015: {
8016: PetscFunctionBegin;
8022: MatCheckPreallocated(mat, 1);
8023: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8024: if (!mat->ops->residual) {
8025: PetscCall(MatMult(mat, x, r));
8026: PetscCall(VecAYPX(r, -1.0, b));
8027: } else {
8028: PetscUseTypeMethod(mat, residual, b, x, r);
8029: }
8030: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8031: PetscFunctionReturn(PETSC_SUCCESS);
8032: }
8034: /*MC
8035: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
8037: Synopsis:
8038: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8040: Not Collective
8042: Input Parameters:
8043: + A - the matrix
8044: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8045: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8046: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8047: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8048: always used.
8050: Output Parameters:
8051: + n - number of local rows in the (possibly compressed) matrix
8052: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8053: . ja - the column indices
8054: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8055: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8057: Level: developer
8059: Note:
8060: Use `MatRestoreRowIJF90()` when you no longer need access to the data
8062: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
8063: M*/
8065: /*MC
8066: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
8068: Synopsis:
8069: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8071: Not Collective
8073: Input Parameters:
8074: + A - the matrix
8075: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8076: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8077: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8078: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8079: always used.
8080: . n - number of local rows in the (possibly compressed) matrix
8081: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8082: . ja - the column indices
8083: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8084: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8086: Level: developer
8088: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
8089: M*/
8091: /*@C
8092: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8094: Collective
8096: Input Parameters:
8097: + mat - the matrix
8098: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8099: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8100: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8101: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8102: always used.
8104: Output Parameters:
8105: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8106: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8107: . ja - the column indices, use `NULL` if not needed
8108: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8109: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8111: Level: developer
8113: Notes:
8114: You CANNOT change any of the ia[] or ja[] values.
8116: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8118: Fortran Notes:
8119: Use
8120: .vb
8121: PetscInt, pointer :: ia(:),ja(:)
8122: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8123: ! Access the ith and jth entries via ia(i) and ja(j)
8124: .ve
8126: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
8128: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8129: @*/
8130: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8131: {
8132: PetscFunctionBegin;
8135: if (n) PetscAssertPointer(n, 5);
8136: if (ia) PetscAssertPointer(ia, 6);
8137: if (ja) PetscAssertPointer(ja, 7);
8138: if (done) PetscAssertPointer(done, 8);
8139: MatCheckPreallocated(mat, 1);
8140: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8141: else {
8142: if (done) *done = PETSC_TRUE;
8143: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8144: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8145: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8146: }
8147: PetscFunctionReturn(PETSC_SUCCESS);
8148: }
8150: /*@C
8151: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8153: Collective
8155: Input Parameters:
8156: + mat - the matrix
8157: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8158: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8159: symmetrized
8160: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8161: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8162: always used.
8163: . n - number of columns in the (possibly compressed) matrix
8164: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8165: - ja - the row indices
8167: Output Parameter:
8168: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8170: Level: developer
8172: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8173: @*/
8174: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8175: {
8176: PetscFunctionBegin;
8179: PetscAssertPointer(n, 5);
8180: if (ia) PetscAssertPointer(ia, 6);
8181: if (ja) PetscAssertPointer(ja, 7);
8182: PetscAssertPointer(done, 8);
8183: MatCheckPreallocated(mat, 1);
8184: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8185: else {
8186: *done = PETSC_TRUE;
8187: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8188: }
8189: PetscFunctionReturn(PETSC_SUCCESS);
8190: }
8192: /*@C
8193: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8195: Collective
8197: Input Parameters:
8198: + mat - the matrix
8199: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8200: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8201: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8202: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8203: always used.
8204: . n - size of (possibly compressed) matrix
8205: . ia - the row pointers
8206: - ja - the column indices
8208: Output Parameter:
8209: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8211: Level: developer
8213: Note:
8214: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8215: us of the array after it has been restored. If you pass `NULL`, it will
8216: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8218: Fortran Note:
8219: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8221: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8222: @*/
8223: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8224: {
8225: PetscFunctionBegin;
8228: if (ia) PetscAssertPointer(ia, 6);
8229: if (ja) PetscAssertPointer(ja, 7);
8230: if (done) PetscAssertPointer(done, 8);
8231: MatCheckPreallocated(mat, 1);
8233: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8234: else {
8235: if (done) *done = PETSC_TRUE;
8236: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8237: if (n) *n = 0;
8238: if (ia) *ia = NULL;
8239: if (ja) *ja = NULL;
8240: }
8241: PetscFunctionReturn(PETSC_SUCCESS);
8242: }
8244: /*@C
8245: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8247: Collective
8249: Input Parameters:
8250: + mat - the matrix
8251: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8252: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8253: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8254: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8255: always used.
8257: Output Parameters:
8258: + n - size of (possibly compressed) matrix
8259: . ia - the column pointers
8260: . ja - the row indices
8261: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8263: Level: developer
8265: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8266: @*/
8267: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8268: {
8269: PetscFunctionBegin;
8272: if (ia) PetscAssertPointer(ia, 6);
8273: if (ja) PetscAssertPointer(ja, 7);
8274: PetscAssertPointer(done, 8);
8275: MatCheckPreallocated(mat, 1);
8277: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8278: else {
8279: *done = PETSC_TRUE;
8280: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8281: if (n) *n = 0;
8282: if (ia) *ia = NULL;
8283: if (ja) *ja = NULL;
8284: }
8285: PetscFunctionReturn(PETSC_SUCCESS);
8286: }
8288: /*@
8289: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8290: `MatGetColumnIJ()`.
8292: Collective
8294: Input Parameters:
8295: + mat - the matrix
8296: . ncolors - maximum color value
8297: . n - number of entries in colorarray
8298: - colorarray - array indicating color for each column
8300: Output Parameter:
8301: . iscoloring - coloring generated using colorarray information
8303: Level: developer
8305: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8306: @*/
8307: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8308: {
8309: PetscFunctionBegin;
8312: PetscAssertPointer(colorarray, 4);
8313: PetscAssertPointer(iscoloring, 5);
8314: MatCheckPreallocated(mat, 1);
8316: if (!mat->ops->coloringpatch) {
8317: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8318: } else {
8319: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8320: }
8321: PetscFunctionReturn(PETSC_SUCCESS);
8322: }
8324: /*@
8325: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8327: Logically Collective
8329: Input Parameter:
8330: . mat - the factored matrix to be reset
8332: Level: developer
8334: Notes:
8335: This routine should be used only with factored matrices formed by in-place
8336: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8337: format). This option can save memory, for example, when solving nonlinear
8338: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8339: ILU(0) preconditioner.
8341: One can specify in-place ILU(0) factorization by calling
8342: .vb
8343: PCType(pc,PCILU);
8344: PCFactorSeUseInPlace(pc);
8345: .ve
8346: or by using the options -pc_type ilu -pc_factor_in_place
8348: In-place factorization ILU(0) can also be used as a local
8349: solver for the blocks within the block Jacobi or additive Schwarz
8350: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8351: for details on setting local solver options.
8353: Most users should employ the `KSP` interface for linear solvers
8354: instead of working directly with matrix algebra routines such as this.
8355: See, e.g., `KSPCreate()`.
8357: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8358: @*/
8359: PetscErrorCode MatSetUnfactored(Mat mat)
8360: {
8361: PetscFunctionBegin;
8364: MatCheckPreallocated(mat, 1);
8365: mat->factortype = MAT_FACTOR_NONE;
8366: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8367: PetscUseTypeMethod(mat, setunfactored);
8368: PetscFunctionReturn(PETSC_SUCCESS);
8369: }
8371: /*MC
8372: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8374: Synopsis:
8375: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8377: Not Collective
8379: Input Parameter:
8380: . x - matrix
8382: Output Parameters:
8383: + xx_v - the Fortran pointer to the array
8384: - ierr - error code
8386: Example of Usage:
8387: .vb
8388: PetscScalar, pointer xx_v(:,:)
8389: ....
8390: call MatDenseGetArrayF90(x,xx_v,ierr)
8391: a = xx_v(3)
8392: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8393: .ve
8395: Level: advanced
8397: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8398: M*/
8400: /*MC
8401: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8402: accessed with `MatDenseGetArrayF90()`.
8404: Synopsis:
8405: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8407: Not Collective
8409: Input Parameters:
8410: + x - matrix
8411: - xx_v - the Fortran90 pointer to the array
8413: Output Parameter:
8414: . ierr - error code
8416: Example of Usage:
8417: .vb
8418: PetscScalar, pointer xx_v(:,:)
8419: ....
8420: call MatDenseGetArrayF90(x,xx_v,ierr)
8421: a = xx_v(3)
8422: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8423: .ve
8425: Level: advanced
8427: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8428: M*/
8430: /*MC
8431: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8433: Synopsis:
8434: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8436: Not Collective
8438: Input Parameter:
8439: . x - matrix
8441: Output Parameters:
8442: + xx_v - the Fortran pointer to the array
8443: - ierr - error code
8445: Example of Usage:
8446: .vb
8447: PetscScalar, pointer xx_v(:)
8448: ....
8449: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8450: a = xx_v(3)
8451: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8452: .ve
8454: Level: advanced
8456: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8457: M*/
8459: /*MC
8460: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8461: accessed with `MatSeqAIJGetArrayF90()`.
8463: Synopsis:
8464: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8466: Not Collective
8468: Input Parameters:
8469: + x - matrix
8470: - xx_v - the Fortran90 pointer to the array
8472: Output Parameter:
8473: . ierr - error code
8475: Example of Usage:
8476: .vb
8477: PetscScalar, pointer xx_v(:)
8478: ....
8479: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8480: a = xx_v(3)
8481: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8482: .ve
8484: Level: advanced
8486: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8487: M*/
8489: /*@
8490: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8491: as the original matrix.
8493: Collective
8495: Input Parameters:
8496: + mat - the original matrix
8497: . isrow - parallel `IS` containing the rows this processor should obtain
8498: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8499: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8501: Output Parameter:
8502: . newmat - the new submatrix, of the same type as the original matrix
8504: Level: advanced
8506: Notes:
8507: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8509: Some matrix types place restrictions on the row and column indices, such
8510: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8511: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8513: The index sets may not have duplicate entries.
8515: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8516: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8517: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8518: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8519: you are finished using it.
8521: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8522: the input matrix.
8524: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8526: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8527: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8529: Example usage:
8530: Consider the following 8x8 matrix with 34 non-zero values, that is
8531: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8532: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8533: as follows
8534: .vb
8535: 1 2 0 | 0 3 0 | 0 4
8536: Proc0 0 5 6 | 7 0 0 | 8 0
8537: 9 0 10 | 11 0 0 | 12 0
8538: -------------------------------------
8539: 13 0 14 | 15 16 17 | 0 0
8540: Proc1 0 18 0 | 19 20 21 | 0 0
8541: 0 0 0 | 22 23 0 | 24 0
8542: -------------------------------------
8543: Proc2 25 26 27 | 0 0 28 | 29 0
8544: 30 0 0 | 31 32 33 | 0 34
8545: .ve
8547: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8549: .vb
8550: 2 0 | 0 3 0 | 0
8551: Proc0 5 6 | 7 0 0 | 8
8552: -------------------------------
8553: Proc1 18 0 | 19 20 21 | 0
8554: -------------------------------
8555: Proc2 26 27 | 0 0 28 | 29
8556: 0 0 | 31 32 33 | 0
8557: .ve
8559: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8560: @*/
8561: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8562: {
8563: PetscMPIInt size;
8564: Mat *local;
8565: IS iscoltmp;
8566: PetscBool flg;
8568: PetscFunctionBegin;
8572: PetscAssertPointer(newmat, 5);
8575: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8576: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8578: MatCheckPreallocated(mat, 1);
8579: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8581: if (!iscol || isrow == iscol) {
8582: PetscBool stride;
8583: PetscMPIInt grabentirematrix = 0, grab;
8584: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8585: if (stride) {
8586: PetscInt first, step, n, rstart, rend;
8587: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8588: if (step == 1) {
8589: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8590: if (rstart == first) {
8591: PetscCall(ISGetLocalSize(isrow, &n));
8592: if (n == rend - rstart) grabentirematrix = 1;
8593: }
8594: }
8595: }
8596: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8597: if (grab) {
8598: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8599: if (cll == MAT_INITIAL_MATRIX) {
8600: *newmat = mat;
8601: PetscCall(PetscObjectReference((PetscObject)mat));
8602: }
8603: PetscFunctionReturn(PETSC_SUCCESS);
8604: }
8605: }
8607: if (!iscol) {
8608: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8609: } else {
8610: iscoltmp = iscol;
8611: }
8613: /* if original matrix is on just one processor then use submatrix generated */
8614: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8615: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8616: goto setproperties;
8617: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8618: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8619: *newmat = *local;
8620: PetscCall(PetscFree(local));
8621: goto setproperties;
8622: } else if (!mat->ops->createsubmatrix) {
8623: /* Create a new matrix type that implements the operation using the full matrix */
8624: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8625: switch (cll) {
8626: case MAT_INITIAL_MATRIX:
8627: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8628: break;
8629: case MAT_REUSE_MATRIX:
8630: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8631: break;
8632: default:
8633: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8634: }
8635: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8636: goto setproperties;
8637: }
8639: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8640: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8641: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8643: setproperties:
8644: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8645: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8646: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8647: }
8648: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8649: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8650: PetscFunctionReturn(PETSC_SUCCESS);
8651: }
8653: /*@
8654: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8656: Not Collective
8658: Input Parameters:
8659: + A - the matrix we wish to propagate options from
8660: - B - the matrix we wish to propagate options to
8662: Level: beginner
8664: Note:
8665: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8667: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8668: @*/
8669: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8670: {
8671: PetscFunctionBegin;
8674: B->symmetry_eternal = A->symmetry_eternal;
8675: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8676: B->symmetric = A->symmetric;
8677: B->structurally_symmetric = A->structurally_symmetric;
8678: B->spd = A->spd;
8679: B->hermitian = A->hermitian;
8680: PetscFunctionReturn(PETSC_SUCCESS);
8681: }
8683: /*@
8684: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8685: used during the assembly process to store values that belong to
8686: other processors.
8688: Not Collective
8690: Input Parameters:
8691: + mat - the matrix
8692: . size - the initial size of the stash.
8693: - bsize - the initial size of the block-stash(if used).
8695: Options Database Keys:
8696: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8697: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8699: Level: intermediate
8701: Notes:
8702: The block-stash is used for values set with `MatSetValuesBlocked()` while
8703: the stash is used for values set with `MatSetValues()`
8705: Run with the option -info and look for output of the form
8706: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8707: to determine the appropriate value, MM, to use for size and
8708: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8709: to determine the value, BMM to use for bsize
8711: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8712: @*/
8713: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8714: {
8715: PetscFunctionBegin;
8718: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8719: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8720: PetscFunctionReturn(PETSC_SUCCESS);
8721: }
8723: /*@
8724: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8725: the matrix
8727: Neighbor-wise Collective
8729: Input Parameters:
8730: + A - the matrix
8731: . x - the vector to be multiplied by the interpolation operator
8732: - y - the vector to be added to the result
8734: Output Parameter:
8735: . w - the resulting vector
8737: Level: intermediate
8739: Notes:
8740: `w` may be the same vector as `y`.
8742: This allows one to use either the restriction or interpolation (its transpose)
8743: matrix to do the interpolation
8745: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8746: @*/
8747: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8748: {
8749: PetscInt M, N, Ny;
8751: PetscFunctionBegin;
8756: PetscCall(MatGetSize(A, &M, &N));
8757: PetscCall(VecGetSize(y, &Ny));
8758: if (M == Ny) {
8759: PetscCall(MatMultAdd(A, x, y, w));
8760: } else {
8761: PetscCall(MatMultTransposeAdd(A, x, y, w));
8762: }
8763: PetscFunctionReturn(PETSC_SUCCESS);
8764: }
8766: /*@
8767: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8768: the matrix
8770: Neighbor-wise Collective
8772: Input Parameters:
8773: + A - the matrix
8774: - x - the vector to be interpolated
8776: Output Parameter:
8777: . y - the resulting vector
8779: Level: intermediate
8781: Note:
8782: This allows one to use either the restriction or interpolation (its transpose)
8783: matrix to do the interpolation
8785: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8786: @*/
8787: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8788: {
8789: PetscInt M, N, Ny;
8791: PetscFunctionBegin;
8795: PetscCall(MatGetSize(A, &M, &N));
8796: PetscCall(VecGetSize(y, &Ny));
8797: if (M == Ny) {
8798: PetscCall(MatMult(A, x, y));
8799: } else {
8800: PetscCall(MatMultTranspose(A, x, y));
8801: }
8802: PetscFunctionReturn(PETSC_SUCCESS);
8803: }
8805: /*@
8806: MatRestrict - $y = A*x$ or $A^T*x$
8808: Neighbor-wise Collective
8810: Input Parameters:
8811: + A - the matrix
8812: - x - the vector to be restricted
8814: Output Parameter:
8815: . y - the resulting vector
8817: Level: intermediate
8819: Note:
8820: This allows one to use either the restriction or interpolation (its transpose)
8821: matrix to do the restriction
8823: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8824: @*/
8825: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8826: {
8827: PetscInt M, N, Nx;
8829: PetscFunctionBegin;
8833: PetscCall(MatGetSize(A, &M, &N));
8834: PetscCall(VecGetSize(x, &Nx));
8835: if (M == Nx) {
8836: PetscCall(MatMultTranspose(A, x, y));
8837: } else {
8838: PetscCall(MatMult(A, x, y));
8839: }
8840: PetscFunctionReturn(PETSC_SUCCESS);
8841: }
8843: /*@
8844: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8846: Neighbor-wise Collective
8848: Input Parameters:
8849: + A - the matrix
8850: . x - the input dense matrix to be multiplied
8851: - w - the input dense matrix to be added to the result
8853: Output Parameter:
8854: . y - the output dense matrix
8856: Level: intermediate
8858: Note:
8859: This allows one to use either the restriction or interpolation (its transpose)
8860: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8861: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8863: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8864: @*/
8865: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8866: {
8867: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8868: PetscBool trans = PETSC_TRUE;
8869: MatReuse reuse = MAT_INITIAL_MATRIX;
8871: PetscFunctionBegin;
8877: PetscCall(MatGetSize(A, &M, &N));
8878: PetscCall(MatGetSize(x, &Mx, &Nx));
8879: if (N == Mx) trans = PETSC_FALSE;
8880: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8881: Mo = trans ? N : M;
8882: if (*y) {
8883: PetscCall(MatGetSize(*y, &My, &Ny));
8884: if (Mo == My && Nx == Ny) {
8885: reuse = MAT_REUSE_MATRIX;
8886: } else {
8887: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8888: PetscCall(MatDestroy(y));
8889: }
8890: }
8892: if (w && *y == w) { /* this is to minimize changes in PCMG */
8893: PetscBool flg;
8895: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8896: if (w) {
8897: PetscInt My, Ny, Mw, Nw;
8899: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8900: PetscCall(MatGetSize(*y, &My, &Ny));
8901: PetscCall(MatGetSize(w, &Mw, &Nw));
8902: if (!flg || My != Mw || Ny != Nw) w = NULL;
8903: }
8904: if (!w) {
8905: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8906: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8907: PetscCall(PetscObjectDereference((PetscObject)w));
8908: } else {
8909: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8910: }
8911: }
8912: if (!trans) {
8913: PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8914: } else {
8915: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8916: }
8917: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8918: PetscFunctionReturn(PETSC_SUCCESS);
8919: }
8921: /*@
8922: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8924: Neighbor-wise Collective
8926: Input Parameters:
8927: + A - the matrix
8928: - x - the input dense matrix
8930: Output Parameter:
8931: . y - the output dense matrix
8933: Level: intermediate
8935: Note:
8936: This allows one to use either the restriction or interpolation (its transpose)
8937: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8938: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8940: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8941: @*/
8942: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8943: {
8944: PetscFunctionBegin;
8945: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8946: PetscFunctionReturn(PETSC_SUCCESS);
8947: }
8949: /*@
8950: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8952: Neighbor-wise Collective
8954: Input Parameters:
8955: + A - the matrix
8956: - x - the input dense matrix
8958: Output Parameter:
8959: . y - the output dense matrix
8961: Level: intermediate
8963: Note:
8964: This allows one to use either the restriction or interpolation (its transpose)
8965: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8966: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8968: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8969: @*/
8970: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8971: {
8972: PetscFunctionBegin;
8973: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8974: PetscFunctionReturn(PETSC_SUCCESS);
8975: }
8977: /*@
8978: MatGetNullSpace - retrieves the null space of a matrix.
8980: Logically Collective
8982: Input Parameters:
8983: + mat - the matrix
8984: - nullsp - the null space object
8986: Level: developer
8988: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8989: @*/
8990: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8991: {
8992: PetscFunctionBegin;
8994: PetscAssertPointer(nullsp, 2);
8995: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8996: PetscFunctionReturn(PETSC_SUCCESS);
8997: }
8999: /*@C
9000: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
9002: Logically Collective
9004: Input Parameters:
9005: + n - the number of matrices
9006: - mat - the array of matrices
9008: Output Parameters:
9009: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
9011: Level: developer
9013: Note:
9014: Call `MatRestoreNullspaces()` to provide these to another array of matrices
9016: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9017: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
9018: @*/
9019: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9020: {
9021: PetscFunctionBegin;
9022: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9023: PetscAssertPointer(mat, 2);
9024: PetscAssertPointer(nullsp, 3);
9026: PetscCall(PetscCalloc1(3 * n, nullsp));
9027: for (PetscInt i = 0; i < n; i++) {
9029: (*nullsp)[i] = mat[i]->nullsp;
9030: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
9031: (*nullsp)[n + i] = mat[i]->nearnullsp;
9032: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
9033: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
9034: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
9035: }
9036: PetscFunctionReturn(PETSC_SUCCESS);
9037: }
9039: /*@C
9040: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
9042: Logically Collective
9044: Input Parameters:
9045: + n - the number of matrices
9046: . mat - the array of matrices
9047: - nullsp - an array of null spaces
9049: Level: developer
9051: Note:
9052: Call `MatGetNullSpaces()` to create `nullsp`
9054: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9055: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
9056: @*/
9057: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9058: {
9059: PetscFunctionBegin;
9060: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9061: PetscAssertPointer(mat, 2);
9062: PetscAssertPointer(nullsp, 3);
9063: PetscAssertPointer(*nullsp, 3);
9065: for (PetscInt i = 0; i < n; i++) {
9067: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9068: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9069: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9070: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9071: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9072: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9073: }
9074: PetscCall(PetscFree(*nullsp));
9075: PetscFunctionReturn(PETSC_SUCCESS);
9076: }
9078: /*@
9079: MatSetNullSpace - attaches a null space to a matrix.
9081: Logically Collective
9083: Input Parameters:
9084: + mat - the matrix
9085: - nullsp - the null space object
9087: Level: advanced
9089: Notes:
9090: This null space is used by the `KSP` linear solvers to solve singular systems.
9092: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9094: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge to
9095: to zero but the linear system will still be solved in a least squares sense.
9097: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9098: the domain of a matrix A (from $R^n$ to $R^m$ (m rows, n columns) $R^n$ = the direct sum of the null space of A, n(A), + the range of $A^T$, $R(A^T)$.
9099: Similarly $R^m$ = direct sum n($A^T$) + R(A). Hence the linear system $A x = b$ has a solution only if b in R(A) (or correspondingly b is orthogonal to
9100: n($A^T$)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
9101: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n($A^T$).
9102: This \hat{b} can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9104: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
9105: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9106: routine also automatically calls `MatSetTransposeNullSpace()`.
9108: The user should call `MatNullSpaceDestroy()`.
9110: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9111: `KSPSetPCSide()`
9112: @*/
9113: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9114: {
9115: PetscFunctionBegin;
9118: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9119: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9120: mat->nullsp = nullsp;
9121: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9122: PetscFunctionReturn(PETSC_SUCCESS);
9123: }
9125: /*@
9126: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9128: Logically Collective
9130: Input Parameters:
9131: + mat - the matrix
9132: - nullsp - the null space object
9134: Level: developer
9136: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9137: @*/
9138: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9139: {
9140: PetscFunctionBegin;
9143: PetscAssertPointer(nullsp, 2);
9144: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9145: PetscFunctionReturn(PETSC_SUCCESS);
9146: }
9148: /*@
9149: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9151: Logically Collective
9153: Input Parameters:
9154: + mat - the matrix
9155: - nullsp - the null space object
9157: Level: advanced
9159: Notes:
9160: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9162: See `MatSetNullSpace()`
9164: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9165: @*/
9166: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9167: {
9168: PetscFunctionBegin;
9171: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9172: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9173: mat->transnullsp = nullsp;
9174: PetscFunctionReturn(PETSC_SUCCESS);
9175: }
9177: /*@
9178: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9179: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9181: Logically Collective
9183: Input Parameters:
9184: + mat - the matrix
9185: - nullsp - the null space object
9187: Level: advanced
9189: Notes:
9190: Overwrites any previous near null space that may have been attached
9192: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9194: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9195: @*/
9196: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9197: {
9198: PetscFunctionBegin;
9202: MatCheckPreallocated(mat, 1);
9203: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9204: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9205: mat->nearnullsp = nullsp;
9206: PetscFunctionReturn(PETSC_SUCCESS);
9207: }
9209: /*@
9210: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9212: Not Collective
9214: Input Parameter:
9215: . mat - the matrix
9217: Output Parameter:
9218: . nullsp - the null space object, `NULL` if not set
9220: Level: advanced
9222: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9223: @*/
9224: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9225: {
9226: PetscFunctionBegin;
9229: PetscAssertPointer(nullsp, 2);
9230: MatCheckPreallocated(mat, 1);
9231: *nullsp = mat->nearnullsp;
9232: PetscFunctionReturn(PETSC_SUCCESS);
9233: }
9235: /*@
9236: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9238: Collective
9240: Input Parameters:
9241: + mat - the matrix
9242: . row - row/column permutation
9243: - info - information on desired factorization process
9245: Level: developer
9247: Notes:
9248: Probably really in-place only when level of fill is zero, otherwise allocates
9249: new space to store factored matrix and deletes previous memory.
9251: Most users should employ the `KSP` interface for linear solvers
9252: instead of working directly with matrix algebra routines such as this.
9253: See, e.g., `KSPCreate()`.
9255: Developer Note:
9256: The Fortran interface is not autogenerated as the
9257: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9259: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9260: @*/
9261: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9262: {
9263: PetscFunctionBegin;
9267: PetscAssertPointer(info, 3);
9268: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9269: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9270: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9271: MatCheckPreallocated(mat, 1);
9272: PetscUseTypeMethod(mat, iccfactor, row, info);
9273: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9274: PetscFunctionReturn(PETSC_SUCCESS);
9275: }
9277: /*@
9278: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9279: ghosted ones.
9281: Not Collective
9283: Input Parameters:
9284: + mat - the matrix
9285: - diag - the diagonal values, including ghost ones
9287: Level: developer
9289: Notes:
9290: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9292: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9294: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9295: @*/
9296: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9297: {
9298: PetscMPIInt size;
9300: PetscFunctionBegin;
9305: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9306: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9307: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9308: if (size == 1) {
9309: PetscInt n, m;
9310: PetscCall(VecGetSize(diag, &n));
9311: PetscCall(MatGetSize(mat, NULL, &m));
9312: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9313: PetscCall(MatDiagonalScale(mat, NULL, diag));
9314: } else {
9315: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9316: }
9317: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9318: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9319: PetscFunctionReturn(PETSC_SUCCESS);
9320: }
9322: /*@
9323: MatGetInertia - Gets the inertia from a factored matrix
9325: Collective
9327: Input Parameter:
9328: . mat - the matrix
9330: Output Parameters:
9331: + nneg - number of negative eigenvalues
9332: . nzero - number of zero eigenvalues
9333: - npos - number of positive eigenvalues
9335: Level: advanced
9337: Note:
9338: Matrix must have been factored by `MatCholeskyFactor()`
9340: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9341: @*/
9342: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9343: {
9344: PetscFunctionBegin;
9347: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9348: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9349: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9350: PetscFunctionReturn(PETSC_SUCCESS);
9351: }
9353: /*@C
9354: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9356: Neighbor-wise Collective
9358: Input Parameters:
9359: + mat - the factored matrix obtained with `MatGetFactor()`
9360: - b - the right-hand-side vectors
9362: Output Parameter:
9363: . x - the result vectors
9365: Level: developer
9367: Note:
9368: The vectors `b` and `x` cannot be the same. I.e., one cannot
9369: call `MatSolves`(A,x,x).
9371: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9372: @*/
9373: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9374: {
9375: PetscFunctionBegin;
9378: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9379: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9380: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9382: MatCheckPreallocated(mat, 1);
9383: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9384: PetscUseTypeMethod(mat, solves, b, x);
9385: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9386: PetscFunctionReturn(PETSC_SUCCESS);
9387: }
9389: /*@
9390: MatIsSymmetric - Test whether a matrix is symmetric
9392: Collective
9394: Input Parameters:
9395: + A - the matrix to test
9396: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9398: Output Parameter:
9399: . flg - the result
9401: Level: intermediate
9403: Notes:
9404: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9406: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9408: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9409: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9411: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9412: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9413: @*/
9414: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9415: {
9416: PetscFunctionBegin;
9418: PetscAssertPointer(flg, 3);
9419: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9420: else {
9421: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9422: else PetscCall(MatIsTranspose(A, A, tol, flg));
9423: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9424: }
9425: PetscFunctionReturn(PETSC_SUCCESS);
9426: }
9428: /*@
9429: MatIsHermitian - Test whether a matrix is Hermitian
9431: Collective
9433: Input Parameters:
9434: + A - the matrix to test
9435: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9437: Output Parameter:
9438: . flg - the result
9440: Level: intermediate
9442: Notes:
9443: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9445: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9447: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9448: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9450: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9451: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9452: @*/
9453: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9454: {
9455: PetscFunctionBegin;
9457: PetscAssertPointer(flg, 3);
9458: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9459: else {
9460: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9461: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9462: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9463: }
9464: PetscFunctionReturn(PETSC_SUCCESS);
9465: }
9467: /*@
9468: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9470: Not Collective
9472: Input Parameter:
9473: . A - the matrix to check
9475: Output Parameters:
9476: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9477: - flg - the result (only valid if set is `PETSC_TRUE`)
9479: Level: advanced
9481: Notes:
9482: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9483: if you want it explicitly checked
9485: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9486: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9488: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9489: @*/
9490: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9491: {
9492: PetscFunctionBegin;
9494: PetscAssertPointer(set, 2);
9495: PetscAssertPointer(flg, 3);
9496: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9497: *set = PETSC_TRUE;
9498: *flg = PetscBool3ToBool(A->symmetric);
9499: } else {
9500: *set = PETSC_FALSE;
9501: }
9502: PetscFunctionReturn(PETSC_SUCCESS);
9503: }
9505: /*@
9506: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9508: Not Collective
9510: Input Parameter:
9511: . A - the matrix to check
9513: Output Parameters:
9514: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9515: - flg - the result (only valid if set is `PETSC_TRUE`)
9517: Level: advanced
9519: Notes:
9520: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9522: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9523: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9525: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9526: @*/
9527: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9528: {
9529: PetscFunctionBegin;
9531: PetscAssertPointer(set, 2);
9532: PetscAssertPointer(flg, 3);
9533: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9534: *set = PETSC_TRUE;
9535: *flg = PetscBool3ToBool(A->spd);
9536: } else {
9537: *set = PETSC_FALSE;
9538: }
9539: PetscFunctionReturn(PETSC_SUCCESS);
9540: }
9542: /*@
9543: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9545: Not Collective
9547: Input Parameter:
9548: . A - the matrix to check
9550: Output Parameters:
9551: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9552: - flg - the result (only valid if set is `PETSC_TRUE`)
9554: Level: advanced
9556: Notes:
9557: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9558: if you want it explicitly checked
9560: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9561: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9563: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9564: @*/
9565: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9566: {
9567: PetscFunctionBegin;
9569: PetscAssertPointer(set, 2);
9570: PetscAssertPointer(flg, 3);
9571: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9572: *set = PETSC_TRUE;
9573: *flg = PetscBool3ToBool(A->hermitian);
9574: } else {
9575: *set = PETSC_FALSE;
9576: }
9577: PetscFunctionReturn(PETSC_SUCCESS);
9578: }
9580: /*@
9581: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9583: Collective
9585: Input Parameter:
9586: . A - the matrix to test
9588: Output Parameter:
9589: . flg - the result
9591: Level: intermediate
9593: Notes:
9594: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9596: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9597: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9599: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9600: @*/
9601: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9602: {
9603: PetscFunctionBegin;
9605: PetscAssertPointer(flg, 2);
9606: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9607: *flg = PetscBool3ToBool(A->structurally_symmetric);
9608: } else {
9609: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9610: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9611: }
9612: PetscFunctionReturn(PETSC_SUCCESS);
9613: }
9615: /*@
9616: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9618: Not Collective
9620: Input Parameter:
9621: . A - the matrix to check
9623: Output Parameters:
9624: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9625: - flg - the result (only valid if set is PETSC_TRUE)
9627: Level: advanced
9629: Notes:
9630: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9631: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9633: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9635: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9636: @*/
9637: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9638: {
9639: PetscFunctionBegin;
9641: PetscAssertPointer(set, 2);
9642: PetscAssertPointer(flg, 3);
9643: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9644: *set = PETSC_TRUE;
9645: *flg = PetscBool3ToBool(A->structurally_symmetric);
9646: } else {
9647: *set = PETSC_FALSE;
9648: }
9649: PetscFunctionReturn(PETSC_SUCCESS);
9650: }
9652: /*@
9653: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9654: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9656: Not Collective
9658: Input Parameter:
9659: . mat - the matrix
9661: Output Parameters:
9662: + nstash - the size of the stash
9663: . reallocs - the number of additional mallocs incurred.
9664: . bnstash - the size of the block stash
9665: - breallocs - the number of additional mallocs incurred.in the block stash
9667: Level: advanced
9669: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9670: @*/
9671: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9672: {
9673: PetscFunctionBegin;
9674: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9675: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9676: PetscFunctionReturn(PETSC_SUCCESS);
9677: }
9679: /*@
9680: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9681: parallel layout, `PetscLayout` for rows and columns
9683: Collective
9685: Input Parameter:
9686: . mat - the matrix
9688: Output Parameters:
9689: + right - (optional) vector that the matrix can be multiplied against
9690: - left - (optional) vector that the matrix vector product can be stored in
9692: Level: advanced
9694: Notes:
9695: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9697: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9699: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9700: @*/
9701: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9702: {
9703: PetscFunctionBegin;
9706: if (mat->ops->getvecs) {
9707: PetscUseTypeMethod(mat, getvecs, right, left);
9708: } else {
9709: if (right) {
9710: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9711: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9712: PetscCall(VecSetType(*right, mat->defaultvectype));
9713: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9714: if (mat->boundtocpu && mat->bindingpropagates) {
9715: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9716: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9717: }
9718: #endif
9719: }
9720: if (left) {
9721: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9722: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9723: PetscCall(VecSetType(*left, mat->defaultvectype));
9724: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9725: if (mat->boundtocpu && mat->bindingpropagates) {
9726: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9727: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9728: }
9729: #endif
9730: }
9731: }
9732: PetscFunctionReturn(PETSC_SUCCESS);
9733: }
9735: /*@
9736: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9737: with default values.
9739: Not Collective
9741: Input Parameter:
9742: . info - the `MatFactorInfo` data structure
9744: Level: developer
9746: Notes:
9747: The solvers are generally used through the `KSP` and `PC` objects, for example
9748: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9750: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9752: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9753: @*/
9754: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9755: {
9756: PetscFunctionBegin;
9757: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9758: PetscFunctionReturn(PETSC_SUCCESS);
9759: }
9761: /*@
9762: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9764: Collective
9766: Input Parameters:
9767: + mat - the factored matrix
9768: - is - the index set defining the Schur indices (0-based)
9770: Level: advanced
9772: Notes:
9773: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9775: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9777: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9779: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9780: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9781: @*/
9782: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9783: {
9784: PetscErrorCode (*f)(Mat, IS);
9786: PetscFunctionBegin;
9791: PetscCheckSameComm(mat, 1, is, 2);
9792: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9793: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9794: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9795: PetscCall(MatDestroy(&mat->schur));
9796: PetscCall((*f)(mat, is));
9797: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9798: PetscFunctionReturn(PETSC_SUCCESS);
9799: }
9801: /*@
9802: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9804: Logically Collective
9806: Input Parameters:
9807: + F - the factored matrix obtained by calling `MatGetFactor()`
9808: . S - location where to return the Schur complement, can be `NULL`
9809: - status - the status of the Schur complement matrix, can be `NULL`
9811: Level: advanced
9813: Notes:
9814: You must call `MatFactorSetSchurIS()` before calling this routine.
9816: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9818: The routine provides a copy of the Schur matrix stored within the solver data structures.
9819: The caller must destroy the object when it is no longer needed.
9820: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9822: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9824: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9826: Developer Note:
9827: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9828: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9830: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9831: @*/
9832: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9833: {
9834: PetscFunctionBegin;
9836: if (S) PetscAssertPointer(S, 2);
9837: if (status) PetscAssertPointer(status, 3);
9838: if (S) {
9839: PetscErrorCode (*f)(Mat, Mat *);
9841: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9842: if (f) {
9843: PetscCall((*f)(F, S));
9844: } else {
9845: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9846: }
9847: }
9848: if (status) *status = F->schur_status;
9849: PetscFunctionReturn(PETSC_SUCCESS);
9850: }
9852: /*@
9853: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9855: Logically Collective
9857: Input Parameters:
9858: + F - the factored matrix obtained by calling `MatGetFactor()`
9859: . S - location where to return the Schur complement, can be `NULL`
9860: - status - the status of the Schur complement matrix, can be `NULL`
9862: Level: advanced
9864: Notes:
9865: You must call `MatFactorSetSchurIS()` before calling this routine.
9867: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9869: The routine returns a the Schur Complement stored within the data structures of the solver.
9871: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9873: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9875: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9877: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9879: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9880: @*/
9881: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9882: {
9883: PetscFunctionBegin;
9885: if (S) {
9886: PetscAssertPointer(S, 2);
9887: *S = F->schur;
9888: }
9889: if (status) {
9890: PetscAssertPointer(status, 3);
9891: *status = F->schur_status;
9892: }
9893: PetscFunctionReturn(PETSC_SUCCESS);
9894: }
9896: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9897: {
9898: Mat S = F->schur;
9900: PetscFunctionBegin;
9901: switch (F->schur_status) {
9902: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9903: case MAT_FACTOR_SCHUR_INVERTED:
9904: if (S) {
9905: S->ops->solve = NULL;
9906: S->ops->matsolve = NULL;
9907: S->ops->solvetranspose = NULL;
9908: S->ops->matsolvetranspose = NULL;
9909: S->ops->solveadd = NULL;
9910: S->ops->solvetransposeadd = NULL;
9911: S->factortype = MAT_FACTOR_NONE;
9912: PetscCall(PetscFree(S->solvertype));
9913: }
9914: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9915: break;
9916: default:
9917: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9918: }
9919: PetscFunctionReturn(PETSC_SUCCESS);
9920: }
9922: /*@
9923: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9925: Logically Collective
9927: Input Parameters:
9928: + F - the factored matrix obtained by calling `MatGetFactor()`
9929: . S - location where the Schur complement is stored
9930: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9932: Level: advanced
9934: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9935: @*/
9936: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9937: {
9938: PetscFunctionBegin;
9940: if (S) {
9942: *S = NULL;
9943: }
9944: F->schur_status = status;
9945: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9946: PetscFunctionReturn(PETSC_SUCCESS);
9947: }
9949: /*@
9950: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9952: Logically Collective
9954: Input Parameters:
9955: + F - the factored matrix obtained by calling `MatGetFactor()`
9956: . rhs - location where the right-hand side of the Schur complement system is stored
9957: - sol - location where the solution of the Schur complement system has to be returned
9959: Level: advanced
9961: Notes:
9962: The sizes of the vectors should match the size of the Schur complement
9964: Must be called after `MatFactorSetSchurIS()`
9966: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9967: @*/
9968: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9969: {
9970: PetscFunctionBegin;
9977: PetscCheckSameComm(F, 1, rhs, 2);
9978: PetscCheckSameComm(F, 1, sol, 3);
9979: PetscCall(MatFactorFactorizeSchurComplement(F));
9980: switch (F->schur_status) {
9981: case MAT_FACTOR_SCHUR_FACTORED:
9982: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9983: break;
9984: case MAT_FACTOR_SCHUR_INVERTED:
9985: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9986: break;
9987: default:
9988: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9989: }
9990: PetscFunctionReturn(PETSC_SUCCESS);
9991: }
9993: /*@
9994: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9996: Logically Collective
9998: Input Parameters:
9999: + F - the factored matrix obtained by calling `MatGetFactor()`
10000: . rhs - location where the right-hand side of the Schur complement system is stored
10001: - sol - location where the solution of the Schur complement system has to be returned
10003: Level: advanced
10005: Notes:
10006: The sizes of the vectors should match the size of the Schur complement
10008: Must be called after `MatFactorSetSchurIS()`
10010: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
10011: @*/
10012: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
10013: {
10014: PetscFunctionBegin;
10021: PetscCheckSameComm(F, 1, rhs, 2);
10022: PetscCheckSameComm(F, 1, sol, 3);
10023: PetscCall(MatFactorFactorizeSchurComplement(F));
10024: switch (F->schur_status) {
10025: case MAT_FACTOR_SCHUR_FACTORED:
10026: PetscCall(MatSolve(F->schur, rhs, sol));
10027: break;
10028: case MAT_FACTOR_SCHUR_INVERTED:
10029: PetscCall(MatMult(F->schur, rhs, sol));
10030: break;
10031: default:
10032: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10033: }
10034: PetscFunctionReturn(PETSC_SUCCESS);
10035: }
10037: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
10038: #if PetscDefined(HAVE_CUDA)
10039: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
10040: #endif
10042: /* Schur status updated in the interface */
10043: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
10044: {
10045: Mat S = F->schur;
10047: PetscFunctionBegin;
10048: if (S) {
10049: PetscMPIInt size;
10050: PetscBool isdense, isdensecuda;
10052: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
10053: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
10054: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
10055: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
10056: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
10057: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
10058: if (isdense) {
10059: PetscCall(MatSeqDenseInvertFactors_Private(S));
10060: } else if (isdensecuda) {
10061: #if defined(PETSC_HAVE_CUDA)
10062: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10063: #endif
10064: }
10065: // HIP??????????????
10066: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10067: }
10068: PetscFunctionReturn(PETSC_SUCCESS);
10069: }
10071: /*@
10072: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10074: Logically Collective
10076: Input Parameter:
10077: . F - the factored matrix obtained by calling `MatGetFactor()`
10079: Level: advanced
10081: Notes:
10082: Must be called after `MatFactorSetSchurIS()`.
10084: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10086: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10087: @*/
10088: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10089: {
10090: PetscFunctionBegin;
10093: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10094: PetscCall(MatFactorFactorizeSchurComplement(F));
10095: PetscCall(MatFactorInvertSchurComplement_Private(F));
10096: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10097: PetscFunctionReturn(PETSC_SUCCESS);
10098: }
10100: /*@
10101: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10103: Logically Collective
10105: Input Parameter:
10106: . F - the factored matrix obtained by calling `MatGetFactor()`
10108: Level: advanced
10110: Note:
10111: Must be called after `MatFactorSetSchurIS()`
10113: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10114: @*/
10115: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10116: {
10117: MatFactorInfo info;
10119: PetscFunctionBegin;
10122: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10123: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10124: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10125: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10126: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10127: } else {
10128: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10129: }
10130: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10131: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10132: PetscFunctionReturn(PETSC_SUCCESS);
10133: }
10135: /*@
10136: MatPtAP - Creates the matrix product $C = P^T * A * P$
10138: Neighbor-wise Collective
10140: Input Parameters:
10141: + A - the matrix
10142: . P - the projection matrix
10143: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10144: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10145: if the result is a dense matrix this is irrelevant
10147: Output Parameter:
10148: . C - the product matrix
10150: Level: intermediate
10152: Notes:
10153: C will be created and must be destroyed by the user with `MatDestroy()`.
10155: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10157: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10159: Developer Note:
10160: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10162: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10163: @*/
10164: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10165: {
10166: PetscFunctionBegin;
10167: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10168: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10170: if (scall == MAT_INITIAL_MATRIX) {
10171: PetscCall(MatProductCreate(A, P, NULL, C));
10172: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10173: PetscCall(MatProductSetAlgorithm(*C, "default"));
10174: PetscCall(MatProductSetFill(*C, fill));
10176: (*C)->product->api_user = PETSC_TRUE;
10177: PetscCall(MatProductSetFromOptions(*C));
10178: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10179: PetscCall(MatProductSymbolic(*C));
10180: } else { /* scall == MAT_REUSE_MATRIX */
10181: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10182: }
10184: PetscCall(MatProductNumeric(*C));
10185: (*C)->symmetric = A->symmetric;
10186: (*C)->spd = A->spd;
10187: PetscFunctionReturn(PETSC_SUCCESS);
10188: }
10190: /*@
10191: MatRARt - Creates the matrix product $C = R * A * R^T$
10193: Neighbor-wise Collective
10195: Input Parameters:
10196: + A - the matrix
10197: . R - the projection matrix
10198: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10199: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10200: if the result is a dense matrix this is irrelevant
10202: Output Parameter:
10203: . C - the product matrix
10205: Level: intermediate
10207: Notes:
10208: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10210: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10212: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10213: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10214: parallel `MatRARt()` is implemented via explicit transpose of `R`, which could be very expensive.
10215: We recommend using `MatPtAP()`.
10217: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10219: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10220: @*/
10221: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10222: {
10223: PetscFunctionBegin;
10224: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10225: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10227: if (scall == MAT_INITIAL_MATRIX) {
10228: PetscCall(MatProductCreate(A, R, NULL, C));
10229: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10230: PetscCall(MatProductSetAlgorithm(*C, "default"));
10231: PetscCall(MatProductSetFill(*C, fill));
10233: (*C)->product->api_user = PETSC_TRUE;
10234: PetscCall(MatProductSetFromOptions(*C));
10235: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10236: PetscCall(MatProductSymbolic(*C));
10237: } else { /* scall == MAT_REUSE_MATRIX */
10238: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10239: }
10241: PetscCall(MatProductNumeric(*C));
10242: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10243: PetscFunctionReturn(PETSC_SUCCESS);
10244: }
10246: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10247: {
10248: PetscBool flg = PETSC_TRUE;
10250: PetscFunctionBegin;
10251: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10252: if (scall == MAT_INITIAL_MATRIX) {
10253: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10254: PetscCall(MatProductCreate(A, B, NULL, C));
10255: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10256: PetscCall(MatProductSetFill(*C, fill));
10257: } else { /* scall == MAT_REUSE_MATRIX */
10258: Mat_Product *product = (*C)->product;
10260: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10261: if (flg && product && product->type != ptype) {
10262: PetscCall(MatProductClear(*C));
10263: product = NULL;
10264: }
10265: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10266: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10267: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10268: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10269: product = (*C)->product;
10270: product->fill = fill;
10271: product->clear = PETSC_TRUE;
10272: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10273: flg = PETSC_FALSE;
10274: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10275: }
10276: }
10277: if (flg) {
10278: (*C)->product->api_user = PETSC_TRUE;
10279: PetscCall(MatProductSetType(*C, ptype));
10280: PetscCall(MatProductSetFromOptions(*C));
10281: PetscCall(MatProductSymbolic(*C));
10282: }
10283: PetscCall(MatProductNumeric(*C));
10284: PetscFunctionReturn(PETSC_SUCCESS);
10285: }
10287: /*@
10288: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10290: Neighbor-wise Collective
10292: Input Parameters:
10293: + A - the left matrix
10294: . B - the right matrix
10295: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10296: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10297: if the result is a dense matrix this is irrelevant
10299: Output Parameter:
10300: . C - the product matrix
10302: Notes:
10303: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10305: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10306: call to this function with `MAT_INITIAL_MATRIX`.
10308: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10310: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10311: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10313: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10315: Example of Usage:
10316: .vb
10317: MatProductCreate(A,B,NULL,&C);
10318: MatProductSetType(C,MATPRODUCT_AB);
10319: MatProductSymbolic(C);
10320: MatProductNumeric(C); // compute C=A * B
10321: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10322: MatProductNumeric(C);
10323: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10324: MatProductNumeric(C);
10325: .ve
10327: Level: intermediate
10329: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10330: @*/
10331: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10332: {
10333: PetscFunctionBegin;
10334: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10335: PetscFunctionReturn(PETSC_SUCCESS);
10336: }
10338: /*@
10339: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10341: Neighbor-wise Collective
10343: Input Parameters:
10344: + A - the left matrix
10345: . B - the right matrix
10346: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10347: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10349: Output Parameter:
10350: . C - the product matrix
10352: Options Database Key:
10353: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10354: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10355: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10357: Level: intermediate
10359: Notes:
10360: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10362: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10364: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10365: actually needed.
10367: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10368: and for pairs of `MATMPIDENSE` matrices.
10370: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10372: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10374: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10375: @*/
10376: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10377: {
10378: PetscFunctionBegin;
10379: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10380: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10381: PetscFunctionReturn(PETSC_SUCCESS);
10382: }
10384: /*@
10385: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10387: Neighbor-wise Collective
10389: Input Parameters:
10390: + A - the left matrix
10391: . B - the right matrix
10392: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10393: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10395: Output Parameter:
10396: . C - the product matrix
10398: Level: intermediate
10400: Notes:
10401: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10403: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10405: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10407: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10408: actually needed.
10410: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10411: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10413: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10415: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10416: @*/
10417: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10418: {
10419: PetscFunctionBegin;
10420: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10421: PetscFunctionReturn(PETSC_SUCCESS);
10422: }
10424: /*@
10425: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10427: Neighbor-wise Collective
10429: Input Parameters:
10430: + A - the left matrix
10431: . B - the middle matrix
10432: . C - the right matrix
10433: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10434: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10435: if the result is a dense matrix this is irrelevant
10437: Output Parameter:
10438: . D - the product matrix
10440: Level: intermediate
10442: Notes:
10443: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10445: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10447: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10449: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10450: actually needed.
10452: If you have many matrices with the same non-zero structure to multiply, you
10453: should use `MAT_REUSE_MATRIX` in all calls but the first
10455: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10457: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10458: @*/
10459: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10460: {
10461: PetscFunctionBegin;
10462: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10463: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10465: if (scall == MAT_INITIAL_MATRIX) {
10466: PetscCall(MatProductCreate(A, B, C, D));
10467: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10468: PetscCall(MatProductSetAlgorithm(*D, "default"));
10469: PetscCall(MatProductSetFill(*D, fill));
10471: (*D)->product->api_user = PETSC_TRUE;
10472: PetscCall(MatProductSetFromOptions(*D));
10473: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10474: ((PetscObject)C)->type_name);
10475: PetscCall(MatProductSymbolic(*D));
10476: } else { /* user may change input matrices when REUSE */
10477: PetscCall(MatProductReplaceMats(A, B, C, *D));
10478: }
10479: PetscCall(MatProductNumeric(*D));
10480: PetscFunctionReturn(PETSC_SUCCESS);
10481: }
10483: /*@
10484: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10486: Collective
10488: Input Parameters:
10489: + mat - the matrix
10490: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10491: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10492: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10494: Output Parameter:
10495: . matredundant - redundant matrix
10497: Level: advanced
10499: Notes:
10500: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10501: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10503: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10504: calling it.
10506: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10508: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10509: @*/
10510: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10511: {
10512: MPI_Comm comm;
10513: PetscMPIInt size;
10514: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10515: Mat_Redundant *redund = NULL;
10516: PetscSubcomm psubcomm = NULL;
10517: MPI_Comm subcomm_in = subcomm;
10518: Mat *matseq;
10519: IS isrow, iscol;
10520: PetscBool newsubcomm = PETSC_FALSE;
10522: PetscFunctionBegin;
10524: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10525: PetscAssertPointer(*matredundant, 5);
10527: }
10529: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10530: if (size == 1 || nsubcomm == 1) {
10531: if (reuse == MAT_INITIAL_MATRIX) {
10532: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10533: } else {
10534: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10535: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10536: }
10537: PetscFunctionReturn(PETSC_SUCCESS);
10538: }
10540: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10541: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10542: MatCheckPreallocated(mat, 1);
10544: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10545: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10546: /* create psubcomm, then get subcomm */
10547: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10548: PetscCallMPI(MPI_Comm_size(comm, &size));
10549: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10551: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10552: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10553: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10554: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10555: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10556: newsubcomm = PETSC_TRUE;
10557: PetscCall(PetscSubcommDestroy(&psubcomm));
10558: }
10560: /* get isrow, iscol and a local sequential matrix matseq[0] */
10561: if (reuse == MAT_INITIAL_MATRIX) {
10562: mloc_sub = PETSC_DECIDE;
10563: nloc_sub = PETSC_DECIDE;
10564: if (bs < 1) {
10565: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10566: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10567: } else {
10568: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10569: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10570: }
10571: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10572: rstart = rend - mloc_sub;
10573: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10574: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10575: PetscCall(ISSetIdentity(iscol));
10576: } else { /* reuse == MAT_REUSE_MATRIX */
10577: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10578: /* retrieve subcomm */
10579: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10580: redund = (*matredundant)->redundant;
10581: isrow = redund->isrow;
10582: iscol = redund->iscol;
10583: matseq = redund->matseq;
10584: }
10585: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10587: /* get matredundant over subcomm */
10588: if (reuse == MAT_INITIAL_MATRIX) {
10589: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10591: /* create a supporting struct and attach it to C for reuse */
10592: PetscCall(PetscNew(&redund));
10593: (*matredundant)->redundant = redund;
10594: redund->isrow = isrow;
10595: redund->iscol = iscol;
10596: redund->matseq = matseq;
10597: if (newsubcomm) {
10598: redund->subcomm = subcomm;
10599: } else {
10600: redund->subcomm = MPI_COMM_NULL;
10601: }
10602: } else {
10603: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10604: }
10605: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10606: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10607: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10608: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10609: }
10610: #endif
10611: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10612: PetscFunctionReturn(PETSC_SUCCESS);
10613: }
10615: /*@C
10616: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10617: a given `Mat`. Each submatrix can span multiple procs.
10619: Collective
10621: Input Parameters:
10622: + mat - the matrix
10623: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10624: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10626: Output Parameter:
10627: . subMat - parallel sub-matrices each spanning a given `subcomm`
10629: Level: advanced
10631: Notes:
10632: The submatrix partition across processors is dictated by `subComm` a
10633: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10634: is not restricted to be grouped with consecutive original MPI processes.
10636: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10637: map directly to the layout of the original matrix [wrt the local
10638: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10639: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10640: the `subMat`. However the offDiagMat looses some columns - and this is
10641: reconstructed with `MatSetValues()`
10643: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10645: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10646: @*/
10647: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10648: {
10649: PetscMPIInt commsize, subCommSize;
10651: PetscFunctionBegin;
10652: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10653: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10654: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10656: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10657: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10658: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10659: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10660: PetscFunctionReturn(PETSC_SUCCESS);
10661: }
10663: /*@
10664: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10666: Not Collective
10668: Input Parameters:
10669: + mat - matrix to extract local submatrix from
10670: . isrow - local row indices for submatrix
10671: - iscol - local column indices for submatrix
10673: Output Parameter:
10674: . submat - the submatrix
10676: Level: intermediate
10678: Notes:
10679: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10681: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10682: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10684: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10685: `MatSetValuesBlockedLocal()` will also be implemented.
10687: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10688: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10690: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10691: @*/
10692: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10693: {
10694: PetscFunctionBegin;
10698: PetscCheckSameComm(isrow, 2, iscol, 3);
10699: PetscAssertPointer(submat, 4);
10700: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10702: if (mat->ops->getlocalsubmatrix) {
10703: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10704: } else {
10705: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10706: }
10707: PetscFunctionReturn(PETSC_SUCCESS);
10708: }
10710: /*@
10711: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10713: Not Collective
10715: Input Parameters:
10716: + mat - matrix to extract local submatrix from
10717: . isrow - local row indices for submatrix
10718: . iscol - local column indices for submatrix
10719: - submat - the submatrix
10721: Level: intermediate
10723: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10724: @*/
10725: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10726: {
10727: PetscFunctionBegin;
10731: PetscCheckSameComm(isrow, 2, iscol, 3);
10732: PetscAssertPointer(submat, 4);
10735: if (mat->ops->restorelocalsubmatrix) {
10736: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10737: } else {
10738: PetscCall(MatDestroy(submat));
10739: }
10740: *submat = NULL;
10741: PetscFunctionReturn(PETSC_SUCCESS);
10742: }
10744: /*@
10745: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10747: Collective
10749: Input Parameter:
10750: . mat - the matrix
10752: Output Parameter:
10753: . is - if any rows have zero diagonals this contains the list of them
10755: Level: developer
10757: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10758: @*/
10759: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10760: {
10761: PetscFunctionBegin;
10764: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10765: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10767: if (!mat->ops->findzerodiagonals) {
10768: Vec diag;
10769: const PetscScalar *a;
10770: PetscInt *rows;
10771: PetscInt rStart, rEnd, r, nrow = 0;
10773: PetscCall(MatCreateVecs(mat, &diag, NULL));
10774: PetscCall(MatGetDiagonal(mat, diag));
10775: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10776: PetscCall(VecGetArrayRead(diag, &a));
10777: for (r = 0; r < rEnd - rStart; ++r)
10778: if (a[r] == 0.0) ++nrow;
10779: PetscCall(PetscMalloc1(nrow, &rows));
10780: nrow = 0;
10781: for (r = 0; r < rEnd - rStart; ++r)
10782: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10783: PetscCall(VecRestoreArrayRead(diag, &a));
10784: PetscCall(VecDestroy(&diag));
10785: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10786: } else {
10787: PetscUseTypeMethod(mat, findzerodiagonals, is);
10788: }
10789: PetscFunctionReturn(PETSC_SUCCESS);
10790: }
10792: /*@
10793: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10795: Collective
10797: Input Parameter:
10798: . mat - the matrix
10800: Output Parameter:
10801: . is - contains the list of rows with off block diagonal entries
10803: Level: developer
10805: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10806: @*/
10807: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10808: {
10809: PetscFunctionBegin;
10812: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10813: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10815: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10816: PetscFunctionReturn(PETSC_SUCCESS);
10817: }
10819: /*@C
10820: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10822: Collective; No Fortran Support
10824: Input Parameter:
10825: . mat - the matrix
10827: Output Parameter:
10828: . values - the block inverses in column major order (FORTRAN-like)
10830: Level: advanced
10832: Notes:
10833: The size of the blocks is determined by the block size of the matrix.
10835: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10837: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10839: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10840: @*/
10841: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10842: {
10843: PetscFunctionBegin;
10845: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10846: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10847: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10848: PetscFunctionReturn(PETSC_SUCCESS);
10849: }
10851: /*@
10852: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10854: Collective; No Fortran Support
10856: Input Parameters:
10857: + mat - the matrix
10858: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10859: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10861: Output Parameter:
10862: . values - the block inverses in column major order (FORTRAN-like)
10864: Level: advanced
10866: Notes:
10867: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10869: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10871: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10872: @*/
10873: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10874: {
10875: PetscFunctionBegin;
10877: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10878: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10879: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10880: PetscFunctionReturn(PETSC_SUCCESS);
10881: }
10883: /*@
10884: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10886: Collective
10888: Input Parameters:
10889: + A - the matrix
10890: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10892: Level: advanced
10894: Note:
10895: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10897: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10898: @*/
10899: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10900: {
10901: const PetscScalar *vals;
10902: PetscInt *dnnz;
10903: PetscInt m, rstart, rend, bs, i, j;
10905: PetscFunctionBegin;
10906: PetscCall(MatInvertBlockDiagonal(A, &vals));
10907: PetscCall(MatGetBlockSize(A, &bs));
10908: PetscCall(MatGetLocalSize(A, &m, NULL));
10909: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10910: PetscCall(PetscMalloc1(m / bs, &dnnz));
10911: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10912: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10913: PetscCall(PetscFree(dnnz));
10914: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10915: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10916: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10917: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10918: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10919: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10920: PetscFunctionReturn(PETSC_SUCCESS);
10921: }
10923: /*@
10924: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10925: via `MatTransposeColoringCreate()`.
10927: Collective
10929: Input Parameter:
10930: . c - coloring context
10932: Level: intermediate
10934: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10935: @*/
10936: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10937: {
10938: MatTransposeColoring matcolor = *c;
10940: PetscFunctionBegin;
10941: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10942: if (--((PetscObject)matcolor)->refct > 0) {
10943: matcolor = NULL;
10944: PetscFunctionReturn(PETSC_SUCCESS);
10945: }
10947: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10948: PetscCall(PetscFree(matcolor->rows));
10949: PetscCall(PetscFree(matcolor->den2sp));
10950: PetscCall(PetscFree(matcolor->colorforcol));
10951: PetscCall(PetscFree(matcolor->columns));
10952: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10953: PetscCall(PetscHeaderDestroy(c));
10954: PetscFunctionReturn(PETSC_SUCCESS);
10955: }
10957: /*@
10958: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10959: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10960: `MatTransposeColoring` to sparse `B`.
10962: Collective
10964: Input Parameters:
10965: + coloring - coloring context created with `MatTransposeColoringCreate()`
10966: - B - sparse matrix
10968: Output Parameter:
10969: . Btdense - dense matrix $B^T$
10971: Level: developer
10973: Note:
10974: These are used internally for some implementations of `MatRARt()`
10976: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10977: @*/
10978: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10979: {
10980: PetscFunctionBegin;
10985: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10986: PetscFunctionReturn(PETSC_SUCCESS);
10987: }
10989: /*@
10990: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10991: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10992: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10993: $C_{sp}$ from $C_{den}$.
10995: Collective
10997: Input Parameters:
10998: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10999: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
11001: Output Parameter:
11002: . Csp - sparse matrix
11004: Level: developer
11006: Note:
11007: These are used internally for some implementations of `MatRARt()`
11009: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
11010: @*/
11011: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
11012: {
11013: PetscFunctionBegin;
11018: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
11019: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
11020: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
11021: PetscFunctionReturn(PETSC_SUCCESS);
11022: }
11024: /*@
11025: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
11027: Collective
11029: Input Parameters:
11030: + mat - the matrix product C
11031: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
11033: Output Parameter:
11034: . color - the new coloring context
11036: Level: intermediate
11038: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
11039: `MatTransColoringApplyDenToSp()`
11040: @*/
11041: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
11042: {
11043: MatTransposeColoring c;
11044: MPI_Comm comm;
11046: PetscFunctionBegin;
11047: PetscAssertPointer(color, 3);
11049: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11050: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
11051: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
11052: c->ctype = iscoloring->ctype;
11053: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
11054: *color = c;
11055: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11056: PetscFunctionReturn(PETSC_SUCCESS);
11057: }
11059: /*@
11060: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
11061: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11063: Not Collective
11065: Input Parameter:
11066: . mat - the matrix
11068: Output Parameter:
11069: . state - the current state
11071: Level: intermediate
11073: Notes:
11074: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11075: different matrices
11077: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11079: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11081: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11082: @*/
11083: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11084: {
11085: PetscFunctionBegin;
11087: *state = mat->nonzerostate;
11088: PetscFunctionReturn(PETSC_SUCCESS);
11089: }
11091: /*@
11092: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11093: matrices from each processor
11095: Collective
11097: Input Parameters:
11098: + comm - the communicators the parallel matrix will live on
11099: . seqmat - the input sequential matrices
11100: . n - number of local columns (or `PETSC_DECIDE`)
11101: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11103: Output Parameter:
11104: . mpimat - the parallel matrix generated
11106: Level: developer
11108: Note:
11109: The number of columns of the matrix in EACH processor MUST be the same.
11111: .seealso: [](ch_matrices), `Mat`
11112: @*/
11113: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11114: {
11115: PetscMPIInt size;
11117: PetscFunctionBegin;
11118: PetscCallMPI(MPI_Comm_size(comm, &size));
11119: if (size == 1) {
11120: if (reuse == MAT_INITIAL_MATRIX) {
11121: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11122: } else {
11123: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11124: }
11125: PetscFunctionReturn(PETSC_SUCCESS);
11126: }
11128: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11130: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11131: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11132: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11133: PetscFunctionReturn(PETSC_SUCCESS);
11134: }
11136: /*@
11137: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11139: Collective
11141: Input Parameters:
11142: + A - the matrix to create subdomains from
11143: - N - requested number of subdomains
11145: Output Parameters:
11146: + n - number of subdomains resulting on this MPI process
11147: - iss - `IS` list with indices of subdomains on this MPI process
11149: Level: advanced
11151: Note:
11152: The number of subdomains must be smaller than the communicator size
11154: .seealso: [](ch_matrices), `Mat`, `IS`
11155: @*/
11156: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11157: {
11158: MPI_Comm comm, subcomm;
11159: PetscMPIInt size, rank, color;
11160: PetscInt rstart, rend, k;
11162: PetscFunctionBegin;
11163: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11164: PetscCallMPI(MPI_Comm_size(comm, &size));
11165: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11166: PetscCheck(N >= 1 && N < (PetscInt)size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11167: *n = 1;
11168: k = ((PetscInt)size) / N + ((PetscInt)size % N > 0); /* There are up to k ranks to a color */
11169: color = rank / k;
11170: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11171: PetscCall(PetscMalloc1(1, iss));
11172: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11173: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11174: PetscCallMPI(MPI_Comm_free(&subcomm));
11175: PetscFunctionReturn(PETSC_SUCCESS);
11176: }
11178: /*@
11179: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11181: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11182: If they are not the same, uses `MatMatMatMult()`.
11184: Once the coarse grid problem is constructed, correct for interpolation operators
11185: that are not of full rank, which can legitimately happen in the case of non-nested
11186: geometric multigrid.
11188: Input Parameters:
11189: + restrct - restriction operator
11190: . dA - fine grid matrix
11191: . interpolate - interpolation operator
11192: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11193: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11195: Output Parameter:
11196: . A - the Galerkin coarse matrix
11198: Options Database Key:
11199: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11201: Level: developer
11203: Note:
11204: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11206: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11207: @*/
11208: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11209: {
11210: IS zerorows;
11211: Vec diag;
11213: PetscFunctionBegin;
11214: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11215: /* Construct the coarse grid matrix */
11216: if (interpolate == restrct) {
11217: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11218: } else {
11219: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11220: }
11222: /* If the interpolation matrix is not of full rank, A will have zero rows.
11223: This can legitimately happen in the case of non-nested geometric multigrid.
11224: In that event, we set the rows of the matrix to the rows of the identity,
11225: ignoring the equations (as the RHS will also be zero). */
11227: PetscCall(MatFindZeroRows(*A, &zerorows));
11229: if (zerorows != NULL) { /* if there are any zero rows */
11230: PetscCall(MatCreateVecs(*A, &diag, NULL));
11231: PetscCall(MatGetDiagonal(*A, diag));
11232: PetscCall(VecISSet(diag, zerorows, 1.0));
11233: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11234: PetscCall(VecDestroy(&diag));
11235: PetscCall(ISDestroy(&zerorows));
11236: }
11237: PetscFunctionReturn(PETSC_SUCCESS);
11238: }
11240: /*@C
11241: MatSetOperation - Allows user to set a matrix operation for any matrix type
11243: Logically Collective
11245: Input Parameters:
11246: + mat - the matrix
11247: . op - the name of the operation
11248: - f - the function that provides the operation
11250: Level: developer
11252: Example Usage:
11253: .vb
11254: extern PetscErrorCode usermult(Mat, Vec, Vec);
11256: PetscCall(MatCreateXXX(comm, ..., &A));
11257: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11258: .ve
11260: Notes:
11261: See the file `include/petscmat.h` for a complete list of matrix
11262: operations, which all have the form MATOP_<OPERATION>, where
11263: <OPERATION> is the name (in all capital letters) of the
11264: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11266: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11267: sequence as the usual matrix interface routines, since they
11268: are intended to be accessed via the usual matrix interface
11269: routines, e.g.,
11270: .vb
11271: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11272: .ve
11274: In particular each function MUST return `PETSC_SUCCESS` on success and
11275: nonzero on failure.
11277: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11279: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11280: @*/
11281: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11282: {
11283: PetscFunctionBegin;
11285: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11286: (((void (**)(void))mat->ops)[op]) = f;
11287: PetscFunctionReturn(PETSC_SUCCESS);
11288: }
11290: /*@C
11291: MatGetOperation - Gets a matrix operation for any matrix type.
11293: Not Collective
11295: Input Parameters:
11296: + mat - the matrix
11297: - op - the name of the operation
11299: Output Parameter:
11300: . f - the function that provides the operation
11302: Level: developer
11304: Example Usage:
11305: .vb
11306: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11308: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11309: .ve
11311: Notes:
11312: See the file include/petscmat.h for a complete list of matrix
11313: operations, which all have the form MATOP_<OPERATION>, where
11314: <OPERATION> is the name (in all capital letters) of the
11315: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11317: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11319: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11320: @*/
11321: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11322: {
11323: PetscFunctionBegin;
11325: *f = (((void (**)(void))mat->ops)[op]);
11326: PetscFunctionReturn(PETSC_SUCCESS);
11327: }
11329: /*@
11330: MatHasOperation - Determines whether the given matrix supports the particular operation.
11332: Not Collective
11334: Input Parameters:
11335: + mat - the matrix
11336: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11338: Output Parameter:
11339: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11341: Level: advanced
11343: Note:
11344: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11346: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11347: @*/
11348: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11349: {
11350: PetscFunctionBegin;
11352: PetscAssertPointer(has, 3);
11353: if (mat->ops->hasoperation) {
11354: PetscUseTypeMethod(mat, hasoperation, op, has);
11355: } else {
11356: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11357: else {
11358: *has = PETSC_FALSE;
11359: if (op == MATOP_CREATE_SUBMATRIX) {
11360: PetscMPIInt size;
11362: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11363: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11364: }
11365: }
11366: }
11367: PetscFunctionReturn(PETSC_SUCCESS);
11368: }
11370: /*@
11371: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11373: Collective
11375: Input Parameter:
11376: . mat - the matrix
11378: Output Parameter:
11379: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11381: Level: beginner
11383: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11384: @*/
11385: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11386: {
11387: PetscFunctionBegin;
11390: PetscAssertPointer(cong, 2);
11391: if (!mat->rmap || !mat->cmap) {
11392: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11393: PetscFunctionReturn(PETSC_SUCCESS);
11394: }
11395: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11396: PetscCall(PetscLayoutSetUp(mat->rmap));
11397: PetscCall(PetscLayoutSetUp(mat->cmap));
11398: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11399: if (*cong) mat->congruentlayouts = 1;
11400: else mat->congruentlayouts = 0;
11401: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11402: PetscFunctionReturn(PETSC_SUCCESS);
11403: }
11405: PetscErrorCode MatSetInf(Mat A)
11406: {
11407: PetscFunctionBegin;
11408: PetscUseTypeMethod(A, setinf);
11409: PetscFunctionReturn(PETSC_SUCCESS);
11410: }
11412: /*@
11413: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11414: and possibly removes small values from the graph structure.
11416: Collective
11418: Input Parameters:
11419: + A - the matrix
11420: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11421: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11422: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11423: . num_idx - size of 'index' array
11424: - index - array of block indices to use for graph strength of connection weight
11426: Output Parameter:
11427: . graph - the resulting graph
11429: Level: advanced
11431: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11432: @*/
11433: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11434: {
11435: PetscFunctionBegin;
11439: PetscAssertPointer(graph, 7);
11440: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11441: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11442: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11443: PetscFunctionReturn(PETSC_SUCCESS);
11444: }
11446: /*@
11447: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11448: meaning the same memory is used for the matrix, and no new memory is allocated.
11450: Collective
11452: Input Parameters:
11453: + A - the matrix
11454: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11456: Level: intermediate
11458: Developer Note:
11459: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11460: of the arrays in the data structure are unneeded.
11462: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11463: @*/
11464: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11465: {
11466: PetscFunctionBegin;
11468: PetscUseTypeMethod(A, eliminatezeros, keep);
11469: PetscFunctionReturn(PETSC_SUCCESS);
11470: }